Detailed changes
@@ -1546,7 +1546,7 @@ dependencies = [
"sum_tree",
"sysinfo",
"tempfile",
- "text",
+ "text2",
"thiserror",
"time",
"tiny_http",
@@ -3079,7 +3079,7 @@ dependencies = [
"smol",
"sum_tree",
"tempfile",
- "text",
+ "text2",
"time",
"util",
]
@@ -3371,6 +3371,26 @@ dependencies = [
"url",
]
+[[package]]
+name = "git3"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "clock",
+ "collections",
+ "futures 0.3.28",
+ "git2",
+ "lazy_static",
+ "log",
+ "parking_lot 0.11.2",
+ "smol",
+ "sum_tree",
+ "text2",
+ "unindent",
+ "util",
+]
+
[[package]]
name = "glob"
version = "0.3.1"
@@ -4212,7 +4232,7 @@ dependencies = [
"settings2",
"shellexpand",
"util",
- "workspace",
+ "workspace2",
]
[[package]]
@@ -4345,7 +4365,7 @@ dependencies = [
"env_logger 0.9.3",
"futures 0.3.28",
"fuzzy2",
- "git",
+ "git3",
"globset",
"gpui2",
"indoc",
@@ -4366,7 +4386,7 @@ dependencies = [
"smallvec",
"smol",
"sum_tree",
- "text",
+ "text2",
"theme2",
"tree-sitter",
"tree-sitter-elixir",
@@ -5081,7 +5101,7 @@ dependencies = [
"ctor",
"env_logger 0.9.3",
"futures 0.3.28",
- "git",
+ "git3",
"gpui2",
"indoc",
"itertools 0.10.5",
@@ -5095,7 +5115,7 @@ dependencies = [
"project2",
"pulldown-cmark",
"rand 0.8.5",
- "rich_text",
+ "rich_text2",
"schemars",
"serde",
"serde_derive",
@@ -5104,7 +5124,7 @@ dependencies = [
"smol",
"snippet",
"sum_tree",
- "text",
+ "text2",
"theme2",
"tree-sitter",
"tree-sitter-html",
@@ -6284,8 +6304,8 @@ dependencies = [
"fsevent",
"futures 0.3.28",
"fuzzy2",
- "git",
"git2",
+ "git3",
"globset",
"gpui2",
"ignore",
@@ -6313,7 +6333,7 @@ dependencies = [
"sum_tree",
"tempdir",
"terminal2",
- "text",
+ "text2",
"thiserror",
"toml 0.5.11",
"unindent",
@@ -6927,6 +6947,24 @@ dependencies = [
"util",
]
+[[package]]
+name = "rich_text2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "collections",
+ "futures 0.3.28",
+ "gpui2",
+ "language2",
+ "lazy_static",
+ "pulldown-cmark",
+ "smallvec",
+ "smol",
+ "sum_tree",
+ "theme2",
+ "util",
+]
+
[[package]]
name = "ring"
version = "0.16.20"
@@ -7542,7 +7580,6 @@ dependencies = [
"collections",
"editor",
"futures 0.3.28",
- "globset",
"gpui",
"language",
"log",
@@ -8856,7 +8893,7 @@ name = "theme2"
version = "0.1.0"
dependencies = [
"anyhow",
- "fs",
+ "fs2",
"gpui2",
"indexmap 1.9.3",
"parking_lot 0.11.2",
@@ -9864,6 +9901,7 @@ dependencies = [
"dirs 3.0.2",
"futures 0.3.28",
"git2",
+ "globset",
"isahc",
"lazy_static",
"log",
@@ -17,7 +17,7 @@ db = { package = "db2", path = "../db2" }
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
rpc = { package = "rpc2", path = "../rpc2" }
-text = { path = "../text" }
+text = { package = "text2", path = "../text2" }
settings = { package = "settings2", path = "../settings2" }
feature_flags = { package = "feature_flags2", path = "../feature_flags2" }
sum_tree = { path = "../sum_tree" }
@@ -10,7 +10,7 @@ path = "src/fs2.rs"
[dependencies]
collections = { path = "../collections" }
rope = { path = "../rope" }
-text = { path = "../text" }
+text = { package = "text2", path = "../text2" }
util = { path = "../util" }
sum_tree = { path = "../sum_tree" }
@@ -0,0 +1,30 @@
+[package]
+# git2 was already taken.
+name = "git3"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/git.rs"
+
+[dependencies]
+anyhow.workspace = true
+clock = { path = "../clock" }
+lazy_static.workspace = true
+sum_tree = { path = "../sum_tree" }
+text = { package = "text2", path = "../text2" }
+collections = { path = "../collections" }
+util = { path = "../util" }
+log.workspace = true
+smol.workspace = true
+parking_lot.workspace = true
+async-trait.workspace = true
+futures.workspace = true
+git2.workspace = true
+
+[dev-dependencies]
+unindent.workspace = true
+
+[features]
+test-support = []
@@ -0,0 +1,412 @@
+use std::{iter, ops::Range};
+use sum_tree::SumTree;
+use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point};
+
+pub use git2 as libgit;
+use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum DiffHunkStatus {
+ Added,
+ Modified,
+ Removed,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct DiffHunk<T> {
+ pub buffer_range: Range<T>,
+ pub diff_base_byte_range: Range<usize>,
+}
+
+impl DiffHunk<u32> {
+ pub fn status(&self) -> DiffHunkStatus {
+ if self.diff_base_byte_range.is_empty() {
+ DiffHunkStatus::Added
+ } else if self.buffer_range.is_empty() {
+ DiffHunkStatus::Removed
+ } else {
+ DiffHunkStatus::Modified
+ }
+ }
+}
+
+impl sum_tree::Item for DiffHunk<Anchor> {
+ type Summary = DiffHunkSummary;
+
+ fn summary(&self) -> Self::Summary {
+ DiffHunkSummary {
+ buffer_range: self.buffer_range.clone(),
+ }
+ }
+}
+
+#[derive(Debug, Default, Clone)]
+pub struct DiffHunkSummary {
+ buffer_range: Range<Anchor>,
+}
+
+impl sum_tree::Summary for DiffHunkSummary {
+ type Context = text::BufferSnapshot;
+
+ fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
+ self.buffer_range.start = self
+ .buffer_range
+ .start
+ .min(&other.buffer_range.start, buffer);
+ self.buffer_range.end = self.buffer_range.end.max(&other.buffer_range.end, buffer);
+ }
+}
+
+#[derive(Clone)]
+pub struct BufferDiff {
+ last_buffer_version: Option<clock::Global>,
+ tree: SumTree<DiffHunk<Anchor>>,
+}
+
+impl BufferDiff {
+ pub fn new() -> BufferDiff {
+ BufferDiff {
+ last_buffer_version: None,
+ tree: SumTree::new(),
+ }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.tree.is_empty()
+ }
+
+ pub fn hunks_in_row_range<'a>(
+ &'a self,
+ range: Range<u32>,
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let start = buffer.anchor_before(Point::new(range.start, 0));
+ let end = buffer.anchor_after(Point::new(range.end, 0));
+
+ self.hunks_intersecting_range(start..end, buffer)
+ }
+
+ pub fn hunks_intersecting_range<'a>(
+ &'a self,
+ range: Range<Anchor>,
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
+ let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
+ let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
+ !before_start && !after_end
+ });
+
+ let anchor_iter = std::iter::from_fn(move || {
+ cursor.next(buffer);
+ cursor.item()
+ })
+ .flat_map(move |hunk| {
+ [
+ (&hunk.buffer_range.start, hunk.diff_base_byte_range.start),
+ (&hunk.buffer_range.end, hunk.diff_base_byte_range.end),
+ ]
+ .into_iter()
+ });
+
+ let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
+ iter::from_fn(move || {
+ let (start_point, start_base) = summaries.next()?;
+ let (end_point, end_base) = summaries.next()?;
+
+ let end_row = if end_point.column > 0 {
+ end_point.row + 1
+ } else {
+ end_point.row
+ };
+
+ Some(DiffHunk {
+ buffer_range: start_point.row..end_row,
+ diff_base_byte_range: start_base..end_base,
+ })
+ })
+ }
+
+ pub fn hunks_intersecting_range_rev<'a>(
+ &'a self,
+ range: Range<Anchor>,
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
+ let before_start = summary.buffer_range.end.cmp(&range.start, buffer).is_lt();
+ let after_end = summary.buffer_range.start.cmp(&range.end, buffer).is_gt();
+ !before_start && !after_end
+ });
+
+ std::iter::from_fn(move || {
+ cursor.prev(buffer);
+
+ let hunk = cursor.item()?;
+ let range = hunk.buffer_range.to_point(buffer);
+ let end_row = if range.end.column > 0 {
+ range.end.row + 1
+ } else {
+ range.end.row
+ };
+
+ Some(DiffHunk {
+ buffer_range: range.start.row..end_row,
+ diff_base_byte_range: hunk.diff_base_byte_range.clone(),
+ })
+ })
+ }
+
+ pub fn clear(&mut self, buffer: &text::BufferSnapshot) {
+ self.last_buffer_version = Some(buffer.version().clone());
+ self.tree = SumTree::new();
+ }
+
+ pub async fn update(&mut self, diff_base: &str, buffer: &text::BufferSnapshot) {
+ let mut tree = SumTree::new();
+
+ let buffer_text = buffer.as_rope().to_string();
+ let patch = Self::diff(&diff_base, &buffer_text);
+
+ if let Some(patch) = patch {
+ let mut divergence = 0;
+ for hunk_index in 0..patch.num_hunks() {
+ let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
+ tree.push(hunk, buffer);
+ }
+ }
+
+ self.tree = tree;
+ self.last_buffer_version = Some(buffer.version().clone());
+ }
+
+ #[cfg(test)]
+ fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let start = text.anchor_before(Point::new(0, 0));
+ let end = text.anchor_after(Point::new(u32::MAX, u32::MAX));
+ self.hunks_intersecting_range(start..end, text)
+ }
+
+ fn diff<'a>(head: &'a str, current: &'a str) -> Option<GitPatch<'a>> {
+ let mut options = GitOptions::default();
+ options.context_lines(0);
+
+ let patch = GitPatch::from_buffers(
+ head.as_bytes(),
+ None,
+ current.as_bytes(),
+ None,
+ Some(&mut options),
+ );
+
+ match patch {
+ Ok(patch) => Some(patch),
+
+ Err(err) => {
+ log::error!("`GitPatch::from_buffers` failed: {}", err);
+ None
+ }
+ }
+ }
+
+ fn process_patch_hunk<'a>(
+ patch: &GitPatch<'a>,
+ hunk_index: usize,
+ buffer: &text::BufferSnapshot,
+ buffer_row_divergence: &mut i64,
+ ) -> DiffHunk<Anchor> {
+ let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap();
+ assert!(line_item_count > 0);
+
+ let mut first_deletion_buffer_row: Option<u32> = None;
+ let mut buffer_row_range: Option<Range<u32>> = None;
+ let mut diff_base_byte_range: Option<Range<usize>> = None;
+
+ for line_index in 0..line_item_count {
+ let line = patch.line_in_hunk(hunk_index, line_index).unwrap();
+ let kind = line.origin_value();
+ let content_offset = line.content_offset() as isize;
+ let content_len = line.content().len() as isize;
+
+ if kind == GitDiffLineType::Addition {
+ *buffer_row_divergence += 1;
+ let row = line.new_lineno().unwrap().saturating_sub(1);
+
+ match &mut buffer_row_range {
+ Some(buffer_row_range) => buffer_row_range.end = row + 1,
+ None => buffer_row_range = Some(row..row + 1),
+ }
+ }
+
+ if kind == GitDiffLineType::Deletion {
+ let end = content_offset + content_len;
+
+ match &mut diff_base_byte_range {
+ Some(head_byte_range) => head_byte_range.end = end as usize,
+ None => diff_base_byte_range = Some(content_offset as usize..end as usize),
+ }
+
+ if first_deletion_buffer_row.is_none() {
+ let old_row = line.old_lineno().unwrap().saturating_sub(1);
+ let row = old_row as i64 + *buffer_row_divergence;
+ first_deletion_buffer_row = Some(row as u32);
+ }
+
+ *buffer_row_divergence -= 1;
+ }
+ }
+
+ //unwrap_or deletion without addition
+ let buffer_row_range = buffer_row_range.unwrap_or_else(|| {
+ //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk
+ let row = first_deletion_buffer_row.unwrap();
+ row..row
+ });
+
+ //unwrap_or addition without deletion
+ let diff_base_byte_range = diff_base_byte_range.unwrap_or(0..0);
+
+ let start = Point::new(buffer_row_range.start, 0);
+ let end = Point::new(buffer_row_range.end, 0);
+ let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end);
+ DiffHunk {
+ buffer_range,
+ diff_base_byte_range,
+ }
+ }
+}
+
+/// Range (crossing new lines), old, new
+#[cfg(any(test, feature = "test-support"))]
+#[track_caller]
+pub fn assert_hunks<Iter>(
+ diff_hunks: Iter,
+ buffer: &BufferSnapshot,
+ diff_base: &str,
+ expected_hunks: &[(Range<u32>, &str, &str)],
+) where
+ Iter: Iterator<Item = DiffHunk<u32>>,
+{
+ let actual_hunks = diff_hunks
+ .map(|hunk| {
+ (
+ hunk.buffer_range.clone(),
+ &diff_base[hunk.diff_base_byte_range],
+ buffer
+ .text_for_range(
+ Point::new(hunk.buffer_range.start, 0)
+ ..Point::new(hunk.buffer_range.end, 0),
+ )
+ .collect::<String>(),
+ )
+ })
+ .collect::<Vec<_>>();
+
+ let expected_hunks: Vec<_> = expected_hunks
+ .iter()
+ .map(|(r, s, h)| (r.clone(), *s, h.to_string()))
+ .collect();
+
+ assert_eq!(actual_hunks, expected_hunks);
+}
+
+#[cfg(test)]
+mod tests {
+ use std::assert_eq;
+
+ use super::*;
+ use text::Buffer;
+ use unindent::Unindent as _;
+
+ #[test]
+ fn test_buffer_diff_simple() {
+ let diff_base = "
+ one
+ two
+ three
+ "
+ .unindent();
+
+ let buffer_text = "
+ one
+ HELLO
+ three
+ "
+ .unindent();
+
+ let mut buffer = Buffer::new(0, 0, buffer_text);
+ let mut diff = BufferDiff::new();
+ smol::block_on(diff.update(&diff_base, &buffer));
+ assert_hunks(
+ diff.hunks(&buffer),
+ &buffer,
+ &diff_base,
+ &[(1..2, "two\n", "HELLO\n")],
+ );
+
+ buffer.edit([(0..0, "point five\n")]);
+ smol::block_on(diff.update(&diff_base, &buffer));
+ assert_hunks(
+ diff.hunks(&buffer),
+ &buffer,
+ &diff_base,
+ &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")],
+ );
+
+ diff.clear(&buffer);
+ assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]);
+ }
+
+ #[test]
+ fn test_buffer_diff_range() {
+ let diff_base = "
+ one
+ two
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "
+ .unindent();
+
+ let buffer_text = "
+ A
+ one
+ B
+ two
+ C
+ three
+ HELLO
+ four
+ five
+ SIXTEEN
+ seven
+ eight
+ WORLD
+ nine
+
+ ten
+
+ "
+ .unindent();
+
+ let buffer = Buffer::new(0, 0, buffer_text);
+ let mut diff = BufferDiff::new();
+ smol::block_on(diff.update(&diff_base, &buffer));
+ assert_eq!(diff.hunks(&buffer).count(), 8);
+
+ assert_hunks(
+ diff.hunks_in_row_range(7..12, &buffer),
+ &buffer,
+ &diff_base,
+ &[
+ (6..7, "", "HELLO\n"),
+ (9..10, "six\n", "SIXTEEN\n"),
+ (12..13, "", "WORLD\n"),
+ ],
+ );
+ }
+}
@@ -0,0 +1,11 @@
+use std::ffi::OsStr;
+
+pub use git2 as libgit;
+pub use lazy_static::lazy_static;
+
+pub mod diff;
+
+lazy_static! {
+ pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git");
+ pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
+}
@@ -12,7 +12,7 @@ doctest = false
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
-workspace = { path = "../workspace" }
+workspace2 = { path = "../workspace2" }
settings2 = { path = "../settings2" }
anyhow.workspace = true
@@ -9,7 +9,7 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use workspace::AppState;
+use workspace2::AppState;
// use zed::AppState;
// todo!();
@@ -59,7 +59,7 @@ pub fn init(_: Arc<AppState>, cx: &mut AppContext) {
// cx.add_global_action(move |_: &NewJournalEntry, cx| new_journal_entry(app_state.clone(), cx));
}
-pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
+pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut AppContext) {
let settings = JournalSettings::get_global(cx);
let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) {
Some(journal_dir) => journal_dir,
@@ -77,7 +77,7 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
let now = now.time();
let _entry_heading = heading_entry(now, &settings.hour_format);
- let _create_entry = cx.background_executor().spawn(async move {
+ let create_entry = cx.background_executor().spawn(async move {
std::fs::create_dir_all(month_dir)?;
OpenOptions::new()
.create(true)
@@ -86,37 +86,38 @@ pub fn new_journal_entry(_: Arc<AppState>, cx: &mut AppContext) {
Ok::<_, std::io::Error>((journal_dir, entry_path))
});
- // todo!("workspace")
- // cx.spawn(|cx| async move {
- // let (journal_dir, entry_path) = create_entry.await?;
- // let (workspace, _) =
- // cx.update(|cx| workspace::open_paths(&[journal_dir], &app_state, None, cx))?;
-
- // let opened = workspace
- // .update(&mut cx, |workspace, cx| {
- // workspace.open_paths(vec![entry_path], true, cx)
- // })?
- // .await;
-
- // if let Some(Some(Ok(item))) = opened.first() {
- // if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
- // editor.update(&mut cx, |editor, cx| {
- // let len = editor.buffer().read(cx).len(cx);
- // editor.change_selections(Some(Autoscroll::center()), cx, |s| {
- // s.select_ranges([len..len])
- // });
- // if len > 0 {
- // editor.insert("\n\n", cx);
- // }
- // editor.insert(&entry_heading, cx);
- // editor.insert("\n\n", cx);
- // })?;
- // }
- // }
-
- // anyhow::Ok(())
- // })
- // .detach_and_log_err(cx);
+ cx.spawn(|mut cx| async move {
+ let (journal_dir, entry_path) = create_entry.await?;
+ let (workspace, _) = cx
+ .update(|cx| workspace2::open_paths(&[journal_dir], &app_state, None, cx))?
+ .await?;
+
+ let _opened = workspace
+ .update(&mut cx, |workspace, cx| {
+ workspace.open_paths(vec![entry_path], true, cx)
+ })?
+ .await;
+
+ // todo!("editor")
+ // if let Some(Some(Ok(item))) = opened.first() {
+ // if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {
+ // editor.update(&mut cx, |editor, cx| {
+ // let len = editor.buffer().read(cx).len(cx);
+ // editor.change_selections(Some(Autoscroll::center()), cx, |s| {
+ // s.select_ranges([len..len])
+ // });
+ // if len > 0 {
+ // editor.insert("\n\n", cx);
+ // }
+ // editor.insert(&entry_heading, cx);
+ // editor.insert("\n\n", cx);
+ // })?;
+ // }
+ // }
+
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
}
fn journal_dir(path: &str) -> Option<PathBuf> {
@@ -25,13 +25,13 @@ test-support = [
clock = { path = "../clock" }
collections = { path = "../collections" }
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
-git = { path = "../git" }
+git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" }
lsp = { package = "lsp2", path = "../lsp2" }
rpc = { package = "rpc2", path = "../rpc2" }
settings = { package = "settings2", path = "../settings2" }
sum_tree = { path = "../sum_tree" }
-text = { path = "../text" }
+text = { package = "text2", path = "../text2" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
@@ -64,7 +64,7 @@ client = { package = "client2", path = "../client2", features = ["test-support"]
collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
-text = { path = "../text", features = ["test-support"] }
+text = { package = "text2", path = "../text2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor.workspace = true
@@ -234,7 +234,6 @@ impl SyntaxMap {
self.snapshot.interpolate(text);
}
- #[allow(dead_code)] // todo!()
#[cfg(test)]
pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
self.snapshot
@@ -786,7 +785,6 @@ impl SyntaxSnapshot {
)
}
- #[allow(dead_code)] // todo!()
#[cfg(test)]
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
self.layers_for_range(0..buffer.len(), buffer).collect()
@@ -23,15 +23,15 @@ test-support = [
client = { package = "client2", path = "../client2" }
clock = { path = "../clock" }
collections = { path = "../collections" }
-git = { path = "../git" }
+git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" }
lsp = { package = "lsp2", path = "../lsp2" }
-rich_text = { path = "../rich_text" }
+rich_text = { package = "rich_text2", path = "../rich_text2" }
settings = { package = "settings2", path = "../settings2" }
snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" }
-text = { path = "../text" }
+text = { package = "text2", path = "../text2" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
@@ -60,7 +60,7 @@ tree-sitter-typescript = { workspace = true, optional = true }
[dev-dependencies]
copilot = { package = "copilot2", path = "../copilot2", features = ["test-support"] }
-text = { path = "../text", features = ["test-support"] }
+text = { package = "text2", path = "../text2", features = ["test-support"] }
language = { package = "language2", path = "../language2", features = ["test-support"] }
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
@@ -1,9 +1,8 @@
-use std::collections::VecDeque;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use anyhow::Context;
-use collections::HashMap;
+use collections::{HashMap, HashSet};
use fs::Fs;
use gpui::{AsyncAppContext, ModelHandle};
use language::language_settings::language_settings;
@@ -11,7 +10,7 @@ use language::{Buffer, Diff};
use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime;
use serde::{Deserialize, Serialize};
-use util::paths::DEFAULT_PRETTIER_DIR;
+use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR};
pub enum Prettier {
Real(RealPrettier),
@@ -20,7 +19,6 @@ pub enum Prettier {
}
pub struct RealPrettier {
- worktree_id: Option<usize>,
default: bool,
prettier_dir: PathBuf,
server: Arc<LanguageServer>,
@@ -28,17 +26,10 @@ pub struct RealPrettier {
#[cfg(any(test, feature = "test-support"))]
pub struct TestPrettier {
- worktree_id: Option<usize>,
prettier_dir: PathBuf,
default: bool,
}
-#[derive(Debug)]
-pub struct LocateStart {
- pub worktree_root_path: Arc<Path>,
- pub starting_path: Arc<Path>,
-}
-
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
const PRETTIER_PACKAGE_NAME: &str = "prettier";
@@ -63,79 +54,106 @@ impl Prettier {
".editorconfig",
];
- pub async fn locate(
- starting_path: Option<LocateStart>,
- fs: Arc<dyn Fs>,
- ) -> anyhow::Result<PathBuf> {
- fn is_node_modules(path_component: &std::path::Component<'_>) -> bool {
- path_component.as_os_str().to_string_lossy() == "node_modules"
+ pub async fn locate_prettier_installation(
+ fs: &dyn Fs,
+ installed_prettiers: &HashSet<PathBuf>,
+ locate_from: &Path,
+ ) -> anyhow::Result<Option<PathBuf>> {
+ let mut path_to_check = locate_from
+ .components()
+ .take_while(|component| component.as_os_str().to_string_lossy() != "node_modules")
+ .collect::<PathBuf>();
+ let path_to_check_metadata = fs
+ .metadata(&path_to_check)
+ .await
+ .with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))?
+ .with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?;
+ if !path_to_check_metadata.is_dir {
+ path_to_check.pop();
}
- let paths_to_check = match starting_path.as_ref() {
- Some(starting_path) => {
- let worktree_root = starting_path
- .worktree_root_path
- .components()
- .into_iter()
- .take_while(|path_component| !is_node_modules(path_component))
- .collect::<PathBuf>();
- if worktree_root != starting_path.worktree_root_path.as_ref() {
- vec![worktree_root]
+ let mut project_path_with_prettier_dependency = None;
+ loop {
+ if installed_prettiers.contains(&path_to_check) {
+ log::debug!("Found prettier path {path_to_check:?} in installed prettiers");
+ return Ok(Some(path_to_check));
+ } else if let Some(package_json_contents) =
+ read_package_json(fs, &path_to_check).await?
+ {
+ if has_prettier_in_package_json(&package_json_contents) {
+ if has_prettier_in_node_modules(fs, &path_to_check).await? {
+ log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules");
+ return Ok(Some(path_to_check));
+ } else if project_path_with_prettier_dependency.is_none() {
+ project_path_with_prettier_dependency = Some(path_to_check.clone());
+ }
} else {
- if starting_path.starting_path.as_ref() == Path::new("") {
- worktree_root
- .parent()
- .map(|path| vec![path.to_path_buf()])
- .unwrap_or_default()
- } else {
- let file_to_format = starting_path.starting_path.as_ref();
- let mut paths_to_check = VecDeque::new();
- let mut current_path = worktree_root;
- for path_component in file_to_format.components().into_iter() {
- let new_path = current_path.join(path_component);
- let old_path = std::mem::replace(&mut current_path, new_path);
- paths_to_check.push_front(old_path);
- if is_node_modules(&path_component) {
- break;
+ match package_json_contents.get("workspaces") {
+ Some(serde_json::Value::Array(workspaces)) => {
+ match &project_path_with_prettier_dependency {
+ Some(project_path_with_prettier_dependency) => {
+ let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix");
+ if workspaces.iter().filter_map(|value| {
+ if let serde_json::Value::String(s) = value {
+ Some(s.clone())
+ } else {
+ log::warn!("Skipping non-string 'workspaces' value: {value:?}");
+ None
+ }
+ }).any(|workspace_definition| {
+ if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
+ path_matcher.is_match(subproject_path)
+ } else {
+ workspace_definition == subproject_path.to_string_lossy()
+ }
+ }) {
+ anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules");
+ log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}");
+ return Ok(Some(path_to_check));
+ } else {
+ log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}");
+ }
+ }
+ None => {
+ log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json");
+ }
}
- }
- Vec::from(paths_to_check)
+ },
+ Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."),
+ None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"),
}
}
}
- None => Vec::new(),
- };
- match find_closest_prettier_dir(paths_to_check, fs.as_ref())
- .await
- .with_context(|| format!("finding prettier starting with {starting_path:?}"))?
- {
- Some(prettier_dir) => Ok(prettier_dir),
- None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()),
+ if !path_to_check.pop() {
+ match project_path_with_prettier_dependency {
+ Some(closest_prettier_discovered) => {
+ anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}")
+ }
+ None => {
+ log::debug!("Found no prettier in ancestors of {locate_from:?}");
+ return Ok(None);
+ }
+ }
+ }
}
}
#[cfg(any(test, feature = "test-support"))]
pub async fn start(
- worktree_id: Option<usize>,
_: LanguageServerId,
prettier_dir: PathBuf,
_: Arc<dyn NodeRuntime>,
_: AsyncAppContext,
) -> anyhow::Result<Self> {
- Ok(
- #[cfg(any(test, feature = "test-support"))]
- Self::Test(TestPrettier {
- worktree_id,
- default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
- prettier_dir,
- }),
- )
+ Ok(Self::Test(TestPrettier {
+ default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
+ prettier_dir,
+ }))
}
#[cfg(not(any(test, feature = "test-support")))]
pub async fn start(
- worktree_id: Option<usize>,
server_id: LanguageServerId,
prettier_dir: PathBuf,
node: Arc<dyn NodeRuntime>,
@@ -143,7 +161,7 @@ impl Prettier {
) -> anyhow::Result<Self> {
use lsp::LanguageServerBinary;
- let backgroud = cx.background();
+ let background = cx.background();
anyhow::ensure!(
prettier_dir.is_dir(),
"Prettier dir {prettier_dir:?} is not a directory"
@@ -154,7 +172,7 @@ impl Prettier {
"no prettier server package found at {prettier_server:?}"
);
- let node_path = backgroud
+ let node_path = background
.spawn(async move { node.binary_path().await })
.await?;
let server = LanguageServer::new(
@@ -169,12 +187,11 @@ impl Prettier {
cx,
)
.context("prettier server creation")?;
- let server = backgroud
+ let server = background
.spawn(server.initialize(None))
.await
.context("prettier server initialization")?;
Ok(Self::Real(RealPrettier {
- worktree_id,
server,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
@@ -340,62 +357,59 @@ impl Prettier {
Self::Test(test_prettier) => &test_prettier.prettier_dir,
}
}
+}
- pub fn worktree_id(&self) -> Option<usize> {
- match self {
- Self::Real(local) => local.worktree_id,
- #[cfg(any(test, feature = "test-support"))]
- Self::Test(test_prettier) => test_prettier.worktree_id,
- }
+async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
+ let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
+ if let Some(node_modules_location_metadata) = fs
+ .metadata(&possible_node_modules_location)
+ .await
+ .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
+ {
+ return Ok(node_modules_location_metadata.is_dir);
}
+ Ok(false)
}
-async fn find_closest_prettier_dir(
- paths_to_check: Vec<PathBuf>,
+async fn read_package_json(
fs: &dyn Fs,
-) -> anyhow::Result<Option<PathBuf>> {
- for path in paths_to_check {
- let possible_package_json = path.join("package.json");
- if let Some(package_json_metadata) = fs
- .metadata(&possible_package_json)
- .await
- .with_context(|| format!("Fetching metadata for {possible_package_json:?}"))?
- {
- if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
- let package_json_contents = fs
- .load(&possible_package_json)
- .await
- .with_context(|| format!("reading {possible_package_json:?} file contents"))?;
- if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
- &package_json_contents,
- ) {
- if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
- if o.contains_key(PRETTIER_PACKAGE_NAME) {
- return Ok(Some(path));
- }
- }
- if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
- {
- if o.contains_key(PRETTIER_PACKAGE_NAME) {
- return Ok(Some(path));
- }
- }
- }
- }
+ path: &Path,
+) -> anyhow::Result<Option<HashMap<String, serde_json::Value>>> {
+ let possible_package_json = path.join("package.json");
+ if let Some(package_json_metadata) = fs
+ .metadata(&possible_package_json)
+ .await
+ .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))?
+ {
+ if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
+ let package_json_contents = fs
+ .load(&possible_package_json)
+ .await
+ .with_context(|| format!("reading {possible_package_json:?} file contents"))?;
+ return serde_json::from_str::<HashMap<String, serde_json::Value>>(
+ &package_json_contents,
+ )
+ .map(Some)
+ .with_context(|| format!("parsing {possible_package_json:?} file contents"));
}
+ }
+ Ok(None)
+}
- let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
- if let Some(node_modules_location_metadata) = fs
- .metadata(&possible_node_modules_location)
- .await
- .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
- {
- if node_modules_location_metadata.is_dir {
- return Ok(Some(path));
- }
+fn has_prettier_in_package_json(
+ package_json_contents: &HashMap<String, serde_json::Value>,
+) -> bool {
+ if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") {
+ if o.contains_key(PRETTIER_PACKAGE_NAME) {
+ return true;
}
}
- Ok(None)
+ if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") {
+ if o.contains_key(PRETTIER_PACKAGE_NAME) {
+ return true;
+ }
+ }
+ false
}
enum Format {}
@@ -436,3 +450,316 @@ impl lsp::request::Request for ClearCache {
type Result = ();
const METHOD: &'static str = "prettier/clear_cache";
}
+
+#[cfg(test)]
+mod tests {
+ use fs::FakeFs;
+ use serde_json::json;
+
+ use super::*;
+
+ #[gpui::test]
+ async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".config": {
+ "zed": {
+ "settings.json": r#"{ "formatter": "auto" }"#,
+ },
+ },
+ "work": {
+ "project": {
+ "src": {
+ "index.js": "// index.js file contents",
+ },
+ "node_modules": {
+ "expect": {
+ "build": {
+ "print.js": "// print.js file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.5.1"
+ }
+ }"#,
+ },
+ "prettier": {
+ "index.js": "// Dummy prettier package file",
+ },
+ },
+ "package.json": r#"{}"#
+ },
+ }
+ }),
+ )
+ .await;
+
+ assert!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/.config/zed/settings.json"),
+ )
+ .await
+ .unwrap()
+ .is_none(),
+ "Should successfully find no prettier for path hierarchy without it"
+ );
+ assert!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/project/src/index.js")
+ )
+ .await
+ .unwrap()
+ .is_none(),
+ "Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it"
+ );
+ assert!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/project/node_modules/expect/build/print.js")
+ )
+ .await
+ .unwrap()
+ .is_none(),
+ "Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "web_blog": {
+ "node_modules": {
+ "prettier": {
+ "index.js": "// Dummy prettier package file",
+ },
+ "expect": {
+ "build": {
+ "print.js": "// print.js file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.5.1"
+ }
+ }"#,
+ },
+ },
+ "pages": {
+ "[slug].tsx": "// [slug].tsx file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.3.0"
+ },
+ "prettier": {
+ "semi": false,
+ "printWidth": 80,
+ "htmlWhitespaceSensitivity": "strict",
+ "tabWidth": 4
+ }
+ }"#
+ }
+ }),
+ )
+ .await;
+
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/web_blog/pages/[slug].tsx")
+ )
+ .await
+ .unwrap(),
+ Some(PathBuf::from("/root/web_blog")),
+ "Should find a preinstalled prettier in the project root"
+ );
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/web_blog/node_modules/expect/build/print.js")
+ )
+ .await
+ .unwrap(),
+ Some(PathBuf::from("/root/web_blog")),
+ "Should find a preinstalled prettier in the project root even for node_modules files"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "work": {
+ "web_blog": {
+ "pages": {
+ "[slug].tsx": "// [slug].tsx file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.3.0"
+ },
+ "prettier": {
+ "semi": false,
+ "printWidth": 80,
+ "htmlWhitespaceSensitivity": "strict",
+ "tabWidth": 4
+ }
+ }"#
+ }
+ }
+ }),
+ )
+ .await;
+
+ let path = "/root/work/web_blog/node_modules/pages/[slug].tsx";
+ match Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new(path)
+ )
+ .await {
+ Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
+ Err(e) => {
+ let message = e.to_string();
+ assert!(message.contains(path), "Error message should mention which start file was used for location");
+ assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents");
+ },
+ };
+
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::from_iter(
+ [PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter()
+ ),
+ Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx")
+ )
+ .await
+ .unwrap(),
+ Some(PathBuf::from("/root/work")),
+ "Should return first cached value found without path checks"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "work": {
+ "full-stack-foundations": {
+ "exercises": {
+ "03.loading": {
+ "01.problem.loader": {
+ "app": {
+ "routes": {
+ "users+": {
+ "$username_+": {
+ "notes.tsx": "// notes.tsx file contents",
+ },
+ },
+ },
+ },
+ "node_modules": {},
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "^3.0.3"
+ }
+ }"#
+ },
+ },
+ },
+ "package.json": r#"{
+ "workspaces": ["exercises/*/*", "examples/*"]
+ }"#,
+ "node_modules": {
+ "prettier": {
+ "index.js": "// Dummy prettier package file",
+ },
+ },
+ },
+ }
+ }),
+ )
+ .await;
+
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"),
+ ).await.unwrap(),
+ Some(PathBuf::from("/root/work/full-stack-foundations")),
+ "Should ascend to the multi-workspace root and find the prettier there",
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_in_npm_workspaces_for_not_installed(
+ cx: &mut gpui::TestAppContext,
+ ) {
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "work": {
+ "full-stack-foundations": {
+ "exercises": {
+ "03.loading": {
+ "01.problem.loader": {
+ "app": {
+ "routes": {
+ "users+": {
+ "$username_+": {
+ "notes.tsx": "// notes.tsx file contents",
+ },
+ },
+ },
+ },
+ "node_modules": {},
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "^3.0.3"
+ }
+ }"#
+ },
+ },
+ },
+ "package.json": r#"{
+ "workspaces": ["exercises/*/*", "examples/*"]
+ }"#,
+ },
+ }
+ }),
+ )
+ .await;
+
+ match Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx")
+ )
+ .await {
+ Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
+ Err(e) => {
+ let message = e.to_string();
+ assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
+ assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
+ },
+ };
+ }
+}
@@ -1,11 +1,13 @@
-const { Buffer } = require('buffer');
+const { Buffer } = require("buffer");
const fs = require("fs");
const path = require("path");
-const { once } = require('events');
+const { once } = require("events");
const prettierContainerPath = process.argv[2];
if (prettierContainerPath == null || prettierContainerPath.length == 0) {
- process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`);
+ process.stderr.write(
+ `Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`,
+ );
process.exit(1);
}
fs.stat(prettierContainerPath, (err, stats) => {
@@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => {
process.exit(1);
}
});
-const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier');
+const prettierPath = path.join(prettierContainerPath, "node_modules/prettier");
class Prettier {
constructor(path, prettier, config) {
@@ -34,7 +36,7 @@ class Prettier {
let config;
try {
prettier = await loadPrettier(prettierPath);
- config = await prettier.resolveConfig(prettierPath) || {};
+ config = (await prettier.resolveConfig(prettierPath)) || {};
} catch (e) {
process.stderr.write(`Failed to load prettier: ${e}\n`);
process.exit(1);
@@ -42,7 +44,7 @@ class Prettier {
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
process.stdin.resume();
handleBuffer(new Prettier(prettierPath, prettier, config));
-})()
+})();
async function handleBuffer(prettier) {
for await (const messageText of readStdin()) {
@@ -54,25 +56,29 @@ async function handleBuffer(prettier) {
continue;
}
// allow concurrent request handling by not `await`ing the message handling promise (async function)
- handleMessage(message, prettier).catch(e => {
+ handleMessage(message, prettier).catch((e) => {
const errorMessage = message;
if ((errorMessage.params || {}).text !== undefined) {
errorMessage.params.text = "..snip..";
}
- sendResponse({ id: message.id, ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`) }); });
+ sendResponse({
+ id: message.id,
+ ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`),
+ });
+ });
}
}
const headerSeparator = "\r\n";
-const contentLengthHeaderName = 'Content-Length';
+const contentLengthHeaderName = "Content-Length";
async function* readStdin() {
let buffer = Buffer.alloc(0);
let streamEnded = false;
- process.stdin.on('end', () => {
+ process.stdin.on("end", () => {
streamEnded = true;
});
- process.stdin.on('data', (data) => {
+ process.stdin.on("data", (data) => {
buffer = Buffer.concat([buffer, data]);
});
@@ -80,7 +86,7 @@ async function* readStdin() {
sendResponse(makeError(errorMessage));
buffer = Buffer.alloc(0);
messageLength = null;
- await once(process.stdin, 'readable');
+ await once(process.stdin, "readable");
streamEnded = false;
}
@@ -91,20 +97,25 @@ async function* readStdin() {
if (messageLength === null) {
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
if (streamEnded) {
- await handleStreamEnded('Unexpected end of stream: headers not found');
+ await handleStreamEnded("Unexpected end of stream: headers not found");
continue main_loop;
} else if (buffer.length > contentLengthHeaderName.length * 10) {
- await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`);
+ await handleStreamEnded(
+ `Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`,
+ );
continue main_loop;
}
- await once(process.stdin, 'readable');
+ await once(process.stdin, "readable");
}
- const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii');
- const contentLengthHeader = headers.split(headerSeparator)
- .map(header => header.split(':'))
- .filter(header => header[2] === undefined)
- .filter(header => (header[1] || '').length > 0)
- .find(header => (header[0] || '').trim() === contentLengthHeaderName);
+ const headers = buffer
+ .subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`))
+ .toString("ascii");
+ const contentLengthHeader = headers
+ .split(headerSeparator)
+ .map((header) => header.split(":"))
+ .filter((header) => header[2] === undefined)
+ .filter((header) => (header[1] || "").length > 0)
+ .find((header) => (header[0] || "").trim() === contentLengthHeaderName);
const contentLength = (contentLengthHeader || [])[1];
if (contentLength === undefined) {
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
@@ -114,13 +125,14 @@ async function* readStdin() {
messageLength = parseInt(contentLength, 10);
}
- while (buffer.length < (headersLength + messageLength)) {
+ while (buffer.length < headersLength + messageLength) {
if (streamEnded) {
await handleStreamEnded(
- `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`);
+ `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`,
+ );
continue main_loop;
}
- await once(process.stdin, 'readable');
+ await once(process.stdin, "readable");
}
const messageEnd = headersLength + messageLength;
@@ -128,12 +140,12 @@ async function* readStdin() {
buffer = buffer.subarray(messageEnd);
headersLength = null;
messageLength = null;
- yield message.toString('utf8');
+ yield message.toString("utf8");
}
} catch (e) {
sendResponse(makeError(`Error reading stdin: ${e}`));
} finally {
- process.stdin.off('data', () => { });
+ process.stdin.off("data", () => {});
}
}
@@ -146,7 +158,7 @@ async function handleMessage(message, prettier) {
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
}
- if (method === 'prettier/format') {
+ if (method === "prettier/format") {
if (params === undefined || params.text === undefined) {
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
}
@@ -156,7 +168,7 @@ async function handleMessage(message, prettier) {
let resolvedConfig = {};
if (params.options.filepath !== undefined) {
- resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {};
+ resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {};
}
const options = {
@@ -164,21 +176,25 @@ async function handleMessage(message, prettier) {
...resolvedConfig,
parser: params.options.parser,
plugins: params.options.plugins,
- path: params.options.filepath
+ path: params.options.filepath,
};
- process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`);
+ process.stderr.write(
+ `Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${
+ params.options.filepath || ""
+ }' with options: ${JSON.stringify(options)}\n`,
+ );
const formattedText = await prettier.prettier.format(params.text, options);
sendResponse({ id, result: { text: formattedText } });
- } else if (method === 'prettier/clear_cache') {
+ } else if (method === "prettier/clear_cache") {
prettier.prettier.clearConfigCache();
- prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {};
+ prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {};
sendResponse({ id, result: null });
- } else if (method === 'initialize') {
+ } else if (method === "initialize") {
sendResponse({
- id: id || 0,
+ id,
result: {
- "capabilities": {}
- }
+ capabilities: {},
+ },
});
} else {
throw new Error(`Unknown method: ${method}`);
@@ -188,18 +204,20 @@ async function handleMessage(message, prettier) {
function makeError(message) {
return {
error: {
- "code": -32600, // invalid request code
+ code: -32600, // invalid request code
message,
- }
+ },
};
}
function sendResponse(response) {
const responsePayloadString = JSON.stringify({
jsonrpc: "2.0",
- ...response
+ ...response,
});
- const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`;
+ const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(
+ responsePayloadString,
+ )}${headerSeparator}${headerSeparator}`;
process.stdout.write(headers + responsePayloadString);
}
@@ -1,5 +1,5 @@
use anyhow::Context;
-use collections::HashMap;
+use collections::{HashMap, HashSet};
use fs::Fs;
use gpui::{AsyncAppContext, Model};
use language::{language_settings::language_settings, Buffer, Diff};
@@ -7,11 +7,10 @@ use lsp::{LanguageServer, LanguageServerId};
use node_runtime::NodeRuntime;
use serde::{Deserialize, Serialize};
use std::{
- collections::VecDeque,
path::{Path, PathBuf},
sync::Arc,
};
-use util::paths::DEFAULT_PRETTIER_DIR;
+use util::paths::{PathMatcher, DEFAULT_PRETTIER_DIR};
pub enum Prettier {
Real(RealPrettier),
@@ -20,7 +19,6 @@ pub enum Prettier {
}
pub struct RealPrettier {
- worktree_id: Option<usize>,
default: bool,
prettier_dir: PathBuf,
server: Arc<LanguageServer>,
@@ -28,17 +26,10 @@ pub struct RealPrettier {
#[cfg(any(test, feature = "test-support"))]
pub struct TestPrettier {
- worktree_id: Option<usize>,
prettier_dir: PathBuf,
default: bool,
}
-#[derive(Debug)]
-pub struct LocateStart {
- pub worktree_root_path: Arc<Path>,
- pub starting_path: Arc<Path>,
-}
-
pub const PRETTIER_SERVER_FILE: &str = "prettier_server.js";
pub const PRETTIER_SERVER_JS: &str = include_str!("./prettier_server.js");
const PRETTIER_PACKAGE_NAME: &str = "prettier";
@@ -63,79 +54,106 @@ impl Prettier {
".editorconfig",
];
- pub async fn locate(
- starting_path: Option<LocateStart>,
- fs: Arc<dyn Fs>,
- ) -> anyhow::Result<PathBuf> {
- fn is_node_modules(path_component: &std::path::Component<'_>) -> bool {
- path_component.as_os_str().to_string_lossy() == "node_modules"
+ pub async fn locate_prettier_installation(
+ fs: &dyn Fs,
+ installed_prettiers: &HashSet<PathBuf>,
+ locate_from: &Path,
+ ) -> anyhow::Result<Option<PathBuf>> {
+ let mut path_to_check = locate_from
+ .components()
+ .take_while(|component| component.as_os_str().to_string_lossy() != "node_modules")
+ .collect::<PathBuf>();
+ let path_to_check_metadata = fs
+ .metadata(&path_to_check)
+ .await
+ .with_context(|| format!("failed to get metadata for initial path {path_to_check:?}"))?
+ .with_context(|| format!("empty metadata for initial path {path_to_check:?}"))?;
+ if !path_to_check_metadata.is_dir {
+ path_to_check.pop();
}
- let paths_to_check = match starting_path.as_ref() {
- Some(starting_path) => {
- let worktree_root = starting_path
- .worktree_root_path
- .components()
- .into_iter()
- .take_while(|path_component| !is_node_modules(path_component))
- .collect::<PathBuf>();
- if worktree_root != starting_path.worktree_root_path.as_ref() {
- vec![worktree_root]
+ let mut project_path_with_prettier_dependency = None;
+ loop {
+ if installed_prettiers.contains(&path_to_check) {
+ log::debug!("Found prettier path {path_to_check:?} in installed prettiers");
+ return Ok(Some(path_to_check));
+ } else if let Some(package_json_contents) =
+ read_package_json(fs, &path_to_check).await?
+ {
+ if has_prettier_in_package_json(&package_json_contents) {
+ if has_prettier_in_node_modules(fs, &path_to_check).await? {
+ log::debug!("Found prettier path {path_to_check:?} in both package.json and node_modules");
+ return Ok(Some(path_to_check));
+ } else if project_path_with_prettier_dependency.is_none() {
+ project_path_with_prettier_dependency = Some(path_to_check.clone());
+ }
} else {
- if starting_path.starting_path.as_ref() == Path::new("") {
- worktree_root
- .parent()
- .map(|path| vec![path.to_path_buf()])
- .unwrap_or_default()
- } else {
- let file_to_format = starting_path.starting_path.as_ref();
- let mut paths_to_check = VecDeque::new();
- let mut current_path = worktree_root;
- for path_component in file_to_format.components().into_iter() {
- let new_path = current_path.join(path_component);
- let old_path = std::mem::replace(&mut current_path, new_path);
- paths_to_check.push_front(old_path);
- if is_node_modules(&path_component) {
- break;
- }
+ match package_json_contents.get("workspaces") {
+ Some(serde_json::Value::Array(workspaces)) => {
+ match &project_path_with_prettier_dependency {
+ Some(project_path_with_prettier_dependency) => {
+ let subproject_path = project_path_with_prettier_dependency.strip_prefix(&path_to_check).expect("traversing path parents, should be able to strip prefix");
+ if workspaces.iter().filter_map(|value| {
+ if let serde_json::Value::String(s) = value {
+ Some(s.clone())
+ } else {
+ log::warn!("Skipping non-string 'workspaces' value: {value:?}");
+ None
+ }
+ }).any(|workspace_definition| {
+ if let Some(path_matcher) = PathMatcher::new(&workspace_definition).ok() {
+ path_matcher.is_match(subproject_path)
+ } else {
+ workspace_definition == subproject_path.to_string_lossy()
+ }
+ }) {
+ anyhow::ensure!(has_prettier_in_node_modules(fs, &path_to_check).await?, "Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}, but it's not installed into workspace root's node_modules");
+ log::info!("Found prettier path {path_to_check:?} in the workspace root for project in {project_path_with_prettier_dependency:?}");
+ return Ok(Some(path_to_check));
+ } else {
+ log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but is not included in its package.json workspaces {workspaces:?}");
+ }
+ }
+ None => {
+ log::warn!("Skipping path {path_to_check:?} that has prettier in its 'node_modules' subdirectory, but has no prettier in its package.json");
+ }
+ }
+ },
+ Some(unknown) => log::error!("Failed to parse workspaces for {path_to_check:?} from package.json, got {unknown:?}. Skipping."),
+ None => log::warn!("Skipping path {path_to_check:?} that has no prettier dependency and no workspaces section in its package.json"),
}
- Vec::from(paths_to_check)
- }
}
}
- None => Vec::new(),
- };
- match find_closest_prettier_dir(paths_to_check, fs.as_ref())
- .await
- .with_context(|| format!("finding prettier starting with {starting_path:?}"))?
- {
- Some(prettier_dir) => Ok(prettier_dir),
- None => Ok(DEFAULT_PRETTIER_DIR.to_path_buf()),
+ if !path_to_check.pop() {
+ match project_path_with_prettier_dependency {
+ Some(closest_prettier_discovered) => {
+ anyhow::bail!("No prettier found in node_modules for ancestors of {locate_from:?}, but discovered prettier package.json dependency in {closest_prettier_discovered:?}")
+ }
+ None => {
+ log::debug!("Found no prettier in ancestors of {locate_from:?}");
+ return Ok(None);
+ }
+ }
+ }
}
}
#[cfg(any(test, feature = "test-support"))]
pub async fn start(
- worktree_id: Option<usize>,
_: LanguageServerId,
prettier_dir: PathBuf,
_: Arc<dyn NodeRuntime>,
_: AsyncAppContext,
) -> anyhow::Result<Self> {
- Ok(
- #[cfg(any(test, feature = "test-support"))]
- Self::Test(TestPrettier {
- worktree_id,
- default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
- prettier_dir,
- }),
- )
+ Ok(Self::Test(TestPrettier {
+ default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
+ prettier_dir,
+ }))
}
#[cfg(not(any(test, feature = "test-support")))]
pub async fn start(
- worktree_id: Option<usize>,
server_id: LanguageServerId,
prettier_dir: PathBuf,
node: Arc<dyn NodeRuntime>,
@@ -174,7 +192,6 @@ impl Prettier {
.await
.context("prettier server initialization")?;
Ok(Self::Real(RealPrettier {
- worktree_id,
server,
default: prettier_dir == DEFAULT_PRETTIER_DIR.as_path(),
prettier_dir,
@@ -370,62 +387,59 @@ impl Prettier {
Self::Test(test_prettier) => &test_prettier.prettier_dir,
}
}
+}
- pub fn worktree_id(&self) -> Option<usize> {
- match self {
- Self::Real(local) => local.worktree_id,
- #[cfg(any(test, feature = "test-support"))]
- Self::Test(test_prettier) => test_prettier.worktree_id,
- }
+async fn has_prettier_in_node_modules(fs: &dyn Fs, path: &Path) -> anyhow::Result<bool> {
+ let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
+ if let Some(node_modules_location_metadata) = fs
+ .metadata(&possible_node_modules_location)
+ .await
+ .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
+ {
+ return Ok(node_modules_location_metadata.is_dir);
}
+ Ok(false)
}
-async fn find_closest_prettier_dir(
- paths_to_check: Vec<PathBuf>,
+async fn read_package_json(
fs: &dyn Fs,
-) -> anyhow::Result<Option<PathBuf>> {
- for path in paths_to_check {
- let possible_package_json = path.join("package.json");
- if let Some(package_json_metadata) = fs
- .metadata(&possible_package_json)
- .await
- .with_context(|| format!("Fetching metadata for {possible_package_json:?}"))?
- {
- if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
- let package_json_contents = fs
- .load(&possible_package_json)
- .await
- .with_context(|| format!("reading {possible_package_json:?} file contents"))?;
- if let Ok(json_contents) = serde_json::from_str::<HashMap<String, serde_json::Value>>(
- &package_json_contents,
- ) {
- if let Some(serde_json::Value::Object(o)) = json_contents.get("dependencies") {
- if o.contains_key(PRETTIER_PACKAGE_NAME) {
- return Ok(Some(path));
- }
- }
- if let Some(serde_json::Value::Object(o)) = json_contents.get("devDependencies")
- {
- if o.contains_key(PRETTIER_PACKAGE_NAME) {
- return Ok(Some(path));
- }
- }
- }
- }
+ path: &Path,
+) -> anyhow::Result<Option<HashMap<String, serde_json::Value>>> {
+ let possible_package_json = path.join("package.json");
+ if let Some(package_json_metadata) = fs
+ .metadata(&possible_package_json)
+ .await
+ .with_context(|| format!("fetching metadata for package json {possible_package_json:?}"))?
+ {
+ if !package_json_metadata.is_dir && !package_json_metadata.is_symlink {
+ let package_json_contents = fs
+ .load(&possible_package_json)
+ .await
+ .with_context(|| format!("reading {possible_package_json:?} file contents"))?;
+ return serde_json::from_str::<HashMap<String, serde_json::Value>>(
+ &package_json_contents,
+ )
+ .map(Some)
+ .with_context(|| format!("parsing {possible_package_json:?} file contents"));
}
+ }
+ Ok(None)
+}
- let possible_node_modules_location = path.join("node_modules").join(PRETTIER_PACKAGE_NAME);
- if let Some(node_modules_location_metadata) = fs
- .metadata(&possible_node_modules_location)
- .await
- .with_context(|| format!("fetching metadata for {possible_node_modules_location:?}"))?
- {
- if node_modules_location_metadata.is_dir {
- return Ok(Some(path));
- }
+fn has_prettier_in_package_json(
+ package_json_contents: &HashMap<String, serde_json::Value>,
+) -> bool {
+ if let Some(serde_json::Value::Object(o)) = package_json_contents.get("dependencies") {
+ if o.contains_key(PRETTIER_PACKAGE_NAME) {
+ return true;
}
}
- Ok(None)
+ if let Some(serde_json::Value::Object(o)) = package_json_contents.get("devDependencies") {
+ if o.contains_key(PRETTIER_PACKAGE_NAME) {
+ return true;
+ }
+ }
+ false
}
enum Format {}
@@ -466,3 +480,316 @@ impl lsp::request::Request for ClearCache {
type Result = ();
const METHOD: &'static str = "prettier/clear_cache";
}
+
+#[cfg(test)]
+mod tests {
+ use fs::FakeFs;
+ use serde_json::json;
+
+ use super::*;
+
+ #[gpui::test]
+ async fn test_prettier_lookup_finds_nothing(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ ".config": {
+ "zed": {
+ "settings.json": r#"{ "formatter": "auto" }"#,
+ },
+ },
+ "work": {
+ "project": {
+ "src": {
+ "index.js": "// index.js file contents",
+ },
+ "node_modules": {
+ "expect": {
+ "build": {
+ "print.js": "// print.js file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.5.1"
+ }
+ }"#,
+ },
+ "prettier": {
+ "index.js": "// Dummy prettier package file",
+ },
+ },
+ "package.json": r#"{}"#
+ },
+ }
+ }),
+ )
+ .await;
+
+ assert!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/.config/zed/settings.json"),
+ )
+ .await
+ .unwrap()
+ .is_none(),
+ "Should successfully find no prettier for path hierarchy without it"
+ );
+ assert!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/project/src/index.js")
+ )
+ .await
+ .unwrap()
+ .is_none(),
+ "Should successfully find no prettier for path hierarchy that has node_modules with prettier, but no package.json mentions of it"
+ );
+ assert!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/project/node_modules/expect/build/print.js")
+ )
+ .await
+ .unwrap()
+ .is_none(),
+ "Even though it has package.json with prettier in it and no prettier on node_modules along the path, nothing should fail since declared inside node_modules"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_in_simple_npm_projects(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "web_blog": {
+ "node_modules": {
+ "prettier": {
+ "index.js": "// Dummy prettier package file",
+ },
+ "expect": {
+ "build": {
+ "print.js": "// print.js file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.5.1"
+ }
+ }"#,
+ },
+ },
+ "pages": {
+ "[slug].tsx": "// [slug].tsx file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.3.0"
+ },
+ "prettier": {
+ "semi": false,
+ "printWidth": 80,
+ "htmlWhitespaceSensitivity": "strict",
+ "tabWidth": 4
+ }
+ }"#
+ }
+ }),
+ )
+ .await;
+
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/web_blog/pages/[slug].tsx")
+ )
+ .await
+ .unwrap(),
+ Some(PathBuf::from("/root/web_blog")),
+ "Should find a preinstalled prettier in the project root"
+ );
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/web_blog/node_modules/expect/build/print.js")
+ )
+ .await
+ .unwrap(),
+ Some(PathBuf::from("/root/web_blog")),
+ "Should find a preinstalled prettier in the project root even for node_modules files"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_for_not_installed(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "work": {
+ "web_blog": {
+ "pages": {
+ "[slug].tsx": "// [slug].tsx file contents",
+ },
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "2.3.0"
+ },
+ "prettier": {
+ "semi": false,
+ "printWidth": 80,
+ "htmlWhitespaceSensitivity": "strict",
+ "tabWidth": 4
+ }
+ }"#
+ }
+ }
+ }),
+ )
+ .await;
+
+ let path = "/root/work/web_blog/node_modules/pages/[slug].tsx";
+ match Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new(path)
+ )
+ .await {
+ Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
+ Err(e) => {
+ let message = e.to_string();
+ assert!(message.contains(path), "Error message should mention which start file was used for location");
+ assert!(message.contains("/root/work/web_blog"), "Error message should mention potential candidates without prettier node_modules contents");
+ },
+ };
+
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::from_iter(
+ [PathBuf::from("/root"), PathBuf::from("/root/work")].into_iter()
+ ),
+ Path::new("/root/work/web_blog/node_modules/pages/[slug].tsx")
+ )
+ .await
+ .unwrap(),
+ Some(PathBuf::from("/root/work")),
+ "Should return first cached value found without path checks"
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_in_npm_workspaces(cx: &mut gpui::TestAppContext) {
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "work": {
+ "full-stack-foundations": {
+ "exercises": {
+ "03.loading": {
+ "01.problem.loader": {
+ "app": {
+ "routes": {
+ "users+": {
+ "$username_+": {
+ "notes.tsx": "// notes.tsx file contents",
+ },
+ },
+ },
+ },
+ "node_modules": {},
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "^3.0.3"
+ }
+ }"#
+ },
+ },
+ },
+ "package.json": r#"{
+ "workspaces": ["exercises/*/*", "examples/*"]
+ }"#,
+ "node_modules": {
+ "prettier": {
+ "index.js": "// Dummy prettier package file",
+ },
+ },
+ },
+ }
+ }),
+ )
+ .await;
+
+ assert_eq!(
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx"),
+ ).await.unwrap(),
+ Some(PathBuf::from("/root/work/full-stack-foundations")),
+ "Should ascend to the multi-workspace root and find the prettier there",
+ );
+ }
+
+ #[gpui::test]
+ async fn test_prettier_lookup_in_npm_workspaces_for_not_installed(
+ cx: &mut gpui::TestAppContext,
+ ) {
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "work": {
+ "full-stack-foundations": {
+ "exercises": {
+ "03.loading": {
+ "01.problem.loader": {
+ "app": {
+ "routes": {
+ "users+": {
+ "$username_+": {
+ "notes.tsx": "// notes.tsx file contents",
+ },
+ },
+ },
+ },
+ "node_modules": {},
+ "package.json": r#"{
+ "devDependencies": {
+ "prettier": "^3.0.3"
+ }
+ }"#
+ },
+ },
+ },
+ "package.json": r#"{
+ "workspaces": ["exercises/*/*", "examples/*"]
+ }"#,
+ },
+ }
+ }),
+ )
+ .await;
+
+ match Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &HashSet::default(),
+ Path::new("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader/app/routes/users+/$username_+/notes.tsx")
+ )
+ .await {
+ Ok(path) => panic!("Expected to fail for prettier in package.json but not in node_modules found, but got path {path:?}"),
+ Err(e) => {
+ let message = e.to_string();
+ assert!(message.contains("/root/work/full-stack-foundations/exercises/03.loading/01.problem.loader"), "Error message should mention which project had prettier defined");
+ assert!(message.contains("/root/work/full-stack-foundations"), "Error message should mention potential candidates without prettier node_modules contents");
+ },
+ };
+ }
+}
@@ -1,11 +1,13 @@
-const { Buffer } = require('buffer');
+const { Buffer } = require("buffer");
const fs = require("fs");
const path = require("path");
-const { once } = require('events');
+const { once } = require("events");
const prettierContainerPath = process.argv[2];
if (prettierContainerPath == null || prettierContainerPath.length == 0) {
- process.stderr.write(`Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`);
+ process.stderr.write(
+ `Prettier path argument was not specified or empty.\nUsage: ${process.argv[0]} ${process.argv[1]} prettier/path\n`,
+ );
process.exit(1);
}
fs.stat(prettierContainerPath, (err, stats) => {
@@ -19,7 +21,7 @@ fs.stat(prettierContainerPath, (err, stats) => {
process.exit(1);
}
});
-const prettierPath = path.join(prettierContainerPath, 'node_modules/prettier');
+const prettierPath = path.join(prettierContainerPath, "node_modules/prettier");
class Prettier {
constructor(path, prettier, config) {
@@ -34,7 +36,7 @@ class Prettier {
let config;
try {
prettier = await loadPrettier(prettierPath);
- config = await prettier.resolveConfig(prettierPath) || {};
+ config = (await prettier.resolveConfig(prettierPath)) || {};
} catch (e) {
process.stderr.write(`Failed to load prettier: ${e}\n`);
process.exit(1);
@@ -42,7 +44,7 @@ class Prettier {
process.stderr.write(`Prettier at path '${prettierPath}' loaded successfully, config: ${JSON.stringify(config)}\n`);
process.stdin.resume();
handleBuffer(new Prettier(prettierPath, prettier, config));
-})()
+})();
async function handleBuffer(prettier) {
for await (const messageText of readStdin()) {
@@ -54,22 +56,29 @@ async function handleBuffer(prettier) {
continue;
}
// allow concurrent request handling by not `await`ing the message handling promise (async function)
- handleMessage(message, prettier).catch(e => {
- sendResponse({ id: message.id, ...makeError(`error during message handling: ${e}`) });
+ handleMessage(message, prettier).catch((e) => {
+ const errorMessage = message;
+ if ((errorMessage.params || {}).text !== undefined) {
+ errorMessage.params.text = "..snip..";
+ }
+ sendResponse({
+ id: message.id,
+ ...makeError(`error during message '${JSON.stringify(errorMessage)}' handling: ${e}`),
+ });
});
}
}
const headerSeparator = "\r\n";
-const contentLengthHeaderName = 'Content-Length';
+const contentLengthHeaderName = "Content-Length";
async function* readStdin() {
let buffer = Buffer.alloc(0);
let streamEnded = false;
- process.stdin.on('end', () => {
+ process.stdin.on("end", () => {
streamEnded = true;
});
- process.stdin.on('data', (data) => {
+ process.stdin.on("data", (data) => {
buffer = Buffer.concat([buffer, data]);
});
@@ -77,7 +86,7 @@ async function* readStdin() {
sendResponse(makeError(errorMessage));
buffer = Buffer.alloc(0);
messageLength = null;
- await once(process.stdin, 'readable');
+ await once(process.stdin, "readable");
streamEnded = false;
}
@@ -88,20 +97,25 @@ async function* readStdin() {
if (messageLength === null) {
while (buffer.indexOf(`${headerSeparator}${headerSeparator}`) === -1) {
if (streamEnded) {
- await handleStreamEnded('Unexpected end of stream: headers not found');
+ await handleStreamEnded("Unexpected end of stream: headers not found");
continue main_loop;
} else if (buffer.length > contentLengthHeaderName.length * 10) {
- await handleStreamEnded(`Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`);
+ await handleStreamEnded(
+ `Unexpected stream of bytes: no headers end found after ${buffer.length} bytes of input`,
+ );
continue main_loop;
}
- await once(process.stdin, 'readable');
+ await once(process.stdin, "readable");
}
- const headers = buffer.subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`)).toString('ascii');
- const contentLengthHeader = headers.split(headerSeparator)
- .map(header => header.split(':'))
- .filter(header => header[2] === undefined)
- .filter(header => (header[1] || '').length > 0)
- .find(header => (header[0] || '').trim() === contentLengthHeaderName);
+ const headers = buffer
+ .subarray(0, buffer.indexOf(`${headerSeparator}${headerSeparator}`))
+ .toString("ascii");
+ const contentLengthHeader = headers
+ .split(headerSeparator)
+ .map((header) => header.split(":"))
+ .filter((header) => header[2] === undefined)
+ .filter((header) => (header[1] || "").length > 0)
+ .find((header) => (header[0] || "").trim() === contentLengthHeaderName);
const contentLength = (contentLengthHeader || [])[1];
if (contentLength === undefined) {
await handleStreamEnded(`Missing or incorrect ${contentLengthHeaderName} header: ${headers}`);
@@ -111,13 +125,14 @@ async function* readStdin() {
messageLength = parseInt(contentLength, 10);
}
- while (buffer.length < (headersLength + messageLength)) {
+ while (buffer.length < headersLength + messageLength) {
if (streamEnded) {
await handleStreamEnded(
- `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`);
+ `Unexpected end of stream: buffer length ${buffer.length} does not match expected header length ${headersLength} + body length ${messageLength}`,
+ );
continue main_loop;
}
- await once(process.stdin, 'readable');
+ await once(process.stdin, "readable");
}
const messageEnd = headersLength + messageLength;
@@ -125,12 +140,12 @@ async function* readStdin() {
buffer = buffer.subarray(messageEnd);
headersLength = null;
messageLength = null;
- yield message.toString('utf8');
+ yield message.toString("utf8");
}
} catch (e) {
sendResponse(makeError(`Error reading stdin: ${e}`));
} finally {
- process.stdin.off('data', () => { });
+ process.stdin.off("data", () => {});
}
}
@@ -143,7 +158,7 @@ async function handleMessage(message, prettier) {
throw new Error(`Message id is undefined: ${JSON.stringify(message)}`);
}
- if (method === 'prettier/format') {
+ if (method === "prettier/format") {
if (params === undefined || params.text === undefined) {
throw new Error(`Message params.text is undefined: ${JSON.stringify(message)}`);
}
@@ -153,7 +168,7 @@ async function handleMessage(message, prettier) {
let resolvedConfig = {};
if (params.options.filepath !== undefined) {
- resolvedConfig = await prettier.prettier.resolveConfig(params.options.filepath) || {};
+ resolvedConfig = (await prettier.prettier.resolveConfig(params.options.filepath)) || {};
}
const options = {
@@ -161,21 +176,25 @@ async function handleMessage(message, prettier) {
...resolvedConfig,
parser: params.options.parser,
plugins: params.options.plugins,
- path: params.options.filepath
+ path: params.options.filepath,
};
- process.stderr.write(`Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${params.options.filepath || ''}' with options: ${JSON.stringify(options)}\n`);
+ process.stderr.write(
+ `Resolved config: ${JSON.stringify(resolvedConfig)}, will format file '${
+ params.options.filepath || ""
+ }' with options: ${JSON.stringify(options)}\n`,
+ );
const formattedText = await prettier.prettier.format(params.text, options);
sendResponse({ id, result: { text: formattedText } });
- } else if (method === 'prettier/clear_cache') {
+ } else if (method === "prettier/clear_cache") {
prettier.prettier.clearConfigCache();
- prettier.config = await prettier.prettier.resolveConfig(prettier.path) || {};
+ prettier.config = (await prettier.prettier.resolveConfig(prettier.path)) || {};
sendResponse({ id, result: null });
- } else if (method === 'initialize') {
+ } else if (method === "initialize") {
sendResponse({
id,
result: {
- "capabilities": {}
- }
+ capabilities: {},
+ },
});
} else {
throw new Error(`Unknown method: ${method}`);
@@ -185,18 +204,20 @@ async function handleMessage(message, prettier) {
function makeError(message) {
return {
error: {
- "code": -32600, // invalid request code
+ code: -32600, // invalid request code
message,
- }
+ },
};
}
function sendResponse(response) {
const responsePayloadString = JSON.stringify({
jsonrpc: "2.0",
- ...response
+ ...response,
});
- const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(responsePayloadString)}${headerSeparator}${headerSeparator}`;
+ const headers = `${contentLengthHeaderName}: ${Buffer.byteLength(
+ responsePayloadString,
+ )}${headerSeparator}${headerSeparator}`;
process.stdout.write(headers + responsePayloadString);
}
@@ -54,7 +54,7 @@ use lsp_command::*;
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use postage::watch;
-use prettier::{LocateStart, Prettier};
+use prettier::Prettier;
use project_settings::{LspSettings, ProjectSettings};
use rand::prelude::*;
use search::SearchQuery;
@@ -82,8 +82,11 @@ use std::{
use terminals::Terminals;
use text::Anchor;
use util::{
- debug_panic, defer, http::HttpClient, merge_json_value_into,
- paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
+ debug_panic, defer,
+ http::HttpClient,
+ merge_json_value_into,
+ paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
+ post_inc, ResultExt, TryFutureExt as _,
};
pub use fs::*;
@@ -162,17 +165,15 @@ pub struct Project {
copilot_log_subscription: Option<lsp::Subscription>,
current_lsp_settings: HashMap<Arc<str>, LspSettings>,
node: Option<Arc<dyn NodeRuntime>>,
- #[cfg(not(any(test, feature = "test-support")))]
default_prettier: Option<DefaultPrettier>,
- prettier_instances: HashMap<
- (Option<WorktreeId>, PathBuf),
- Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
- >,
+ prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
+ prettier_instances: HashMap<PathBuf, Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
}
-#[cfg(not(any(test, feature = "test-support")))]
struct DefaultPrettier {
- installation_process: Option<Shared<Task<()>>>,
+ instance: Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
+ installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
+ #[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet<&'static str>,
}
@@ -685,8 +686,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
node: Some(node),
- #[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
+ prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
}
})
@@ -786,8 +787,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
node: None,
- #[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
+ prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
};
for worktree in worktrees {
@@ -924,8 +925,7 @@ impl Project {
}
for (worktree, language, settings) in language_formatters_to_check {
- self.install_default_formatters(worktree, &language, &settings, cx)
- .detach_and_log_err(cx);
+ self.install_default_formatters(worktree, &language, &settings, cx);
}
// Start all the newly-enabled language servers.
@@ -2681,20 +2681,7 @@ impl Project {
let buffer_file = File::from_dyn(buffer_file.as_ref());
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
- let task_buffer = buffer.clone();
- let prettier_installation_task =
- self.install_default_formatters(worktree, &new_language, &settings, cx);
- cx.spawn(|project, mut cx| async move {
- prettier_installation_task.await?;
- let _ = project
- .update(&mut cx, |project, cx| {
- project.prettier_instance_for_buffer(&task_buffer, cx)
- })
- .await;
- anyhow::Ok(())
- })
- .detach_and_log_err(cx);
-
+ self.install_default_formatters(worktree, &new_language, &settings, cx);
if let Some(file) = buffer_file {
let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() {
@@ -4029,7 +4016,7 @@ impl Project {
}
pub fn format(
- &self,
+ &mut self,
buffers: HashSet<ModelHandle<Buffer>>,
push_to_history: bool,
trigger: FormatTrigger,
@@ -4049,10 +4036,10 @@ impl Project {
})
.collect::<Vec<_>>();
- cx.spawn(|this, mut cx| async move {
+ cx.spawn(|project, mut cx| async move {
// Do not allow multiple concurrent formatting requests for the
// same buffer.
- this.update(&mut cx, |this, cx| {
+ project.update(&mut cx, |this, cx| {
buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
this.buffers_being_formatted
.insert(buffer.read(cx).remote_id())
@@ -4060,7 +4047,7 @@ impl Project {
});
let _cleanup = defer({
- let this = this.clone();
+ let this = project.clone();
let mut cx = cx.clone();
let buffers = &buffers_with_paths_and_servers;
move || {
@@ -4128,7 +4115,7 @@ impl Project {
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
- &this,
+ &project,
&buffer,
buffer_abs_path,
&language_server,
@@ -4163,14 +4150,14 @@ impl Project {
}
}
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
- if let Some(prettier_task) = this
+ if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
}).await {
match prettier_task.await
{
Ok(prettier) => {
- let buffer_path = buffer.read_with(&cx, |buffer, cx| {
+ let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
});
format_operation = Some(FormatOperation::Prettier(
@@ -4180,16 +4167,35 @@ impl Project {
.context("formatting via prettier")?,
));
}
- Err(e) => anyhow::bail!(
- "Failed to create prettier instance for buffer during autoformatting: {e:#}"
- ),
+ Err(e) => {
+ project.update(&mut cx, |project, _| {
+ match &prettier_path {
+ Some(prettier_path) => {
+ project.prettier_instances.remove(prettier_path);
+ },
+ None => {
+ if let Some(default_prettier) = project.default_prettier.as_mut() {
+ default_prettier.instance = None;
+ }
+ },
+ }
+ });
+ match &prettier_path {
+ Some(prettier_path) => {
+ log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
+ },
+ None => {
+ log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
+ },
+ }
+ }
}
} else if let Some((language_server, buffer_abs_path)) =
language_server.as_ref().zip(buffer_abs_path.as_ref())
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
- &this,
+ &project,
&buffer,
buffer_abs_path,
&language_server,
@@ -4202,14 +4208,14 @@ impl Project {
}
}
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
- if let Some(prettier_task) = this
+ if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
}).await {
match prettier_task.await
{
Ok(prettier) => {
- let buffer_path = buffer.read_with(&cx, |buffer, cx| {
+ let buffer_path = buffer.update(&mut cx, |buffer, cx| {
File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
});
format_operation = Some(FormatOperation::Prettier(
@@ -4219,9 +4225,28 @@ impl Project {
.context("formatting via prettier")?,
));
}
- Err(e) => anyhow::bail!(
- "Failed to create prettier instance for buffer during formatting: {e:#}"
- ),
+ Err(e) => {
+ project.update(&mut cx, |project, _| {
+ match &prettier_path {
+ Some(prettier_path) => {
+ project.prettier_instances.remove(prettier_path);
+ },
+ None => {
+ if let Some(default_prettier) = project.default_prettier.as_mut() {
+ default_prettier.instance = None;
+ }
+ },
+ }
+ });
+ match &prettier_path {
+ Some(prettier_path) => {
+ log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
+ },
+ None => {
+ log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
+ },
+ }
+ }
}
}
}
@@ -6431,15 +6456,25 @@ impl Project {
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
);
let prettiers_to_reload = self
- .prettier_instances
+ .prettiers_per_worktree
+ .get(¤t_worktree_id)
.iter()
- .filter_map(|((worktree_id, prettier_path), prettier_task)| {
- if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
- Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
- } else {
- None
- }
+ .flat_map(|prettier_paths| prettier_paths.iter())
+ .flatten()
+ .filter_map(|prettier_path| {
+ Some((
+ current_worktree_id,
+ Some(prettier_path.clone()),
+ self.prettier_instances.get(prettier_path)?.clone(),
+ ))
})
+ .chain(self.default_prettier.iter().filter_map(|default_prettier| {
+ Some((
+ current_worktree_id,
+ None,
+ default_prettier.instance.clone()?,
+ ))
+ }))
.collect::<Vec<_>>();
cx.background()
@@ -6450,9 +6485,15 @@ impl Project {
.clear_cache()
.await
.with_context(|| {
- format!(
- "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
- )
+ match prettier_path {
+ Some(prettier_path) => format!(
+ "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
+ ),
+ None => format!(
+ "clearing default prettier cache for worktree {worktree_id:?} on prettier settings update"
+ ),
+ }
+
})
.map_err(Arc::new)
}
@@ -8364,7 +8405,12 @@ impl Project {
&mut self,
buffer: &ModelHandle<Buffer>,
cx: &mut ModelContext<Self>,
- ) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
+ ) -> Task<
+ Option<(
+ Option<PathBuf>,
+ Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
+ )>,
+ > {
let buffer = buffer.read(cx);
let buffer_file = buffer.file();
let Some(buffer_language) = buffer.language() else {
@@ -8374,136 +8420,119 @@ impl Project {
return Task::ready(None);
}
- let buffer_file = File::from_dyn(buffer_file);
- let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
- let worktree_path = buffer_file
- .as_ref()
- .and_then(|file| Some(file.worktree.read(cx).abs_path()));
- let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
- if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
+ if self.is_local() {
let Some(node) = self.node.as_ref().map(Arc::clone) else {
return Task::ready(None);
};
- cx.spawn(|this, mut cx| async move {
- let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs));
- let prettier_dir = match cx
- .background()
- .spawn(Prettier::locate(
- worktree_path.zip(buffer_path).map(
- |(worktree_root_path, starting_path)| LocateStart {
- worktree_root_path,
- starting_path,
- },
- ),
- fs,
- ))
- .await
- {
- Ok(path) => path,
- Err(e) => {
- return Some(
- Task::ready(Err(Arc::new(e.context(
- "determining prettier path for worktree {worktree_path:?}",
- ))))
- .shared(),
- );
- }
- };
-
- if let Some(existing_prettier) = this.update(&mut cx, |project, _| {
- project
- .prettier_instances
- .get(&(worktree_id, prettier_dir.clone()))
- .cloned()
- }) {
- return Some(existing_prettier);
- }
-
- log::info!("Found prettier in {prettier_dir:?}, starting.");
- let task_prettier_dir = prettier_dir.clone();
- let weak_project = this.downgrade();
- let new_server_id =
- this.update(&mut cx, |this, _| this.languages.next_language_server_id());
- let new_prettier_task = cx
- .spawn(|mut cx| async move {
- let prettier = Prettier::start(
- worktree_id.map(|id| id.to_usize()),
- new_server_id,
- task_prettier_dir,
- node,
- cx.clone(),
- )
- .await
- .context("prettier start")
- .map_err(Arc::new)?;
- log::info!("Started prettier in {:?}", prettier.prettier_dir());
-
- if let Some((project, prettier_server)) =
- weak_project.upgrade(&mut cx).zip(prettier.server())
+ match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
+ {
+ Some((worktree_id, buffer_path)) => {
+ let fs = Arc::clone(&self.fs);
+ let installed_prettiers = self.prettier_instances.keys().cloned().collect();
+ return cx.spawn(|project, mut cx| async move {
+ match cx
+ .background()
+ .spawn(async move {
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &installed_prettiers,
+ &buffer_path,
+ )
+ .await
+ })
+ .await
{
- project.update(&mut cx, |project, cx| {
- let name = if prettier.is_default() {
- LanguageServerName(Arc::from("prettier (default)"))
- } else {
- let prettier_dir = prettier.prettier_dir();
- let worktree_path = prettier
- .worktree_id()
- .map(WorktreeId::from_usize)
- .and_then(|id| project.worktree_for_id(id, cx))
- .map(|worktree| worktree.read(cx).abs_path());
- match worktree_path {
- Some(worktree_path) => {
- if worktree_path.as_ref() == prettier_dir {
- LanguageServerName(Arc::from(format!(
- "prettier ({})",
- prettier_dir
- .file_name()
- .and_then(|name| name.to_str())
- .unwrap_or_default()
- )))
- } else {
- let dir_to_display = match prettier_dir
- .strip_prefix(&worktree_path)
- .ok()
- {
- Some(relative_path) => relative_path,
- None => prettier_dir,
- };
- LanguageServerName(Arc::from(format!(
- "prettier ({})",
- dir_to_display.display(),
- )))
- }
- }
- None => LanguageServerName(Arc::from(format!(
- "prettier ({})",
- prettier_dir.display(),
- ))),
+ Ok(None) => {
+ let started_default_prettier =
+ project.update(&mut cx, |project, _| {
+ project
+ .prettiers_per_worktree
+ .entry(worktree_id)
+ .or_default()
+ .insert(None);
+ project.default_prettier.as_ref().and_then(
+ |default_prettier| default_prettier.instance.clone(),
+ )
+ });
+ match started_default_prettier {
+ Some(old_task) => return Some((None, old_task)),
+ None => {
+ let new_default_prettier = project
+ .update(&mut cx, |_, cx| {
+ start_default_prettier(node, Some(worktree_id), cx)
+ })
+ .await;
+ return Some((None, new_default_prettier));
}
- };
+ }
+ }
+ Ok(Some(prettier_dir)) => {
+ project.update(&mut cx, |project, _| {
+ project
+ .prettiers_per_worktree
+ .entry(worktree_id)
+ .or_default()
+ .insert(Some(prettier_dir.clone()))
+ });
+ if let Some(existing_prettier) =
+ project.update(&mut cx, |project, _| {
+ project.prettier_instances.get(&prettier_dir).cloned()
+ })
+ {
+ log::debug!(
+ "Found already started prettier in {prettier_dir:?}"
+ );
+ return Some((Some(prettier_dir), existing_prettier));
+ }
- project
- .supplementary_language_servers
- .insert(new_server_id, (name, Arc::clone(prettier_server)));
- cx.emit(Event::LanguageServerAdded(new_server_id));
- });
+ log::info!("Found prettier in {prettier_dir:?}, starting.");
+ let new_prettier_task = project.update(&mut cx, |project, cx| {
+ let new_prettier_task = start_prettier(
+ node,
+ prettier_dir.clone(),
+ Some(worktree_id),
+ cx,
+ );
+ project
+ .prettier_instances
+ .insert(prettier_dir.clone(), new_prettier_task.clone());
+ new_prettier_task
+ });
+ Some((Some(prettier_dir), new_prettier_task))
+ }
+ Err(e) => {
+ return Some((
+ None,
+ Task::ready(Err(Arc::new(
+ e.context("determining prettier path"),
+ )))
+ .shared(),
+ ));
+ }
}
- Ok(Arc::new(prettier)).map_err(Arc::new)
- })
- .shared();
- this.update(&mut cx, |project, _| {
- project
- .prettier_instances
- .insert((worktree_id, prettier_dir), new_prettier_task.clone());
- });
- Some(new_prettier_task)
- })
+ });
+ }
+ None => {
+ let started_default_prettier = self
+ .default_prettier
+ .as_ref()
+ .and_then(|default_prettier| default_prettier.instance.clone());
+ match started_default_prettier {
+ Some(old_task) => return Task::ready(Some((None, old_task))),
+ None => {
+ let new_task = start_default_prettier(node, None, cx);
+ return cx.spawn(|_, _| async move { Some((None, new_task.await)) });
+ }
+ }
+ }
+ }
} else if self.remote_id().is_some() {
return Task::ready(None);
} else {
- Task::ready(Some(
+ Task::ready(Some((
+ None,
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
- ))
+ )))
}
}
@@ -8514,8 +8543,7 @@ impl Project {
_new_language: &Language,
_language_settings: &LanguageSettings,
_cx: &mut ModelContext<Self>,
- ) -> Task<anyhow::Result<()>> {
- return Task::ready(Ok(()));
+ ) {
}
#[cfg(not(any(test, feature = "test-support")))]
@@ -8525,19 +8553,19 @@ impl Project {
new_language: &Language,
language_settings: &LanguageSettings,
cx: &mut ModelContext<Self>,
- ) -> Task<anyhow::Result<()>> {
+ ) {
match &language_settings.formatter {
Formatter::Prettier { .. } | Formatter::Auto => {}
- Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
+ Formatter::LanguageServer | Formatter::External { .. } => return,
};
let Some(node) = self.node.as_ref().cloned() else {
- return Task::ready(Ok(()));
+ return;
};
let mut prettier_plugins = None;
if new_language.prettier_parser_name().is_some() {
prettier_plugins
- .get_or_insert_with(|| HashSet::default())
+ .get_or_insert_with(|| HashSet::<&'static str>::default())
.extend(
new_language
.lsp_adapters()
@@ -8546,114 +8574,270 @@ impl Project {
)
}
let Some(prettier_plugins) = prettier_plugins else {
- return Task::ready(Ok(()));
+ return;
};
+ let fs = Arc::clone(&self.fs);
+ let locate_prettier_installation = match worktree.and_then(|worktree_id| {
+ self.worktree_for_id(worktree_id, cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ }) {
+ Some(locate_from) => {
+ let installed_prettiers = self.prettier_instances.keys().cloned().collect();
+ cx.background().spawn(async move {
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &installed_prettiers,
+ locate_from.as_ref(),
+ )
+ .await
+ })
+ }
+ None => Task::ready(Ok(None)),
+ };
let mut plugins_to_install = prettier_plugins;
- let (mut install_success_tx, mut install_success_rx) =
- futures::channel::mpsc::channel::<HashSet<&'static str>>(1);
- let new_installation_process = cx
- .spawn(|this, mut cx| async move {
- if let Some(installed_plugins) = install_success_rx.next().await {
- this.update(&mut cx, |this, _| {
- let default_prettier =
- this.default_prettier
- .get_or_insert_with(|| DefaultPrettier {
- installation_process: None,
- installed_plugins: HashSet::default(),
- });
- if !installed_plugins.is_empty() {
- log::info!("Installed new prettier plugins: {installed_plugins:?}");
- default_prettier.installed_plugins.extend(installed_plugins);
- }
- })
- }
- })
- .shared();
let previous_installation_process =
if let Some(default_prettier) = &mut self.default_prettier {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
if plugins_to_install.is_empty() {
- return Task::ready(Ok(()));
+ return;
}
- std::mem::replace(
- &mut default_prettier.installation_process,
- Some(new_installation_process.clone()),
- )
+ default_prettier.installation_process.clone()
} else {
None
};
-
- let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path();
- let already_running_prettier = self
- .prettier_instances
- .get(&(worktree, default_prettier_dir.to_path_buf()))
- .cloned();
let fs = Arc::clone(&self.fs);
- cx.spawn(|this, mut cx| async move {
- if let Some(previous_installation_process) = previous_installation_process {
- previous_installation_process.await;
- }
- let mut everything_was_installed = false;
- this.update(&mut cx, |this, _| {
- match &mut this.default_prettier {
- Some(default_prettier) => {
- plugins_to_install
- .retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
- everything_was_installed = plugins_to_install.is_empty();
- },
- None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }),
+ let default_prettier = self
+ .default_prettier
+ .get_or_insert_with(|| DefaultPrettier {
+ instance: None,
+ installation_process: None,
+ installed_plugins: HashSet::default(),
+ });
+ default_prettier.installation_process = Some(
+ cx.spawn(|this, mut cx| async move {
+ match locate_prettier_installation
+ .await
+ .context("locate prettier installation")
+ .map_err(Arc::new)?
+ {
+ Some(_non_default_prettier) => return Ok(()),
+ None => {
+ let mut needs_install = match previous_installation_process {
+ Some(previous_installation_process) => {
+ previous_installation_process.await.is_err()
+ }
+ None => true,
+ };
+ this.update(&mut cx, |this, _| {
+ if let Some(default_prettier) = &mut this.default_prettier {
+ plugins_to_install.retain(|plugin| {
+ !default_prettier.installed_plugins.contains(plugin)
+ });
+ needs_install |= !plugins_to_install.is_empty();
+ }
+ });
+ if needs_install {
+ let installed_plugins = plugins_to_install.clone();
+ cx.background()
+ .spawn(async move {
+ install_default_prettier(plugins_to_install, node, fs).await
+ })
+ .await
+ .context("prettier & plugins install")
+ .map_err(Arc::new)?;
+ this.update(&mut cx, |this, _| {
+ let default_prettier =
+ this.default_prettier
+ .get_or_insert_with(|| DefaultPrettier {
+ instance: None,
+ installation_process: Some(
+ Task::ready(Ok(())).shared(),
+ ),
+ installed_plugins: HashSet::default(),
+ });
+ default_prettier.instance = None;
+ default_prettier.installed_plugins.extend(installed_plugins);
+ });
+ }
+ }
}
+ Ok(())
+ })
+ .shared(),
+ );
+ }
+}
+
+fn start_default_prettier(
+ node: Arc<dyn NodeRuntime>,
+ worktree_id: Option<WorktreeId>,
+ cx: &mut ModelContext<'_, Project>,
+) -> Task<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>> {
+ cx.spawn(|project, mut cx| async move {
+ loop {
+ let default_prettier_installing = project.update(&mut cx, |project, _| {
+ project
+ .default_prettier
+ .as_ref()
+ .and_then(|default_prettier| default_prettier.installation_process.clone())
});
- if everything_was_installed {
- return Ok(());
+ match default_prettier_installing {
+ Some(installation_task) => {
+ if installation_task.await.is_ok() {
+ break;
+ }
+ }
+ None => break,
}
+ }
- cx.background()
- .spawn(async move {
- let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE);
- // method creates parent directory if it doesn't exist
- fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await
- .with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?;
+ project.update(&mut cx, |project, cx| {
+ match project
+ .default_prettier
+ .as_mut()
+ .and_then(|default_prettier| default_prettier.instance.as_mut())
+ {
+ Some(default_prettier) => default_prettier.clone(),
+ None => {
+ let new_default_prettier =
+ start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
+ project
+ .default_prettier
+ .get_or_insert_with(|| DefaultPrettier {
+ instance: None,
+ installation_process: None,
+ #[cfg(not(any(test, feature = "test-support")))]
+ installed_plugins: HashSet::default(),
+ })
+ .instance = Some(new_default_prettier.clone());
+ new_default_prettier
+ }
+ }
+ })
+ })
+}
- let packages_to_versions = future::try_join_all(
- plugins_to_install
- .iter()
- .chain(Some(&"prettier"))
- .map(|package_name| async {
- let returned_package_name = package_name.to_string();
- let latest_version = node.npm_package_latest_version(package_name)
- .await
- .with_context(|| {
- format!("fetching latest npm version for package {returned_package_name}")
- })?;
- anyhow::Ok((returned_package_name, latest_version))
- }),
- )
- .await
- .context("fetching latest npm versions")?;
-
- log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
- let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
- (package.as_str(), version.as_str())
- }).collect::<Vec<_>>();
- node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
- let installed_packages = !plugins_to_install.is_empty();
- install_success_tx.try_send(plugins_to_install).ok();
-
- if !installed_packages {
- if let Some(prettier) = already_running_prettier {
- prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
+fn start_prettier(
+ node: Arc<dyn NodeRuntime>,
+ prettier_dir: PathBuf,
+ worktree_id: Option<WorktreeId>,
+ cx: &mut ModelContext<'_, Project>,
+) -> Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>> {
+ cx.spawn(|project, mut cx| async move {
+ let new_server_id = project.update(&mut cx, |project, _| {
+ project.languages.next_language_server_id()
+ });
+ let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
+ .await
+ .context("default prettier spawn")
+ .map(Arc::new)
+ .map_err(Arc::new)?;
+ register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
+ Ok(new_prettier)
+ })
+ .shared()
+}
+
+fn register_new_prettier(
+ project: &ModelHandle<Project>,
+ prettier: &Prettier,
+ worktree_id: Option<WorktreeId>,
+ new_server_id: LanguageServerId,
+ cx: &mut AsyncAppContext,
+) {
+ let prettier_dir = prettier.prettier_dir();
+ let is_default = prettier.is_default();
+ if is_default {
+ log::info!("Started default prettier in {prettier_dir:?}");
+ } else {
+ log::info!("Started prettier in {prettier_dir:?}");
+ }
+ if let Some(prettier_server) = prettier.server() {
+ project.update(cx, |project, cx| {
+ let name = if is_default {
+ LanguageServerName(Arc::from("prettier (default)"))
+ } else {
+ let worktree_path = worktree_id
+ .and_then(|id| project.worktree_for_id(id, cx))
+ .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
+ let name = match worktree_path {
+ Some(worktree_path) => {
+ if prettier_dir == worktree_path.as_ref() {
+ let name = prettier_dir
+ .file_name()
+ .and_then(|name| name.to_str())
+ .unwrap_or_default();
+ format!("prettier ({name})")
+ } else {
+ let dir_to_display = prettier_dir
+ .strip_prefix(worktree_path.as_ref())
+ .ok()
+ .unwrap_or(prettier_dir);
+ format!("prettier ({})", dir_to_display.display())
}
}
-
- anyhow::Ok(())
- }).await
- })
+ None => format!("prettier ({})", prettier_dir.display()),
+ };
+ LanguageServerName(Arc::from(name))
+ };
+ project
+ .supplementary_language_servers
+ .insert(new_server_id, (name, Arc::clone(prettier_server)));
+ cx.emit(Event::LanguageServerAdded(new_server_id));
+ });
}
}
+#[cfg(not(any(test, feature = "test-support")))]
+async fn install_default_prettier(
+ plugins_to_install: HashSet<&'static str>,
+ node: Arc<dyn NodeRuntime>,
+ fs: Arc<dyn Fs>,
+) -> anyhow::Result<()> {
+ let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
+ // method creates parent directory if it doesn't exist
+ fs.save(
+ &prettier_wrapper_path,
+ &text::Rope::from(prettier::PRETTIER_SERVER_JS),
+ text::LineEnding::Unix,
+ )
+ .await
+ .with_context(|| {
+ format!(
+ "writing {} file at {prettier_wrapper_path:?}",
+ prettier::PRETTIER_SERVER_FILE
+ )
+ })?;
+
+ let packages_to_versions =
+ future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
+ |package_name| async {
+ let returned_package_name = package_name.to_string();
+ let latest_version = node
+ .npm_package_latest_version(package_name)
+ .await
+ .with_context(|| {
+ format!("fetching latest npm version for package {returned_package_name}")
+ })?;
+ anyhow::Ok((returned_package_name, latest_version))
+ },
+ ))
+ .await
+ .context("fetching latest npm versions")?;
+
+ log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
+ let borrowed_packages = packages_to_versions
+ .iter()
+ .map(|(package, version)| (package.as_str(), version.as_str()))
+ .collect::<Vec<_>>();
+ node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
+ .await
+ .context("fetching formatter packages")?;
+ anyhow::Ok(())
+}
+
fn subscribe_for_copilot_events(
copilot: &ModelHandle<Copilot>,
cx: &mut ModelContext<'_, Project>,
@@ -1,4 +1,4 @@
-use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
+use crate::{worktree::WorktreeModelHandle, Event, *};
use fs::{FakeFs, RealFs};
use futures::{future, StreamExt};
use gpui::{executor::Deterministic, test::subscribe, AppContext};
@@ -13,7 +13,7 @@ use pretty_assertions::assert_eq;
use serde_json::json;
use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
use unindent::Unindent as _;
-use util::{assert_set_eq, test::temp_tree};
+use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
#[cfg(test)]
#[ctor::ctor]
@@ -1,7 +1,6 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result};
use client::proto;
-use globset::{Glob, GlobMatcher};
use itertools::Itertools;
use language::{char_kind, BufferSnapshot};
use regex::{Regex, RegexBuilder};
@@ -10,9 +9,10 @@ use std::{
borrow::Cow,
io::{BufRead, BufReader, Read},
ops::Range,
- path::{Path, PathBuf},
+ path::Path,
sync::Arc,
};
+use util::paths::PathMatcher;
#[derive(Clone, Debug)]
pub struct SearchInputs {
@@ -52,31 +52,6 @@ pub enum SearchQuery {
},
}
-#[derive(Clone, Debug)]
-pub struct PathMatcher {
- maybe_path: PathBuf,
- glob: GlobMatcher,
-}
-
-impl std::fmt::Display for PathMatcher {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- self.maybe_path.to_string_lossy().fmt(f)
- }
-}
-
-impl PathMatcher {
- pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
- Ok(PathMatcher {
- glob: Glob::new(&maybe_glob)?.compile_matcher(),
- maybe_path: PathBuf::from(maybe_glob),
- })
- }
-
- pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
- other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
- }
-}
-
impl SearchQuery {
pub fn text(
query: impl ToString,
@@ -20,7 +20,7 @@ test-support = [
]
[dependencies]
-text = { path = "../text" }
+text = { package = "text2", path = "../text2" }
copilot = { package = "copilot2", path = "../copilot2" }
client = { package = "client2", path = "../client2" }
clock = { path = "../clock" }
@@ -29,7 +29,7 @@ db = { package = "db2", path = "../db2" }
fs = { package = "fs2", path = "../fs2" }
fsevent = { path = "../fsevent" }
fuzzy = { package = "fuzzy2", path = "../fuzzy2" }
-git = { path = "../git" }
+git = { package = "git3", path = "../git3" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { package = "language2", path = "../language2" }
lsp = { package = "lsp2", path = "../lsp2" }
@@ -54,7 +54,7 @@ use lsp_command::*;
use node_runtime::NodeRuntime;
use parking_lot::Mutex;
use postage::watch;
-use prettier::{LocateStart, Prettier};
+use prettier::Prettier;
use project_settings::{LspSettings, ProjectSettings};
use rand::prelude::*;
use search::SearchQuery;
@@ -82,8 +82,11 @@ use std::{
use terminals::Terminals;
use text::Anchor;
use util::{
- debug_panic, defer, http::HttpClient, merge_json_value_into,
- paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
+ debug_panic, defer,
+ http::HttpClient,
+ merge_json_value_into,
+ paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
+ post_inc, ResultExt, TryFutureExt as _,
};
pub use fs::*;
@@ -162,17 +165,15 @@ pub struct Project {
copilot_log_subscription: Option<lsp::Subscription>,
current_lsp_settings: HashMap<Arc<str>, LspSettings>,
node: Option<Arc<dyn NodeRuntime>>,
- #[cfg(not(any(test, feature = "test-support")))]
default_prettier: Option<DefaultPrettier>,
- prettier_instances: HashMap<
- (Option<WorktreeId>, PathBuf),
- Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
- >,
+ prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
+ prettier_instances: HashMap<PathBuf, Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
}
-#[cfg(not(any(test, feature = "test-support")))]
struct DefaultPrettier {
- installation_process: Option<Shared<Task<()>>>,
+ instance: Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>,
+ installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
+ #[cfg(not(any(test, feature = "test-support")))]
installed_plugins: HashSet<&'static str>,
}
@@ -686,8 +687,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
node: Some(node),
- #[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
+ prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
}
})
@@ -789,8 +790,8 @@ impl Project {
copilot_log_subscription: None,
current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
node: None,
- #[cfg(not(any(test, feature = "test-support")))]
default_prettier: None,
+ prettiers_per_worktree: HashMap::default(),
prettier_instances: HashMap::default(),
};
for worktree in worktrees {
@@ -963,8 +964,7 @@ impl Project {
}
for (worktree, language, settings) in language_formatters_to_check {
- self.install_default_formatters(worktree, &language, &settings, cx)
- .detach_and_log_err(cx);
+ self.install_default_formatters(worktree, &language, &settings, cx);
}
// Start all the newly-enabled language servers.
@@ -2720,20 +2720,7 @@ impl Project {
let buffer_file = File::from_dyn(buffer_file.as_ref());
let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
- let task_buffer = buffer.clone();
- let prettier_installation_task =
- self.install_default_formatters(worktree, &new_language, &settings, cx);
- cx.spawn(move |project, mut cx| async move {
- prettier_installation_task.await?;
- let _ = project
- .update(&mut cx, |project, cx| {
- project.prettier_instance_for_buffer(&task_buffer, cx)
- })?
- .await;
- anyhow::Ok(())
- })
- .detach_and_log_err(cx);
-
+ self.install_default_formatters(worktree, &new_language, &settings, cx);
if let Some(file) = buffer_file {
let worktree = file.worktree.clone();
if let Some(tree) = worktree.read(cx).as_local() {
@@ -4096,7 +4083,7 @@ impl Project {
}
pub fn format(
- &self,
+ &mut self,
buffers: HashSet<Model<Buffer>>,
push_to_history: bool,
trigger: FormatTrigger,
@@ -4116,10 +4103,10 @@ impl Project {
})
.collect::<Vec<_>>();
- cx.spawn(move |this, mut cx| async move {
+ cx.spawn(move |project, mut cx| async move {
// Do not allow multiple concurrent formatting requests for the
// same buffer.
- this.update(&mut cx, |this, cx| {
+ project.update(&mut cx, |this, cx| {
buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
this.buffers_being_formatted
.insert(buffer.read(cx).remote_id())
@@ -4127,7 +4114,7 @@ impl Project {
})?;
let _cleanup = defer({
- let this = this.clone();
+ let this = project.clone();
let mut cx = cx.clone();
let buffers = &buffers_with_paths_and_servers;
move || {
@@ -4195,7 +4182,7 @@ impl Project {
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
- &this,
+ &project,
&buffer,
buffer_abs_path,
&language_server,
@@ -4230,7 +4217,7 @@ impl Project {
}
}
(Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
- if let Some(prettier_task) = this
+ if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
})?.await {
@@ -4247,16 +4234,35 @@ impl Project {
.context("formatting via prettier")?,
));
}
- Err(e) => anyhow::bail!(
- "Failed to create prettier instance for buffer during autoformatting: {e:#}"
- ),
+ Err(e) => {
+ project.update(&mut cx, |project, _| {
+ match &prettier_path {
+ Some(prettier_path) => {
+ project.prettier_instances.remove(prettier_path);
+ },
+ None => {
+ if let Some(default_prettier) = project.default_prettier.as_mut() {
+ default_prettier.instance = None;
+ }
+ },
+ }
+ })?;
+ match &prettier_path {
+ Some(prettier_path) => {
+ log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
+ },
+ None => {
+ log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
+ },
+ }
+ }
}
} else if let Some((language_server, buffer_abs_path)) =
language_server.as_ref().zip(buffer_abs_path.as_ref())
{
format_operation = Some(FormatOperation::Lsp(
Self::format_via_lsp(
- &this,
+ &project,
&buffer,
buffer_abs_path,
&language_server,
@@ -4269,7 +4275,7 @@ impl Project {
}
}
(Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
- if let Some(prettier_task) = this
+ if let Some((prettier_path, prettier_task)) = project
.update(&mut cx, |project, cx| {
project.prettier_instance_for_buffer(buffer, cx)
})?.await {
@@ -4286,9 +4292,28 @@ impl Project {
.context("formatting via prettier")?,
));
}
- Err(e) => anyhow::bail!(
- "Failed to create prettier instance for buffer during formatting: {e:#}"
- ),
+ Err(e) => {
+ project.update(&mut cx, |project, _| {
+ match &prettier_path {
+ Some(prettier_path) => {
+ project.prettier_instances.remove(prettier_path);
+ },
+ None => {
+ if let Some(default_prettier) = project.default_prettier.as_mut() {
+ default_prettier.instance = None;
+ }
+ },
+ }
+ })?;
+ match &prettier_path {
+ Some(prettier_path) => {
+ log::error!("Failed to create prettier instance from {prettier_path:?} for buffer during autoformatting: {e:#}");
+ },
+ None => {
+ log::error!("Failed to create default prettier instance for buffer during autoformatting: {e:#}");
+ },
+ }
+ }
}
}
}
@@ -6506,15 +6531,25 @@ impl Project {
"Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
);
let prettiers_to_reload = self
- .prettier_instances
+ .prettiers_per_worktree
+ .get(¤t_worktree_id)
.iter()
- .filter_map(|((worktree_id, prettier_path), prettier_task)| {
- if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
- Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
- } else {
- None
- }
+ .flat_map(|prettier_paths| prettier_paths.iter())
+ .flatten()
+ .filter_map(|prettier_path| {
+ Some((
+ current_worktree_id,
+ Some(prettier_path.clone()),
+ self.prettier_instances.get(prettier_path)?.clone(),
+ ))
})
+ .chain(self.default_prettier.iter().filter_map(|default_prettier| {
+ Some((
+ current_worktree_id,
+ None,
+ default_prettier.instance.clone()?,
+ ))
+ }))
.collect::<Vec<_>>();
cx.background_executor()
@@ -6525,9 +6560,14 @@ impl Project {
.clear_cache()
.await
.with_context(|| {
- format!(
- "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
- )
+ match prettier_path {
+ Some(prettier_path) => format!(
+ "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
+ ),
+ None => format!(
+ "clearing default prettier cache for worktree {worktree_id:?} on prettier settings update"
+ ),
+ }
})
.map_err(Arc::new)
}
@@ -8411,7 +8451,12 @@ impl Project {
&mut self,
buffer: &Model<Buffer>,
cx: &mut ModelContext<Self>,
- ) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
+ ) -> Task<
+ Option<(
+ Option<PathBuf>,
+ Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
+ )>,
+ > {
let buffer = buffer.read(cx);
let buffer_file = buffer.file();
let Some(buffer_language) = buffer.language() else {
@@ -8421,142 +8466,142 @@ impl Project {
return Task::ready(None);
}
- let buffer_file = File::from_dyn(buffer_file);
- let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
- let worktree_path = buffer_file
- .as_ref()
- .and_then(|file| Some(file.worktree.read(cx).abs_path()));
- let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
- if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
+ if self.is_local() {
let Some(node) = self.node.as_ref().map(Arc::clone) else {
return Task::ready(None);
};
- let fs = self.fs.clone();
- cx.spawn(move |this, mut cx| async move {
- let prettier_dir = match cx
- .background_executor()
- .spawn(Prettier::locate(
- worktree_path.zip(buffer_path).map(
- |(worktree_root_path, starting_path)| LocateStart {
- worktree_root_path,
- starting_path,
- },
- ),
- fs,
- ))
- .await
- {
- Ok(path) => path,
- Err(e) => {
- return Some(
- Task::ready(Err(Arc::new(e.context(
- "determining prettier path for worktree {worktree_path:?}",
- ))))
- .shared(),
- );
- }
- };
-
- if let Some(existing_prettier) = this
- .update(&mut cx, |project, _| {
- project
- .prettier_instances
- .get(&(worktree_id, prettier_dir.clone()))
- .cloned()
- })
- .ok()
- .flatten()
- {
- return Some(existing_prettier);
- }
-
- log::info!("Found prettier in {prettier_dir:?}, starting.");
- let task_prettier_dir = prettier_dir.clone();
- let new_prettier_task = cx
- .spawn({
- let this = this.clone();
- move |mut cx| async move {
- let new_server_id = this.update(&mut cx, |this, _| {
- this.languages.next_language_server_id()
- })?;
- let prettier = Prettier::start(
- worktree_id.map(|id| id.to_usize()),
- new_server_id,
- task_prettier_dir,
- node,
- cx.clone(),
- )
+ match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
+ {
+ Some((worktree_id, buffer_path)) => {
+ let fs = Arc::clone(&self.fs);
+ let installed_prettiers = self.prettier_instances.keys().cloned().collect();
+ return cx.spawn(|project, mut cx| async move {
+ match cx
+ .background_executor()
+ .spawn(async move {
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &installed_prettiers,
+ &buffer_path,
+ )
+ .await
+ })
.await
- .context("prettier start")
- .map_err(Arc::new)?;
- log::info!("Started prettier in {:?}", prettier.prettier_dir());
-
- if let Some(prettier_server) = prettier.server() {
- this.update(&mut cx, |project, cx| {
- let name = if prettier.is_default() {
- LanguageServerName(Arc::from("prettier (default)"))
- } else {
- let prettier_dir = prettier.prettier_dir();
- let worktree_path = prettier
- .worktree_id()
- .map(WorktreeId::from_usize)
- .and_then(|id| project.worktree_for_id(id, cx))
- .map(|worktree| worktree.read(cx).abs_path());
- match worktree_path {
- Some(worktree_path) => {
- if worktree_path.as_ref() == prettier_dir {
- LanguageServerName(Arc::from(format!(
- "prettier ({})",
- prettier_dir
- .file_name()
- .and_then(|name| name.to_str())
- .unwrap_or_default()
- )))
- } else {
- let dir_to_display = match prettier_dir
- .strip_prefix(&worktree_path)
- .ok()
- {
- Some(relative_path) => relative_path,
- None => prettier_dir,
- };
- LanguageServerName(Arc::from(format!(
- "prettier ({})",
- dir_to_display.display(),
- )))
- }
+ {
+ Ok(None) => {
+ match project.update(&mut cx, |project, _| {
+ project
+ .prettiers_per_worktree
+ .entry(worktree_id)
+ .or_default()
+ .insert(None);
+ project.default_prettier.as_ref().and_then(
+ |default_prettier| default_prettier.instance.clone(),
+ )
+ }) {
+ Ok(Some(old_task)) => Some((None, old_task)),
+ Ok(None) => {
+ match project.update(&mut cx, |_, cx| {
+ start_default_prettier(node, Some(worktree_id), cx)
+ }) {
+ Ok(new_default_prettier) => {
+ return Some((None, new_default_prettier.await))
+ }
+ Err(e) => {
+ Some((
+ None,
+ Task::ready(Err(Arc::new(e.context("project is gone during default prettier startup"))))
+ .shared(),
+ ))
}
- None => LanguageServerName(Arc::from(format!(
- "prettier ({})",
- prettier_dir.display(),
- ))),
}
- };
-
+ }
+ Err(e) => Some((None, Task::ready(Err(Arc::new(e.context("project is gone during default prettier checks"))))
+ .shared())),
+ }
+ }
+ Ok(Some(prettier_dir)) => {
+ match project.update(&mut cx, |project, _| {
project
- .supplementary_language_servers
- .insert(new_server_id, (name, Arc::clone(prettier_server)));
- cx.emit(Event::LanguageServerAdded(new_server_id));
- })?;
+ .prettiers_per_worktree
+ .entry(worktree_id)
+ .or_default()
+ .insert(Some(prettier_dir.clone()));
+ project.prettier_instances.get(&prettier_dir).cloned()
+ }) {
+ Ok(Some(existing_prettier)) => {
+ log::debug!(
+ "Found already started prettier in {prettier_dir:?}"
+ );
+ return Some((Some(prettier_dir), existing_prettier));
+ }
+ Err(e) => {
+ return Some((
+ Some(prettier_dir),
+ Task::ready(Err(Arc::new(e.context("project is gone during custom prettier checks"))))
+ .shared(),
+ ))
+ }
+ _ => {},
+ }
+
+ log::info!("Found prettier in {prettier_dir:?}, starting.");
+ let new_prettier_task =
+ match project.update(&mut cx, |project, cx| {
+ let new_prettier_task = start_prettier(
+ node,
+ prettier_dir.clone(),
+ Some(worktree_id),
+ cx,
+ );
+ project.prettier_instances.insert(
+ prettier_dir.clone(),
+ new_prettier_task.clone(),
+ );
+ new_prettier_task
+ }) {
+ Ok(task) => task,
+ Err(e) => return Some((
+ Some(prettier_dir),
+ Task::ready(Err(Arc::new(e.context("project is gone during custom prettier startup"))))
+ .shared()
+ )),
+ };
+ Some((Some(prettier_dir), new_prettier_task))
+ }
+ Err(e) => {
+ return Some((
+ None,
+ Task::ready(Err(Arc::new(
+ e.context("determining prettier path"),
+ )))
+ .shared(),
+ ));
}
- Ok(Arc::new(prettier)).map_err(Arc::new)
}
- })
- .shared();
- this.update(&mut cx, |project, _| {
- project
- .prettier_instances
- .insert((worktree_id, prettier_dir), new_prettier_task.clone());
- })
- .ok();
- Some(new_prettier_task)
- })
+ });
+ }
+ None => {
+ let started_default_prettier = self
+ .default_prettier
+ .as_ref()
+ .and_then(|default_prettier| default_prettier.instance.clone());
+ match started_default_prettier {
+ Some(old_task) => return Task::ready(Some((None, old_task))),
+ None => {
+ let new_task = start_default_prettier(node, None, cx);
+ return cx.spawn(|_, _| async move { Some((None, new_task.await)) });
+ }
+ }
+ }
+ }
} else if self.remote_id().is_some() {
return Task::ready(None);
} else {
- Task::ready(Some(
+ Task::ready(Some((
+ None,
Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
- ))
+ )))
}
}
@@ -8567,8 +8612,7 @@ impl Project {
_: &Language,
_: &LanguageSettings,
_: &mut ModelContext<Self>,
- ) -> Task<anyhow::Result<()>> {
- Task::ready(Ok(()))
+ ) {
}
#[cfg(not(any(test, feature = "test-support")))]
@@ -8578,19 +8622,19 @@ impl Project {
new_language: &Language,
language_settings: &LanguageSettings,
cx: &mut ModelContext<Self>,
- ) -> Task<anyhow::Result<()>> {
+ ) {
match &language_settings.formatter {
Formatter::Prettier { .. } | Formatter::Auto => {}
- Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
+ Formatter::LanguageServer | Formatter::External { .. } => return,
};
let Some(node) = self.node.as_ref().cloned() else {
- return Task::ready(Ok(()));
+ return;
};
let mut prettier_plugins = None;
if new_language.prettier_parser_name().is_some() {
prettier_plugins
- .get_or_insert_with(|| HashSet::default())
+ .get_or_insert_with(|| HashSet::<&'static str>::default())
.extend(
new_language
.lsp_adapters()
@@ -8599,114 +8643,287 @@ impl Project {
)
}
let Some(prettier_plugins) = prettier_plugins else {
- return Task::ready(Ok(()));
+ return;
};
+ let fs = Arc::clone(&self.fs);
+ let locate_prettier_installation = match worktree.and_then(|worktree_id| {
+ self.worktree_for_id(worktree_id, cx)
+ .map(|worktree| worktree.read(cx).abs_path())
+ }) {
+ Some(locate_from) => {
+ let installed_prettiers = self.prettier_instances.keys().cloned().collect();
+ cx.background_executor().spawn(async move {
+ Prettier::locate_prettier_installation(
+ fs.as_ref(),
+ &installed_prettiers,
+ locate_from.as_ref(),
+ )
+ .await
+ })
+ }
+ None => Task::ready(Ok(None)),
+ };
let mut plugins_to_install = prettier_plugins;
- let (mut install_success_tx, mut install_success_rx) =
- futures::channel::mpsc::channel::<HashSet<&'static str>>(1);
- let new_installation_process = cx
- .spawn(|this, mut cx| async move {
- if let Some(installed_plugins) = install_success_rx.next().await {
- this.update(&mut cx, |this, _| {
- let default_prettier =
- this.default_prettier
- .get_or_insert_with(|| DefaultPrettier {
- installation_process: None,
- installed_plugins: HashSet::default(),
- });
- if !installed_plugins.is_empty() {
- log::info!("Installed new prettier plugins: {installed_plugins:?}");
- default_prettier.installed_plugins.extend(installed_plugins);
- }
- })
- .ok();
- }
- })
- .shared();
let previous_installation_process =
if let Some(default_prettier) = &mut self.default_prettier {
plugins_to_install
.retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
if plugins_to_install.is_empty() {
- return Task::ready(Ok(()));
+ return;
}
- std::mem::replace(
- &mut default_prettier.installation_process,
- Some(new_installation_process.clone()),
- )
+ default_prettier.installation_process.clone()
} else {
None
};
- let default_prettier_dir = util::paths::DEFAULT_PRETTIER_DIR.as_path();
- let already_running_prettier = self
- .prettier_instances
- .get(&(worktree, default_prettier_dir.to_path_buf()))
- .cloned();
let fs = Arc::clone(&self.fs);
- cx.spawn(move |this, mut cx| async move {
- if let Some(previous_installation_process) = previous_installation_process {
- previous_installation_process.await;
- }
- let mut everything_was_installed = false;
- this.update(&mut cx, |this, _| {
- match &mut this.default_prettier {
- Some(default_prettier) => {
- plugins_to_install
- .retain(|plugin| !default_prettier.installed_plugins.contains(plugin));
- everything_was_installed = plugins_to_install.is_empty();
- },
- None => this.default_prettier = Some(DefaultPrettier { installation_process: Some(new_installation_process), installed_plugins: HashSet::default() }),
+ let default_prettier = self
+ .default_prettier
+ .get_or_insert_with(|| DefaultPrettier {
+ instance: None,
+ installation_process: None,
+ installed_plugins: HashSet::default(),
+ });
+ default_prettier.installation_process = Some(
+ cx.spawn(|this, mut cx| async move {
+ match locate_prettier_installation
+ .await
+ .context("locate prettier installation")
+ .map_err(Arc::new)?
+ {
+ Some(_non_default_prettier) => return Ok(()),
+ None => {
+ let mut needs_install = match previous_installation_process {
+ Some(previous_installation_process) => {
+ previous_installation_process.await.is_err()
+ }
+ None => true,
+ };
+ this.update(&mut cx, |this, _| {
+ if let Some(default_prettier) = &mut this.default_prettier {
+ plugins_to_install.retain(|plugin| {
+ !default_prettier.installed_plugins.contains(plugin)
+ });
+ needs_install |= !plugins_to_install.is_empty();
+ }
+ })?;
+ if needs_install {
+ let installed_plugins = plugins_to_install.clone();
+ cx.background_executor()
+ .spawn(async move {
+ install_default_prettier(plugins_to_install, node, fs).await
+ })
+ .await
+ .context("prettier & plugins install")
+ .map_err(Arc::new)?;
+ this.update(&mut cx, |this, _| {
+ let default_prettier =
+ this.default_prettier
+ .get_or_insert_with(|| DefaultPrettier {
+ instance: None,
+ installation_process: Some(
+ Task::ready(Ok(())).shared(),
+ ),
+ installed_plugins: HashSet::default(),
+ });
+ default_prettier.instance = None;
+ default_prettier.installed_plugins.extend(installed_plugins);
+ })?;
+ }
+ }
}
- })?;
- if everything_was_installed {
- return Ok(());
- }
-
- cx.spawn(move |_| async move {
- let prettier_wrapper_path = default_prettier_dir.join(prettier::PRETTIER_SERVER_FILE);
- // method creates parent directory if it doesn't exist
- fs.save(&prettier_wrapper_path, &text::Rope::from(prettier::PRETTIER_SERVER_JS), text::LineEnding::Unix).await
- .with_context(|| format!("writing {} file at {prettier_wrapper_path:?}", prettier::PRETTIER_SERVER_FILE))?;
+ Ok(())
+ })
+ .shared(),
+ );
+ }
+}
- let packages_to_versions = future::try_join_all(
- plugins_to_install
- .iter()
- .chain(Some(&"prettier"))
- .map(|package_name| async {
- let returned_package_name = package_name.to_string();
- let latest_version = node.npm_package_latest_version(package_name)
- .await
- .with_context(|| {
- format!("fetching latest npm version for package {returned_package_name}")
- })?;
- anyhow::Ok((returned_package_name, latest_version))
- }),
- )
- .await
- .context("fetching latest npm versions")?;
-
- log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
- let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
- (package.as_str(), version.as_str())
- }).collect::<Vec<_>>();
- node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
- let installed_packages = !plugins_to_install.is_empty();
- install_success_tx.try_send(plugins_to_install).ok();
-
- if !installed_packages {
- if let Some(prettier) = already_running_prettier {
- prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
+fn start_default_prettier(
+ node: Arc<dyn NodeRuntime>,
+ worktree_id: Option<WorktreeId>,
+ cx: &mut ModelContext<'_, Project>,
+) -> Task<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>> {
+ cx.spawn(|project, mut cx| async move {
+ loop {
+ let default_prettier_installing = match project.update(&mut cx, |project, _| {
+ project
+ .default_prettier
+ .as_ref()
+ .and_then(|default_prettier| default_prettier.installation_process.clone())
+ }) {
+ Ok(installation) => installation,
+ Err(e) => {
+ return Task::ready(Err(Arc::new(
+ e.context("project is gone during default prettier installation"),
+ )))
+ .shared()
+ }
+ };
+ match default_prettier_installing {
+ Some(installation_task) => {
+ if installation_task.await.is_ok() {
+ break;
}
}
+ None => break,
+ }
+ }
+
+ match project.update(&mut cx, |project, cx| {
+ match project
+ .default_prettier
+ .as_mut()
+ .and_then(|default_prettier| default_prettier.instance.as_mut())
+ {
+ Some(default_prettier) => default_prettier.clone(),
+ None => {
+ let new_default_prettier =
+ start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
+ project
+ .default_prettier
+ .get_or_insert_with(|| DefaultPrettier {
+ instance: None,
+ installation_process: None,
+ #[cfg(not(any(test, feature = "test-support")))]
+ installed_plugins: HashSet::default(),
+ })
+ .instance = Some(new_default_prettier.clone());
+ new_default_prettier
+ }
+ }
+ }) {
+ Ok(task) => task,
+ Err(e) => Task::ready(Err(Arc::new(
+ e.context("project is gone during default prettier startup"),
+ )))
+ .shared(),
+ }
+ })
+}
- anyhow::Ok(())
- }).await
- })
+fn start_prettier(
+ node: Arc<dyn NodeRuntime>,
+ prettier_dir: PathBuf,
+ worktree_id: Option<WorktreeId>,
+ cx: &mut ModelContext<'_, Project>,
+) -> Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>> {
+ cx.spawn(|project, mut cx| async move {
+ let new_server_id = project.update(&mut cx, |project, _| {
+ project.languages.next_language_server_id()
+ })?;
+ let new_prettier = Prettier::start(new_server_id, prettier_dir, node, cx.clone())
+ .await
+ .context("default prettier spawn")
+ .map(Arc::new)
+ .map_err(Arc::new)?;
+ register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
+ Ok(new_prettier)
+ })
+ .shared()
+}
+
+fn register_new_prettier(
+ project: &WeakModel<Project>,
+ prettier: &Prettier,
+ worktree_id: Option<WorktreeId>,
+ new_server_id: LanguageServerId,
+ cx: &mut AsyncAppContext,
+) {
+ let prettier_dir = prettier.prettier_dir();
+ let is_default = prettier.is_default();
+ if is_default {
+ log::info!("Started default prettier in {prettier_dir:?}");
+ } else {
+ log::info!("Started prettier in {prettier_dir:?}");
+ }
+ if let Some(prettier_server) = prettier.server() {
+ project
+ .update(cx, |project, cx| {
+ let name = if is_default {
+ LanguageServerName(Arc::from("prettier (default)"))
+ } else {
+ let worktree_path = worktree_id
+ .and_then(|id| project.worktree_for_id(id, cx))
+ .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
+ let name = match worktree_path {
+ Some(worktree_path) => {
+ if prettier_dir == worktree_path.as_ref() {
+ let name = prettier_dir
+ .file_name()
+ .and_then(|name| name.to_str())
+ .unwrap_or_default();
+ format!("prettier ({name})")
+ } else {
+ let dir_to_display = prettier_dir
+ .strip_prefix(worktree_path.as_ref())
+ .ok()
+ .unwrap_or(prettier_dir);
+ format!("prettier ({})", dir_to_display.display())
+ }
+ }
+ None => format!("prettier ({})", prettier_dir.display()),
+ };
+ LanguageServerName(Arc::from(name))
+ };
+ project
+ .supplementary_language_servers
+ .insert(new_server_id, (name, Arc::clone(prettier_server)));
+ cx.emit(Event::LanguageServerAdded(new_server_id));
+ })
+ .ok();
}
}
+#[cfg(not(any(test, feature = "test-support")))]
+async fn install_default_prettier(
+ plugins_to_install: HashSet<&'static str>,
+ node: Arc<dyn NodeRuntime>,
+ fs: Arc<dyn Fs>,
+) -> anyhow::Result<()> {
+ let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
+ // method creates parent directory if it doesn't exist
+ fs.save(
+ &prettier_wrapper_path,
+ &text::Rope::from(prettier::PRETTIER_SERVER_JS),
+ text::LineEnding::Unix,
+ )
+ .await
+ .with_context(|| {
+ format!(
+ "writing {} file at {prettier_wrapper_path:?}",
+ prettier::PRETTIER_SERVER_FILE
+ )
+ })?;
+
+ let packages_to_versions =
+ future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
+ |package_name| async {
+ let returned_package_name = package_name.to_string();
+ let latest_version = node
+ .npm_package_latest_version(package_name)
+ .await
+ .with_context(|| {
+ format!("fetching latest npm version for package {returned_package_name}")
+ })?;
+ anyhow::Ok((returned_package_name, latest_version))
+ },
+ ))
+ .await
+ .context("fetching latest npm versions")?;
+
+ log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
+ let borrowed_packages = packages_to_versions
+ .iter()
+ .map(|(package, version)| (package.as_str(), version.as_str()))
+ .collect::<Vec<_>>();
+ node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
+ .await
+ .context("fetching formatter packages")?;
+ anyhow::Ok(())
+}
+
fn subscribe_for_copilot_events(
copilot: &Model<Copilot>,
cx: &mut ModelContext<'_, Project>,
@@ -1,4 +1,4 @@
-use crate::{search::PathMatcher, Event, *};
+use crate::{Event, *};
use fs::FakeFs;
use futures::{future, StreamExt};
use gpui::AppContext;
@@ -13,7 +13,7 @@ use pretty_assertions::assert_eq;
use serde_json::json;
use std::{os, task::Poll};
use unindent::Unindent as _;
-use util::{assert_set_eq, test::temp_tree};
+use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
#[gpui::test]
async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
@@ -1,7 +1,6 @@
use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
use anyhow::{Context, Result};
use client::proto;
-use globset::{Glob, GlobMatcher};
use itertools::Itertools;
use language::{char_kind, BufferSnapshot};
use regex::{Regex, RegexBuilder};
@@ -10,9 +9,10 @@ use std::{
borrow::Cow,
io::{BufRead, BufReader, Read},
ops::Range,
- path::{Path, PathBuf},
+ path::Path,
sync::Arc,
};
+use util::paths::PathMatcher;
#[derive(Clone, Debug)]
pub struct SearchInputs {
@@ -52,31 +52,6 @@ pub enum SearchQuery {
},
}
-#[derive(Clone, Debug)]
-pub struct PathMatcher {
- maybe_path: PathBuf,
- glob: GlobMatcher,
-}
-
-impl std::fmt::Display for PathMatcher {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- self.maybe_path.to_string_lossy().fmt(f)
- }
-}
-
-impl PathMatcher {
- pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
- Ok(PathMatcher {
- glob: Glob::new(&maybe_glob)?.compile_matcher(),
- maybe_path: PathBuf::from(maybe_glob),
- })
- }
-
- pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
- other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
- }
-}
-
impl SearchQuery {
pub fn text(
query: impl ToString,
@@ -0,0 +1,29 @@
+[package]
+name = "rich_text2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/rich_text.rs"
+doctest = false
+
+[features]
+test-support = [
+ "gpui/test-support",
+ "util/test-support",
+]
+
+[dependencies]
+collections = { path = "../collections" }
+gpui = { package = "gpui2", path = "../gpui2" }
+sum_tree = { path = "../sum_tree" }
+theme = { package = "theme2", path = "../theme2" }
+language = { package = "language2", path = "../language2" }
+util = { path = "../util" }
+anyhow.workspace = true
+futures.workspace = true
+lazy_static.workspace = true
+pulldown-cmark = { version = "0.9.2", default-features = false }
+smallvec.workspace = true
+smol.workspace = true
@@ -0,0 +1,373 @@
+use std::{ops::Range, sync::Arc};
+
+use anyhow::bail;
+use futures::FutureExt;
+use gpui::{AnyElement, FontStyle, FontWeight, HighlightStyle, UnderlineStyle};
+use language::{HighlightId, Language, LanguageRegistry};
+use util::RangeExt;
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Highlight {
+ Id(HighlightId),
+ Highlight(HighlightStyle),
+ Mention,
+ SelfMention,
+}
+
+impl From<HighlightStyle> for Highlight {
+ fn from(style: HighlightStyle) -> Self {
+ Self::Highlight(style)
+ }
+}
+
+impl From<HighlightId> for Highlight {
+ fn from(style: HighlightId) -> Self {
+ Self::Id(style)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct RichText {
+ pub text: String,
+ pub highlights: Vec<(Range<usize>, Highlight)>,
+ pub region_ranges: Vec<Range<usize>>,
+ pub regions: Vec<RenderedRegion>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum BackgroundKind {
+ Code,
+ /// A mention background for non-self user.
+ Mention,
+ SelfMention,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct RenderedRegion {
+ pub background_kind: Option<BackgroundKind>,
+ pub link_url: Option<String>,
+}
+
+/// Allows one to specify extra links to the rendered markdown, which can be used
+/// for e.g. mentions.
+pub struct Mention {
+ pub range: Range<usize>,
+ pub is_self_mention: bool,
+}
+
+impl RichText {
+ pub fn element<V: 'static>(
+ &self,
+ // syntax: Arc<SyntaxTheme>,
+ // style: RichTextStyle,
+ // cx: &mut ViewContext<V>,
+ ) -> AnyElement<V> {
+ todo!();
+
+ // let mut region_id = 0;
+ // let view_id = cx.view_id();
+
+ // let regions = self.regions.clone();
+
+ // enum Markdown {}
+ // Text::new(self.text.clone(), style.text.clone())
+ // .with_highlights(
+ // self.highlights
+ // .iter()
+ // .filter_map(|(range, highlight)| {
+ // let style = match highlight {
+ // Highlight::Id(id) => id.style(&syntax)?,
+ // Highlight::Highlight(style) => style.clone(),
+ // Highlight::Mention => style.mention_highlight,
+ // Highlight::SelfMention => style.self_mention_highlight,
+ // };
+ // Some((range.clone(), style))
+ // })
+ // .collect::<Vec<_>>(),
+ // )
+ // .with_custom_runs(self.region_ranges.clone(), move |ix, bounds, cx| {
+ // region_id += 1;
+ // let region = regions[ix].clone();
+ // if let Some(url) = region.link_url {
+ // cx.scene().push_cursor_region(CursorRegion {
+ // bounds,
+ // style: CursorStyle::PointingHand,
+ // });
+ // cx.scene().push_mouse_region(
+ // MouseRegion::new::<Markdown>(view_id, region_id, bounds)
+ // .on_click::<V, _>(MouseButton::Left, move |_, _, cx| {
+ // cx.platform().open_url(&url)
+ // }),
+ // );
+ // }
+ // if let Some(region_kind) = ®ion.background_kind {
+ // let background = match region_kind {
+ // BackgroundKind::Code => style.code_background,
+ // BackgroundKind::Mention => style.mention_background,
+ // BackgroundKind::SelfMention => style.self_mention_background,
+ // };
+ // if background.is_some() {
+ // cx.scene().push_quad(gpui::Quad {
+ // bounds,
+ // background,
+ // border: Default::default(),
+ // corner_radii: (2.0).into(),
+ // });
+ // }
+ // }
+ // })
+ // .with_soft_wrap(true)
+ // .into_any()
+ }
+
+ pub fn add_mention(
+ &mut self,
+ range: Range<usize>,
+ is_current_user: bool,
+ mention_style: HighlightStyle,
+ ) -> anyhow::Result<()> {
+ if range.end > self.text.len() {
+ bail!(
+ "Mention in range {range:?} is outside of bounds for a message of length {}",
+ self.text.len()
+ );
+ }
+
+ if is_current_user {
+ self.region_ranges.push(range.clone());
+ self.regions.push(RenderedRegion {
+ background_kind: Some(BackgroundKind::Mention),
+ link_url: None,
+ });
+ }
+ self.highlights
+ .push((range, Highlight::Highlight(mention_style)));
+ Ok(())
+ }
+}
+
+pub fn render_markdown_mut(
+ block: &str,
+ mut mentions: &[Mention],
+ language_registry: &Arc<LanguageRegistry>,
+ language: Option<&Arc<Language>>,
+ data: &mut RichText,
+) {
+ use pulldown_cmark::{CodeBlockKind, Event, Options, Parser, Tag};
+
+ let mut bold_depth = 0;
+ let mut italic_depth = 0;
+ let mut link_url = None;
+ let mut current_language = None;
+ let mut list_stack = Vec::new();
+
+ let options = Options::all();
+ for (event, source_range) in Parser::new_ext(&block, options).into_offset_iter() {
+ let prev_len = data.text.len();
+ match event {
+ Event::Text(t) => {
+ if let Some(language) = ¤t_language {
+ render_code(&mut data.text, &mut data.highlights, t.as_ref(), language);
+ } else {
+ if let Some(mention) = mentions.first() {
+ if source_range.contains_inclusive(&mention.range) {
+ mentions = &mentions[1..];
+ let range = (prev_len + mention.range.start - source_range.start)
+ ..(prev_len + mention.range.end - source_range.start);
+ data.highlights.push((
+ range.clone(),
+ if mention.is_self_mention {
+ Highlight::SelfMention
+ } else {
+ Highlight::Mention
+ },
+ ));
+ data.region_ranges.push(range);
+ data.regions.push(RenderedRegion {
+ background_kind: Some(if mention.is_self_mention {
+ BackgroundKind::SelfMention
+ } else {
+ BackgroundKind::Mention
+ }),
+ link_url: None,
+ });
+ }
+ }
+
+ data.text.push_str(t.as_ref());
+ let mut style = HighlightStyle::default();
+ if bold_depth > 0 {
+ style.font_weight = Some(FontWeight::BOLD);
+ }
+ if italic_depth > 0 {
+ style.font_style = Some(FontStyle::Italic);
+ }
+ if let Some(link_url) = link_url.clone() {
+ data.region_ranges.push(prev_len..data.text.len());
+ data.regions.push(RenderedRegion {
+ link_url: Some(link_url),
+ background_kind: None,
+ });
+ style.underline = Some(UnderlineStyle {
+ thickness: 1.0.into(),
+ ..Default::default()
+ });
+ }
+
+ if style != HighlightStyle::default() {
+ let mut new_highlight = true;
+ if let Some((last_range, last_style)) = data.highlights.last_mut() {
+ if last_range.end == prev_len
+ && last_style == &Highlight::Highlight(style)
+ {
+ last_range.end = data.text.len();
+ new_highlight = false;
+ }
+ }
+ if new_highlight {
+ data.highlights
+ .push((prev_len..data.text.len(), Highlight::Highlight(style)));
+ }
+ }
+ }
+ }
+ Event::Code(t) => {
+ data.text.push_str(t.as_ref());
+ data.region_ranges.push(prev_len..data.text.len());
+ if link_url.is_some() {
+ data.highlights.push((
+ prev_len..data.text.len(),
+ Highlight::Highlight(HighlightStyle {
+ underline: Some(UnderlineStyle {
+ thickness: 1.0.into(),
+ ..Default::default()
+ }),
+ ..Default::default()
+ }),
+ ));
+ }
+ data.regions.push(RenderedRegion {
+ background_kind: Some(BackgroundKind::Code),
+ link_url: link_url.clone(),
+ });
+ }
+ Event::Start(tag) => match tag {
+ Tag::Paragraph => new_paragraph(&mut data.text, &mut list_stack),
+ Tag::Heading(_, _, _) => {
+ new_paragraph(&mut data.text, &mut list_stack);
+ bold_depth += 1;
+ }
+ Tag::CodeBlock(kind) => {
+ new_paragraph(&mut data.text, &mut list_stack);
+ current_language = if let CodeBlockKind::Fenced(language) = kind {
+ language_registry
+ .language_for_name(language.as_ref())
+ .now_or_never()
+ .and_then(Result::ok)
+ } else {
+ language.cloned()
+ }
+ }
+ Tag::Emphasis => italic_depth += 1,
+ Tag::Strong => bold_depth += 1,
+ Tag::Link(_, url, _) => link_url = Some(url.to_string()),
+ Tag::List(number) => {
+ list_stack.push((number, false));
+ }
+ Tag::Item => {
+ let len = list_stack.len();
+ if let Some((list_number, has_content)) = list_stack.last_mut() {
+ *has_content = false;
+ if !data.text.is_empty() && !data.text.ends_with('\n') {
+ data.text.push('\n');
+ }
+ for _ in 0..len - 1 {
+ data.text.push_str(" ");
+ }
+ if let Some(number) = list_number {
+ data.text.push_str(&format!("{}. ", number));
+ *number += 1;
+ *has_content = false;
+ } else {
+ data.text.push_str("- ");
+ }
+ }
+ }
+ _ => {}
+ },
+ Event::End(tag) => match tag {
+ Tag::Heading(_, _, _) => bold_depth -= 1,
+ Tag::CodeBlock(_) => current_language = None,
+ Tag::Emphasis => italic_depth -= 1,
+ Tag::Strong => bold_depth -= 1,
+ Tag::Link(_, _, _) => link_url = None,
+ Tag::List(_) => drop(list_stack.pop()),
+ _ => {}
+ },
+ Event::HardBreak => data.text.push('\n'),
+ Event::SoftBreak => data.text.push(' '),
+ _ => {}
+ }
+ }
+}
+
+pub fn render_markdown(
+ block: String,
+ mentions: &[Mention],
+ language_registry: &Arc<LanguageRegistry>,
+ language: Option<&Arc<Language>>,
+) -> RichText {
+ let mut data = RichText {
+ text: Default::default(),
+ highlights: Default::default(),
+ region_ranges: Default::default(),
+ regions: Default::default(),
+ };
+
+ render_markdown_mut(&block, mentions, language_registry, language, &mut data);
+
+ data.text = data.text.trim().to_string();
+
+ data
+}
+
+pub fn render_code(
+ text: &mut String,
+ highlights: &mut Vec<(Range<usize>, Highlight)>,
+ content: &str,
+ language: &Arc<Language>,
+) {
+ let prev_len = text.len();
+ text.push_str(content);
+ for (range, highlight_id) in language.highlight_text(&content.into(), 0..content.len()) {
+ highlights.push((
+ prev_len + range.start..prev_len + range.end,
+ Highlight::Id(highlight_id),
+ ));
+ }
+}
+
+pub fn new_paragraph(text: &mut String, list_stack: &mut Vec<(Option<u64>, bool)>) {
+ let mut is_subsequent_paragraph_of_list = false;
+ if let Some((_, has_content)) = list_stack.last_mut() {
+ if *has_content {
+ is_subsequent_paragraph_of_list = true;
+ } else {
+ *has_content = true;
+ return;
+ }
+ }
+
+ if !text.is_empty() {
+ if !text.ends_with('\n') {
+ text.push('\n');
+ }
+ text.push('\n');
+ }
+ for _ in 0..list_stack.len().saturating_sub(1) {
+ text.push_str(" ");
+ }
+ if is_subsequent_paragraph_of_list {
+ text.push_str(" ");
+ }
+}
@@ -29,7 +29,6 @@ serde.workspace = true
serde_derive.workspace = true
smallvec.workspace = true
smol.workspace = true
-globset.workspace = true
serde_json.workspace = true
[dev-dependencies]
client = { path = "../client", features = ["test-support"] }
@@ -22,7 +22,7 @@ use gpui::{
};
use menu::Confirm;
use project::{
- search::{PathMatcher, SearchInputs, SearchQuery},
+ search::{SearchInputs, SearchQuery},
Entry, Project,
};
use semantic_index::{SemanticIndex, SemanticIndexStatus};
@@ -37,7 +37,7 @@ use std::{
sync::Arc,
time::{Duration, Instant},
};
-use util::ResultExt as _;
+use util::{paths::PathMatcher, ResultExt as _};
use workspace::{
item::{BreadcrumbText, Item, ItemEvent, ItemHandle},
searchable::{Direction, SearchableItem, SearchableItemHandle},
@@ -9,7 +9,7 @@ use futures::channel::oneshot;
use gpui::executor;
use ndarray::{Array1, Array2};
use ordered_float::OrderedFloat;
-use project::{search::PathMatcher, Fs};
+use project::Fs;
use rpc::proto::Timestamp;
use rusqlite::params;
use rusqlite::types::Value;
@@ -21,7 +21,7 @@ use std::{
sync::Arc,
time::SystemTime,
};
-use util::TryFutureExt;
+use util::{paths::PathMatcher, TryFutureExt};
pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> {
let mut indices = (0..data.len()).collect::<Vec<_>>();
@@ -21,7 +21,7 @@ use ordered_float::OrderedFloat;
use parking_lot::Mutex;
use parsing::{CodeContextRetriever, Span, SpanDigest, PARSEABLE_ENTIRE_FILE_TYPES};
use postage::watch;
-use project::{search::PathMatcher, Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
+use project::{Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
use smol::channel;
use std::{
cmp::Reverse,
@@ -33,6 +33,7 @@ use std::{
sync::{Arc, Weak},
time::{Duration, Instant, SystemTime},
};
+use util::paths::PathMatcher;
use util::{channel::RELEASE_CHANNEL_NAME, http::HttpClient, paths::EMBEDDINGS_DIR, ResultExt};
use workspace::WorkspaceCreated;
@@ -10,13 +10,13 @@ use gpui::{executor::Deterministic, Task, TestAppContext};
use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
use parking_lot::Mutex;
use pretty_assertions::assert_eq;
-use project::{project_settings::ProjectSettings, search::PathMatcher, FakeFs, Fs, Project};
+use project::{project_settings::ProjectSettings, FakeFs, Fs, Project};
use rand::{rngs::StdRng, Rng};
use serde_json::json;
use settings::SettingsStore;
use std::{path::Path, sync::Arc, time::SystemTime};
use unindent::Unindent;
-use util::RandomCharIter;
+use util::{paths::PathMatcher, RandomCharIter};
#[ctor::ctor]
fn init_logger() {
@@ -289,12 +289,12 @@ async fn test_code_context_retrieval_rust() {
impl E {
// This is also a preceding comment
pub fn function_1() -> Option<()> {
- todo!();
+ unimplemented!();
}
// This is a preceding comment
fn function_2() -> Result<()> {
- todo!();
+ unimplemented!();
}
}
@@ -344,7 +344,7 @@ async fn test_code_context_retrieval_rust() {
"
// This is also a preceding comment
pub fn function_1() -> Option<()> {
- todo!();
+ unimplemented!();
}"
.unindent(),
text.find("pub fn function_1").unwrap(),
@@ -353,7 +353,7 @@ async fn test_code_context_retrieval_rust() {
"
// This is a preceding comment
fn function_2() -> Result<()> {
- todo!();
+ unimplemented!();
}"
.unindent(),
text.find("fn function_2").unwrap(),
@@ -17,7 +17,7 @@ doctest = false
[dependencies]
anyhow.workspace = true
-fs = { path = "../fs" }
+fs = { package = "fs2", path = "../fs2" }
gpui = { package = "gpui2", path = "../gpui2" }
indexmap = "1.6.2"
parking_lot.workspace = true
@@ -32,5 +32,5 @@ util = { path = "../util" }
[dev-dependencies]
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-fs = { path = "../fs", features = ["test-support"] }
+fs = { package = "fs2", path = "../fs2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
@@ -14,6 +14,7 @@ test-support = ["tempdir", "git2"]
[dependencies]
anyhow.workspace = true
backtrace = "0.3"
+globset.workspace = true
log.workspace = true
lazy_static.workspace = true
futures.workspace = true
@@ -1,5 +1,6 @@
use std::path::{Path, PathBuf};
+use globset::{Glob, GlobMatcher};
use serde::{Deserialize, Serialize};
lazy_static::lazy_static! {
@@ -189,6 +190,31 @@ impl<P> PathLikeWithPosition<P> {
}
}
+#[derive(Clone, Debug)]
+pub struct PathMatcher {
+ maybe_path: PathBuf,
+ glob: GlobMatcher,
+}
+
+impl std::fmt::Display for PathMatcher {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.maybe_path.to_string_lossy().fmt(f)
+ }
+}
+
+impl PathMatcher {
+ pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
+ Ok(PathMatcher {
+ glob: Glob::new(&maybe_glob)?.compile_matcher(),
+ maybe_path: PathBuf::from(maybe_glob),
+ })
+ }
+
+ pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
+ other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -14,7 +14,7 @@ test-support = [
"client2/test-support",
"project2/test-support",
"settings2/test-support",
- "gpui2/test-support",
+ "gpui/test-support",
"fs2/test-support"
]
@@ -25,7 +25,7 @@ client2 = { path = "../client2" }
collections = { path = "../collections" }
# context_menu = { path = "../context_menu" }
fs2 = { path = "../fs2" }
-gpui2 = { path = "../gpui2" }
+gpui = { package = "gpui2", path = "../gpui2" }
install_cli2 = { path = "../install_cli2" }
language2 = { path = "../language2" }
#menu = { path = "../menu" }
@@ -56,7 +56,7 @@ uuid.workspace = true
[dev-dependencies]
call2 = { path = "../call2", features = ["test-support"] }
client2 = { path = "../client2", features = ["test-support"] }
-gpui2 = { path = "../gpui2", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
project2 = { path = "../project2", features = ["test-support"] }
settings2 = { path = "../settings2", features = ["test-support"] }
fs2 = { path = "../fs2", features = ["test-support"] }
@@ -1,5 +1,5 @@
use crate::{status_bar::StatusItemView, Axis, Workspace};
-use gpui2::{
+use gpui::{
div, Action, AnyView, AppContext, Div, Entity, EntityId, EventEmitter, ParentElement, Render,
Subscription, View, ViewContext, WeakView, WindowContext,
};
@@ -226,9 +226,9 @@ impl Dock {
// })
}
- // pub fn active_panel_index(&self) -> usize {
- // self.active_panel_index
- // }
+ pub fn active_panel_index(&self) -> usize {
+ self.active_panel_index
+ }
pub(crate) fn set_open(&mut self, open: bool, cx: &mut ViewContext<Self>) {
if open != self.is_open {
@@ -241,84 +241,87 @@ impl Dock {
}
}
- // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
- // for entry in &mut self.panel_entries {
- // if entry.panel.as_any() == panel {
- // if zoomed != entry.panel.is_zoomed(cx) {
- // entry.panel.set_zoomed(zoomed, cx);
- // }
- // } else if entry.panel.is_zoomed(cx) {
- // entry.panel.set_zoomed(false, cx);
+ // todo!()
+ // pub fn set_panel_zoomed(&mut self, panel: &AnyView, zoomed: bool, cx: &mut ViewContext<Self>) {
+ // for entry in &mut self.panel_entries {
+ // if entry.panel.as_any() == panel {
+ // if zoomed != entry.panel.is_zoomed(cx) {
+ // entry.panel.set_zoomed(zoomed, cx);
// }
+ // } else if entry.panel.is_zoomed(cx) {
+ // entry.panel.set_zoomed(false, cx);
// }
-
- // cx.notify();
// }
- // pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
- // for entry in &mut self.panel_entries {
- // if entry.panel.is_zoomed(cx) {
- // entry.panel.set_zoomed(false, cx);
- // }
- // }
- // }
+ // cx.notify();
+ // }
- // pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
- // let subscriptions = [
- // cx.observe(&panel, |_, _, cx| cx.notify()),
- // cx.subscribe(&panel, |this, panel, event, cx| {
- // if T::should_activate_on_event(event) {
- // if let Some(ix) = this
- // .panel_entries
- // .iter()
- // .position(|entry| entry.panel.id() == panel.id())
- // {
- // this.set_open(true, cx);
- // this.activate_panel(ix, cx);
- // cx.focus(&panel);
- // }
- // } else if T::should_close_on_event(event)
- // && this.visible_panel().map_or(false, |p| p.id() == panel.id())
- // {
- // this.set_open(false, cx);
- // }
- // }),
- // ];
-
- // let dock_view_id = cx.view_id();
- // self.panel_entries.push(PanelEntry {
- // panel: Arc::new(panel),
- // // todo!()
- // // context_menu: cx.add_view(|cx| {
- // // let mut menu = ContextMenu::new(dock_view_id, cx);
- // // menu.set_position_mode(OverlayPositionMode::Local);
- // // menu
- // // }),
- // _subscriptions: subscriptions,
- // });
- // cx.notify()
- // }
+ pub fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
+ for entry in &mut self.panel_entries {
+ if entry.panel.is_zoomed(cx) {
+ entry.panel.set_zoomed(false, cx);
+ }
+ }
+ }
- // pub fn remove_panel<T: Panel>(&mut self, panel: &View<T>, cx: &mut ViewContext<Self>) {
- // if let Some(panel_ix) = self
- // .panel_entries
- // .iter()
- // .position(|entry| entry.panel.id() == panel.id())
- // {
- // if panel_ix == self.active_panel_index {
- // self.active_panel_index = 0;
- // self.set_open(false, cx);
- // } else if panel_ix < self.active_panel_index {
- // self.active_panel_index -= 1;
- // }
- // self.panel_entries.remove(panel_ix);
- // cx.notify();
- // }
- // }
+ pub(crate) fn add_panel<T: Panel>(&mut self, panel: View<T>, cx: &mut ViewContext<Self>) {
+ let subscriptions = [
+ cx.observe(&panel, |_, _, cx| cx.notify()),
+ cx.subscribe(&panel, |this, panel, event, cx| {
+ if T::should_activate_on_event(event) {
+ if let Some(ix) = this
+ .panel_entries
+ .iter()
+ .position(|entry| entry.panel.id() == panel.id())
+ {
+ this.set_open(true, cx);
+ this.activate_panel(ix, cx);
+ // todo!()
+ // cx.focus(&panel);
+ }
+ } else if T::should_close_on_event(event)
+ && this.visible_panel().map_or(false, |p| p.id() == panel.id())
+ {
+ this.set_open(false, cx);
+ }
+ }),
+ ];
- // pub fn panels_len(&self) -> usize {
- // self.panel_entries.len()
- // }
+ // todo!()
+ // let dock_view_id = cx.view_id();
+ self.panel_entries.push(PanelEntry {
+ panel: Arc::new(panel),
+ // todo!()
+ // context_menu: cx.add_view(|cx| {
+ // let mut menu = ContextMenu::new(dock_view_id, cx);
+ // menu.set_position_mode(OverlayPositionMode::Local);
+ // menu
+ // }),
+ _subscriptions: subscriptions,
+ });
+ cx.notify()
+ }
+
+ pub fn remove_panel<T: Panel>(&mut self, panel: &View<T>, cx: &mut ViewContext<Self>) {
+ if let Some(panel_ix) = self
+ .panel_entries
+ .iter()
+ .position(|entry| entry.panel.id() == panel.id())
+ {
+ if panel_ix == self.active_panel_index {
+ self.active_panel_index = 0;
+ self.set_open(false, cx);
+ } else if panel_ix < self.active_panel_index {
+ self.active_panel_index -= 1;
+ }
+ self.panel_entries.remove(panel_ix);
+ cx.notify();
+ }
+ }
+
+ pub fn panels_len(&self) -> usize {
+ self.panel_entries.len()
+ }
pub fn activate_panel(&mut self, panel_ix: usize, cx: &mut ViewContext<Self>) {
if panel_ix != self.active_panel_index {
@@ -352,38 +355,38 @@ impl Dock {
}
}
- // pub fn zoomed_panel(&self, cx: &WindowContext) -> Option<Arc<dyn PanelHandle>> {
- // let entry = self.visible_entry()?;
- // if entry.panel.is_zoomed(cx) {
- // Some(entry.panel.clone())
- // } else {
- // None
- // }
- // }
+ pub fn zoomed_panel(&self, cx: &WindowContext) -> Option<Arc<dyn PanelHandle>> {
+ let entry = self.visible_entry()?;
+ if entry.panel.is_zoomed(cx) {
+ Some(entry.panel.clone())
+ } else {
+ None
+ }
+ }
- // pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
- // self.panel_entries
- // .iter()
- // .find(|entry| entry.panel.id() == panel.id())
- // .map(|entry| entry.panel.size(cx))
- // }
+ pub fn panel_size(&self, panel: &dyn PanelHandle, cx: &WindowContext) -> Option<f32> {
+ self.panel_entries
+ .iter()
+ .find(|entry| entry.panel.id() == panel.id())
+ .map(|entry| entry.panel.size(cx))
+ }
- // pub fn active_panel_size(&self, cx: &WindowContext) -> Option<f32> {
- // if self.is_open {
- // self.panel_entries
- // .get(self.active_panel_index)
- // .map(|entry| entry.panel.size(cx))
- // } else {
- // None
- // }
- // }
+ pub fn active_panel_size(&self, cx: &WindowContext) -> Option<f32> {
+ if self.is_open {
+ self.panel_entries
+ .get(self.active_panel_index)
+ .map(|entry| entry.panel.size(cx))
+ } else {
+ None
+ }
+ }
- // pub fn resize_active_panel(&mut self, size: Option<f32>, cx: &mut ViewContext<Self>) {
- // if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) {
- // entry.panel.set_size(size, cx);
- // cx.notify();
- // }
- // }
+ pub fn resize_active_panel(&mut self, size: Option<f32>, cx: &mut ViewContext<Self>) {
+ if let Some(entry) = self.panel_entries.get_mut(self.active_panel_index) {
+ entry.panel.set_size(size, cx);
+ cx.notify();
+ }
+ }
// pub fn render_placeholder(&self, cx: &WindowContext) -> AnyElement<Workspace> {
// todo!()
@@ -629,7 +632,7 @@ impl StatusItemView for PanelButtons {
#[cfg(any(test, feature = "test-support"))]
pub mod test {
use super::*;
- use gpui2::{div, Div, ViewContext, WindowContext};
+ use gpui::{div, Div, ViewContext, WindowContext};
#[derive(Debug)]
pub enum TestPanelEvent {
@@ -678,7 +681,7 @@ pub mod test {
"TestPanel"
}
- fn position(&self, _: &gpui2::WindowContext) -> super::DockPosition {
+ fn position(&self, _: &gpui::WindowContext) -> super::DockPosition {
self.position
}
@@ -11,7 +11,7 @@ use client2::{
proto::{self, PeerId},
Client,
};
-use gpui2::{
+use gpui::{
AnyElement, AnyView, AppContext, Entity, EntityId, EventEmitter, HighlightStyle, Model, Pixels,
Point, Render, SharedString, Task, View, ViewContext, WeakView, WindowContext,
};
@@ -212,7 +212,7 @@ pub trait ItemHandle: 'static + Send {
&self,
cx: &mut WindowContext,
handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>,
- ) -> gpui2::Subscription;
+ ) -> gpui::Subscription;
fn tab_tooltip_text(&self, cx: &AppContext) -> Option<SharedString>;
fn tab_description(&self, detail: usize, cx: &AppContext) -> Option<SharedString>;
fn tab_content(&self, detail: Option<usize>, cx: &AppContext) -> AnyElement<Pane>;
@@ -256,7 +256,7 @@ pub trait ItemHandle: 'static + Send {
&mut self,
cx: &mut AppContext,
callback: Box<dyn FnOnce(&mut AppContext) + Send>,
- ) -> gpui2::Subscription;
+ ) -> gpui::Subscription;
fn to_searchable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn SearchableItemHandle>>;
fn breadcrumb_location(&self, cx: &AppContext) -> ToolbarItemLocation;
fn breadcrumbs(&self, theme: &ThemeVariant, cx: &AppContext) -> Option<Vec<BreadcrumbText>>;
@@ -286,7 +286,7 @@ impl<T: Item> ItemHandle for View<T> {
&self,
cx: &mut WindowContext,
handler: Box<dyn Fn(ItemEvent, &mut WindowContext) + Send>,
- ) -> gpui2::Subscription {
+ ) -> gpui::Subscription {
cx.subscribe(self, move |_, event, cx| {
for item_event in T::to_item_events(event) {
handler(item_event, cx)
@@ -573,7 +573,7 @@ impl<T: Item> ItemHandle for View<T> {
&mut self,
cx: &mut AppContext,
callback: Box<dyn FnOnce(&mut AppContext) + Send>,
- ) -> gpui2::Subscription {
+ ) -> gpui::Subscription {
cx.observe_release(self, move |_, cx| callback(cx))
}
@@ -747,7 +747,7 @@ impl<T: FollowableItem> FollowableItemHandle for View<T> {
// pub mod test {
// use super::{Item, ItemEvent};
// use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId};
-// use gpui2::{
+// use gpui::{
// elements::Empty, AnyElement, AppContext, Element, Entity, Model, Task, View,
// ViewContext, View, WeakViewHandle,
// };
@@ -1,6 +1,6 @@
use crate::{Toast, Workspace};
use collections::HashMap;
-use gpui2::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext};
+use gpui::{AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext};
use std::{any::TypeId, ops::DerefMut};
pub fn init(cx: &mut AppContext) {
@@ -160,7 +160,7 @@ impl Workspace {
pub mod simple_message_notification {
use super::Notification;
- use gpui2::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext};
+ use gpui::{AnyElement, AppContext, Div, EventEmitter, Render, TextStyle, ViewContext};
use serde::Deserialize;
use std::{borrow::Cow, sync::Arc};
@@ -220,36 +220,36 @@ pub mod simple_message_notification {
}
}
- // todo!()
- // pub fn new_element(
- // message: fn(TextStyle, &AppContext) -> AnyElement<MessageNotification>,
- // ) -> MessageNotification {
- // Self {
- // message: NotificationMessage::Element(message),
- // on_click: None,
- // click_message: None,
- // }
- // }
+ pub fn new_element(
+ message: fn(TextStyle, &AppContext) -> AnyElement<MessageNotification>,
+ ) -> MessageNotification {
+ Self {
+ message: NotificationMessage::Element(message),
+ on_click: None,
+ click_message: None,
+ }
+ }
- // pub fn with_click_message<S>(mut self, message: S) -> Self
- // where
- // S: Into<Cow<'static, str>>,
- // {
- // self.click_message = Some(message.into());
- // self
- // }
+ pub fn with_click_message<S>(mut self, message: S) -> Self
+ where
+ S: Into<Cow<'static, str>>,
+ {
+ self.click_message = Some(message.into());
+ self
+ }
- // pub fn on_click<F>(mut self, on_click: F) -> Self
- // where
- // F: 'static + Fn(&mut ViewContext<Self>),
- // {
- // self.on_click = Some(Arc::new(on_click));
- // self
- // }
+ pub fn on_click<F>(mut self, on_click: F) -> Self
+ where
+ F: 'static + Send + Sync + Fn(&mut ViewContext<Self>),
+ {
+ self.on_click = Some(Arc::new(on_click));
+ self
+ }
- // pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
- // cx.emit(MessageNotificationEvent::Dismiss);
- // }
+ // todo!()
+ // pub fn dismiss(&mut self, _: &CancelMessageNotification, cx: &mut ViewContext<Self>) {
+ // cx.emit(MessageNotificationEvent::Dismiss);
+ // }
}
impl Render for MessageNotification {
@@ -265,7 +265,7 @@ pub mod simple_message_notification {
// "MessageNotification"
// }
- // fn render(&mut self, cx: &mut gpui2::ViewContext<Self>) -> gpui::AnyElement<Self> {
+ // fn render(&mut self, cx: &mut gpui::ViewContext<Self>) -> gpui::AnyElement<Self> {
// let theme = theme2::current(cx).clone();
// let theme = &theme.simple_message_notification;
@@ -8,7 +8,7 @@ use crate::{
};
use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
-use gpui2::{
+use gpui::{
AppContext, AsyncWindowContext, Component, Div, EntityId, EventEmitter, Model, PromptLevel,
Render, Task, View, ViewContext, VisualContext, WeakView, WindowContext,
};
@@ -416,17 +416,17 @@ impl Pane {
}
}
- // pub(crate) fn workspace(&self) -> &WeakView<Workspace> {
- // &self.workspace
- // }
+ pub(crate) fn workspace(&self) -> &WeakView<Workspace> {
+ &self.workspace
+ }
pub fn has_focus(&self) -> bool {
self.has_focus
}
- // pub fn active_item_index(&self) -> usize {
- // self.active_item_index
- // }
+ pub fn active_item_index(&self) -> usize {
+ self.active_item_index
+ }
// pub fn on_can_drop<F>(&mut self, can_drop: F)
// where
@@ -1865,14 +1865,14 @@ impl Pane {
// .into_any()
// }
- // pub fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext<Self>) {
- // self.zoomed = zoomed;
- // cx.notify();
- // }
+ pub fn set_zoomed(&mut self, zoomed: bool, cx: &mut ViewContext<Self>) {
+ self.zoomed = zoomed;
+ cx.notify();
+ }
- // pub fn is_zoomed(&self) -> bool {
- // self.zoomed
- // }
+ pub fn is_zoomed(&self) -> bool {
+ self.zoomed
+ }
}
// impl Entity for Pane {
@@ -2907,6 +2907,6 @@ impl Render for DraggedTab {
type Element = Div<Self>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
- div().w_8().h_4().bg(gpui2::red())
+ div().w_8().h_4().bg(gpui::red())
}
}
@@ -1,6 +1,6 @@
use super::DraggedItem;
use crate::{Pane, SplitDirection, Workspace};
-use gpui2::{
+use gpui::{
color::Color,
elements::{Canvas, MouseEventHandler, ParentElement, Stack},
geometry::{rect::RectF, vector::Vector2F},
@@ -6,9 +6,7 @@ use db2::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
};
-use gpui2::{
- point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext,
-};
+use gpui::{point, size, AnyElement, AnyWeakView, Bounds, Model, Pixels, Point, View, ViewContext};
use parking_lot::Mutex;
use project2::Project;
use serde::Deserialize;
@@ -6,7 +6,7 @@ use std::path::Path;
use anyhow::{anyhow, bail, Context, Result};
use db2::{define_connection, query, sqlez::connection::Connection, sqlez_macros::sql};
-use gpui2::WindowBounds;
+use gpui::WindowBounds;
use util::{unzip_option, ResultExt};
use uuid::Uuid;
@@ -549,425 +549,425 @@ impl WorkspaceDb {
}
}
-// todo!()
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use db::open_test_db;
-
-// #[gpui::test]
-// async fn test_next_id_stability() {
-// env_logger::try_init().ok();
-
-// let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
-
-// db.write(|conn| {
-// conn.migrate(
-// "test_table",
-// &[sql!(
-// CREATE TABLE test_table(
-// text TEXT,
-// workspace_id INTEGER,
-// FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
-// ON DELETE CASCADE
-// ) STRICT;
-// )],
-// )
-// .unwrap();
-// })
-// .await;
-
-// let id = db.next_id().await.unwrap();
-// // Assert the empty row got inserted
-// assert_eq!(
-// Some(id),
-// db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!(
-// SELECT workspace_id FROM workspaces WHERE workspace_id = ?
-// ))
-// .unwrap()(id)
-// .unwrap()
-// );
-
-// db.write(move |conn| {
-// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
-// .unwrap()(("test-text-1", id))
-// .unwrap()
-// })
-// .await;
-
-// let test_text_1 = db
-// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
-// .unwrap()(1)
-// .unwrap()
-// .unwrap();
-// assert_eq!(test_text_1, "test-text-1");
-// }
-
-// #[gpui::test]
-// async fn test_workspace_id_stability() {
-// env_logger::try_init().ok();
-
-// let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
-
-// db.write(|conn| {
-// conn.migrate(
-// "test_table",
-// &[sql!(
-// CREATE TABLE test_table(
-// text TEXT,
-// workspace_id INTEGER,
-// FOREIGN KEY(workspace_id)
-// REFERENCES workspaces(workspace_id)
-// ON DELETE CASCADE
-// ) STRICT;)],
-// )
-// })
-// .await
-// .unwrap();
-
-// let mut workspace_1 = SerializedWorkspace {
-// id: 1,
-// location: (["/tmp", "/tmp2"]).into(),
-// center_group: Default::default(),
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// };
-
-// let workspace_2 = SerializedWorkspace {
-// id: 2,
-// location: (["/tmp"]).into(),
-// center_group: Default::default(),
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// };
-
-// db.save_workspace(workspace_1.clone()).await;
-
-// db.write(|conn| {
-// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
-// .unwrap()(("test-text-1", 1))
-// .unwrap();
-// })
-// .await;
-
-// db.save_workspace(workspace_2.clone()).await;
-
-// db.write(|conn| {
-// conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
-// .unwrap()(("test-text-2", 2))
-// .unwrap();
-// })
-// .await;
-
-// workspace_1.location = (["/tmp", "/tmp3"]).into();
-// db.save_workspace(workspace_1.clone()).await;
-// db.save_workspace(workspace_1).await;
-// db.save_workspace(workspace_2).await;
-
-// let test_text_2 = db
-// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
-// .unwrap()(2)
-// .unwrap()
-// .unwrap();
-// assert_eq!(test_text_2, "test-text-2");
-
-// let test_text_1 = db
-// .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
-// .unwrap()(1)
-// .unwrap()
-// .unwrap();
-// assert_eq!(test_text_1, "test-text-1");
-// }
-
-// fn group(axis: gpui::Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
-// SerializedPaneGroup::Group {
-// axis,
-// flexes: None,
-// children,
-// }
-// }
-
-// #[gpui::test]
-// async fn test_full_workspace_serialization() {
-// env_logger::try_init().ok();
-
-// let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
-
-// // -----------------
-// // | 1,2 | 5,6 |
-// // | - - - | |
-// // | 3,4 | |
-// // -----------------
-// let center_group = group(
-// gpui::Axis::Horizontal,
-// vec![
-// group(
-// gpui::Axis::Vertical,
-// vec![
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 5, false),
-// SerializedItem::new("Terminal", 6, true),
-// ],
-// false,
-// )),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 7, true),
-// SerializedItem::new("Terminal", 8, false),
-// ],
-// false,
-// )),
-// ],
-// ),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 9, false),
-// SerializedItem::new("Terminal", 10, true),
-// ],
-// false,
-// )),
-// ],
-// );
-
-// let workspace = SerializedWorkspace {
-// id: 5,
-// location: (["/tmp", "/tmp2"]).into(),
-// center_group,
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// };
-
-// db.save_workspace(workspace.clone()).await;
-// let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
-
-// assert_eq!(workspace, round_trip_workspace.unwrap());
-
-// // Test guaranteed duplicate IDs
-// db.save_workspace(workspace.clone()).await;
-// db.save_workspace(workspace.clone()).await;
-
-// let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
-// assert_eq!(workspace, round_trip_workspace.unwrap());
-// }
-
-// #[gpui::test]
-// async fn test_workspace_assignment() {
-// env_logger::try_init().ok();
-
-// let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
-
-// let workspace_1 = SerializedWorkspace {
-// id: 1,
-// location: (["/tmp", "/tmp2"]).into(),
-// center_group: Default::default(),
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// };
-
-// let mut workspace_2 = SerializedWorkspace {
-// id: 2,
-// location: (["/tmp"]).into(),
-// center_group: Default::default(),
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// };
-
-// db.save_workspace(workspace_1.clone()).await;
-// db.save_workspace(workspace_2.clone()).await;
-
-// // Test that paths are treated as a set
-// assert_eq!(
-// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
-// workspace_1
-// );
-// assert_eq!(
-// db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(),
-// workspace_1
-// );
-
-// // Make sure that other keys work
-// assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2);
-// assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
-
-// // Test 'mutate' case of updating a pre-existing id
-// workspace_2.location = (["/tmp", "/tmp2"]).into();
-
-// db.save_workspace(workspace_2.clone()).await;
-// assert_eq!(
-// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
-// workspace_2
-// );
-
-// // Test other mechanism for mutating
-// let mut workspace_3 = SerializedWorkspace {
-// id: 3,
-// location: (&["/tmp", "/tmp2"]).into(),
-// center_group: Default::default(),
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// };
-
-// db.save_workspace(workspace_3.clone()).await;
-// assert_eq!(
-// db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
-// workspace_3
-// );
-
-// // Make sure that updating paths differently also works
-// workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into();
-// db.save_workspace(workspace_3.clone()).await;
-// assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
-// assert_eq!(
-// db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"])
-// .unwrap(),
-// workspace_3
-// );
-// }
-
-// use crate::persistence::model::SerializedWorkspace;
-// use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
-
-// fn default_workspace<P: AsRef<Path>>(
-// workspace_id: &[P],
-// center_group: &SerializedPaneGroup,
-// ) -> SerializedWorkspace {
-// SerializedWorkspace {
-// id: 4,
-// location: workspace_id.into(),
-// center_group: center_group.clone(),
-// bounds: Default::default(),
-// display: Default::default(),
-// docks: Default::default(),
-// }
-// }
-
-// #[gpui::test]
-// async fn test_simple_split() {
-// env_logger::try_init().ok();
-
-// let db = WorkspaceDb(open_test_db("simple_split").await);
-
-// // -----------------
-// // | 1,2 | 5,6 |
-// // | - - - | |
-// // | 3,4 | |
-// // -----------------
-// let center_pane = group(
-// gpui::Axis::Horizontal,
-// vec![
-// group(
-// gpui::Axis::Vertical,
-// vec![
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 1, false),
-// SerializedItem::new("Terminal", 2, true),
-// ],
-// false,
-// )),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 4, false),
-// SerializedItem::new("Terminal", 3, true),
-// ],
-// true,
-// )),
-// ],
-// ),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 5, true),
-// SerializedItem::new("Terminal", 6, false),
-// ],
-// false,
-// )),
-// ],
-// );
-
-// let workspace = default_workspace(&["/tmp"], ¢er_pane);
-
-// db.save_workspace(workspace.clone()).await;
-
-// let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
-
-// assert_eq!(workspace.center_group, new_workspace.center_group);
-// }
-
-// #[gpui::test]
-// async fn test_cleanup_panes() {
-// env_logger::try_init().ok();
-
-// let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
-
-// let center_pane = group(
-// gpui::Axis::Horizontal,
-// vec![
-// group(
-// gpui::Axis::Vertical,
-// vec![
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 1, false),
-// SerializedItem::new("Terminal", 2, true),
-// ],
-// false,
-// )),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 4, false),
-// SerializedItem::new("Terminal", 3, true),
-// ],
-// true,
-// )),
-// ],
-// ),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 5, false),
-// SerializedItem::new("Terminal", 6, true),
-// ],
-// false,
-// )),
-// ],
-// );
-
-// let id = &["/tmp"];
-
-// let mut workspace = default_workspace(id, ¢er_pane);
-
-// db.save_workspace(workspace.clone()).await;
-
-// workspace.center_group = group(
-// gpui::Axis::Vertical,
-// vec![
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 1, false),
-// SerializedItem::new("Terminal", 2, true),
-// ],
-// false,
-// )),
-// SerializedPaneGroup::Pane(SerializedPane::new(
-// vec![
-// SerializedItem::new("Terminal", 4, true),
-// SerializedItem::new("Terminal", 3, false),
-// ],
-// true,
-// )),
-// ],
-// );
-
-// db.save_workspace(workspace.clone()).await;
-
-// let new_workspace = db.workspace_for_roots(id).unwrap();
-
-// assert_eq!(workspace.center_group, new_workspace.center_group);
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use db2::open_test_db;
+ use gpui;
+
+ #[gpui::test]
+ async fn test_next_id_stability() {
+ env_logger::try_init().ok();
+
+ let db = WorkspaceDb(open_test_db("test_next_id_stability").await);
+
+ db.write(|conn| {
+ conn.migrate(
+ "test_table",
+ &[sql!(
+ CREATE TABLE test_table(
+ text TEXT,
+ workspace_id INTEGER,
+ FOREIGN KEY(workspace_id) REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ) STRICT;
+ )],
+ )
+ .unwrap();
+ })
+ .await;
+
+ let id = db.next_id().await.unwrap();
+ // Assert the empty row got inserted
+ assert_eq!(
+ Some(id),
+ db.select_row_bound::<WorkspaceId, WorkspaceId>(sql!(
+ SELECT workspace_id FROM workspaces WHERE workspace_id = ?
+ ))
+ .unwrap()(id)
+ .unwrap()
+ );
+
+ db.write(move |conn| {
+ conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
+ .unwrap()(("test-text-1", id))
+ .unwrap()
+ })
+ .await;
+
+ let test_text_1 = db
+ .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
+ .unwrap()(1)
+ .unwrap()
+ .unwrap();
+ assert_eq!(test_text_1, "test-text-1");
+ }
+
+ #[gpui::test]
+ async fn test_workspace_id_stability() {
+ env_logger::try_init().ok();
+
+ let db = WorkspaceDb(open_test_db("test_workspace_id_stability").await);
+
+ db.write(|conn| {
+ conn.migrate(
+ "test_table",
+ &[sql!(
+ CREATE TABLE test_table(
+ text TEXT,
+ workspace_id INTEGER,
+ FOREIGN KEY(workspace_id)
+ REFERENCES workspaces(workspace_id)
+ ON DELETE CASCADE
+ ) STRICT;)],
+ )
+ })
+ .await
+ .unwrap();
+
+ let mut workspace_1 = SerializedWorkspace {
+ id: 1,
+ location: (["/tmp", "/tmp2"]).into(),
+ center_group: Default::default(),
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ };
+
+ let workspace_2 = SerializedWorkspace {
+ id: 2,
+ location: (["/tmp"]).into(),
+ center_group: Default::default(),
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ };
+
+ db.save_workspace(workspace_1.clone()).await;
+
+ db.write(|conn| {
+ conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
+ .unwrap()(("test-text-1", 1))
+ .unwrap();
+ })
+ .await;
+
+ db.save_workspace(workspace_2.clone()).await;
+
+ db.write(|conn| {
+ conn.exec_bound(sql!(INSERT INTO test_table(text, workspace_id) VALUES (?, ?)))
+ .unwrap()(("test-text-2", 2))
+ .unwrap();
+ })
+ .await;
+
+ workspace_1.location = (["/tmp", "/tmp3"]).into();
+ db.save_workspace(workspace_1.clone()).await;
+ db.save_workspace(workspace_1).await;
+ db.save_workspace(workspace_2).await;
+
+ let test_text_2 = db
+ .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
+ .unwrap()(2)
+ .unwrap()
+ .unwrap();
+ assert_eq!(test_text_2, "test-text-2");
+
+ let test_text_1 = db
+ .select_row_bound::<_, String>(sql!(SELECT text FROM test_table WHERE workspace_id = ?))
+ .unwrap()(1)
+ .unwrap()
+ .unwrap();
+ assert_eq!(test_text_1, "test-text-1");
+ }
+
+ fn group(axis: Axis, children: Vec<SerializedPaneGroup>) -> SerializedPaneGroup {
+ SerializedPaneGroup::Group {
+ axis,
+ flexes: None,
+ children,
+ }
+ }
+
+ #[gpui::test]
+ async fn test_full_workspace_serialization() {
+ env_logger::try_init().ok();
+
+ let db = WorkspaceDb(open_test_db("test_full_workspace_serialization").await);
+
+ // -----------------
+ // | 1,2 | 5,6 |
+ // | - - - | |
+ // | 3,4 | |
+ // -----------------
+ let center_group = group(
+ Axis::Horizontal,
+ vec![
+ group(
+ Axis::Vertical,
+ vec![
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 5, false),
+ SerializedItem::new("Terminal", 6, true),
+ ],
+ false,
+ )),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 7, true),
+ SerializedItem::new("Terminal", 8, false),
+ ],
+ false,
+ )),
+ ],
+ ),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 9, false),
+ SerializedItem::new("Terminal", 10, true),
+ ],
+ false,
+ )),
+ ],
+ );
+
+ let workspace = SerializedWorkspace {
+ id: 5,
+ location: (["/tmp", "/tmp2"]).into(),
+ center_group,
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ };
+
+ db.save_workspace(workspace.clone()).await;
+ let round_trip_workspace = db.workspace_for_roots(&["/tmp2", "/tmp"]);
+
+ assert_eq!(workspace, round_trip_workspace.unwrap());
+
+ // Test guaranteed duplicate IDs
+ db.save_workspace(workspace.clone()).await;
+ db.save_workspace(workspace.clone()).await;
+
+ let round_trip_workspace = db.workspace_for_roots(&["/tmp", "/tmp2"]);
+ assert_eq!(workspace, round_trip_workspace.unwrap());
+ }
+
+ #[gpui::test]
+ async fn test_workspace_assignment() {
+ env_logger::try_init().ok();
+
+ let db = WorkspaceDb(open_test_db("test_basic_functionality").await);
+
+ let workspace_1 = SerializedWorkspace {
+ id: 1,
+ location: (["/tmp", "/tmp2"]).into(),
+ center_group: Default::default(),
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ };
+
+ let mut workspace_2 = SerializedWorkspace {
+ id: 2,
+ location: (["/tmp"]).into(),
+ center_group: Default::default(),
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ };
+
+ db.save_workspace(workspace_1.clone()).await;
+ db.save_workspace(workspace_2.clone()).await;
+
+ // Test that paths are treated as a set
+ assert_eq!(
+ db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
+ workspace_1
+ );
+ assert_eq!(
+ db.workspace_for_roots(&["/tmp2", "/tmp"]).unwrap(),
+ workspace_1
+ );
+
+ // Make sure that other keys work
+ assert_eq!(db.workspace_for_roots(&["/tmp"]).unwrap(), workspace_2);
+ assert_eq!(db.workspace_for_roots(&["/tmp3", "/tmp2", "/tmp4"]), None);
+
+ // Test 'mutate' case of updating a pre-existing id
+ workspace_2.location = (["/tmp", "/tmp2"]).into();
+
+ db.save_workspace(workspace_2.clone()).await;
+ assert_eq!(
+ db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
+ workspace_2
+ );
+
+ // Test other mechanism for mutating
+ let mut workspace_3 = SerializedWorkspace {
+ id: 3,
+ location: (&["/tmp", "/tmp2"]).into(),
+ center_group: Default::default(),
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ };
+
+ db.save_workspace(workspace_3.clone()).await;
+ assert_eq!(
+ db.workspace_for_roots(&["/tmp", "/tmp2"]).unwrap(),
+ workspace_3
+ );
+
+ // Make sure that updating paths differently also works
+ workspace_3.location = (["/tmp3", "/tmp4", "/tmp2"]).into();
+ db.save_workspace(workspace_3.clone()).await;
+ assert_eq!(db.workspace_for_roots(&["/tmp2", "tmp"]), None);
+ assert_eq!(
+ db.workspace_for_roots(&["/tmp2", "/tmp3", "/tmp4"])
+ .unwrap(),
+ workspace_3
+ );
+ }
+
+ use crate::persistence::model::SerializedWorkspace;
+ use crate::persistence::model::{SerializedItem, SerializedPane, SerializedPaneGroup};
+
+ fn default_workspace<P: AsRef<Path>>(
+ workspace_id: &[P],
+ center_group: &SerializedPaneGroup,
+ ) -> SerializedWorkspace {
+ SerializedWorkspace {
+ id: 4,
+ location: workspace_id.into(),
+ center_group: center_group.clone(),
+ bounds: Default::default(),
+ display: Default::default(),
+ docks: Default::default(),
+ }
+ }
+
+ #[gpui::test]
+ async fn test_simple_split() {
+ env_logger::try_init().ok();
+
+ let db = WorkspaceDb(open_test_db("simple_split").await);
+
+ // -----------------
+ // | 1,2 | 5,6 |
+ // | - - - | |
+ // | 3,4 | |
+ // -----------------
+ let center_pane = group(
+ Axis::Horizontal,
+ vec![
+ group(
+ Axis::Vertical,
+ vec![
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 1, false),
+ SerializedItem::new("Terminal", 2, true),
+ ],
+ false,
+ )),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 4, false),
+ SerializedItem::new("Terminal", 3, true),
+ ],
+ true,
+ )),
+ ],
+ ),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 5, true),
+ SerializedItem::new("Terminal", 6, false),
+ ],
+ false,
+ )),
+ ],
+ );
+
+ let workspace = default_workspace(&["/tmp"], ¢er_pane);
+
+ db.save_workspace(workspace.clone()).await;
+
+ let new_workspace = db.workspace_for_roots(&["/tmp"]).unwrap();
+
+ assert_eq!(workspace.center_group, new_workspace.center_group);
+ }
+
+ #[gpui::test]
+ async fn test_cleanup_panes() {
+ env_logger::try_init().ok();
+
+ let db = WorkspaceDb(open_test_db("test_cleanup_panes").await);
+
+ let center_pane = group(
+ Axis::Horizontal,
+ vec![
+ group(
+ Axis::Vertical,
+ vec![
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 1, false),
+ SerializedItem::new("Terminal", 2, true),
+ ],
+ false,
+ )),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 4, false),
+ SerializedItem::new("Terminal", 3, true),
+ ],
+ true,
+ )),
+ ],
+ ),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 5, false),
+ SerializedItem::new("Terminal", 6, true),
+ ],
+ false,
+ )),
+ ],
+ );
+
+ let id = &["/tmp"];
+
+ let mut workspace = default_workspace(id, ¢er_pane);
+
+ db.save_workspace(workspace.clone()).await;
+
+ workspace.center_group = group(
+ Axis::Vertical,
+ vec![
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 1, false),
+ SerializedItem::new("Terminal", 2, true),
+ ],
+ false,
+ )),
+ SerializedPaneGroup::Pane(SerializedPane::new(
+ vec![
+ SerializedItem::new("Terminal", 4, true),
+ SerializedItem::new("Terminal", 3, false),
+ ],
+ true,
+ )),
+ ],
+ );
+
+ db.save_workspace(workspace.clone()).await;
+
+ let new_workspace = db.workspace_for_roots(id).unwrap();
+
+ assert_eq!(workspace.center_group, new_workspace.center_group);
+ }
+}
@@ -7,7 +7,7 @@ use db2::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
};
-use gpui2::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds};
+use gpui::{AsyncWindowContext, Model, Task, View, WeakView, WindowBounds};
use project2::Project;
use std::{
path::{Path, PathBuf},
@@ -55,7 +55,7 @@ impl Column for WorkspaceLocation {
}
}
-#[derive(PartialEq, Clone)]
+#[derive(Debug, PartialEq, Clone)]
pub struct SerializedWorkspace {
pub id: WorkspaceId,
pub location: WorkspaceLocation,
@@ -127,7 +127,7 @@ impl Bind for DockData {
}
}
-#[derive(PartialEq, Clone)]
+#[derive(Debug, PartialEq, Clone)]
pub enum SerializedPaneGroup {
Group {
axis: Axis,
@@ -286,15 +286,15 @@ pub struct SerializedItem {
pub active: bool,
}
-// impl SerializedItem {
-// pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self {
-// Self {
-// kind: Arc::from(kind.as_ref()),
-// item_id,
-// active,
-// }
-// }
-// }
+impl SerializedItem {
+ pub fn new(kind: impl AsRef<str>, item_id: ItemId, active: bool) -> Self {
+ Self {
+ kind: Arc::from(kind.as_ref()),
+ item_id,
+ active,
+ }
+ }
+}
#[cfg(test)]
impl Default for SerializedItem {
@@ -1,6 +1,6 @@
use std::{any::Any, sync::Arc};
-use gpui2::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext};
+use gpui::{AnyView, AppContext, Subscription, Task, View, ViewContext, WindowContext};
use project2::search::SearchQuery;
use crate::{
@@ -1,7 +1,7 @@
use std::any::TypeId;
use crate::{ItemHandle, Pane};
-use gpui2::{
+use gpui::{
div, AnyView, Component, Div, ParentElement, Render, Styled, Subscription, View, ViewContext,
WindowContext,
};
@@ -1,5 +1,5 @@
use crate::ItemHandle;
-use gpui2::{
+use gpui::{
AnyView, AppContext, Entity, EntityId, EventEmitter, Render, View, ViewContext, WindowContext,
};
@@ -8,6 +8,7 @@ pub mod pane;
pub mod pane_group;
mod persistence;
pub mod searchable;
+// todo!()
// pub mod shared_screen;
mod status_bar;
mod toolbar;
@@ -23,14 +24,14 @@ use client2::{
proto::{self, PeerId},
Client, TypedEnvelope, UserStore,
};
-use collections::{HashMap, HashSet};
+use collections::{hash_map, HashMap, HashSet};
use dock::{Dock, DockPosition, PanelButtons};
use futures::{
channel::{mpsc, oneshot},
future::try_join_all,
Future, FutureExt, StreamExt,
};
-use gpui2::{
+use gpui::{
div, point, size, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext,
AsyncWindowContext, Bounds, Component, Div, EntityId, EventEmitter, GlobalPixels, Model,
ModelContext, ParentElement, Point, Render, Size, StatefulInteractive, Styled, Subscription,
@@ -38,6 +39,7 @@ use gpui2::{
WindowOptions,
};
use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem};
+use itertools::Itertools;
use language2::LanguageRegistry;
use lazy_static::lazy_static;
use node_runtime::NodeRuntime;
@@ -174,42 +176,42 @@ pub struct Toast {
on_click: Option<(Cow<'static, str>, Arc<dyn Fn(&mut WindowContext)>)>,
}
-// impl Toast {
-// pub fn new<I: Into<Cow<'static, str>>>(id: usize, msg: I) -> Self {
-// Toast {
-// id,
-// msg: msg.into(),
-// on_click: None,
-// }
-// }
+impl Toast {
+ pub fn new<I: Into<Cow<'static, str>>>(id: usize, msg: I) -> Self {
+ Toast {
+ id,
+ msg: msg.into(),
+ on_click: None,
+ }
+ }
-// pub fn on_click<F, M>(mut self, message: M, on_click: F) -> Self
-// where
-// M: Into<Cow<'static, str>>,
-// F: Fn(&mut WindowContext) + 'static,
-// {
-// self.on_click = Some((message.into(), Arc::new(on_click)));
-// self
-// }
-// }
+ pub fn on_click<F, M>(mut self, message: M, on_click: F) -> Self
+ where
+ M: Into<Cow<'static, str>>,
+ F: Fn(&mut WindowContext) + 'static,
+ {
+ self.on_click = Some((message.into(), Arc::new(on_click)));
+ self
+ }
+}
-// impl PartialEq for Toast {
-// fn eq(&self, other: &Self) -> bool {
-// self.id == other.id
-// && self.msg == other.msg
-// && self.on_click.is_some() == other.on_click.is_some()
-// }
-// }
+impl PartialEq for Toast {
+ fn eq(&self, other: &Self) -> bool {
+ self.id == other.id
+ && self.msg == other.msg
+ && self.on_click.is_some() == other.on_click.is_some()
+ }
+}
-// impl Clone for Toast {
-// fn clone(&self) -> Self {
-// Toast {
-// id: self.id,
-// msg: self.msg.to_owned(),
-// on_click: self.on_click.clone(),
-// }
-// }
-// }
+impl Clone for Toast {
+ fn clone(&self) -> Self {
+ Toast {
+ id: self.id,
+ msg: self.msg.to_owned(),
+ on_click: self.on_click.clone(),
+ }
+ }
+}
// #[derive(Clone, Deserialize, PartialEq)]
// pub struct OpenTerminal {
@@ -460,7 +462,7 @@ struct Follower {
impl AppState {
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut AppContext) -> Arc<Self> {
- use gpui2::Context;
+ use gpui::Context;
use node_runtime::FakeNodeRuntime;
use settings2::SettingsStore;
@@ -476,8 +478,7 @@ impl AppState {
let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
let workspace_store = cx.build_model(|cx| WorkspaceStore::new(client.clone(), cx));
- // todo!()
- // theme::init((), cx);
+ theme2::init(cx);
client2::init(&client, cx);
crate::init_settings(cx);
@@ -549,7 +550,7 @@ pub struct Workspace {
weak_self: WeakView<Self>,
// modal: Option<ActiveModal>,
zoomed: Option<AnyWeakView>,
- // zoomed_position: Option<DockPosition>,
+ zoomed_position: Option<DockPosition>,
center: PaneGroup,
left_dock: View<Dock>,
bottom_dock: View<Dock>,
@@ -626,7 +627,7 @@ impl Workspace {
}
project2::Event::Closed => {
- // cx.remove_window();
+ cx.remove_window();
}
project2::Event::DeletedEntry(entry_id) => {
@@ -768,7 +769,7 @@ impl Workspace {
weak_self: weak_handle.clone(),
// modal: None,
zoomed: None,
- // zoomed_position: None,
+ zoomed_position: None,
center: PaneGroup::new(center_pane.clone()),
panes: vec![center_pane.clone()],
panes_by_item: Default::default(),
@@ -1059,183 +1060,185 @@ impl Workspace {
&self.project
}
- // pub fn recent_navigation_history(
- // &self,
- // limit: Option<usize>,
- // cx: &AppContext,
- // ) -> Vec<(ProjectPath, Option<PathBuf>)> {
- // let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
- // let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
- // for pane in &self.panes {
- // let pane = pane.read(cx);
- // pane.nav_history()
- // .for_each_entry(cx, |entry, (project_path, fs_path)| {
- // if let Some(fs_path) = &fs_path {
- // abs_paths_opened
- // .entry(fs_path.clone())
- // .or_default()
- // .insert(project_path.clone());
- // }
- // let timestamp = entry.timestamp;
- // match history.entry(project_path) {
- // hash_map::Entry::Occupied(mut entry) => {
- // let (_, old_timestamp) = entry.get();
- // if ×tamp > old_timestamp {
- // entry.insert((fs_path, timestamp));
- // }
- // }
- // hash_map::Entry::Vacant(entry) => {
- // entry.insert((fs_path, timestamp));
- // }
- // }
- // });
- // }
-
- // history
- // .into_iter()
- // .sorted_by_key(|(_, (_, timestamp))| *timestamp)
- // .map(|(project_path, (fs_path, _))| (project_path, fs_path))
- // .rev()
- // .filter(|(history_path, abs_path)| {
- // let latest_project_path_opened = abs_path
- // .as_ref()
- // .and_then(|abs_path| abs_paths_opened.get(abs_path))
- // .and_then(|project_paths| {
- // project_paths
- // .iter()
- // .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
- // });
+ pub fn recent_navigation_history(
+ &self,
+ limit: Option<usize>,
+ cx: &AppContext,
+ ) -> Vec<(ProjectPath, Option<PathBuf>)> {
+ let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
+ let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
+ for pane in &self.panes {
+ let pane = pane.read(cx);
+ pane.nav_history()
+ .for_each_entry(cx, |entry, (project_path, fs_path)| {
+ if let Some(fs_path) = &fs_path {
+ abs_paths_opened
+ .entry(fs_path.clone())
+ .or_default()
+ .insert(project_path.clone());
+ }
+ let timestamp = entry.timestamp;
+ match history.entry(project_path) {
+ hash_map::Entry::Occupied(mut entry) => {
+ let (_, old_timestamp) = entry.get();
+ if ×tamp > old_timestamp {
+ entry.insert((fs_path, timestamp));
+ }
+ }
+ hash_map::Entry::Vacant(entry) => {
+ entry.insert((fs_path, timestamp));
+ }
+ }
+ });
+ }
- // match latest_project_path_opened {
- // Some(latest_project_path_opened) => latest_project_path_opened == history_path,
- // None => true,
- // }
- // })
- // .take(limit.unwrap_or(usize::MAX))
- // .collect()
- // }
+ history
+ .into_iter()
+ .sorted_by_key(|(_, (_, timestamp))| *timestamp)
+ .map(|(project_path, (fs_path, _))| (project_path, fs_path))
+ .rev()
+ .filter(|(history_path, abs_path)| {
+ let latest_project_path_opened = abs_path
+ .as_ref()
+ .and_then(|abs_path| abs_paths_opened.get(abs_path))
+ .and_then(|project_paths| {
+ project_paths
+ .iter()
+ .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
+ });
- // fn navigate_history(
- // &mut self,
- // pane: WeakView<Pane>,
- // mode: NavigationMode,
- // cx: &mut ViewContext<Workspace>,
- // ) -> Task<Result<()>> {
- // let to_load = if let Some(pane) = pane.upgrade(cx) {
- // cx.focus(&pane);
+ match latest_project_path_opened {
+ Some(latest_project_path_opened) => latest_project_path_opened == history_path,
+ None => true,
+ }
+ })
+ .take(limit.unwrap_or(usize::MAX))
+ .collect()
+ }
- // pane.update(cx, |pane, cx| {
- // loop {
- // // Retrieve the weak item handle from the history.
- // let entry = pane.nav_history_mut().pop(mode, cx)?;
+ fn navigate_history(
+ &mut self,
+ pane: WeakView<Pane>,
+ mode: NavigationMode,
+ cx: &mut ViewContext<Workspace>,
+ ) -> Task<Result<()>> {
+ let to_load = if let Some(pane) = pane.upgrade() {
+ // todo!("focus")
+ // cx.focus(&pane);
+
+ pane.update(cx, |pane, cx| {
+ loop {
+ // Retrieve the weak item handle from the history.
+ let entry = pane.nav_history_mut().pop(mode, cx)?;
+
+ // If the item is still present in this pane, then activate it.
+ if let Some(index) = entry
+ .item
+ .upgrade()
+ .and_then(|v| pane.index_for_item(v.as_ref()))
+ {
+ let prev_active_item_index = pane.active_item_index();
+ pane.nav_history_mut().set_mode(mode);
+ pane.activate_item(index, true, true, cx);
+ pane.nav_history_mut().set_mode(NavigationMode::Normal);
+
+ let mut navigated = prev_active_item_index != pane.active_item_index();
+ if let Some(data) = entry.data {
+ navigated |= pane.active_item()?.navigate(data, cx);
+ }
- // // If the item is still present in this pane, then activate it.
- // if let Some(index) = entry
- // .item
- // .upgrade(cx)
- // .and_then(|v| pane.index_for_item(v.as_ref()))
- // {
- // let prev_active_item_index = pane.active_item_index();
- // pane.nav_history_mut().set_mode(mode);
- // pane.activate_item(index, true, true, cx);
- // pane.nav_history_mut().set_mode(NavigationMode::Normal);
-
- // let mut navigated = prev_active_item_index != pane.active_item_index();
- // if let Some(data) = entry.data {
- // navigated |= pane.active_item()?.navigate(data, cx);
- // }
+ if navigated {
+ break None;
+ }
+ }
+ // If the item is no longer present in this pane, then retrieve its
+ // project path in order to reopen it.
+ else {
+ break pane
+ .nav_history()
+ .path_for_item(entry.item.id())
+ .map(|(project_path, _)| (project_path, entry));
+ }
+ }
+ })
+ } else {
+ None
+ };
- // if navigated {
- // break None;
- // }
- // }
- // // If the item is no longer present in this pane, then retrieve its
- // // project path in order to reopen it.
- // else {
- // break pane
- // .nav_history()
- // .path_for_item(entry.item.id())
- // .map(|(project_path, _)| (project_path, entry));
- // }
- // }
- // })
- // } else {
- // None
- // };
+ if let Some((project_path, entry)) = to_load {
+ // If the item was no longer present, then load it again from its previous path.
+ let task = self.load_path(project_path, cx);
+ cx.spawn(|workspace, mut cx| async move {
+ let task = task.await;
+ let mut navigated = false;
+ if let Some((project_entry_id, build_item)) = task.log_err() {
+ let prev_active_item_id = pane.update(&mut cx, |pane, _| {
+ pane.nav_history_mut().set_mode(mode);
+ pane.active_item().map(|p| p.id())
+ })?;
- // if let Some((project_path, entry)) = to_load {
- // // If the item was no longer present, then load it again from its previous path.
- // let task = self.load_path(project_path, cx);
- // cx.spawn(|workspace, mut cx| async move {
- // let task = task.await;
- // let mut navigated = false;
- // if let Some((project_entry_id, build_item)) = task.log_err() {
- // let prev_active_item_id = pane.update(&mut cx, |pane, _| {
- // pane.nav_history_mut().set_mode(mode);
- // pane.active_item().map(|p| p.id())
- // })?;
-
- // pane.update(&mut cx, |pane, cx| {
- // let item = pane.open_item(project_entry_id, true, cx, build_item);
- // navigated |= Some(item.id()) != prev_active_item_id;
- // pane.nav_history_mut().set_mode(NavigationMode::Normal);
- // if let Some(data) = entry.data {
- // navigated |= item.navigate(data, cx);
- // }
- // })?;
- // }
+ pane.update(&mut cx, |pane, cx| {
+ let item = pane.open_item(project_entry_id, true, cx, build_item);
+ navigated |= Some(item.id()) != prev_active_item_id;
+ pane.nav_history_mut().set_mode(NavigationMode::Normal);
+ if let Some(data) = entry.data {
+ navigated |= item.navigate(data, cx);
+ }
+ })?;
+ }
- // if !navigated {
- // workspace
- // .update(&mut cx, |workspace, cx| {
- // Self::navigate_history(workspace, pane, mode, cx)
- // })?
- // .await?;
- // }
+ if !navigated {
+ workspace
+ .update(&mut cx, |workspace, cx| {
+ Self::navigate_history(workspace, pane, mode, cx)
+ })?
+ .await?;
+ }
- // Ok(())
- // })
- // } else {
- // Task::ready(Ok(()))
- // }
- // }
+ Ok(())
+ })
+ } else {
+ Task::ready(Ok(()))
+ }
+ }
- // pub fn go_back(
- // &mut self,
- // pane: WeakView<Pane>,
- // cx: &mut ViewContext<Workspace>,
- // ) -> Task<Result<()>> {
- // self.navigate_history(pane, NavigationMode::GoingBack, cx)
- // }
+ pub fn go_back(
+ &mut self,
+ pane: WeakView<Pane>,
+ cx: &mut ViewContext<Workspace>,
+ ) -> Task<Result<()>> {
+ self.navigate_history(pane, NavigationMode::GoingBack, cx)
+ }
- // pub fn go_forward(
- // &mut self,
- // pane: WeakView<Pane>,
- // cx: &mut ViewContext<Workspace>,
- // ) -> Task<Result<()>> {
- // self.navigate_history(pane, NavigationMode::GoingForward, cx)
- // }
+ pub fn go_forward(
+ &mut self,
+ pane: WeakView<Pane>,
+ cx: &mut ViewContext<Workspace>,
+ ) -> Task<Result<()>> {
+ self.navigate_history(pane, NavigationMode::GoingForward, cx)
+ }
- // pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
- // self.navigate_history(
- // self.active_pane().downgrade(),
- // NavigationMode::ReopeningClosedItem,
- // cx,
- // )
- // }
+ pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
+ self.navigate_history(
+ self.active_pane().downgrade(),
+ NavigationMode::ReopeningClosedItem,
+ cx,
+ )
+ }
- // pub fn client(&self) -> &Client {
- // &self.app_state.client
- // }
+ pub fn client(&self) -> &Client {
+ &self.app_state.client
+ }
- // pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext<Self>) {
- // self.titlebar_item = Some(item);
- // cx.notify();
- // }
+ // todo!()
+ // pub fn set_titlebar_item(&mut self, item: AnyViewHandle, cx: &mut ViewContext<Self>) {
+ // self.titlebar_item = Some(item);
+ // cx.notify();
+ // }
- // pub fn titlebar_item(&self) -> Option<AnyViewHandle> {
- // self.titlebar_item.clone()
- // }
+ // pub fn titlebar_item(&self) -> Option<AnyViewHandle> {
+ // self.titlebar_item.clone()
+ // }
// /// Call the given callback with a workspace whose project is local.
// ///
@@ -1261,32 +1264,29 @@ impl Workspace {
// }
// }
- // pub fn worktrees<'a>(
- // &self,
- // cx: &'a AppContext,
- // ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
- // self.project.read(cx).worktrees(cx)
- // }
+ pub fn worktrees<'a>(&self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Model<Worktree>> {
+ self.project.read(cx).worktrees()
+ }
- // pub fn visible_worktrees<'a>(
- // &self,
- // cx: &'a AppContext,
- // ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
- // self.project.read(cx).visible_worktrees(cx)
- // }
+ pub fn visible_worktrees<'a>(
+ &self,
+ cx: &'a AppContext,
+ ) -> impl 'a + Iterator<Item = Model<Worktree>> {
+ self.project.read(cx).visible_worktrees(cx)
+ }
- // pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future<Output = ()> + 'static {
- // let futures = self
- // .worktrees(cx)
- // .filter_map(|worktree| worktree.read(cx).as_local())
- // .map(|worktree| worktree.scan_complete())
- // .collect::<Vec<_>>();
- // async move {
- // for future in futures {
- // future.await;
- // }
- // }
- // }
+ pub fn worktree_scans_complete(&self, cx: &AppContext) -> impl Future<Output = ()> + 'static {
+ let futures = self
+ .worktrees(cx)
+ .filter_map(|worktree| worktree.read(cx).as_local())
+ .map(|worktree| worktree.scan_complete())
+ .collect::<Vec<_>>();
+ async move {
+ for future in futures {
+ future.await;
+ }
+ }
+ }
// pub fn close_global(_: &CloseWindow, cx: &mut AppContext) {
// cx.spawn(|mut cx| async move {
@@ -1699,31 +1699,31 @@ impl Workspace {
self.active_pane().read(cx).active_item()
}
- // fn active_project_path(&self, cx: &ViewContext<Self>) -> Option<ProjectPath> {
- // self.active_item(cx).and_then(|item| item.project_path(cx))
- // }
+ fn active_project_path(&self, cx: &ViewContext<Self>) -> Option<ProjectPath> {
+ self.active_item(cx).and_then(|item| item.project_path(cx))
+ }
- // pub fn save_active_item(
- // &mut self,
- // save_intent: SaveIntent,
- // cx: &mut ViewContext<Self>,
- // ) -> Task<Result<()>> {
- // let project = self.project.clone();
- // let pane = self.active_pane();
- // let item_ix = pane.read(cx).active_item_index();
- // let item = pane.read(cx).active_item();
- // let pane = pane.downgrade();
+ pub fn save_active_item(
+ &mut self,
+ save_intent: SaveIntent,
+ cx: &mut ViewContext<Self>,
+ ) -> Task<Result<()>> {
+ let project = self.project.clone();
+ let pane = self.active_pane();
+ let item_ix = pane.read(cx).active_item_index();
+ let item = pane.read(cx).active_item();
+ let pane = pane.downgrade();
- // cx.spawn(|_, mut cx| async move {
- // if let Some(item) = item {
- // Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx)
- // .await
- // .map(|_| ())
- // } else {
- // Ok(())
- // }
- // })
- // }
+ cx.spawn(|_, mut cx| async move {
+ if let Some(item) = item {
+ Pane::save_item(project, &pane, item_ix, item.as_ref(), save_intent, &mut cx)
+ .await
+ .map(|_| ())
+ } else {
+ Ok(())
+ }
+ })
+ }
// pub fn close_inactive_items_and_panes(
// &mut self,
@@ -1825,19 +1825,20 @@ impl Workspace {
// self.serialize_workspace(cx);
// }
- // pub fn close_all_docks(&mut self, cx: &mut ViewContext<Self>) {
- // let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock];
+ pub fn close_all_docks(&mut self, cx: &mut ViewContext<Self>) {
+ let docks = [&self.left_dock, &self.bottom_dock, &self.right_dock];
- // for dock in docks {
- // dock.update(cx, |dock, cx| {
- // dock.set_open(false, cx);
- // });
- // }
+ for dock in docks {
+ dock.update(cx, |dock, cx| {
+ dock.set_open(false, cx);
+ });
+ }
- // cx.focus_self();
- // cx.notify();
- // self.serialize_workspace(cx);
- // }
+ // todo!("focus")
+ // cx.focus_self();
+ cx.notify();
+ self.serialize_workspace(cx);
+ }
// /// Transfer focus to the panel of the given type.
// pub fn focus_panel<T: Panel>(&mut self, cx: &mut ViewContext<Self>) -> Option<View<T>> {
@@ -1904,19 +1905,19 @@ impl Workspace {
// None
// }
- // fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
- // for pane in &self.panes {
- // pane.update(cx, |pane, cx| pane.set_zoomed(false, cx));
- // }
+ fn zoom_out(&mut self, cx: &mut ViewContext<Self>) {
+ for pane in &self.panes {
+ pane.update(cx, |pane, cx| pane.set_zoomed(false, cx));
+ }
- // self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx));
- // self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx));
- // self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx));
- // self.zoomed = None;
- // self.zoomed_position = None;
+ self.left_dock.update(cx, |dock, cx| dock.zoom_out(cx));
+ self.bottom_dock.update(cx, |dock, cx| dock.zoom_out(cx));
+ self.right_dock.update(cx, |dock, cx| dock.zoom_out(cx));
+ self.zoomed = None;
+ self.zoomed_position = None;
- // cx.notify();
- // }
+ cx.notify();
+ }
// #[cfg(any(test, feature = "test-support"))]
// pub fn zoomed_view(&self, cx: &AppContext) -> Option<AnyViewHandle> {
@@ -1962,22 +1963,21 @@ impl Workspace {
// cx.notify();
// }
- fn add_pane(&mut self, _cx: &mut ViewContext<Self>) -> View<Pane> {
- todo!()
- // let pane = cx.build_view(|cx| {
- // Pane::new(
- // self.weak_handle(),
- // self.project.clone(),
- // self.pane_history_timestamp.clone(),
- // cx,
- // )
- // });
- // cx.subscribe(&pane, Self::handle_pane_event).detach();
- // self.panes.push(pane.clone());
+ fn add_pane(&mut self, cx: &mut ViewContext<Self>) -> View<Pane> {
+ let pane = cx.build_view(|cx| {
+ Pane::new(
+ self.weak_handle(),
+ self.project.clone(),
+ self.pane_history_timestamp.clone(),
+ cx,
+ )
+ });
+ cx.subscribe(&pane, Self::handle_pane_event).detach();
+ self.panes.push(pane.clone());
// todo!()
// cx.focus(&pane);
- // cx.emit(Event::PaneAdded(pane.clone()));
- // pane
+ cx.emit(Event::PaneAdded(pane.clone()));
+ pane
}
// pub fn add_item_to_center(
@@ -3122,6 +3122,7 @@ impl Workspace {
None
}
+ // todo!()
// fn shared_screen_for_peer(
// &self,
// peer_id: PeerId,
@@ -3498,6 +3499,7 @@ impl Workspace {
})
}
+ // todo!()
// #[cfg(any(test, feature = "test-support"))]
// pub fn test_new(project: ModelHandle<Project>, cx: &mut ViewContext<Self>) -> Self {
// use node_runtime::FakeNodeRuntime;
@@ -3658,6 +3660,7 @@ fn open_items(
})
}
+// todo!()
// fn notify_of_new_dock(workspace: &WeakView<Workspace>, cx: &mut AsyncAppContext) {
// const NEW_PANEL_BLOG_POST: &str = "https://zed.dev/blog/new-panel-system";
// const NEW_DOCK_HINT_KEY: &str = "show_new_dock_key";
@@ -3738,23 +3741,22 @@ fn open_items(
// })
// .ok();
-fn notify_if_database_failed(_workspace: WindowHandle<Workspace>, _cx: &mut AsyncAppContext) {
+fn notify_if_database_failed(workspace: WindowHandle<Workspace>, cx: &mut AsyncAppContext) {
const REPORT_ISSUE_URL: &str ="https://github.com/zed-industries/community/issues/new?assignees=&labels=defect%2Ctriage&template=2_bug_report.yml";
- // todo!()
- // workspace
- // .update(cx, |workspace, cx| {
- // if (*db::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
- // workspace.show_notification_once(0, cx, |cx| {
- // cx.build_view(|_| {
- // MessageNotification::new("Failed to load the database file.")
- // .with_click_message("Click to let us know about this error")
- // .on_click(|cx| cx.platform().open_url(REPORT_ISSUE_URL))
- // })
- // });
- // }
- // })
- // .log_err();
+ workspace
+ .update(cx, |workspace, cx| {
+ if (*db2::ALL_FILE_DB_FAILED).load(std::sync::atomic::Ordering::Acquire) {
+ workspace.show_notification_once(0, cx, |cx| {
+ cx.build_view(|_| {
+ MessageNotification::new("Failed to load the database file.")
+ .with_click_message("Click to let us know about this error")
+ .on_click(|cx| cx.open_url(REPORT_ISSUE_URL))
+ })
+ });
+ }
+ })
+ .log_err();
}
impl EventEmitter for Workspace {
@@ -4176,36 +4178,32 @@ impl WorkspaceStore {
}
async fn handle_update_followers(
- _this: Model<Self>,
- _envelope: TypedEnvelope<proto::UpdateFollowers>,
+ this: Model<Self>,
+ envelope: TypedEnvelope<proto::UpdateFollowers>,
_: Arc<Client>,
- mut _cx: AsyncWindowContext,
+ mut cx: AsyncWindowContext,
) -> Result<()> {
- // let leader_id = envelope.original_sender_id()?;
- // let update = envelope.payload;
-
- // this.update(&mut cx, |this, cx| {
- // for workspace in &this.workspaces {
- // let Some(workspace) = workspace.upgrade() else {
- // continue;
- // };
- // workspace.update(cx, |workspace, cx| {
- // let project_id = workspace.project.read(cx).remote_id();
- // if update.project_id != project_id && update.project_id.is_some() {
- // return;
- // }
- // workspace.handle_update_followers(leader_id, update.clone(), cx);
- // });
- // }
- // Ok(())
- // })?
- todo!()
+ let leader_id = envelope.original_sender_id()?;
+ let update = envelope.payload;
+
+ this.update(&mut cx, |this, cx| {
+ for workspace in &this.workspaces {
+ workspace.update(cx, |workspace, cx| {
+ let project_id = workspace.project.read(cx).remote_id();
+ if update.project_id != project_id && update.project_id.is_some() {
+ return;
+ }
+ workspace.handle_update_followers(leader_id, update.clone(), cx);
+ })?;
+ }
+ Ok(())
+ })?
}
}
-// impl Entity for WorkspaceStore {
-// type Event = ();
-// }
+impl EventEmitter for WorkspaceStore {
+ type Event = ();
+}
impl ViewId {
pub(crate) fn from_proto(message: proto::ViewId) -> Result<Self> {
@@ -49,7 +49,7 @@ impl Settings for WorkspaceSettings {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
- _: &mut gpui2::AppContext,
+ _: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
@@ -12,6 +12,7 @@ use cli::{
CliRequest, CliResponse, IpcHandshake, FORCE_CLI_MODE_ENV_VAR_NAME,
};
use client::UserStore;
+use collections::HashMap;
use db::kvp::KEY_VALUE_STORE;
use fs::RealFs;
use futures::{channel::mpsc, SinkExt, StreamExt};
@@ -42,11 +43,13 @@ use std::{
thread,
time::{SystemTime, UNIX_EPOCH},
};
+use text::Point;
use util::{
async_maybe,
channel::{parse_zed_link, ReleaseChannel, RELEASE_CHANNEL},
http::{self, HttpClient},
- paths, ResultExt,
+ paths::{self, PathLikeWithPosition},
+ ResultExt,
};
use uuid::Uuid;
use workspace2::{AppState, WorkspaceStore};
@@ -228,10 +231,8 @@ fn main() {
let mut _triggered_authentication = false;
match open_rx.try_next() {
- Ok(Some(OpenRequest::Paths { paths: _ })) => {
- // todo!("workspace")
- // cx.update(|cx| workspace::open_paths(&paths, &app_state, None, cx))
- // .detach();
+ Ok(Some(OpenRequest::Paths { paths })) => {
+ workspace2::open_paths(&paths, &app_state, None, cx).detach();
}
Ok(Some(OpenRequest::CliConnection { connection })) => {
let app_state = app_state.clone();
@@ -263,10 +264,10 @@ fn main() {
async move {
while let Some(request) = open_rx.next().await {
match request {
- OpenRequest::Paths { paths: _ } => {
- // todo!("workspace")
- // cx.update(|cx| workspace::open_paths(&paths, &app_state, None, cx))
- // .detach();
+ OpenRequest::Paths { paths } => {
+ cx.update(|cx| workspace2::open_paths(&paths, &app_state, None, cx))
+ .ok()
+ .map(|t| t.detach());
}
OpenRequest::CliConnection { connection } => {
let app_state = app_state.clone();
@@ -781,45 +782,45 @@ async fn handle_cli_connection(
) {
if let Some(request) = requests.next().await {
match request {
- CliRequest::Open { paths: _, wait: _ } => {
- // let mut caret_positions = HashMap::new();
+ CliRequest::Open { paths, wait } => {
+ let mut caret_positions = HashMap::default();
- // todo!("workspace")
- // let paths = if paths.is_empty() {
- // workspace::last_opened_workspace_paths()
- // .await
- // .map(|location| location.paths().to_vec())
- // .unwrap_or_default()
- // } else {
- // paths
- // .into_iter()
- // .filter_map(|path_with_position_string| {
- // let path_with_position = PathLikeWithPosition::parse_str(
- // &path_with_position_string,
- // |path_str| {
- // Ok::<_, std::convert::Infallible>(
- // Path::new(path_str).to_path_buf(),
- // )
- // },
- // )
- // .expect("Infallible");
- // let path = path_with_position.path_like;
- // if let Some(row) = path_with_position.row {
- // if path.is_file() {
- // let row = row.saturating_sub(1);
- // let col =
- // path_with_position.column.unwrap_or(0).saturating_sub(1);
- // caret_positions.insert(path.clone(), Point::new(row, col));
- // }
- // }
- // Some(path)
- // })
- // .collect()
- // };
+ let paths = if paths.is_empty() {
+ workspace2::last_opened_workspace_paths()
+ .await
+ .map(|location| location.paths().to_vec())
+ .unwrap_or_default()
+ } else {
+ paths
+ .into_iter()
+ .filter_map(|path_with_position_string| {
+ let path_with_position = PathLikeWithPosition::parse_str(
+ &path_with_position_string,
+ |path_str| {
+ Ok::<_, std::convert::Infallible>(
+ Path::new(path_str).to_path_buf(),
+ )
+ },
+ )
+ .expect("Infallible");
+ let path = path_with_position.path_like;
+ if let Some(row) = path_with_position.row {
+ if path.is_file() {
+ let row = row.saturating_sub(1);
+ let col =
+ path_with_position.column.unwrap_or(0).saturating_sub(1);
+ caret_positions.insert(path.clone(), Point::new(row, col));
+ }
+ }
+ Some(path)
+ })
+ .collect()
+ };
+ // todo!("editor")
// let mut errored = false;
// match cx
- // .update(|cx| workspace::open_paths(&paths, &app_state, None, cx))
+ // .update(|cx| workspace2::open_paths(&paths, &app_state, None, cx))
// .await
// {
// Ok((workspace, items)) => {
@@ -37,10 +37,9 @@ pub enum IsOnlyInstance {
}
pub fn ensure_only_instance() -> IsOnlyInstance {
- // todo!("zed_stateless")
- // if *db::ZED_STATELESS {
- // return IsOnlyInstance::Yes;
- // }
+ if *db::ZED_STATELESS {
+ return IsOnlyInstance::Yes;
+ }
if check_got_handshake() {
return IsOnlyInstance::No;
@@ -69,11 +69,10 @@ pub async fn handle_cli_connection(
let mut caret_positions = HashMap::default();
let paths = if paths.is_empty() {
- todo!()
- // workspace::last_opened_workspace_paths()
- // .await
- // .map(|location| location.paths().to_vec())
- // .unwrap_or_default()
+ workspace2::last_opened_workspace_paths()
+ .await
+ .map(|location| location.paths().to_vec())
+ .unwrap_or_default()
} else {
paths
.into_iter()
@@ -115,7 +114,7 @@ pub async fn handle_cli_connection(
match item {
Some(Ok(mut item)) => {
if let Some(point) = caret_positions.remove(path) {
- todo!()
+ todo!("editor")
// if let Some(active_editor) = item.downcast::<Editor>() {
// active_editor
// .downgrade()
@@ -260,33 +259,33 @@ pub fn initialize_workspace(
move |workspace, _, event, cx| {
if let workspace2::Event::PaneAdded(pane) = event {
pane.update(cx, |pane, cx| {
- // todo!()
- // pane.toolbar().update(cx, |toolbar, cx| {
- // let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace));
- // toolbar.add_item(breadcrumbs, cx);
- // let buffer_search_bar = cx.add_view(BufferSearchBar::new);
- // toolbar.add_item(buffer_search_bar.clone(), cx);
- // let quick_action_bar = cx.add_view(|_| {
- // QuickActionBar::new(buffer_search_bar, workspace)
- // });
- // toolbar.add_item(quick_action_bar, cx);
- // let diagnostic_editor_controls =
- // cx.add_view(|_| diagnostics2::ToolbarControls::new());
- // toolbar.add_item(diagnostic_editor_controls, cx);
- // let project_search_bar = cx.add_view(|_| ProjectSearchBar::new());
- // toolbar.add_item(project_search_bar, cx);
- // let submit_feedback_button =
- // cx.add_view(|_| SubmitFeedbackButton::new());
- // toolbar.add_item(submit_feedback_button, cx);
- // let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new());
- // toolbar.add_item(feedback_info_text, cx);
- // let lsp_log_item =
- // cx.add_view(|_| language_tools::LspLogToolbarItemView::new());
- // toolbar.add_item(lsp_log_item, cx);
- // let syntax_tree_item = cx
- // .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new());
- // toolbar.add_item(syntax_tree_item, cx);
- // })
+ pane.toolbar().update(cx, |toolbar, cx| {
+ // todo!()
+ // let breadcrumbs = cx.add_view(|_| Breadcrumbs::new(workspace));
+ // toolbar.add_item(breadcrumbs, cx);
+ // let buffer_search_bar = cx.add_view(BufferSearchBar::new);
+ // toolbar.add_item(buffer_search_bar.clone(), cx);
+ // let quick_action_bar = cx.add_view(|_| {
+ // QuickActionBar::new(buffer_search_bar, workspace)
+ // });
+ // toolbar.add_item(quick_action_bar, cx);
+ // let diagnostic_editor_controls =
+ // cx.add_view(|_| diagnostics2::ToolbarControls::new());
+ // toolbar.add_item(diagnostic_editor_controls, cx);
+ // let project_search_bar = cx.add_view(|_| ProjectSearchBar::new());
+ // toolbar.add_item(project_search_bar, cx);
+ // let submit_feedback_button =
+ // cx.add_view(|_| SubmitFeedbackButton::new());
+ // toolbar.add_item(submit_feedback_button, cx);
+ // let feedback_info_text = cx.add_view(|_| FeedbackInfoText::new());
+ // toolbar.add_item(feedback_info_text, cx);
+ // let lsp_log_item =
+ // cx.add_view(|_| language_tools::LspLogToolbarItemView::new());
+ // toolbar.add_item(lsp_log_item, cx);
+ // let syntax_tree_item = cx
+ // .add_view(|_| language_tools::SyntaxTreeToolbarItemView::new());
+ // toolbar.add_item(syntax_tree_item, cx);
+ })
});
}
}