Detailed changes
@@ -5927,6 +5927,61 @@ dependencies = [
"util",
]
+[[package]]
+name = "project2"
+version = "0.1.0"
+dependencies = [
+ "aho-corasick",
+ "anyhow",
+ "async-trait",
+ "backtrace",
+ "client2",
+ "clock",
+ "collections",
+ "copilot",
+ "ctor",
+ "db2",
+ "env_logger 0.9.3",
+ "fs",
+ "fsevent",
+ "futures 0.3.28",
+ "fuzzy2",
+ "git",
+ "git2",
+ "globset",
+ "gpui2",
+ "ignore",
+ "itertools 0.10.5",
+ "language2",
+ "lazy_static",
+ "log",
+ "lsp2",
+ "node_runtime",
+ "parking_lot 0.11.2",
+ "postage",
+ "prettier",
+ "pretty_assertions",
+ "rand 0.8.5",
+ "regex",
+ "rpc",
+ "schemars",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "settings2",
+ "sha2 0.10.7",
+ "similar",
+ "smol",
+ "sum_tree",
+ "tempdir",
+ "terminal",
+ "text",
+ "thiserror",
+ "toml 0.5.11",
+ "unindent",
+ "util",
+]
+
[[package]]
name = "project_panel"
version = "0.1.0"
@@ -61,6 +61,7 @@ members = [
"crates/plugin_runtime",
"crates/prettier",
"crates/project",
+ "crates/project2",
"crates/project_panel",
"crates/project_symbols",
"crates/recent_projects",
@@ -17,13 +17,14 @@ use crate::{
};
use anyhow::{anyhow, Result};
use collections::{HashMap, HashSet, VecDeque};
-use futures::Future;
+use futures::{future::BoxFuture, Future};
use parking_lot::{Mutex, RwLock};
use slotmap::SlotMap;
use std::{
any::{type_name, Any, TypeId},
mem,
sync::{atomic::Ordering::SeqCst, Arc, Weak},
+ time::Duration,
};
use util::http::{self, HttpClient};
@@ -89,6 +90,7 @@ impl App {
event_listeners: SubscriberSet::new(),
release_listeners: SubscriberSet::new(),
global_observers: SubscriberSet::new(),
+ quit_observers: SubscriberSet::new(),
layout_id_buffer: Default::default(),
propagate_event: true,
})
@@ -155,11 +157,12 @@ impl App {
}
}
+type ActionBuilder = fn(json: Option<serde_json::Value>) -> anyhow::Result<Box<dyn Action>>;
+type FrameCallback = Box<dyn FnOnce(&mut WindowContext) + Send>;
type Handler = Box<dyn Fn(&mut AppContext) -> bool + Send + Sync + 'static>;
type Listener = Box<dyn Fn(&dyn Any, &mut AppContext) -> bool + Send + Sync + 'static>;
+type QuitHandler = Box<dyn Fn(&mut AppContext) -> BoxFuture<'static, ()> + Send + Sync + 'static>;
type ReleaseListener = Box<dyn Fn(&mut dyn Any, &mut AppContext) + Send + Sync + 'static>;
-type FrameCallback = Box<dyn FnOnce(&mut WindowContext) + Send>;
-type ActionBuilder = fn(json: Option<serde_json::Value>) -> anyhow::Result<Box<dyn Action>>;
pub struct AppContext {
this: Weak<Mutex<AppContext>>,
@@ -188,11 +191,33 @@ pub struct AppContext {
pub(crate) event_listeners: SubscriberSet<EntityId, Listener>,
pub(crate) release_listeners: SubscriberSet<EntityId, ReleaseListener>,
pub(crate) global_observers: SubscriberSet<TypeId, Listener>,
+ pub(crate) quit_observers: SubscriberSet<(), QuitHandler>,
pub(crate) layout_id_buffer: Vec<LayoutId>, // We recycle this memory across layout requests.
pub(crate) propagate_event: bool,
}
impl AppContext {
+ pub fn quit(&mut self) {
+ let mut futures = Vec::new();
+
+ self.quit_observers.clone().retain(&(), |observer| {
+ futures.push(observer(self));
+ true
+ });
+
+ self.windows.clear();
+ self.flush_effects();
+
+ let futures = futures::future::join_all(futures);
+ if self
+ .executor
+ .block_with_timeout(Duration::from_millis(100), futures)
+ .is_err()
+ {
+ log::error!("timed out waiting on app_will_quit");
+ }
+ }
+
pub fn app_metadata(&self) -> AppMetadata {
self.app_metadata.clone()
}
@@ -3,6 +3,7 @@ use crate::{
Subscription, Task, WeakHandle,
};
use derive_more::{Deref, DerefMut};
+use futures::FutureExt;
use std::{future::Future, marker::PhantomData};
#[derive(Deref, DerefMut)]
@@ -84,6 +85,28 @@ impl<'a, T: Send + Sync + 'static> ModelContext<'a, T> {
)
}
+ pub fn on_app_quit<Fut>(
+ &mut self,
+ on_quit: impl Fn(&mut T, &mut AppContext) -> Fut + Send + Sync + 'static,
+ ) -> Subscription
+ where
+ Fut: 'static + Future<Output = ()> + Send,
+ {
+ let handle = self.handle();
+ self.app.quit_observers.insert(
+ (),
+ Box::new(move |cx| {
+ let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok();
+ async move {
+ if let Some(future) = future {
+ future.await;
+ }
+ }
+ .boxed()
+ }),
+ )
+ }
+
pub fn observe_release<E: Send + Sync + 'static>(
&mut self,
handle: &Handle<E>,
@@ -152,14 +152,11 @@ impl Executor {
}
}
- pub fn block_with_timeout<F, R>(
+ pub fn block_with_timeout<R>(
&self,
duration: Duration,
- future: F,
- ) -> Result<R, impl Future<Output = R>>
- where
- F: Future<Output = R> + Send + Sync + 'static,
- {
+ future: impl Future<Output = R>,
+ ) -> Result<R, impl Future<Output = R>> {
let mut future = Box::pin(future);
let timeout = {
let future = &mut future;
@@ -832,7 +832,8 @@ impl Buffer {
if parse_again {
this.reparse(cx);
}
- });
+ })
+ .ok();
})
.detach();
}
@@ -876,7 +877,8 @@ impl Buffer {
let indent_sizes = indent_sizes.await;
this.update(&mut cx, |this, cx| {
this.apply_autoindents(indent_sizes, cx);
- });
+ })
+ .ok();
}));
}
}
@@ -234,6 +234,7 @@ impl SyntaxMap {
self.snapshot.interpolate(text);
}
+ #[allow(dead_code)] // todo!()
#[cfg(test)]
pub fn reparse(&mut self, language: Arc<Language>, text: &BufferSnapshot) {
self.snapshot
@@ -785,6 +786,7 @@ impl SyntaxSnapshot {
)
}
+ #[allow(dead_code)] // todo!()
#[cfg(test)]
pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
self.layers_for_range(0..buffer.len(), buffer).collect()
@@ -0,0 +1,84 @@
+[package]
+name = "project2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/project2.rs"
+doctest = false
+
+[features]
+test-support = [
+ "client2/test-support",
+ "db2/test-support",
+ "language2/test-support",
+ "settings2/test-support",
+ "text/test-support",
+ "prettier/test-support",
+]
+
+[dependencies]
+text = { path = "../text" }
+copilot = { path = "../copilot" }
+client2 = { path = "../client2" }
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+db2 = { path = "../db2" }
+fs = { path = "../fs" }
+fsevent = { path = "../fsevent" }
+fuzzy2 = { path = "../fuzzy2" }
+git = { path = "../git" }
+gpui2 = { path = "../gpui2" }
+language2 = { path = "../language2" }
+lsp2 = { path = "../lsp2" }
+node_runtime = { path = "../node_runtime" }
+prettier = { path = "../prettier" }
+rpc = { path = "../rpc" }
+settings2 = { path = "../settings2" }
+sum_tree = { path = "../sum_tree" }
+terminal = { path = "../terminal" }
+util = { path = "../util" }
+
+aho-corasick = "1.1"
+anyhow.workspace = true
+async-trait.workspace = true
+backtrace = "0.3"
+futures.workspace = true
+globset.workspace = true
+ignore = "0.4"
+lazy_static.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+rand.workspace = true
+regex.workspace = true
+schemars.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+serde_json.workspace = true
+sha2 = "0.10"
+similar = "1.3"
+smol.workspace = true
+thiserror.workspace = true
+toml.workspace = true
+itertools = "0.10"
+
+[dev-dependencies]
+ctor.workspace = true
+env_logger.workspace = true
+pretty_assertions.workspace = true
+client2 = { path = "../client2", features = ["test-support"] }
+collections = { path = "../collections", features = ["test-support"] }
+db2 = { path = "../db2", features = ["test-support"] }
+fs = { path = "../fs", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+language2 = { path = "../language2", features = ["test-support"] }
+lsp2 = { path = "../lsp2", features = ["test-support"] }
+settings2 = { path = "../settings2", features = ["test-support"] }
+prettier = { path = "../prettier", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+rpc = { path = "../rpc", features = ["test-support"] }
+git2.workspace = true
+tempdir.workspace = true
+unindent.workspace = true
@@ -0,0 +1,57 @@
+use ignore::gitignore::Gitignore;
+use std::{ffi::OsStr, path::Path, sync::Arc};
+
+pub enum IgnoreStack {
+ None,
+ Some {
+ abs_base_path: Arc<Path>,
+ ignore: Arc<Gitignore>,
+ parent: Arc<IgnoreStack>,
+ },
+ All,
+}
+
+impl IgnoreStack {
+ pub fn none() -> Arc<Self> {
+ Arc::new(Self::None)
+ }
+
+ pub fn all() -> Arc<Self> {
+ Arc::new(Self::All)
+ }
+
+ pub fn is_all(&self) -> bool {
+ matches!(self, IgnoreStack::All)
+ }
+
+ pub fn append(self: Arc<Self>, abs_base_path: Arc<Path>, ignore: Arc<Gitignore>) -> Arc<Self> {
+ match self.as_ref() {
+ IgnoreStack::All => self,
+ _ => Arc::new(Self::Some {
+ abs_base_path,
+ ignore,
+ parent: self,
+ }),
+ }
+ }
+
+ pub fn is_abs_path_ignored(&self, abs_path: &Path, is_dir: bool) -> bool {
+ if is_dir && abs_path.file_name() == Some(OsStr::new(".git")) {
+ return true;
+ }
+
+ match self {
+ Self::None => false,
+ Self::All => true,
+ Self::Some {
+ abs_base_path,
+ ignore,
+ parent: prev,
+ } => match ignore.matched(abs_path.strip_prefix(abs_base_path).unwrap(), is_dir) {
+ ignore::Match::None => prev.is_abs_path_ignored(abs_path, is_dir),
+ ignore::Match::Ignore(_) => true,
+ ignore::Match::Whitelist(_) => false,
+ },
+ }
+ }
+}
@@ -0,0 +1,2352 @@
+use crate::{
+ DocumentHighlight, Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel,
+ InlayHintLabelPart, InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink,
+ MarkupContent, Project, ProjectTransaction, ResolveState,
+};
+use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
+use client2::proto::{self, PeerId};
+use futures::future;
+use gpui2::{AppContext, AsyncAppContext, Handle};
+use language2::{
+ language_settings::{language_settings, InlayHintKind},
+ point_from_lsp, point_to_lsp,
+ proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
+ range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CharKind,
+ CodeAction, Completion, OffsetRangeExt, PointUtf16, ToOffset, ToPointUtf16, Transaction,
+ Unclipped,
+};
+use lsp2::{
+ CompletionListItemDefaultsEditRange, DocumentHighlightKind, LanguageServer, LanguageServerId,
+ OneOf, ServerCapabilities,
+};
+use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
+use text::LineEnding;
+
+pub fn lsp_formatting_options(tab_size: u32) -> lsp2::FormattingOptions {
+ lsp2::FormattingOptions {
+ tab_size,
+ insert_spaces: true,
+ insert_final_newline: Some(true),
+ ..lsp2::FormattingOptions::default()
+ }
+}
+
+#[async_trait(?Send)]
+pub(crate) trait LspCommand: 'static + Sized {
+ type Response: 'static + Default + Send;
+ type LspRequest: 'static + Send + lsp2::request::Request;
+ type ProtoRequest: 'static + Send + proto::RequestMessage;
+
+ fn check_capabilities(&self, _: &lsp2::ServerCapabilities) -> bool {
+ true
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ buffer: &Buffer,
+ language_server: &Arc<LanguageServer>,
+ cx: &AppContext,
+ ) -> <Self::LspRequest as lsp2::request::Request>::Params;
+
+ async fn response_from_lsp(
+ self,
+ message: <Self::LspRequest as lsp2::request::Request>::Result,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Self::Response>;
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest;
+
+ async fn from_proto(
+ message: Self::ProtoRequest,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ cx: AsyncAppContext,
+ ) -> Result<Self>;
+
+ fn response_to_proto(
+ response: Self::Response,
+ project: &mut Project,
+ peer_id: PeerId,
+ buffer_version: &clock::Global,
+ cx: &mut AppContext,
+ ) -> <Self::ProtoRequest as proto::RequestMessage>::Response;
+
+ async fn response_from_proto(
+ self,
+ message: <Self::ProtoRequest as proto::RequestMessage>::Response,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ cx: AsyncAppContext,
+ ) -> Result<Self::Response>;
+
+ fn buffer_id_from_proto(message: &Self::ProtoRequest) -> u64;
+}
+
+pub(crate) struct PrepareRename {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct PerformRename {
+ pub position: PointUtf16,
+ pub new_name: String,
+ pub push_to_history: bool,
+}
+
+pub(crate) struct GetDefinition {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct GetTypeDefinition {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct GetReferences {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct GetDocumentHighlights {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct GetHover {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct GetCompletions {
+ pub position: PointUtf16,
+}
+
+pub(crate) struct GetCodeActions {
+ pub range: Range<Anchor>,
+}
+
+pub(crate) struct OnTypeFormatting {
+ pub position: PointUtf16,
+ pub trigger: String,
+ pub options: FormattingOptions,
+ pub push_to_history: bool,
+}
+
+pub(crate) struct InlayHints {
+ pub range: Range<Anchor>,
+}
+
+pub(crate) struct FormattingOptions {
+ tab_size: u32,
+}
+
+impl From<lsp2::FormattingOptions> for FormattingOptions {
+ fn from(value: lsp2::FormattingOptions) -> Self {
+ Self {
+ tab_size: value.tab_size,
+ }
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for PrepareRename {
+ type Response = Option<Range<Anchor>>;
+ type LspRequest = lsp2::request::PrepareRenameRequest;
+ type ProtoRequest = proto::PrepareRename;
+
+ fn check_capabilities(&self, capabilities: &ServerCapabilities) -> bool {
+ if let Some(lsp2::OneOf::Right(rename)) = &capabilities.rename_provider {
+ rename.prepare_provider == Some(true)
+ } else {
+ false
+ }
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::TextDocumentPositionParams {
+ lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp2::PrepareRenameResponse>,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ _: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Option<Range<Anchor>>> {
+ buffer.read_with(&cx, |buffer, _| {
+ if let Some(
+ lsp2::PrepareRenameResponse::Range(range)
+ | lsp2::PrepareRenameResponse::RangeWithPlaceholder { range, .. },
+ ) = message
+ {
+ let Range { start, end } = range_from_lsp(range);
+ if buffer.clip_point_utf16(start, Bias::Left) == start.0
+ && buffer.clip_point_utf16(end, Bias::Left) == end.0
+ {
+ return Ok(Some(buffer.anchor_after(start)..buffer.anchor_before(end)));
+ }
+ }
+ Ok(None)
+ })
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PrepareRename {
+ proto::PrepareRename {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::PrepareRename,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ })
+ }
+
+ fn response_to_proto(
+ range: Option<Range<Anchor>>,
+ _: &mut Project,
+ _: PeerId,
+ buffer_version: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::PrepareRenameResponse {
+ proto::PrepareRenameResponse {
+ can_rename: range.is_some(),
+ start: range
+ .as_ref()
+ .map(|range| language2::proto::serialize_anchor(&range.start)),
+ end: range
+ .as_ref()
+ .map(|range| language2::proto::serialize_anchor(&range.end)),
+ version: serialize_version(buffer_version),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::PrepareRenameResponse,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Option<Range<Anchor>>> {
+ if message.can_rename {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ let start = message.start.and_then(deserialize_anchor);
+ let end = message.end.and_then(deserialize_anchor);
+ Ok(start.zip(end).map(|(start, end)| start..end))
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn buffer_id_from_proto(message: &proto::PrepareRename) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for PerformRename {
+ type Response = ProjectTransaction;
+ type LspRequest = lsp2::request::Rename;
+ type ProtoRequest = proto::PerformRename;
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::RenameParams {
+ lsp2::RenameParams {
+ text_document_position: lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ },
+ new_name: self.new_name.clone(),
+ work_done_progress_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp2::WorkspaceEdit>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ mut cx: AsyncAppContext,
+ ) -> Result<ProjectTransaction> {
+ if let Some(edit) = message {
+ let (lsp_adapter, lsp_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
+ Project::deserialize_workspace_edit(
+ project,
+ edit,
+ self.push_to_history,
+ lsp_adapter,
+ lsp_server,
+ &mut cx,
+ )
+ .await
+ } else {
+ Ok(ProjectTransaction::default())
+ }
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::PerformRename {
+ proto::PerformRename {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ new_name: self.new_name.clone(),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::PerformRename,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ new_name: message.new_name,
+ push_to_history: false,
+ })
+ }
+
+ fn response_to_proto(
+ response: ProjectTransaction,
+ project: &mut Project,
+ peer_id: PeerId,
+ _: &clock::Global,
+ cx: &mut AppContext,
+ ) -> proto::PerformRenameResponse {
+ let transaction = project.serialize_project_transaction_for_peer(response, peer_id, cx);
+ proto::PerformRenameResponse {
+ transaction: Some(transaction),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::PerformRenameResponse,
+ project: Handle<Project>,
+ _: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<ProjectTransaction> {
+ let message = message
+ .transaction
+ .ok_or_else(|| anyhow!("missing transaction"))?;
+ project
+ .update(&mut cx, |project, cx| {
+ project.deserialize_project_transaction(message, self.push_to_history, cx)
+ })
+ .await
+ }
+
+ fn buffer_id_from_proto(message: &proto::PerformRename) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetDefinition {
+ type Response = Vec<LocationLink>;
+ type LspRequest = lsp2::request::GotoDefinition;
+ type ProtoRequest = proto::GetDefinition;
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::GotoDefinitionParams {
+ lsp2::GotoDefinitionParams {
+ text_document_position_params: lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ },
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp2::GotoDefinitionResponse>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Vec<LocationLink>> {
+ location_links_from_lsp(message, project, buffer, server_id, cx).await
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDefinition {
+ proto::GetDefinition {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::GetDefinition,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ })
+ }
+
+ fn response_to_proto(
+ response: Vec<LocationLink>,
+ project: &mut Project,
+ peer_id: PeerId,
+ _: &clock::Global,
+ cx: &mut AppContext,
+ ) -> proto::GetDefinitionResponse {
+ let links = location_links_to_proto(response, project, peer_id, cx);
+ proto::GetDefinitionResponse { links }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetDefinitionResponse,
+ project: Handle<Project>,
+ _: Handle<Buffer>,
+ cx: AsyncAppContext,
+ ) -> Result<Vec<LocationLink>> {
+ location_links_from_proto(message.links, project, cx).await
+ }
+
+ fn buffer_id_from_proto(message: &proto::GetDefinition) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetTypeDefinition {
+ type Response = Vec<LocationLink>;
+ type LspRequest = lsp2::request::GotoTypeDefinition;
+ type ProtoRequest = proto::GetTypeDefinition;
+
+ fn check_capabilities(&self, capabilities: &ServerCapabilities) -> bool {
+ match &capabilities.type_definition_provider {
+ None => false,
+ Some(lsp2::TypeDefinitionProviderCapability::Simple(false)) => false,
+ _ => true,
+ }
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::GotoTypeDefinitionParams {
+ lsp2::GotoTypeDefinitionParams {
+ text_document_position_params: lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ },
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp2::GotoTypeDefinitionResponse>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Vec<LocationLink>> {
+ location_links_from_lsp(message, project, buffer, server_id, cx).await
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetTypeDefinition {
+ proto::GetTypeDefinition {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::GetTypeDefinition,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ })
+ }
+
+ fn response_to_proto(
+ response: Vec<LocationLink>,
+ project: &mut Project,
+ peer_id: PeerId,
+ _: &clock::Global,
+ cx: &mut AppContext,
+ ) -> proto::GetTypeDefinitionResponse {
+ let links = location_links_to_proto(response, project, peer_id, cx);
+ proto::GetTypeDefinitionResponse { links }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetTypeDefinitionResponse,
+ project: Handle<Project>,
+ _: Handle<Buffer>,
+ cx: AsyncAppContext,
+ ) -> Result<Vec<LocationLink>> {
+ location_links_from_proto(message.links, project, cx).await
+ }
+
+ fn buffer_id_from_proto(message: &proto::GetTypeDefinition) -> u64 {
+ message.buffer_id
+ }
+}
+
+fn language_server_for_buffer(
+ project: &Handle<Project>,
+ buffer: &Handle<Buffer>,
+ server_id: LanguageServerId,
+ cx: &mut AsyncAppContext,
+) -> Result<(Arc<CachedLspAdapter>, Arc<LanguageServer>)> {
+ project
+ .read_with(cx, |project, cx| {
+ project
+ .language_server_for_buffer(buffer.read(cx), server_id, cx)
+ .map(|(adapter, server)| (adapter.clone(), server.clone()))
+ })
+ .ok_or_else(|| anyhow!("no language server found for buffer"))
+}
+
+async fn location_links_from_proto(
+ proto_links: Vec<proto::LocationLink>,
+ project: Handle<Project>,
+ mut cx: AsyncAppContext,
+) -> Result<Vec<LocationLink>> {
+ let mut links = Vec::new();
+
+ for link in proto_links {
+ let origin = match link.origin {
+ Some(origin) => {
+ let buffer = project
+ .update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(origin.buffer_id, cx)
+ })
+ .await?;
+ let start = origin
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing origin start"))?;
+ let end = origin
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing origin end"))?;
+ buffer
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
+ .await?;
+ Some(Location {
+ buffer,
+ range: start..end,
+ })
+ }
+ None => None,
+ };
+
+ let target = link.target.ok_or_else(|| anyhow!("missing target"))?;
+ let buffer = project
+ .update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(target.buffer_id, cx)
+ })
+ .await?;
+ let start = target
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target start"))?;
+ let end = target
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target end"))?;
+ buffer
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
+ .await?;
+ let target = Location {
+ buffer,
+ range: start..end,
+ };
+
+ links.push(LocationLink { origin, target })
+ }
+
+ Ok(links)
+}
+
+async fn location_links_from_lsp(
+ message: Option<lsp2::GotoDefinitionResponse>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ mut cx: AsyncAppContext,
+) -> Result<Vec<LocationLink>> {
+ let message = match message {
+ Some(message) => message,
+ None => return Ok(Vec::new()),
+ };
+
+ let mut unresolved_links = Vec::new();
+ match message {
+ lsp2::GotoDefinitionResponse::Scalar(loc) => {
+ unresolved_links.push((None, loc.uri, loc.range));
+ }
+
+ lsp2::GotoDefinitionResponse::Array(locs) => {
+ unresolved_links.extend(locs.into_iter().map(|l| (None, l.uri, l.range)));
+ }
+
+ lsp2::GotoDefinitionResponse::Link(links) => {
+ unresolved_links.extend(links.into_iter().map(|l| {
+ (
+ l.origin_selection_range,
+ l.target_uri,
+ l.target_selection_range,
+ )
+ }));
+ }
+ }
+
+ let (lsp_adapter, language_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
+ let mut definitions = Vec::new();
+ for (origin_range, target_uri, target_range) in unresolved_links {
+ let target_buffer_handle = project
+ .update(&mut cx, |this, cx| {
+ this.open_local_buffer_via_lsp(
+ target_uri,
+ language_server.server_id(),
+ lsp_adapter.name.clone(),
+ cx,
+ )
+ })
+ .await?;
+
+ cx.read(|cx| {
+ let origin_location = origin_range.map(|origin_range| {
+ let origin_buffer = buffer.read(cx);
+ let origin_start =
+ origin_buffer.clip_point_utf16(point_from_lsp(origin_range.start), Bias::Left);
+ let origin_end =
+ origin_buffer.clip_point_utf16(point_from_lsp(origin_range.end), Bias::Left);
+ Location {
+ buffer: buffer.clone(),
+ range: origin_buffer.anchor_after(origin_start)
+ ..origin_buffer.anchor_before(origin_end),
+ }
+ });
+
+ let target_buffer = target_buffer_handle.read(cx);
+ let target_start =
+ target_buffer.clip_point_utf16(point_from_lsp(target_range.start), Bias::Left);
+ let target_end =
+ target_buffer.clip_point_utf16(point_from_lsp(target_range.end), Bias::Left);
+ let target_location = Location {
+ buffer: target_buffer_handle,
+ range: target_buffer.anchor_after(target_start)
+ ..target_buffer.anchor_before(target_end),
+ };
+
+ definitions.push(LocationLink {
+ origin: origin_location,
+ target: target_location,
+ })
+ });
+ }
+ Ok(definitions)
+}
+
+fn location_links_to_proto(
+ links: Vec<LocationLink>,
+ project: &mut Project,
+ peer_id: PeerId,
+ cx: &mut AppContext,
+) -> Vec<proto::LocationLink> {
+ links
+ .into_iter()
+ .map(|definition| {
+ let origin = definition.origin.map(|origin| {
+ let buffer_id = project.create_buffer_for_peer(&origin.buffer, peer_id, cx);
+ proto::Location {
+ start: Some(serialize_anchor(&origin.range.start)),
+ end: Some(serialize_anchor(&origin.range.end)),
+ buffer_id,
+ }
+ });
+
+ let buffer_id = project.create_buffer_for_peer(&definition.target.buffer, peer_id, cx);
+ let target = proto::Location {
+ start: Some(serialize_anchor(&definition.target.range.start)),
+ end: Some(serialize_anchor(&definition.target.range.end)),
+ buffer_id,
+ };
+
+ proto::LocationLink {
+ origin,
+ target: Some(target),
+ }
+ })
+ .collect()
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetReferences {
+ type Response = Vec<Location>;
+ type LspRequest = lsp2::request::References;
+ type ProtoRequest = proto::GetReferences;
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::ReferenceParams {
+ lsp2::ReferenceParams {
+ text_document_position: lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ },
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ context: lsp2::ReferenceContext {
+ include_declaration: true,
+ },
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ locations: Option<Vec<lsp2::Location>>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ mut cx: AsyncAppContext,
+ ) -> Result<Vec<Location>> {
+ let mut references = Vec::new();
+ let (lsp_adapter, language_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
+
+ if let Some(locations) = locations {
+ for lsp_location in locations {
+ let target_buffer_handle = project
+ .update(&mut cx, |this, cx| {
+ this.open_local_buffer_via_lsp(
+ lsp_location.uri,
+ language_server.server_id(),
+ lsp_adapter.name.clone(),
+ cx,
+ )
+ })
+ .await?;
+
+ cx.read(|cx| {
+ let target_buffer = target_buffer_handle.read(cx);
+ let target_start = target_buffer
+ .clip_point_utf16(point_from_lsp(lsp_location.range.start), Bias::Left);
+ let target_end = target_buffer
+ .clip_point_utf16(point_from_lsp(lsp_location.range.end), Bias::Left);
+ references.push(Location {
+ buffer: target_buffer_handle,
+ range: target_buffer.anchor_after(target_start)
+ ..target_buffer.anchor_before(target_end),
+ });
+ });
+ }
+ }
+
+ Ok(references)
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetReferences {
+ proto::GetReferences {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::GetReferences,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ })
+ }
+
+ fn response_to_proto(
+ response: Vec<Location>,
+ project: &mut Project,
+ peer_id: PeerId,
+ _: &clock::Global,
+ cx: &mut AppContext,
+ ) -> proto::GetReferencesResponse {
+ let locations = response
+ .into_iter()
+ .map(|definition| {
+ let buffer_id = project.create_buffer_for_peer(&definition.buffer, peer_id, cx);
+ proto::Location {
+ start: Some(serialize_anchor(&definition.range.start)),
+ end: Some(serialize_anchor(&definition.range.end)),
+ buffer_id,
+ }
+ })
+ .collect();
+ proto::GetReferencesResponse { locations }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetReferencesResponse,
+ project: Handle<Project>,
+ _: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Vec<Location>> {
+ let mut locations = Vec::new();
+ for location in message.locations {
+ let target_buffer = project
+ .update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(location.buffer_id, cx)
+ })
+ .await?;
+ let start = location
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target start"))?;
+ let end = location
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target end"))?;
+ target_buffer
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
+ .await?;
+ locations.push(Location {
+ buffer: target_buffer,
+ range: start..end,
+ })
+ }
+ Ok(locations)
+ }
+
+ fn buffer_id_from_proto(message: &proto::GetReferences) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetDocumentHighlights {
+ type Response = Vec<DocumentHighlight>;
+ type LspRequest = lsp2::request::DocumentHighlightRequest;
+ type ProtoRequest = proto::GetDocumentHighlights;
+
+ fn check_capabilities(&self, capabilities: &ServerCapabilities) -> bool {
+ capabilities.document_highlight_provider.is_some()
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::DocumentHighlightParams {
+ lsp2::DocumentHighlightParams {
+ text_document_position_params: lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ },
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ lsp_highlights: Option<Vec<lsp2::DocumentHighlight>>,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ _: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Vec<DocumentHighlight>> {
+ buffer.read_with(&cx, |buffer, _| {
+ let mut lsp_highlights = lsp_highlights.unwrap_or_default();
+ lsp_highlights.sort_unstable_by_key(|h| (h.range.start, Reverse(h.range.end)));
+ Ok(lsp_highlights
+ .into_iter()
+ .map(|lsp_highlight| {
+ let start = buffer
+ .clip_point_utf16(point_from_lsp(lsp_highlight.range.start), Bias::Left);
+ let end = buffer
+ .clip_point_utf16(point_from_lsp(lsp_highlight.range.end), Bias::Left);
+ DocumentHighlight {
+ range: buffer.anchor_after(start)..buffer.anchor_before(end),
+ kind: lsp_highlight
+ .kind
+ .unwrap_or(lsp2::DocumentHighlightKind::READ),
+ }
+ })
+ .collect())
+ })
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetDocumentHighlights {
+ proto::GetDocumentHighlights {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::GetDocumentHighlights,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ })
+ }
+
+ fn response_to_proto(
+ response: Vec<DocumentHighlight>,
+ _: &mut Project,
+ _: PeerId,
+ _: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::GetDocumentHighlightsResponse {
+ let highlights = response
+ .into_iter()
+ .map(|highlight| proto::DocumentHighlight {
+ start: Some(serialize_anchor(&highlight.range.start)),
+ end: Some(serialize_anchor(&highlight.range.end)),
+ kind: match highlight.kind {
+ DocumentHighlightKind::TEXT => proto::document_highlight::Kind::Text.into(),
+ DocumentHighlightKind::WRITE => proto::document_highlight::Kind::Write.into(),
+ DocumentHighlightKind::READ => proto::document_highlight::Kind::Read.into(),
+ _ => proto::document_highlight::Kind::Text.into(),
+ },
+ })
+ .collect();
+ proto::GetDocumentHighlightsResponse { highlights }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetDocumentHighlightsResponse,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Vec<DocumentHighlight>> {
+ let mut highlights = Vec::new();
+ for highlight in message.highlights {
+ let start = highlight
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target start"))?;
+ let end = highlight
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target end"))?;
+ buffer
+ .update(&mut cx, |buffer, _| buffer.wait_for_anchors([start, end]))
+ .await?;
+ let kind = match proto::document_highlight::Kind::from_i32(highlight.kind) {
+ Some(proto::document_highlight::Kind::Text) => DocumentHighlightKind::TEXT,
+ Some(proto::document_highlight::Kind::Read) => DocumentHighlightKind::READ,
+ Some(proto::document_highlight::Kind::Write) => DocumentHighlightKind::WRITE,
+ None => DocumentHighlightKind::TEXT,
+ };
+ highlights.push(DocumentHighlight {
+ range: start..end,
+ kind,
+ });
+ }
+ Ok(highlights)
+ }
+
+ fn buffer_id_from_proto(message: &proto::GetDocumentHighlights) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetHover {
+ type Response = Option<Hover>;
+ type LspRequest = lsp2::request::HoverRequest;
+ type ProtoRequest = proto::GetHover;
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::HoverParams {
+ lsp2::HoverParams {
+ text_document_position_params: lsp2::TextDocumentPositionParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ position: point_to_lsp(self.position),
+ },
+ work_done_progress_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<lsp2::Hover>,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ _: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Self::Response> {
+ Ok(message.and_then(|hover| {
+ let (language, range) = cx.read(|cx| {
+ let buffer = buffer.read(cx);
+ (
+ buffer.language().cloned(),
+ hover.range.map(|range| {
+ let token_start =
+ buffer.clip_point_utf16(point_from_lsp(range.start), Bias::Left);
+ let token_end =
+ buffer.clip_point_utf16(point_from_lsp(range.end), Bias::Left);
+ buffer.anchor_after(token_start)..buffer.anchor_before(token_end)
+ }),
+ )
+ });
+
+ fn hover_blocks_from_marked_string(
+ marked_string: lsp2::MarkedString,
+ ) -> Option<HoverBlock> {
+ let block = match marked_string {
+ lsp2::MarkedString::String(content) => HoverBlock {
+ text: content,
+ kind: HoverBlockKind::Markdown,
+ },
+ lsp2::MarkedString::LanguageString(lsp2::LanguageString {
+ language,
+ value,
+ }) => HoverBlock {
+ text: value,
+ kind: HoverBlockKind::Code { language },
+ },
+ };
+ if block.text.is_empty() {
+ None
+ } else {
+ Some(block)
+ }
+ }
+
+ let contents = cx.read(|_| match hover.contents {
+ lsp2::HoverContents::Scalar(marked_string) => {
+ hover_blocks_from_marked_string(marked_string)
+ .into_iter()
+ .collect()
+ }
+ lsp2::HoverContents::Array(marked_strings) => marked_strings
+ .into_iter()
+ .filter_map(hover_blocks_from_marked_string)
+ .collect(),
+ lsp2::HoverContents::Markup(markup_content) => vec![HoverBlock {
+ text: markup_content.value,
+ kind: if markup_content.kind == lsp2::MarkupKind::Markdown {
+ HoverBlockKind::Markdown
+ } else {
+ HoverBlockKind::PlainText
+ },
+ }],
+ });
+
+ Some(Hover {
+ contents,
+ range,
+ language,
+ })
+ }))
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> Self::ProtoRequest {
+ proto::GetHover {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ version: serialize_version(&buffer.version),
+ }
+ }
+
+ async fn from_proto(
+ message: Self::ProtoRequest,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ })
+ }
+
+ fn response_to_proto(
+ response: Self::Response,
+ _: &mut Project,
+ _: PeerId,
+ _: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::GetHoverResponse {
+ if let Some(response) = response {
+ let (start, end) = if let Some(range) = response.range {
+ (
+ Some(language2::proto::serialize_anchor(&range.start)),
+ Some(language2::proto::serialize_anchor(&range.end)),
+ )
+ } else {
+ (None, None)
+ };
+
+ let contents = response
+ .contents
+ .into_iter()
+ .map(|block| proto::HoverBlock {
+ text: block.text,
+ is_markdown: block.kind == HoverBlockKind::Markdown,
+ language: if let HoverBlockKind::Code { language } = block.kind {
+ Some(language)
+ } else {
+ None
+ },
+ })
+ .collect();
+
+ proto::GetHoverResponse {
+ start,
+ end,
+ contents,
+ }
+ } else {
+ proto::GetHoverResponse {
+ start: None,
+ end: None,
+ contents: Vec::new(),
+ }
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetHoverResponse,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ cx: AsyncAppContext,
+ ) -> Result<Self::Response> {
+ let contents: Vec<_> = message
+ .contents
+ .into_iter()
+ .map(|block| HoverBlock {
+ text: block.text,
+ kind: if let Some(language) = block.language {
+ HoverBlockKind::Code { language }
+ } else if block.is_markdown {
+ HoverBlockKind::Markdown
+ } else {
+ HoverBlockKind::PlainText
+ },
+ })
+ .collect();
+ if contents.is_empty() {
+ return Ok(None);
+ }
+
+ let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
+ let range = if let (Some(start), Some(end)) = (message.start, message.end) {
+ language2::proto::deserialize_anchor(start)
+ .and_then(|start| language2::proto::deserialize_anchor(end).map(|end| start..end))
+ } else {
+ None
+ };
+
+ Ok(Some(Hover {
+ contents,
+ range,
+ language,
+ }))
+ }
+
+ fn buffer_id_from_proto(message: &Self::ProtoRequest) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetCompletions {
+ type Response = Vec<Completion>;
+ type LspRequest = lsp2::request::Completion;
+ type ProtoRequest = proto::GetCompletions;
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::CompletionParams {
+ lsp2::CompletionParams {
+ text_document_position: lsp2::TextDocumentPositionParams::new(
+ lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path(path).unwrap()),
+ point_to_lsp(self.position),
+ ),
+ context: Default::default(),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ completions: Option<lsp2::CompletionResponse>,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ cx: AsyncAppContext,
+ ) -> Result<Vec<Completion>> {
+ let mut response_list = None;
+ let completions = if let Some(completions) = completions {
+ match completions {
+ lsp2::CompletionResponse::Array(completions) => completions,
+
+ lsp2::CompletionResponse::List(mut list) => {
+ let items = std::mem::take(&mut list.items);
+ response_list = Some(list);
+ items
+ }
+ }
+ } else {
+ Default::default()
+ };
+
+ let completions = buffer.read_with(&cx, |buffer, _| {
+ let language = buffer.language().cloned();
+ let snapshot = buffer.snapshot();
+ let clipped_position = buffer.clip_point_utf16(Unclipped(self.position), Bias::Left);
+
+ let mut range_for_token = None;
+ completions
+ .into_iter()
+ .filter_map(move |mut lsp_completion| {
+ let (old_range, mut new_text) = match lsp_completion.text_edit.as_ref() {
+ // If the language server provides a range to overwrite, then
+ // check that the range is valid.
+ Some(lsp2::CompletionTextEdit::Edit(edit)) => {
+ let range = range_from_lsp(edit.range);
+ let start = snapshot.clip_point_utf16(range.start, Bias::Left);
+ let end = snapshot.clip_point_utf16(range.end, Bias::Left);
+ if start != range.start.0 || end != range.end.0 {
+ log::info!("completion out of expected range");
+ return None;
+ }
+ (
+ snapshot.anchor_before(start)..snapshot.anchor_after(end),
+ edit.new_text.clone(),
+ )
+ }
+
+ // If the language server does not provide a range, then infer
+ // the range based on the syntax tree.
+ None => {
+ if self.position != clipped_position {
+ log::info!("completion out of expected range");
+ return None;
+ }
+
+ let default_edit_range = response_list
+ .as_ref()
+ .and_then(|list| list.item_defaults.as_ref())
+ .and_then(|defaults| defaults.edit_range.as_ref())
+ .and_then(|range| match range {
+ CompletionListItemDefaultsEditRange::Range(r) => Some(r),
+ _ => None,
+ });
+
+ let range = if let Some(range) = default_edit_range {
+ let range = range_from_lsp(range.clone());
+ let start = snapshot.clip_point_utf16(range.start, Bias::Left);
+ let end = snapshot.clip_point_utf16(range.end, Bias::Left);
+ if start != range.start.0 || end != range.end.0 {
+ log::info!("completion out of expected range");
+ return None;
+ }
+
+ snapshot.anchor_before(start)..snapshot.anchor_after(end)
+ } else {
+ range_for_token
+ .get_or_insert_with(|| {
+ let offset = self.position.to_offset(&snapshot);
+ let (range, kind) = snapshot.surrounding_word(offset);
+ let range = if kind == Some(CharKind::Word) {
+ range
+ } else {
+ offset..offset
+ };
+
+ snapshot.anchor_before(range.start)
+ ..snapshot.anchor_after(range.end)
+ })
+ .clone()
+ };
+
+ let text = lsp_completion
+ .insert_text
+ .as_ref()
+ .unwrap_or(&lsp_completion.label)
+ .clone();
+ (range, text)
+ }
+
+ Some(lsp2::CompletionTextEdit::InsertAndReplace(_)) => {
+ log::info!("unsupported insert/replace completion");
+ return None;
+ }
+ };
+
+ let language = language.clone();
+ LineEnding::normalize(&mut new_text);
+ Some(async move {
+ let mut label = None;
+ if let Some(language) = language {
+ language.process_completion(&mut lsp_completion).await;
+ label = language.label_for_completion(&lsp_completion).await;
+ }
+ Completion {
+ old_range,
+ new_text,
+ label: label.unwrap_or_else(|| {
+ language2::CodeLabel::plain(
+ lsp_completion.label.clone(),
+ lsp_completion.filter_text.as_deref(),
+ )
+ }),
+ server_id,
+ lsp_completion,
+ }
+ })
+ })
+ });
+
+ Ok(future::join_all(completions).await)
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCompletions {
+ let anchor = buffer.anchor_after(self.position);
+ proto::GetCompletions {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(&anchor)),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::GetCompletions,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let version = deserialize_version(&message.version);
+ buffer
+ .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
+ .await?;
+ let position = message
+ .position
+ .and_then(language2::proto::deserialize_anchor)
+ .map(|p| {
+ buffer.read_with(&cx, |buffer, _| {
+ buffer.clip_point_utf16(Unclipped(p.to_point_utf16(buffer)), Bias::Left)
+ })
+ })
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ Ok(Self { position })
+ }
+
+ fn response_to_proto(
+ completions: Vec<Completion>,
+ _: &mut Project,
+ _: PeerId,
+ buffer_version: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::GetCompletionsResponse {
+ proto::GetCompletionsResponse {
+ completions: completions
+ .iter()
+ .map(language2::proto::serialize_completion)
+ .collect(),
+ version: serialize_version(&buffer_version),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetCompletionsResponse,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Vec<Completion>> {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ let language = buffer.read_with(&cx, |buffer, _| buffer.language().cloned());
+ let completions = message.completions.into_iter().map(|completion| {
+ language2::proto::deserialize_completion(completion, language.clone())
+ });
+ future::try_join_all(completions).await
+ }
+
+ fn buffer_id_from_proto(message: &proto::GetCompletions) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for GetCodeActions {
+ type Response = Vec<CodeAction>;
+ type LspRequest = lsp2::request::CodeActionRequest;
+ type ProtoRequest = proto::GetCodeActions;
+
+ fn check_capabilities(&self, capabilities: &ServerCapabilities) -> bool {
+ match &capabilities.code_action_provider {
+ None => false,
+ Some(lsp2::CodeActionProviderCapability::Simple(false)) => false,
+ _ => true,
+ }
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ buffer: &Buffer,
+ language_server: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::CodeActionParams {
+ let relevant_diagnostics = buffer
+ .snapshot()
+ .diagnostics_in_range::<_, usize>(self.range.clone(), false)
+ .map(|entry| entry.to_lsp_diagnostic_stub())
+ .collect();
+ lsp2::CodeActionParams {
+ text_document: lsp2::TextDocumentIdentifier::new(
+ lsp2::Url::from_file_path(path).unwrap(),
+ ),
+ range: range_to_lsp(self.range.to_point_utf16(buffer)),
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ context: lsp2::CodeActionContext {
+ diagnostics: relevant_diagnostics,
+ only: language_server.code_action_kinds(),
+ ..lsp2::CodeActionContext::default()
+ },
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ actions: Option<lsp2::CodeActionResponse>,
+ _: Handle<Project>,
+ _: Handle<Buffer>,
+ server_id: LanguageServerId,
+ _: AsyncAppContext,
+ ) -> Result<Vec<CodeAction>> {
+ Ok(actions
+ .unwrap_or_default()
+ .into_iter()
+ .filter_map(|entry| {
+ if let lsp2::CodeActionOrCommand::CodeAction(lsp_action) = entry {
+ Some(CodeAction {
+ server_id,
+ range: self.range.clone(),
+ lsp_action,
+ })
+ } else {
+ None
+ }
+ })
+ .collect())
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::GetCodeActions {
+ proto::GetCodeActions {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ start: Some(language2::proto::serialize_anchor(&self.range.start)),
+ end: Some(language2::proto::serialize_anchor(&self.range.end)),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::GetCodeActions,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let start = message
+ .start
+ .and_then(language2::proto::deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid start"))?;
+ let end = message
+ .end
+ .and_then(language2::proto::deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid end"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ Ok(Self { range: start..end })
+ }
+
+ fn response_to_proto(
+ code_actions: Vec<CodeAction>,
+ _: &mut Project,
+ _: PeerId,
+ buffer_version: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::GetCodeActionsResponse {
+ proto::GetCodeActionsResponse {
+ actions: code_actions
+ .iter()
+ .map(language2::proto::serialize_code_action)
+ .collect(),
+ version: serialize_version(&buffer_version),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::GetCodeActionsResponse,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Vec<CodeAction>> {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+ message
+ .actions
+ .into_iter()
+ .map(language2::proto::deserialize_code_action)
+ .collect()
+ }
+
+ fn buffer_id_from_proto(message: &proto::GetCodeActions) -> u64 {
+ message.buffer_id
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for OnTypeFormatting {
+ type Response = Option<Transaction>;
+ type LspRequest = lsp2::request::OnTypeFormatting;
+ type ProtoRequest = proto::OnTypeFormatting;
+
+ fn check_capabilities(&self, server_capabilities: &lsp2::ServerCapabilities) -> bool {
+ let Some(on_type_formatting_options) =
+ &server_capabilities.document_on_type_formatting_provider
+ else {
+ return false;
+ };
+ on_type_formatting_options
+ .first_trigger_character
+ .contains(&self.trigger)
+ || on_type_formatting_options
+ .more_trigger_character
+ .iter()
+ .flatten()
+ .any(|chars| chars.contains(&self.trigger))
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ _: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::DocumentOnTypeFormattingParams {
+ lsp2::DocumentOnTypeFormattingParams {
+ text_document_position: lsp2::TextDocumentPositionParams::new(
+ lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path(path).unwrap()),
+ point_to_lsp(self.position),
+ ),
+ ch: self.trigger.clone(),
+ options: lsp_formatting_options(self.options.tab_size),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<Vec<lsp2::TextEdit>>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ mut cx: AsyncAppContext,
+ ) -> Result<Option<Transaction>> {
+ if let Some(edits) = message {
+ let (lsp_adapter, lsp_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
+ Project::deserialize_edits(
+ project,
+ buffer,
+ edits,
+ self.push_to_history,
+ lsp_adapter,
+ lsp_server,
+ &mut cx,
+ )
+ .await
+ } else {
+ Ok(None)
+ }
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::OnTypeFormatting {
+ proto::OnTypeFormatting {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ position: Some(language2::proto::serialize_anchor(
+ &buffer.anchor_before(self.position),
+ )),
+ trigger: self.trigger.clone(),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::OnTypeFormatting,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let position = message
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ let tab_size = buffer.read_with(&cx, |buffer, cx| {
+ language_settings(buffer.language(), buffer.file(), cx).tab_size
+ });
+
+ Ok(Self {
+ position: buffer.read_with(&cx, |buffer, _| position.to_point_utf16(buffer)),
+ trigger: message.trigger.clone(),
+ options: lsp_formatting_options(tab_size.get()).into(),
+ push_to_history: false,
+ })
+ }
+
+ fn response_to_proto(
+ response: Option<Transaction>,
+ _: &mut Project,
+ _: PeerId,
+ _: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::OnTypeFormattingResponse {
+ proto::OnTypeFormattingResponse {
+ transaction: response
+ .map(|transaction| language2::proto::serialize_transaction(&transaction)),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::OnTypeFormattingResponse,
+ _: Handle<Project>,
+ _: Handle<Buffer>,
+ _: AsyncAppContext,
+ ) -> Result<Option<Transaction>> {
+ let Some(transaction) = message.transaction else {
+ return Ok(None);
+ };
+ Ok(Some(language2::proto::deserialize_transaction(
+ transaction,
+ )?))
+ }
+
+ fn buffer_id_from_proto(message: &proto::OnTypeFormatting) -> u64 {
+ message.buffer_id
+ }
+}
+
+impl InlayHints {
+ pub async fn lsp2_to_project_hint(
+ lsp_hint: lsp2::InlayHint,
+ buffer_handle: &Handle<Buffer>,
+ server_id: LanguageServerId,
+ resolve_state: ResolveState,
+ force_no_type_left_padding: bool,
+ cx: &mut AsyncAppContext,
+ ) -> anyhow::Result<InlayHint> {
+ let kind = lsp_hint.kind.and_then(|kind| match kind {
+ lsp2::InlayHintKind::TYPE => Some(InlayHintKind::Type),
+ lsp2::InlayHintKind::PARAMETER => Some(InlayHintKind::Parameter),
+ _ => None,
+ });
+
+ let position = cx.update(|cx| {
+ let buffer = buffer_handle.read(cx);
+ let position = buffer.clip_point_utf16(point_from_lsp(lsp_hint.position), Bias::Left);
+ if kind == Some(InlayHintKind::Parameter) {
+ buffer.anchor_before(position)
+ } else {
+ buffer.anchor_after(position)
+ }
+ });
+ let label = Self::lsp_inlay_label_to_project(lsp_hint.label, server_id)
+ .await
+ .context("lsp to project inlay hint conversion")?;
+ let padding_left = if force_no_type_left_padding && kind == Some(InlayHintKind::Type) {
+ false
+ } else {
+ lsp_hint.padding_left.unwrap_or(false)
+ };
+
+ Ok(InlayHint {
+ position,
+ padding_left,
+ padding_right: lsp_hint.padding_right.unwrap_or(false),
+ label,
+ kind,
+ tooltip: lsp_hint.tooltip.map(|tooltip| match tooltip {
+ lsp2::InlayHintTooltip::String(s) => InlayHintTooltip::String(s),
+ lsp2::InlayHintTooltip::MarkupContent(markup_content) => {
+ InlayHintTooltip::MarkupContent(MarkupContent {
+ kind: match markup_content.kind {
+ lsp2::MarkupKind::PlainText => HoverBlockKind::PlainText,
+ lsp2::MarkupKind::Markdown => HoverBlockKind::Markdown,
+ },
+ value: markup_content.value,
+ })
+ }
+ }),
+ resolve_state,
+ })
+ }
+
+ async fn lsp2_inlay_label_to_project(
+ lsp_label: lsp2::InlayHintLabel,
+ server_id: LanguageServerId,
+ ) -> anyhow::Result<InlayHintLabel> {
+ let label = match lsp_label {
+ lsp2::InlayHintLabel::String(s) => InlayHintLabel::String(s),
+ lsp2::InlayHintLabel::LabelParts(lsp_parts) => {
+ let mut parts = Vec::with_capacity(lsp_parts.len());
+ for lsp_part in lsp_parts {
+ parts.push(InlayHintLabelPart {
+ value: lsp_part.value,
+ tooltip: lsp_part.tooltip.map(|tooltip| match tooltip {
+ lsp2::InlayHintLabelPartTooltip::String(s) => {
+ InlayHintLabelPartTooltip::String(s)
+ }
+ lsp2::InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
+ InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
+ kind: match markup_content.kind {
+ lsp2::MarkupKind::PlainText => HoverBlockKind::PlainText,
+ lsp2::MarkupKind::Markdown => HoverBlockKind::Markdown,
+ },
+ value: markup_content.value,
+ })
+ }
+ }),
+ location: Some(server_id).zip(lsp_part.location),
+ });
+ }
+ InlayHintLabel::LabelParts(parts)
+ }
+ };
+
+ Ok(label)
+ }
+
+ pub fn project_to_proto_hint(response_hint: InlayHint) -> proto::InlayHint {
+ let (state, lsp_resolve_state) = match response_hint.resolve_state {
+ ResolveState::Resolved => (0, None),
+ ResolveState::CanResolve(server_id, resolve_data) => (
+ 1,
+ resolve_data
+ .map(|json_data| {
+ serde_json::to_string(&json_data)
+ .expect("failed to serialize resolve json data")
+ })
+ .map(|value| proto::resolve_state::LspResolveState {
+ server_id: server_id.0 as u64,
+ value,
+ }),
+ ),
+ ResolveState::Resolving => (2, None),
+ };
+ let resolve_state = Some(proto::ResolveState {
+ state,
+ lsp_resolve_state,
+ });
+ proto::InlayHint {
+ position: Some(language2::proto::serialize_anchor(&response_hint.position)),
+ padding_left: response_hint.padding_left,
+ padding_right: response_hint.padding_right,
+ label: Some(proto::InlayHintLabel {
+ label: Some(match response_hint.label {
+ InlayHintLabel::String(s) => proto::inlay_hint_label::Label::Value(s),
+ InlayHintLabel::LabelParts(label_parts) => {
+ proto::inlay_hint_label::Label::LabelParts(proto::InlayHintLabelParts {
+ parts: label_parts.into_iter().map(|label_part| {
+ let location_url = label_part.location.as_ref().map(|(_, location)| location.uri.to_string());
+ let location_range_start = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.start).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
+ let location_range_end = label_part.location.as_ref().map(|(_, location)| point_from_lsp(location.range.end).0).map(|point| proto::PointUtf16 { row: point.row, column: point.column });
+ proto::InlayHintLabelPart {
+ value: label_part.value,
+ tooltip: label_part.tooltip.map(|tooltip| {
+ let proto_tooltip = match tooltip {
+ InlayHintLabelPartTooltip::String(s) => proto::inlay_hint_label_part_tooltip::Content::Value(s),
+ InlayHintLabelPartTooltip::MarkupContent(markup_content) => proto::inlay_hint_label_part_tooltip::Content::MarkupContent(proto::MarkupContent {
+ is_markdown: markup_content.kind == HoverBlockKind::Markdown,
+ value: markup_content.value,
+ }),
+ };
+ proto::InlayHintLabelPartTooltip {content: Some(proto_tooltip)}
+ }),
+ location_url,
+ location_range_start,
+ location_range_end,
+ language_server_id: label_part.location.as_ref().map(|(server_id, _)| server_id.0 as u64),
+ }}).collect()
+ })
+ }
+ }),
+ }),
+ kind: response_hint.kind.map(|kind| kind.name().to_string()),
+ tooltip: response_hint.tooltip.map(|response_tooltip| {
+ let proto_tooltip = match response_tooltip {
+ InlayHintTooltip::String(s) => proto::inlay_hint_tooltip::Content::Value(s),
+ InlayHintTooltip::MarkupContent(markup_content) => {
+ proto::inlay_hint_tooltip::Content::MarkupContent(proto::MarkupContent {
+ is_markdown: markup_content.kind == HoverBlockKind::Markdown,
+ value: markup_content.value,
+ })
+ }
+ };
+ proto::InlayHintTooltip {
+ content: Some(proto_tooltip),
+ }
+ }),
+ resolve_state,
+ }
+ }
+
+ pub fn proto_to_project_hint(message_hint: proto::InlayHint) -> anyhow::Result<InlayHint> {
+ let resolve_state = message_hint.resolve_state.as_ref().unwrap_or_else(|| {
+ panic!("incorrect proto inlay hint message: no resolve state in hint {message_hint:?}",)
+ });
+ let resolve_state_data = resolve_state
+ .lsp_resolve_state.as_ref()
+ .map(|lsp_resolve_state| {
+ serde_json::from_str::<Option<lsp2::LSPAny>>(&lsp_resolve_state.value)
+ .with_context(|| format!("incorrect proto inlay hint message: non-json resolve state {lsp_resolve_state:?}"))
+ .map(|state| (LanguageServerId(lsp_resolve_state.server_id as usize), state))
+ })
+ .transpose()?;
+ let resolve_state = match resolve_state.state {
+ 0 => ResolveState::Resolved,
+ 1 => {
+ let (server_id, lsp_resolve_state) = resolve_state_data.with_context(|| {
+ format!(
+ "No lsp resolve data for the hint that can be resolved: {message_hint:?}"
+ )
+ })?;
+ ResolveState::CanResolve(server_id, lsp_resolve_state)
+ }
+ 2 => ResolveState::Resolving,
+ invalid => {
+ anyhow::bail!("Unexpected resolve state {invalid} for hint {message_hint:?}")
+ }
+ };
+ Ok(InlayHint {
+ position: message_hint
+ .position
+ .and_then(language2::proto::deserialize_anchor)
+ .context("invalid position")?,
+ label: match message_hint
+ .label
+ .and_then(|label| label.label)
+ .context("missing label")?
+ {
+ proto::inlay_hint_label::Label::Value(s) => InlayHintLabel::String(s),
+ proto::inlay_hint_label::Label::LabelParts(parts) => {
+ let mut label_parts = Vec::new();
+ for part in parts.parts {
+ label_parts.push(InlayHintLabelPart {
+ value: part.value,
+ tooltip: part.tooltip.map(|tooltip| match tooltip.content {
+ Some(proto::inlay_hint_label_part_tooltip::Content::Value(s)) => {
+ InlayHintLabelPartTooltip::String(s)
+ }
+ Some(
+ proto::inlay_hint_label_part_tooltip::Content::MarkupContent(
+ markup_content,
+ ),
+ ) => InlayHintLabelPartTooltip::MarkupContent(MarkupContent {
+ kind: if markup_content.is_markdown {
+ HoverBlockKind::Markdown
+ } else {
+ HoverBlockKind::PlainText
+ },
+ value: markup_content.value,
+ }),
+ None => InlayHintLabelPartTooltip::String(String::new()),
+ }),
+ location: {
+ match part
+ .location_url
+ .zip(
+ part.location_range_start.and_then(|start| {
+ Some(start..part.location_range_end?)
+ }),
+ )
+ .zip(part.language_server_id)
+ {
+ Some(((uri, range), server_id)) => Some((
+ LanguageServerId(server_id as usize),
+ lsp2::Location {
+ uri: lsp2::Url::parse(&uri)
+ .context("invalid uri in hint part {part:?}")?,
+ range: lsp2::Range::new(
+ point_to_lsp(PointUtf16::new(
+ range.start.row,
+ range.start.column,
+ )),
+ point_to_lsp(PointUtf16::new(
+ range.end.row,
+ range.end.column,
+ )),
+ ),
+ },
+ )),
+ None => None,
+ }
+ },
+ });
+ }
+
+ InlayHintLabel::LabelParts(label_parts)
+ }
+ },
+ padding_left: message_hint.padding_left,
+ padding_right: message_hint.padding_right,
+ kind: message_hint
+ .kind
+ .as_deref()
+ .and_then(InlayHintKind::from_name),
+ tooltip: message_hint.tooltip.and_then(|tooltip| {
+ Some(match tooltip.content? {
+ proto::inlay_hint_tooltip::Content::Value(s) => InlayHintTooltip::String(s),
+ proto::inlay_hint_tooltip::Content::MarkupContent(markup_content) => {
+ InlayHintTooltip::MarkupContent(MarkupContent {
+ kind: if markup_content.is_markdown {
+ HoverBlockKind::Markdown
+ } else {
+ HoverBlockKind::PlainText
+ },
+ value: markup_content.value,
+ })
+ }
+ })
+ }),
+ resolve_state,
+ })
+ }
+
+ pub fn project_to_lsp2_hint(hint: InlayHint, snapshot: &BufferSnapshot) -> lsp2::InlayHint {
+ lsp2::InlayHint {
+ position: point_to_lsp(hint.position.to_point_utf16(snapshot)),
+ kind: hint.kind.map(|kind| match kind {
+ InlayHintKind::Type => lsp2::InlayHintKind::TYPE,
+ InlayHintKind::Parameter => lsp2::InlayHintKind::PARAMETER,
+ }),
+ text_edits: None,
+ tooltip: hint.tooltip.and_then(|tooltip| {
+ Some(match tooltip {
+ InlayHintTooltip::String(s) => lsp2::InlayHintTooltip::String(s),
+ InlayHintTooltip::MarkupContent(markup_content) => {
+ lsp2::InlayHintTooltip::MarkupContent(lsp2::MarkupContent {
+ kind: match markup_content.kind {
+ HoverBlockKind::PlainText => lsp2::MarkupKind::PlainText,
+ HoverBlockKind::Markdown => lsp2::MarkupKind::Markdown,
+ HoverBlockKind::Code { .. } => return None,
+ },
+ value: markup_content.value,
+ })
+ }
+ })
+ }),
+ label: match hint.label {
+ InlayHintLabel::String(s) => lsp2::InlayHintLabel::String(s),
+ InlayHintLabel::LabelParts(label_parts) => lsp2::InlayHintLabel::LabelParts(
+ label_parts
+ .into_iter()
+ .map(|part| lsp2::InlayHintLabelPart {
+ value: part.value,
+ tooltip: part.tooltip.and_then(|tooltip| {
+ Some(match tooltip {
+ InlayHintLabelPartTooltip::String(s) => {
+ lsp2::InlayHintLabelPartTooltip::String(s)
+ }
+ InlayHintLabelPartTooltip::MarkupContent(markup_content) => {
+ lsp2::InlayHintLabelPartTooltip::MarkupContent(
+ lsp2::MarkupContent {
+ kind: match markup_content.kind {
+ HoverBlockKind::PlainText => {
+ lsp2::MarkupKind::PlainText
+ }
+ HoverBlockKind::Markdown => {
+ lsp2::MarkupKind::Markdown
+ }
+ HoverBlockKind::Code { .. } => return None,
+ },
+ value: markup_content.value,
+ },
+ )
+ }
+ })
+ }),
+ location: part.location.map(|(_, location)| location),
+ command: None,
+ })
+ .collect(),
+ ),
+ },
+ padding_left: Some(hint.padding_left),
+ padding_right: Some(hint.padding_right),
+ data: match hint.resolve_state {
+ ResolveState::CanResolve(_, data) => data,
+ ResolveState::Resolving | ResolveState::Resolved => None,
+ },
+ }
+ }
+
+ pub fn can_resolve_inlays(capabilities: &ServerCapabilities) -> bool {
+ capabilities
+ .inlay_hint_provider
+ .as_ref()
+ .and_then(|options| match options {
+ OneOf::Left(_is_supported) => None,
+ OneOf::Right(capabilities) => match capabilities {
+ lsp2::InlayHintServerCapabilities::Options(o) => o.resolve_provider,
+ lsp2::InlayHintServerCapabilities::RegistrationOptions(o) => {
+ o.inlay_hint_options.resolve_provider
+ }
+ },
+ })
+ .unwrap_or(false)
+ }
+}
+
+#[async_trait(?Send)]
+impl LspCommand for InlayHints {
+ type Response = Vec<InlayHint>;
+ type LspRequest = lsp2::InlayHintRequest;
+ type ProtoRequest = proto::InlayHints;
+
+ fn check_capabilities(&self, server_capabilities: &lsp2::ServerCapabilities) -> bool {
+ let Some(inlay_hint_provider) = &server_capabilities.inlay_hint_provider else {
+ return false;
+ };
+ match inlay_hint_provider {
+ lsp2::OneOf::Left(enabled) => *enabled,
+ lsp2::OneOf::Right(inlay_hint_capabilities) => match inlay_hint_capabilities {
+ lsp2::InlayHintServerCapabilities::Options(_) => true,
+ lsp2::InlayHintServerCapabilities::RegistrationOptions(_) => false,
+ },
+ }
+ }
+
+ fn to_lsp(
+ &self,
+ path: &Path,
+ buffer: &Buffer,
+ _: &Arc<LanguageServer>,
+ _: &AppContext,
+ ) -> lsp2::InlayHintParams {
+ lsp2::InlayHintParams {
+ text_document: lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(path).unwrap(),
+ },
+ range: range_to_lsp(self.range.to_point_utf16(buffer)),
+ work_done_progress_params: Default::default(),
+ }
+ }
+
+ async fn response_from_lsp(
+ self,
+ message: Option<Vec<lsp2::InlayHint>>,
+ project: Handle<Project>,
+ buffer: Handle<Buffer>,
+ server_id: LanguageServerId,
+ mut cx: AsyncAppContext,
+ ) -> anyhow::Result<Vec<InlayHint>> {
+ let (lsp_adapter, lsp_server) =
+ language_server_for_buffer(&project, &buffer, server_id, &mut cx)?;
+ // `typescript-language-server` adds padding to the left for type hints, turning
+ // `const foo: boolean` into `const foo : boolean` which looks odd.
+ // `rust-analyzer` does not have the padding for this case, and we have to accomodate both.
+ //
+ // We could trim the whole string, but being pessimistic on par with the situation above,
+ // there might be a hint with multiple whitespaces at the end(s) which we need to display properly.
+ // Hence let's use a heuristic first to handle the most awkward case and look for more.
+ let force_no_type_left_padding =
+ lsp_adapter.name.0.as_ref() == "typescript-language-server";
+
+ let hints = message.unwrap_or_default().into_iter().map(|lsp_hint| {
+ let resolve_state = if InlayHints::can_resolve_inlays(lsp_server.capabilities()) {
+ ResolveState::CanResolve(lsp_server.server_id(), lsp_hint.data.clone())
+ } else {
+ ResolveState::Resolved
+ };
+
+ let buffer = buffer.clone();
+ cx.spawn(|mut cx| async move {
+ InlayHints::lsp_to_project_hint(
+ lsp_hint,
+ &buffer,
+ server_id,
+ resolve_state,
+ force_no_type_left_padding,
+ &mut cx,
+ )
+ .await
+ })
+ });
+ future::join_all(hints)
+ .await
+ .into_iter()
+ .collect::<anyhow::Result<_>>()
+ .context("lsp to project inlay hints conversion")
+ }
+
+ fn to_proto(&self, project_id: u64, buffer: &Buffer) -> proto::InlayHints {
+ proto::InlayHints {
+ project_id,
+ buffer_id: buffer.remote_id(),
+ start: Some(language2::proto::serialize_anchor(&self.range.start)),
+ end: Some(language2::proto::serialize_anchor(&self.range.end)),
+ version: serialize_version(&buffer.version()),
+ }
+ }
+
+ async fn from_proto(
+ message: proto::InlayHints,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Self> {
+ let start = message
+ .start
+ .and_then(language2::proto::deserialize_anchor)
+ .context("invalid start")?;
+ let end = message
+ .end
+ .and_then(language2::proto::deserialize_anchor)
+ .context("invalid end")?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ Ok(Self { range: start..end })
+ }
+
+ fn response_to_proto(
+ response: Vec<InlayHint>,
+ _: &mut Project,
+ _: PeerId,
+ buffer_version: &clock::Global,
+ _: &mut AppContext,
+ ) -> proto::InlayHintsResponse {
+ proto::InlayHintsResponse {
+ hints: response
+ .into_iter()
+ .map(|response_hint| InlayHints::project_to_proto_hint(response_hint))
+ .collect(),
+ version: serialize_version(buffer_version),
+ }
+ }
+
+ async fn response_from_proto(
+ self,
+ message: proto::InlayHintsResponse,
+ _: Handle<Project>,
+ buffer: Handle<Buffer>,
+ mut cx: AsyncAppContext,
+ ) -> anyhow::Result<Vec<InlayHint>> {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&message.version))
+ })
+ .await?;
+
+ let mut hints = Vec::new();
+ for message_hint in message.hints {
+ hints.push(InlayHints::proto_to_project_hint(message_hint)?);
+ }
+
+ Ok(hints)
+ }
+
+ fn buffer_id_from_proto(message: &proto::InlayHints) -> u64 {
+ message.buffer_id
+ }
+}
@@ -0,0 +1,8846 @@
+mod ignore;
+mod lsp_command;
+pub mod project_settings;
+pub mod search;
+pub mod terminals;
+pub mod worktree;
+
+#[cfg(test)]
+mod project_tests;
+#[cfg(test)]
+mod worktree_tests;
+
+use anyhow::{anyhow, Context, Result};
+use client2::{proto, Client, Collaborator, TypedEnvelope, UserStore};
+use clock::ReplicaId;
+use collections::{hash_map, BTreeMap, HashMap, HashSet};
+use copilot::Copilot;
+use futures::{
+ channel::{
+ mpsc::{self, UnboundedReceiver},
+ oneshot,
+ },
+ future::{self, try_join_all, Shared},
+ stream::FuturesUnordered,
+ AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
+};
+use globset::{Glob, GlobSet, GlobSetBuilder};
+use gpui2::{
+ AnyHandle, AppContext, AsyncAppContext, EventEmitter, Handle, ModelContext, Task, WeakHandle,
+};
+use itertools::Itertools;
+use language2::{
+ language_settings::{
+ language_settings, FormatOnSave, Formatter, InlayHintKind, LanguageSettings,
+ },
+ point_to_lsp,
+ proto::{
+ deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
+ serialize_anchor, serialize_version, split_operations,
+ },
+ range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, BundledFormatter, CachedLspAdapter,
+ CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
+ Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
+ LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16,
+ TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
+};
+use log::error;
+use lsp2::{
+ DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
+ DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
+};
+use lsp_command::*;
+use node_runtime::NodeRuntime;
+use postage::watch;
+use prettier::{LocateStart, Prettier, PRETTIER_SERVER_FILE, PRETTIER_SERVER_JS};
+use project_settings::{LspSettings, ProjectSettings};
+use rand::prelude::*;
+use search::SearchQuery;
+use serde::Serialize;
+use settings2::SettingsStore;
+use sha2::{Digest, Sha256};
+use similar::{ChangeTag, TextDiff};
+use smol::channel::{Receiver, Sender};
+use std::{
+ cmp::{self, Ordering},
+ convert::TryInto,
+ hash::Hash,
+ mem,
+ num::NonZeroU32,
+ ops::Range,
+ path::{self, Component, Path, PathBuf},
+ process::Stdio,
+ str,
+ sync::{
+ atomic::{AtomicUsize, Ordering::SeqCst},
+ Arc,
+ },
+ time::{Duration, Instant},
+};
+use terminals::Terminals;
+use text::{Anchor, LineEnding, Rope};
+use util::{
+ debug_panic, defer,
+ http::HttpClient,
+ merge_json_value_into,
+ paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
+ post_inc, ResultExt, TryFutureExt as _,
+};
+
+pub use fs::*;
+pub use worktree::*;
+
+pub trait Item {
+ fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
+ fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
+}
+
+// Language server state is stored across 3 collections:
+// language_servers =>
+// a mapping from unique server id to LanguageServerState which can either be a task for a
+// server in the process of starting, or a running server with adapter and language server arcs
+// language_server_ids => a mapping from worktreeId and server name to the unique server id
+// language_server_statuses => a mapping from unique server id to the current server status
+//
+// Multiple worktrees can map to the same language server for example when you jump to the definition
+// of a file in the standard library. So language_server_ids is used to look up which server is active
+// for a given worktree and language server name
+//
+// When starting a language server, first the id map is checked to make sure a server isn't already available
+// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
+// the Starting variant of LanguageServerState is stored in the language_servers map.
+pub struct Project {
+ worktrees: Vec<WorktreeHandle>,
+ active_entry: Option<ProjectEntryId>,
+ buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
+ languages: Arc<LanguageRegistry>,
+ supplementary_language_servers:
+ HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
+ language_servers: HashMap<LanguageServerId, LanguageServerState>,
+ language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
+ language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
+ last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
+ client: Arc<client2::Client>,
+ next_entry_id: Arc<AtomicUsize>,
+ join_project_response_message_id: u32,
+ next_diagnostic_group_id: usize,
+ user_store: Handle<UserStore>,
+ fs: Arc<dyn Fs>,
+ client_state: Option<ProjectClientState>,
+ collaborators: HashMap<proto::PeerId, Collaborator>,
+ client_subscriptions: Vec<client2::Subscription>,
+ _subscriptions: Vec<gpui2::Subscription>,
+ next_buffer_id: u64,
+ opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
+ shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
+ #[allow(clippy::type_complexity)]
+ loading_buffers_by_path: HashMap<
+ ProjectPath,
+ postage::watch::Receiver<Option<Result<Handle<Buffer>, Arc<anyhow::Error>>>>,
+ >,
+ #[allow(clippy::type_complexity)]
+ loading_local_worktrees:
+ HashMap<Arc<Path>, Shared<Task<Result<Handle<Worktree>, Arc<anyhow::Error>>>>>,
+ opened_buffers: HashMap<u64, OpenBuffer>,
+ local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
+ local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
+ /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
+ /// Used for re-issuing buffer requests when peers temporarily disconnect
+ incomplete_remote_buffers: HashMap<u64, Option<Handle<Buffer>>>,
+ buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
+ buffers_being_formatted: HashSet<u64>,
+ buffers_needing_diff: HashSet<WeakHandle<Buffer>>,
+ git_diff_debouncer: DelayedDebounced,
+ nonce: u128,
+ _maintain_buffer_languages: Task<()>,
+ _maintain_workspace_config: Task<()>,
+ terminals: Terminals,
+ copilot_lsp_subscription: Option<gpui2::Subscription>,
+ copilot_log_subscription: Option<lsp2::Subscription>,
+ current_lsp_settings: HashMap<Arc<str>, LspSettings>,
+ node: Option<Arc<dyn NodeRuntime>>,
+ prettier_instances: HashMap<
+ (Option<WorktreeId>, PathBuf),
+ Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
+ >,
+}
+
+struct DelayedDebounced {
+ task: Option<Task<()>>,
+ cancel_channel: Option<oneshot::Sender<()>>,
+}
+
+enum LanguageServerToQuery {
+ Primary,
+ Other(LanguageServerId),
+}
+
+impl DelayedDebounced {
+ fn new() -> DelayedDebounced {
+ DelayedDebounced {
+ task: None,
+ cancel_channel: None,
+ }
+ }
+
+ fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
+ where
+ F: 'static + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
+ {
+ if let Some(channel) = self.cancel_channel.take() {
+ _ = channel.send(());
+ }
+
+ let (sender, mut receiver) = oneshot::channel::<()>();
+ self.cancel_channel = Some(sender);
+
+ let previous_task = self.task.take();
+ self.task = Some(cx.spawn(|workspace, mut cx| async move {
+ let mut timer = cx.background().timer(delay).fuse();
+ if let Some(previous_task) = previous_task {
+ previous_task.await;
+ }
+
+ futures::select_biased! {
+ _ = receiver => return,
+ _ = timer => {}
+ }
+
+ workspace
+ .update(&mut cx, |workspace, cx| (func)(workspace, cx))
+ .await;
+ }));
+ }
+}
+
+struct LspBufferSnapshot {
+ version: i32,
+ snapshot: TextBufferSnapshot,
+}
+
+/// Message ordered with respect to buffer operations
+enum BufferOrderedMessage {
+ Operation {
+ buffer_id: u64,
+ operation: proto::Operation,
+ },
+ LanguageServerUpdate {
+ language_server_id: LanguageServerId,
+ message: proto::update_language_server::Variant,
+ },
+ Resync,
+}
+
+enum LocalProjectUpdate {
+ WorktreesChanged,
+ CreateBufferForPeer {
+ peer_id: proto::PeerId,
+ buffer_id: u64,
+ },
+}
+
+enum OpenBuffer {
+ Strong(Handle<Buffer>),
+ Weak(WeakHandle<Buffer>),
+ Operations(Vec<Operation>),
+}
+
+#[derive(Clone)]
+enum WorktreeHandle {
+ Strong(Handle<Worktree>),
+ Weak(WeakHandle<Worktree>),
+}
+
+enum ProjectClientState {
+ Local {
+ remote_id: u64,
+ updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
+ _send_updates: Task<()>,
+ },
+ Remote {
+ sharing_has_stopped: bool,
+ remote_id: u64,
+ replica_id: ReplicaId,
+ },
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub enum Event {
+ LanguageServerAdded(LanguageServerId),
+ LanguageServerRemoved(LanguageServerId),
+ LanguageServerLog(LanguageServerId, String),
+ Notification(String),
+ ActiveEntryChanged(Option<ProjectEntryId>),
+ ActivateProjectPanel,
+ WorktreeAdded,
+ WorktreeRemoved(WorktreeId),
+ WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
+ DiskBasedDiagnosticsStarted {
+ language_server_id: LanguageServerId,
+ },
+ DiskBasedDiagnosticsFinished {
+ language_server_id: LanguageServerId,
+ },
+ DiagnosticsUpdated {
+ path: ProjectPath,
+ language_server_id: LanguageServerId,
+ },
+ RemoteIdChanged(Option<u64>),
+ DisconnectedFromHost,
+ Closed,
+ DeletedEntry(ProjectEntryId),
+ CollaboratorUpdated {
+ old_peer_id: proto::PeerId,
+ new_peer_id: proto::PeerId,
+ },
+ CollaboratorJoined(proto::PeerId),
+ CollaboratorLeft(proto::PeerId),
+ RefreshInlayHints,
+}
+
+pub enum LanguageServerState {
+ Starting(Task<Option<Arc<LanguageServer>>>),
+
+ Running {
+ language: Arc<Language>,
+ adapter: Arc<CachedLspAdapter>,
+ server: Arc<LanguageServer>,
+ watched_paths: HashMap<WorktreeId, GlobSet>,
+ simulate_disk_based_diagnostics_completion: Option<Task<()>>,
+ },
+}
+
+#[derive(Serialize)]
+pub struct LanguageServerStatus {
+ pub name: String,
+ pub pending_work: BTreeMap<String, LanguageServerProgress>,
+ pub has_pending_diagnostic_updates: bool,
+ progress_tokens: HashSet<String>,
+}
+
+#[derive(Clone, Debug, Serialize)]
+pub struct LanguageServerProgress {
+ pub message: Option<String>,
+ pub percentage: Option<usize>,
+ #[serde(skip_serializing)]
+ pub last_update_at: Instant,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
+pub struct ProjectPath {
+ pub worktree_id: WorktreeId,
+ pub path: Arc<Path>,
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
+pub struct DiagnosticSummary {
+ pub error_count: usize,
+ pub warning_count: usize,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Location {
+ pub buffer: Handle<Buffer>,
+ pub range: Range<language2::Anchor>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct InlayHint {
+ pub position: language2::Anchor,
+ pub label: InlayHintLabel,
+ pub kind: Option<InlayHintKind>,
+ pub padding_left: bool,
+ pub padding_right: bool,
+ pub tooltip: Option<InlayHintTooltip>,
+ pub resolve_state: ResolveState,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ResolveState {
+ Resolved,
+ CanResolve(LanguageServerId, Option<lsp2::LSPAny>),
+ Resolving,
+}
+
+impl InlayHint {
+ pub fn text(&self) -> String {
+ match &self.label {
+ InlayHintLabel::String(s) => s.to_owned(),
+ InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum InlayHintLabel {
+ String(String),
+ LabelParts(Vec<InlayHintLabelPart>),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct InlayHintLabelPart {
+ pub value: String,
+ pub tooltip: Option<InlayHintLabelPartTooltip>,
+ pub location: Option<(LanguageServerId, lsp2::Location)>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum InlayHintTooltip {
+ String(String),
+ MarkupContent(MarkupContent),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum InlayHintLabelPartTooltip {
+ String(String),
+ MarkupContent(MarkupContent),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct MarkupContent {
+ pub kind: HoverBlockKind,
+ pub value: String,
+}
+
+#[derive(Debug, Clone)]
+pub struct LocationLink {
+ pub origin: Option<Location>,
+ pub target: Location,
+}
+
+#[derive(Debug)]
+pub struct DocumentHighlight {
+ pub range: Range<language2::Anchor>,
+ pub kind: DocumentHighlightKind,
+}
+
+#[derive(Clone, Debug)]
+pub struct Symbol {
+ pub language_server_name: LanguageServerName,
+ pub source_worktree_id: WorktreeId,
+ pub path: ProjectPath,
+ pub label: CodeLabel,
+ pub name: String,
+ pub kind: lsp2::SymbolKind,
+ pub range: Range<Unclipped<PointUtf16>>,
+ pub signature: [u8; 32],
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct HoverBlock {
+ pub text: String,
+ pub kind: HoverBlockKind,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum HoverBlockKind {
+ PlainText,
+ Markdown,
+ Code { language: String },
+}
+
+#[derive(Debug)]
+pub struct Hover {
+ pub contents: Vec<HoverBlock>,
+ pub range: Option<Range<language2::Anchor>>,
+ pub language: Option<Arc<Language>>,
+}
+
+impl Hover {
+ pub fn is_empty(&self) -> bool {
+ self.contents.iter().all(|block| block.text.is_empty())
+ }
+}
+
+#[derive(Default)]
+pub struct ProjectTransaction(pub HashMap<Handle<Buffer>, language2::Transaction>);
+
+impl DiagnosticSummary {
+ fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
+ let mut this = Self {
+ error_count: 0,
+ warning_count: 0,
+ };
+
+ for entry in diagnostics {
+ if entry.diagnostic.is_primary {
+ match entry.diagnostic.severity {
+ DiagnosticSeverity::ERROR => this.error_count += 1,
+ DiagnosticSeverity::WARNING => this.warning_count += 1,
+ _ => {}
+ }
+ }
+ }
+
+ this
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.error_count == 0 && self.warning_count == 0
+ }
+
+ pub fn to_proto(
+ &self,
+ language_server_id: LanguageServerId,
+ path: &Path,
+ ) -> proto::DiagnosticSummary {
+ proto::DiagnosticSummary {
+ path: path.to_string_lossy().to_string(),
+ language_server_id: language_server_id.0 as u64,
+ error_count: self.error_count as u32,
+ warning_count: self.warning_count as u32,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct ProjectEntryId(usize);
+
+impl ProjectEntryId {
+ pub const MAX: Self = Self(usize::MAX);
+
+ pub fn new(counter: &AtomicUsize) -> Self {
+ Self(counter.fetch_add(1, SeqCst))
+ }
+
+ pub fn from_proto(id: u64) -> Self {
+ Self(id as usize)
+ }
+
+ pub fn to_proto(&self) -> u64 {
+ self.0 as u64
+ }
+
+ pub fn to_usize(&self) -> usize {
+ self.0
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum FormatTrigger {
+ Save,
+ Manual,
+}
+
+struct ProjectLspAdapterDelegate {
+ project: Handle<Project>,
+ http_client: Arc<dyn HttpClient>,
+}
+
+impl FormatTrigger {
+ fn from_proto(value: i32) -> FormatTrigger {
+ match value {
+ 0 => FormatTrigger::Save,
+ 1 => FormatTrigger::Manual,
+ _ => FormatTrigger::Save,
+ }
+ }
+}
+#[derive(Clone, Debug, PartialEq)]
+enum SearchMatchCandidate {
+ OpenBuffer {
+ buffer: Handle<Buffer>,
+ // This might be an unnamed file without representation on filesystem
+ path: Option<Arc<Path>>,
+ },
+ Path {
+ worktree_id: WorktreeId,
+ path: Arc<Path>,
+ },
+}
+
+type SearchMatchCandidateIndex = usize;
+impl SearchMatchCandidate {
+ fn path(&self) -> Option<Arc<Path>> {
+ match self {
+ SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
+ SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
+ }
+ }
+}
+
+impl Project {
+ pub fn init_settings(cx: &mut AppContext) {
+ settings2::register::<ProjectSettings>(cx);
+ }
+
+ pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
+ Self::init_settings(cx);
+
+ client.add_model_message_handler(Self::handle_add_collaborator);
+ client.add_model_message_handler(Self::handle_update_project_collaborator);
+ client.add_model_message_handler(Self::handle_remove_collaborator);
+ client.add_model_message_handler(Self::handle_buffer_reloaded);
+ client.add_model_message_handler(Self::handle_buffer_saved);
+ client.add_model_message_handler(Self::handle_start_language_server);
+ client.add_model_message_handler(Self::handle_update_language_server);
+ client.add_model_message_handler(Self::handle_update_project);
+ client.add_model_message_handler(Self::handle_unshare_project);
+ client.add_model_message_handler(Self::handle_create_buffer_for_peer);
+ client.add_model_message_handler(Self::handle_update_buffer_file);
+ client.add_model_request_handler(Self::handle_update_buffer);
+ client.add_model_message_handler(Self::handle_update_diagnostic_summary);
+ client.add_model_message_handler(Self::handle_update_worktree);
+ client.add_model_message_handler(Self::handle_update_worktree_settings);
+ client.add_model_request_handler(Self::handle_create_project_entry);
+ client.add_model_request_handler(Self::handle_rename_project_entry);
+ client.add_model_request_handler(Self::handle_copy_project_entry);
+ client.add_model_request_handler(Self::handle_delete_project_entry);
+ client.add_model_request_handler(Self::handle_expand_project_entry);
+ client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
+ client.add_model_request_handler(Self::handle_apply_code_action);
+ client.add_model_request_handler(Self::handle_on_type_formatting);
+ client.add_model_request_handler(Self::handle_inlay_hints);
+ client.add_model_request_handler(Self::handle_resolve_inlay_hint);
+ client.add_model_request_handler(Self::handle_refresh_inlay_hints);
+ client.add_model_request_handler(Self::handle_reload_buffers);
+ client.add_model_request_handler(Self::handle_synchronize_buffers);
+ client.add_model_request_handler(Self::handle_format_buffers);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
+ client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
+ client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
+ client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
+ client.add_model_request_handler(Self::handle_search_project);
+ client.add_model_request_handler(Self::handle_get_project_symbols);
+ client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
+ client.add_model_request_handler(Self::handle_open_buffer_by_id);
+ client.add_model_request_handler(Self::handle_open_buffer_by_path);
+ client.add_model_request_handler(Self::handle_save_buffer);
+ client.add_model_message_handler(Self::handle_update_diff_base);
+ }
+
+ pub fn local(
+ client: Arc<Client>,
+ node: Arc<dyn NodeRuntime>,
+ user_store: Handle<UserStore>,
+ languages: Arc<LanguageRegistry>,
+ fs: Arc<dyn Fs>,
+ cx: &mut AppContext,
+ ) -> Handle<Self> {
+ cx.add_model(|cx: &mut ModelContext<Self>| {
+ let (tx, rx) = mpsc::unbounded();
+ cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
+ .detach();
+ let copilot_lsp_subscription =
+ Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
+ Self {
+ worktrees: Default::default(),
+ buffer_ordered_messages_tx: tx,
+ collaborators: Default::default(),
+ next_buffer_id: 0,
+ opened_buffers: Default::default(),
+ shared_buffers: Default::default(),
+ incomplete_remote_buffers: Default::default(),
+ loading_buffers_by_path: Default::default(),
+ loading_local_worktrees: Default::default(),
+ local_buffer_ids_by_path: Default::default(),
+ local_buffer_ids_by_entry_id: Default::default(),
+ buffer_snapshots: Default::default(),
+ join_project_response_message_id: 0,
+ client_state: None,
+ opened_buffer: watch::channel(),
+ client_subscriptions: Vec::new(),
+ _subscriptions: vec![
+ cx.observe_global::<SettingsStore, _>(Self::on_settings_changed),
+ cx.on_release(Self::release),
+ ],
+ _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
+ _maintain_workspace_config: Self::maintain_workspace_config(cx),
+ active_entry: None,
+ languages,
+ client,
+ user_store,
+ fs,
+ next_entry_id: Default::default(),
+ next_diagnostic_group_id: Default::default(),
+ supplementary_language_servers: HashMap::default(),
+ language_servers: Default::default(),
+ language_server_ids: Default::default(),
+ language_server_statuses: Default::default(),
+ last_workspace_edits_by_language_server: Default::default(),
+ buffers_being_formatted: Default::default(),
+ buffers_needing_diff: Default::default(),
+ git_diff_debouncer: DelayedDebounced::new(),
+ nonce: StdRng::from_entropy().gen(),
+ terminals: Terminals {
+ local_handles: Vec::new(),
+ },
+ copilot_lsp_subscription,
+ copilot_log_subscription: None,
+ current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp2.clone(),
+ node: Some(node),
+ prettier_instances: HashMap::default(),
+ }
+ })
+ }
+
+ pub async fn remote(
+ remote_id: u64,
+ client: Arc<Client>,
+ user_store: Handle<UserStore>,
+ languages: Arc<LanguageRegistry>,
+ fs: Arc<dyn Fs>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Handle<Self>> {
+ client.authenticate_and_connect(true, &cx).await?;
+
+ let subscription = client.subscribe_to_entity(remote_id)?;
+ let response = client
+ .request_envelope(proto::JoinProject {
+ project_id: remote_id,
+ })
+ .await?;
+ let this = cx.add_model(|cx| {
+ let replica_id = response.payload.replica_id as ReplicaId;
+
+ let mut worktrees = Vec::new();
+ for worktree in response.payload.worktrees {
+ let worktree = cx.update(|cx| {
+ Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
+ });
+ worktrees.push(worktree);
+ }
+
+ let (tx, rx) = mpsc::unbounded();
+ cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
+ .detach();
+ let copilot_lsp_subscription =
+ Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
+ let mut this = Self {
+ worktrees: Vec::new(),
+ buffer_ordered_messages_tx: tx,
+ loading_buffers_by_path: Default::default(),
+ next_buffer_id: 0,
+ opened_buffer: watch::channel(),
+ shared_buffers: Default::default(),
+ incomplete_remote_buffers: Default::default(),
+ loading_local_worktrees: Default::default(),
+ local_buffer_ids_by_path: Default::default(),
+ local_buffer_ids_by_entry_id: Default::default(),
+ active_entry: None,
+ collaborators: Default::default(),
+ join_project_response_message_id: response.message_id,
+ _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
+ _maintain_workspace_config: Self::maintain_workspace_config(cx),
+ languages,
+ user_store: user_store.clone(),
+ fs,
+ next_entry_id: Default::default(),
+ next_diagnostic_group_id: Default::default(),
+ client_subscriptions: Default::default(),
+ _subscriptions: vec![cx.on_release(Self::release)],
+ client: client.clone(),
+ client_state: Some(ProjectClientState::Remote {
+ sharing_has_stopped: false,
+ remote_id,
+ replica_id,
+ }),
+ supplementary_language_servers: HashMap::default(),
+ language_servers: Default::default(),
+ language_server_ids: Default::default(),
+ language_server_statuses: response
+ .payload
+ .language_servers
+ .into_iter()
+ .map(|server| {
+ (
+ LanguageServerId(server.id as usize),
+ LanguageServerStatus {
+ name: server.name,
+ pending_work: Default::default(),
+ has_pending_diagnostic_updates: false,
+ progress_tokens: Default::default(),
+ },
+ )
+ })
+ .collect(),
+ last_workspace_edits_by_language_server: Default::default(),
+ opened_buffers: Default::default(),
+ buffers_being_formatted: Default::default(),
+ buffers_needing_diff: Default::default(),
+ git_diff_debouncer: DelayedDebounced::new(),
+ buffer_snapshots: Default::default(),
+ nonce: StdRng::from_entropy().gen(),
+ terminals: Terminals {
+ local_handles: Vec::new(),
+ },
+ copilot_lsp_subscription,
+ copilot_log_subscription: None,
+ current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp2.clone(),
+ node: None,
+ prettier_instances: HashMap::default(),
+ };
+ for worktree in worktrees {
+ let _ = this.add_worktree(&worktree, cx);
+ }
+ this
+ });
+ let subscription = subscription.set_model(&this, &mut cx);
+
+ let user_ids = response
+ .payload
+ .collaborators
+ .iter()
+ .map(|peer| peer.user_id)
+ .collect();
+ user_store
+ .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
+ .await?;
+
+ this.update(&mut cx, |this, cx| {
+ this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
+ this.client_subscriptions.push(subscription);
+ anyhow::Ok(())
+ })?;
+
+ Ok(this)
+ }
+
+ fn release(&mut self, cx: &mut AppContext) {
+ match &self.client_state {
+ Some(ProjectClientState::Local { .. }) => {
+ let _ = self.unshare_internal(cx);
+ }
+ Some(ProjectClientState::Remote { remote_id, .. }) => {
+ let _ = self.client.send(proto::LeaveProject {
+ project_id: *remote_id,
+ });
+ self.disconnected_from_host_internal(cx);
+ }
+ _ => {}
+ }
+ }
+
+ // #[cfg(any(test, feature = "test-support"))]
+ // pub async fn test(
+ // fs: Arc<dyn Fs>,
+ // root_paths: impl IntoIterator<Item = &Path>,
+ // cx: &mut gpui::TestAppContext,
+ // ) -> Handle<Project> {
+ // let mut languages = LanguageRegistry::test();
+ // languages.set_executor(cx.background());
+ // let http_client = util::http::FakeHttpClient::with_404_response();
+ // let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
+ // let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
+ // let project = cx.update(|cx| {
+ // Project::local(
+ // client,
+ // node_runtime::FakeNodeRuntime::new(),
+ // user_store,
+ // Arc::new(languages),
+ // fs,
+ // cx,
+ // )
+ // });
+ // for path in root_paths {
+ // let (tree, _) = project
+ // .update(cx, |project, cx| {
+ // project.find_or_create_local_worktree(path, true, cx)
+ // })
+ // .await
+ // .unwrap();
+ // tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ // .await;
+ // }
+ // project
+ // }
+
+ /// Enables a prettier mock that avoids interacting with node runtime, prettier LSP wrapper, or any real file changes.
+ /// Instead, if appends the suffix to every input, this suffix is returned by this method.
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn enable_test_prettier(&mut self, plugins: &[&'static str]) -> &'static str {
+ self.node = Some(node_runtime::FakeNodeRuntime::with_prettier_support(
+ plugins,
+ ));
+ Prettier::FORMAT_SUFFIX
+ }
+
+ fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
+ let mut language_servers_to_start = Vec::new();
+ let mut language_formatters_to_check = Vec::new();
+ for buffer in self.opened_buffers.values() {
+ if let Some(buffer) = buffer.upgrade() {
+ let buffer = buffer.read(cx);
+ let buffer_file = File::from_dyn(buffer.file());
+ let buffer_language = buffer.language();
+ let settings = language_settings(buffer_language, buffer.file(), cx);
+ if let Some(language) = buffer_language {
+ if settings.enable_language_server {
+ if let Some(file) = buffer_file {
+ language_servers_to_start
+ .push((file.worktree.clone(), Arc::clone(language)));
+ }
+ }
+ language_formatters_to_check.push((
+ buffer_file.map(|f| f.worktree_id(cx)),
+ Arc::clone(language),
+ settings.clone(),
+ ));
+ }
+ }
+ }
+
+ let mut language_servers_to_stop = Vec::new();
+ let mut language_servers_to_restart = Vec::new();
+ let languages = self.languages.to_vec();
+
+ let new_lsp_settings = settings2::get::<ProjectSettings>(cx).lsp2.clone();
+ let current_lsp_settings = &self.current_lsp_settings;
+ for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
+ let language = languages.iter().find_map(|l| {
+ let adapter = l
+ .lsp_adapters()
+ .iter()
+ .find(|adapter| &adapter.name == started_lsp_name)?;
+ Some((l, adapter))
+ });
+ if let Some((language, adapter)) = language {
+ let worktree = self.worktree_for_id(*worktree_id, cx);
+ let file = worktree.as_ref().and_then(|tree| {
+ tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
+ });
+ if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
+ language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
+ } else if let Some(worktree) = worktree {
+ let server_name = &adapter.name.0;
+ match (
+ current_lsp_settings.get(server_name),
+ new_lsp_settings.get(server_name),
+ ) {
+ (None, None) => {}
+ (Some(_), None) | (None, Some(_)) => {
+ language_servers_to_restart.push((worktree, Arc::clone(language)));
+ }
+ (Some(current_lsp_settings), Some(new_lsp_settings)) => {
+ if current_lsp_settings != new_lsp_settings {
+ language_servers_to_restart.push((worktree, Arc::clone(language)));
+ }
+ }
+ }
+ }
+ }
+ }
+ self.current_lsp_settings = new_lsp_settings;
+
+ // Stop all newly-disabled language servers.
+ for (worktree_id, adapter_name) in language_servers_to_stop {
+ self.stop_language_server(worktree_id, adapter_name, cx)
+ .detach();
+ }
+
+ for (worktree, language, settings) in language_formatters_to_check {
+ self.install_default_formatters(worktree, &language, &settings, cx)
+ .detach_and_log_err(cx);
+ }
+
+ // Start all the newly-enabled language servers.
+ for (worktree, language) in language_servers_to_start {
+ let worktree_path = worktree.read(cx).abs_path();
+ self.start_language_servers(&worktree, worktree_path, language, cx);
+ }
+
+ // Restart all language servers with changed initialization options.
+ for (worktree, language) in language_servers_to_restart {
+ self.restart_language_servers(worktree, language, cx);
+ }
+
+ if self.copilot_lsp_subscription.is_none() {
+ if let Some(copilot) = Copilot::global(cx) {
+ for buffer in self.opened_buffers.values() {
+ if let Some(buffer) = buffer.upgrade() {
+ self.register_buffer_with_copilot(&buffer, cx);
+ }
+ }
+ self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
+ }
+ }
+
+ cx.notify();
+ }
+
+ pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<Handle<Buffer>> {
+ self.opened_buffers
+ .get(&remote_id)
+ .and_then(|buffer| buffer.upgrade())
+ }
+
+ pub fn languages(&self) -> &Arc<LanguageRegistry> {
+ &self.languages
+ }
+
+ pub fn client(&self) -> Arc<Client> {
+ self.client.clone()
+ }
+
+ pub fn user_store(&self) -> Handle<UserStore> {
+ self.user_store.clone()
+ }
+
+ pub fn opened_buffers(&self, cx: &AppContext) -> Vec<Handle<Buffer>> {
+ self.opened_buffers
+ .values()
+ .filter_map(|b| b.upgrade())
+ .collect()
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
+ let path = path.into();
+ if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
+ self.opened_buffers.iter().any(|(_, buffer)| {
+ if let Some(buffer) = buffer.upgrade() {
+ if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
+ if file.worktree == worktree && file.path() == &path.path {
+ return true;
+ }
+ }
+ }
+ false
+ })
+ } else {
+ false
+ }
+ }
+
+ pub fn fs(&self) -> &Arc<dyn Fs> {
+ &self.fs
+ }
+
+ pub fn remote_id(&self) -> Option<u64> {
+ match self.client_state.as_ref()? {
+ ProjectClientState::Local { remote_id, .. }
+ | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
+ }
+ }
+
+ pub fn replica_id(&self) -> ReplicaId {
+ match &self.client_state {
+ Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
+ _ => 0,
+ }
+ }
+
+ fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
+ if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
+ updates_tx
+ .unbounded_send(LocalProjectUpdate::WorktreesChanged)
+ .ok();
+ }
+ cx.notify();
+ }
+
+ pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
+ &self.collaborators
+ }
+
+ pub fn host(&self) -> Option<&Collaborator> {
+ self.collaborators.values().find(|c| c.replica_id == 0)
+ }
+
+ /// Collect all worktrees, including ones that don't appear in the project panel
+ pub fn worktrees<'a>(
+ &'a self,
+ cx: &'a AppContext,
+ ) -> impl 'a + DoubleEndedIterator<Item = Handle<Worktree>> {
+ self.worktrees
+ .iter()
+ .filter_map(move |worktree| worktree.upgrade())
+ }
+
+ /// Collect all user-visible worktrees, the ones that appear in the project panel
+ pub fn visible_worktrees<'a>(
+ &'a self,
+ cx: &'a AppContext,
+ ) -> impl 'a + DoubleEndedIterator<Item = Handle<Worktree>> {
+ self.worktrees.iter().filter_map(|worktree| {
+ worktree.upgrade().and_then(|worktree| {
+ if worktree.read(cx).is_visible() {
+ Some(worktree)
+ } else {
+ None
+ }
+ })
+ })
+ }
+
+ pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
+ self.visible_worktrees(cx)
+ .map(|tree| tree.read(cx).root_name())
+ }
+
+ pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Handle<Worktree>> {
+ self.worktrees(cx)
+ .find(|worktree| worktree.read(cx).id() == id)
+ }
+
+ pub fn worktree_for_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ cx: &AppContext,
+ ) -> Option<Handle<Worktree>> {
+ self.worktrees(cx)
+ .find(|worktree| worktree.read(cx).contains_entry(entry_id))
+ }
+
+ pub fn worktree_id_for_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ cx: &AppContext,
+ ) -> Option<WorktreeId> {
+ self.worktree_for_entry(entry_id, cx)
+ .map(|worktree| worktree.read(cx).id())
+ }
+
+ pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
+ paths.iter().all(|path| self.contains_path(path, cx))
+ }
+
+ pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
+ for worktree in self.worktrees(cx) {
+ let worktree = worktree.read(cx).as_local();
+ if worktree.map_or(false, |w| w.contains_abs_path(path)) {
+ return true;
+ }
+ }
+ false
+ }
+
+ pub fn create_entry(
+ &mut self,
+ project_path: impl Into<ProjectPath>,
+ is_directory: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Task<Result<Entry>>> {
+ let project_path = project_path.into();
+ let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
+ if self.is_local() {
+ Some(worktree.update(cx, |worktree, cx| {
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .create_entry(project_path.path, is_directory, cx)
+ }))
+ } else {
+ let client = self.client.clone();
+ let project_id = self.remote_id().unwrap();
+ Some(cx.spawn_weak(|_, mut cx| async move {
+ let response = client
+ .request(proto::CreateProjectEntry {
+ worktree_id: project_path.worktree_id.to_proto(),
+ project_id,
+ path: project_path.path.to_string_lossy().into(),
+ is_directory,
+ })
+ .await?;
+ let entry = response
+ .entry
+ .ok_or_else(|| anyhow!("missing entry in response"))?;
+ worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ }))
+ }
+ }
+
+ pub fn copy_entry(
+ &mut self,
+ entry_id: ProjectEntryId,
+ new_path: impl Into<Arc<Path>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Task<Result<Entry>>> {
+ let worktree = self.worktree_for_entry(entry_id, cx)?;
+ let new_path = new_path.into();
+ if self.is_local() {
+ worktree.update(cx, |worktree, cx| {
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .copy_entry(entry_id, new_path, cx)
+ })
+ } else {
+ let client = self.client.clone();
+ let project_id = self.remote_id().unwrap();
+
+ Some(cx.spawn_weak(|_, mut cx| async move {
+ let response = client
+ .request(proto::CopyProjectEntry {
+ project_id,
+ entry_id: entry_id.to_proto(),
+ new_path: new_path.to_string_lossy().into(),
+ })
+ .await?;
+ let entry = response
+ .entry
+ .ok_or_else(|| anyhow!("missing entry in response"))?;
+ worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ }))
+ }
+ }
+
+ pub fn rename_entry(
+ &mut self,
+ entry_id: ProjectEntryId,
+ new_path: impl Into<Arc<Path>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Task<Result<Entry>>> {
+ let worktree = self.worktree_for_entry(entry_id, cx)?;
+ let new_path = new_path.into();
+ if self.is_local() {
+ worktree.update(cx, |worktree, cx| {
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .rename_entry(entry_id, new_path, cx)
+ })
+ } else {
+ let client = self.client.clone();
+ let project_id = self.remote_id().unwrap();
+
+ Some(cx.spawn_weak(|_, mut cx| async move {
+ let response = client
+ .request(proto::RenameProjectEntry {
+ project_id,
+ entry_id: entry_id.to_proto(),
+ new_path: new_path.to_string_lossy().into(),
+ })
+ .await?;
+ let entry = response
+ .entry
+ .ok_or_else(|| anyhow!("missing entry in response"))?;
+ worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ }))
+ }
+ }
+
+ pub fn delete_entry(
+ &mut self,
+ entry_id: ProjectEntryId,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Task<Result<()>>> {
+ let worktree = self.worktree_for_entry(entry_id, cx)?;
+
+ cx.emit(Event::DeletedEntry(entry_id));
+
+ if self.is_local() {
+ worktree.update(cx, |worktree, cx| {
+ worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
+ })
+ } else {
+ let client = self.client.clone();
+ let project_id = self.remote_id().unwrap();
+ Some(cx.spawn_weak(|_, mut cx| async move {
+ let response = client
+ .request(proto::DeleteProjectEntry {
+ project_id,
+ entry_id: entry_id.to_proto(),
+ })
+ .await?;
+ worktree
+ .update(&mut cx, move |worktree, cx| {
+ worktree.as_remote_mut().unwrap().delete_entry(
+ entry_id,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ }))
+ }
+ }
+
+ pub fn expand_entry(
+ &mut self,
+ worktree_id: WorktreeId,
+ entry_id: ProjectEntryId,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Task<Result<()>>> {
+ let worktree = self.worktree_for_id(worktree_id, cx)?;
+ if self.is_local() {
+ worktree.update(cx, |worktree, cx| {
+ worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
+ })
+ } else {
+ let worktree = worktree.downgrade();
+ let request = self.client.request(proto::ExpandProjectEntry {
+ project_id: self.remote_id().unwrap(),
+ entry_id: entry_id.to_proto(),
+ });
+ Some(cx.spawn_weak(|_, mut cx| async move {
+ let response = request.await?;
+ if let Some(worktree) = worktree.upgrade() {
+ worktree
+ .update(&mut cx, |worktree, _| {
+ worktree
+ .as_remote_mut()
+ .unwrap()
+ .wait_for_snapshot(response.worktree_scan_id as usize)
+ })
+ .await?;
+ }
+ Ok(())
+ }))
+ }
+ }
+
+ pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
+ if self.client_state.is_some() {
+ return Err(anyhow!("project was already shared"));
+ }
+ self.client_subscriptions.push(
+ self.client
+ .subscribe_to_entity(project_id)?
+ .set_model(&cx.handle(), &mut cx.to_async()),
+ );
+
+ for open_buffer in self.opened_buffers.values_mut() {
+ match open_buffer {
+ OpenBuffer::Strong(_) => {}
+ OpenBuffer::Weak(buffer) => {
+ if let Some(buffer) = buffer.upgrade() {
+ *open_buffer = OpenBuffer::Strong(buffer);
+ }
+ }
+ OpenBuffer::Operations(_) => unreachable!(),
+ }
+ }
+
+ for worktree_handle in self.worktrees.iter_mut() {
+ match worktree_handle {
+ WorktreeHandle::Strong(_) => {}
+ WorktreeHandle::Weak(worktree) => {
+ if let Some(worktree) = worktree.upgrade() {
+ *worktree_handle = WorktreeHandle::Strong(worktree);
+ }
+ }
+ }
+ }
+
+ for (server_id, status) in &self.language_server_statuses {
+ self.client
+ .send(proto::StartLanguageServer {
+ project_id,
+ server: Some(proto::LanguageServer {
+ id: server_id.0 as u64,
+ name: status.name.clone(),
+ }),
+ })
+ .log_err();
+ }
+
+ let store = cx.global::<SettingsStore>();
+ for worktree in self.worktrees(cx) {
+ let worktree_id = worktree.read(cx).id().to_proto();
+ for (path, content) in store.local_settings(worktree.id()) {
+ self.client
+ .send(proto::UpdateWorktreeSettings {
+ project_id,
+ worktree_id,
+ path: path.to_string_lossy().into(),
+ content: Some(content),
+ })
+ .log_err();
+ }
+ }
+
+ let (updates_tx, mut updates_rx) = mpsc::unbounded();
+ let client = self.client.clone();
+ self.client_state = Some(ProjectClientState::Local {
+ remote_id: project_id,
+ updates_tx,
+ _send_updates: cx.spawn_weak(move |this, mut cx| async move {
+ while let Some(update) = updates_rx.next().await {
+ let Some(this) = this.upgrade(&cx) else { break };
+
+ match update {
+ LocalProjectUpdate::WorktreesChanged => {
+ let worktrees = this
+ .read_with(&cx, |this, cx| this.worktrees(cx).collect::<Vec<_>>());
+ let update_project = this
+ .read_with(&cx, |this, cx| {
+ this.client.request(proto::UpdateProject {
+ project_id,
+ worktrees: this.worktree_metadata_protos(cx),
+ })
+ })
+ .await;
+ if update_project.is_ok() {
+ for worktree in worktrees {
+ worktree.update(&mut cx, |worktree, cx| {
+ let worktree = worktree.as_local_mut().unwrap();
+ worktree.share(project_id, cx).detach_and_log_err(cx)
+ });
+ }
+ }
+ }
+ LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
+ let buffer = this.update(&mut cx, |this, _| {
+ let buffer = this.opened_buffers.get(&buffer_id).unwrap();
+ let shared_buffers =
+ this.shared_buffers.entry(peer_id).or_default();
+ if shared_buffers.insert(buffer_id) {
+ if let OpenBuffer::Strong(buffer) = buffer {
+ Some(buffer.clone())
+ } else {
+ None
+ }
+ } else {
+ None
+ }
+ });
+
+ let Some(buffer) = buffer else { continue };
+ let operations =
+ buffer.read_with(&cx, |b, cx| b.serialize_ops(None, cx));
+ let operations = operations.await;
+ let state = buffer.read_with(&cx, |buffer, _| buffer.to_proto());
+
+ let initial_state = proto::CreateBufferForPeer {
+ project_id,
+ peer_id: Some(peer_id),
+ variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
+ };
+ if client.send(initial_state).log_err().is_some() {
+ let client = client.clone();
+ cx.background()
+ .spawn(async move {
+ let mut chunks = split_operations(operations).peekable();
+ while let Some(chunk) = chunks.next() {
+ let is_last = chunks.peek().is_none();
+ client.send(proto::CreateBufferForPeer {
+ project_id,
+ peer_id: Some(peer_id),
+ variant: Some(
+ proto::create_buffer_for_peer::Variant::Chunk(
+ proto::BufferChunk {
+ buffer_id,
+ operations: chunk,
+ is_last,
+ },
+ ),
+ ),
+ })?;
+ }
+ anyhow::Ok(())
+ })
+ .await
+ .log_err();
+ }
+ }
+ }
+ }
+ }),
+ });
+
+ self.metadata_changed(cx);
+ cx.emit(Event::RemoteIdChanged(Some(project_id)));
+ cx.notify();
+ Ok(())
+ }
+
+ pub fn reshared(
+ &mut self,
+ message: proto::ResharedProject,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ self.shared_buffers.clear();
+ self.set_collaborators_from_proto(message.collaborators, cx)?;
+ self.metadata_changed(cx);
+ Ok(())
+ }
+
+ pub fn rejoined(
+ &mut self,
+ message: proto::RejoinedProject,
+ message_id: u32,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ for worktree in &self.worktrees {
+ store
+ .clear_local_settings(worktree.handle_id(), cx)
+ .log_err();
+ }
+ });
+
+ self.join_project_response_message_id = message_id;
+ self.set_worktrees_from_proto(message.worktrees, cx)?;
+ self.set_collaborators_from_proto(message.collaborators, cx)?;
+ self.language_server_statuses = message
+ .language_servers
+ .into_iter()
+ .map(|server| {
+ (
+ LanguageServerId(server.id as usize),
+ LanguageServerStatus {
+ name: server.name,
+ pending_work: Default::default(),
+ has_pending_diagnostic_updates: false,
+ progress_tokens: Default::default(),
+ },
+ )
+ })
+ .collect();
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::Resync)
+ .unwrap();
+ cx.notify();
+ Ok(())
+ }
+
+ pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
+ self.unshare_internal(cx)?;
+ self.metadata_changed(cx);
+ cx.notify();
+ Ok(())
+ }
+
+ fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
+ if self.is_remote() {
+ return Err(anyhow!("attempted to unshare a remote project"));
+ }
+
+ if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
+ self.collaborators.clear();
+ self.shared_buffers.clear();
+ self.client_subscriptions.clear();
+
+ for worktree_handle in self.worktrees.iter_mut() {
+ if let WorktreeHandle::Strong(worktree) = worktree_handle {
+ let is_visible = worktree.update(cx, |worktree, _| {
+ worktree.as_local_mut().unwrap().unshare();
+ worktree.is_visible()
+ });
+ if !is_visible {
+ *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
+ }
+ }
+ }
+
+ for open_buffer in self.opened_buffers.values_mut() {
+ // Wake up any tasks waiting for peers' edits to this buffer.
+ if let Some(buffer) = open_buffer.upgrade() {
+ buffer.update(cx, |buffer, _| buffer.give_up_waiting());
+ }
+
+ if let OpenBuffer::Strong(buffer) = open_buffer {
+ *open_buffer = OpenBuffer::Weak(buffer.downgrade());
+ }
+ }
+
+ self.client.send(proto::UnshareProject {
+ project_id: remote_id,
+ })?;
+
+ Ok(())
+ } else {
+ Err(anyhow!("attempted to unshare an unshared project"))
+ }
+ }
+
+ pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
+ self.disconnected_from_host_internal(cx);
+ cx.emit(Event::DisconnectedFromHost);
+ cx.notify();
+ }
+
+ fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
+ if let Some(ProjectClientState::Remote {
+ sharing_has_stopped,
+ ..
+ }) = &mut self.client_state
+ {
+ *sharing_has_stopped = true;
+
+ self.collaborators.clear();
+
+ for worktree in &self.worktrees {
+ if let Some(worktree) = worktree.upgrade() {
+ worktree.update(cx, |worktree, _| {
+ if let Some(worktree) = worktree.as_remote_mut() {
+ worktree.disconnected_from_host();
+ }
+ });
+ }
+ }
+
+ for open_buffer in self.opened_buffers.values_mut() {
+ // Wake up any tasks waiting for peers' edits to this buffer.
+ if let Some(buffer) = open_buffer.upgrade() {
+ buffer.update(cx, |buffer, _| buffer.give_up_waiting());
+ }
+
+ if let OpenBuffer::Strong(buffer) = open_buffer {
+ *open_buffer = OpenBuffer::Weak(buffer.downgrade());
+ }
+ }
+
+ // Wake up all futures currently waiting on a buffer to get opened,
+ // to give them a chance to fail now that we've disconnected.
+ *self.opened_buffer.0.borrow_mut() = ();
+ }
+ }
+
+ pub fn close(&mut self, cx: &mut ModelContext<Self>) {
+ cx.emit(Event::Closed);
+ }
+
+ pub fn is_read_only(&self) -> bool {
+ match &self.client_state {
+ Some(ProjectClientState::Remote {
+ sharing_has_stopped,
+ ..
+ }) => *sharing_has_stopped,
+ _ => false,
+ }
+ }
+
+ pub fn is_local(&self) -> bool {
+ match &self.client_state {
+ Some(ProjectClientState::Remote { .. }) => false,
+ _ => true,
+ }
+ }
+
+ pub fn is_remote(&self) -> bool {
+ !self.is_local()
+ }
+
+ pub fn create_buffer(
+ &mut self,
+ text: &str,
+ language: Option<Arc<Language>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<Handle<Buffer>> {
+ if self.is_remote() {
+ return Err(anyhow!("creating buffers as a guest is not supported yet"));
+ }
+ let id = post_inc(&mut self.next_buffer_id);
+ let buffer = cx.add_model(|cx| {
+ Buffer::new(self.replica_id(), id, text).with_language(
+ language.unwrap_or_else(|| language2::PLAIN_TEXT.clone()),
+ cx,
+ )
+ });
+ self.register_buffer(&buffer, cx)?;
+ Ok(buffer)
+ }
+
+ pub fn open_path(
+ &mut self,
+ path: impl Into<ProjectPath>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<(ProjectEntryId, AnyHandle)>> {
+ let task = self.open_buffer(path, cx);
+ cx.spawn_weak(|_, cx| async move {
+ let buffer = task.await?;
+ let project_entry_id = buffer
+ .read_with(&cx, |buffer, cx| {
+ File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
+ })
+ .ok_or_else(|| anyhow!("no project entry"))?;
+
+ let buffer: &AnyHandle = &buffer;
+ Ok((project_entry_id, buffer.clone()))
+ })
+ }
+
+ pub fn open_local_buffer(
+ &mut self,
+ abs_path: impl AsRef<Path>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
+ self.open_buffer((worktree.read(cx).id(), relative_path), cx)
+ } else {
+ Task::ready(Err(anyhow!("no such path")))
+ }
+ }
+
+ pub fn open_buffer(
+ &mut self,
+ path: impl Into<ProjectPath>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ let project_path = path.into();
+ let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
+ worktree
+ } else {
+ return Task::ready(Err(anyhow!("no such worktree")));
+ };
+
+ // If there is already a buffer for the given path, then return it.
+ let existing_buffer = self.get_open_buffer(&project_path, cx);
+ if let Some(existing_buffer) = existing_buffer {
+ return Task::ready(Ok(existing_buffer));
+ }
+
+ let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
+ // If the given path is already being loaded, then wait for that existing
+ // task to complete and return the same buffer.
+ hash_map::Entry::Occupied(e) => e.get().clone(),
+
+ // Otherwise, record the fact that this path is now being loaded.
+ hash_map::Entry::Vacant(entry) => {
+ let (mut tx, rx) = postage::watch::channel();
+ entry.insert(rx.clone());
+
+ let load_buffer = if worktree.read(cx).is_local() {
+ self.open_local_buffer_internal(&project_path.path, &worktree, cx)
+ } else {
+ self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
+ };
+
+ cx.spawn(move |this, mut cx| async move {
+ let load_result = load_buffer.await;
+ *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
+ // Record the fact that the buffer is no longer loading.
+ this.loading_buffers_by_path.remove(&project_path);
+ let buffer = load_result.map_err(Arc::new)?;
+ Ok(buffer)
+ }));
+ })
+ .detach();
+ rx
+ }
+ };
+
+ cx.foreground().spawn(async move {
+ wait_for_loading_buffer(loading_watch)
+ .await
+ .map_err(|error| anyhow!("{}", error))
+ })
+ }
+
+ fn open_local_buffer_internal(
+ &mut self,
+ path: &Arc<Path>,
+ worktree: &Handle<Worktree>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ let buffer_id = post_inc(&mut self.next_buffer_id);
+ let load_buffer = worktree.update(cx, |worktree, cx| {
+ let worktree = worktree.as_local_mut().unwrap();
+ worktree.load_buffer(buffer_id, path, cx)
+ });
+ cx.spawn(|this, mut cx| async move {
+ let buffer = load_buffer.await?;
+ this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
+ Ok(buffer)
+ })
+ }
+
+ fn open_remote_buffer_internal(
+ &mut self,
+ path: &Arc<Path>,
+ worktree: &Handle<Worktree>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ let rpc = self.client.clone();
+ let project_id = self.remote_id().unwrap();
+ let remote_worktree_id = worktree.read(cx).id();
+ let path = path.clone();
+ let path_string = path.to_string_lossy().to_string();
+ cx.spawn(|this, mut cx| async move {
+ let response = rpc
+ .request(proto::OpenBufferByPath {
+ project_id,
+ worktree_id: remote_worktree_id.to_proto(),
+ path: path_string,
+ })
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(response.buffer_id, cx)
+ })
+ .await
+ })
+ }
+
+ /// LanguageServerName is owned, because it is inserted into a map
+ pub fn open_local_buffer_via_lsp(
+ &mut self,
+ abs_path: lsp2::Url,
+ language_server_id: LanguageServerId,
+ language_server_name: LanguageServerName,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ cx.spawn(|this, mut cx| async move {
+ let abs_path = abs_path
+ .to_file_path()
+ .map_err(|_| anyhow!("can't convert URI to path"))?;
+ let (worktree, relative_path) = if let Some(result) =
+ this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
+ {
+ result
+ } else {
+ let worktree = this
+ .update(&mut cx, |this, cx| {
+ this.create_local_worktree(&abs_path, false, cx)
+ })
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ this.language_server_ids.insert(
+ (worktree.read(cx).id(), language_server_name),
+ language_server_id,
+ );
+ });
+ (worktree, PathBuf::new())
+ };
+
+ let project_path = ProjectPath {
+ worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
+ path: relative_path.into(),
+ };
+ this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
+ .await
+ })
+ }
+
+ pub fn open_buffer_by_id(
+ &mut self,
+ id: u64,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ if let Some(buffer) = self.buffer_for_id(id, cx) {
+ Task::ready(Ok(buffer))
+ } else if self.is_local() {
+ Task::ready(Err(anyhow!("buffer {} does not exist", id)))
+ } else if let Some(project_id) = self.remote_id() {
+ let request = self
+ .client
+ .request(proto::OpenBufferById { project_id, id });
+ cx.spawn(|this, mut cx| async move {
+ let buffer_id = request.await?.buffer_id;
+ this.update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(buffer_id, cx)
+ })
+ .await
+ })
+ } else {
+ Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
+ }
+ }
+
+ pub fn save_buffers(
+ &self,
+ buffers: HashSet<Handle<Buffer>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ cx.spawn(|this, mut cx| async move {
+ let save_tasks = buffers
+ .into_iter()
+ .map(|buffer| this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx)));
+ try_join_all(save_tasks).await?;
+ Ok(())
+ })
+ }
+
+ pub fn save_buffer(
+ &self,
+ buffer: Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
+ return Task::ready(Err(anyhow!("buffer doesn't have a file")));
+ };
+ let worktree = file.worktree.clone();
+ let path = file.path.clone();
+ worktree.update(cx, |worktree, cx| match worktree {
+ Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
+ Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
+ })
+ }
+
+ pub fn save_buffer_as(
+ &mut self,
+ buffer: Handle<Buffer>,
+ abs_path: PathBuf,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
+ let old_file = File::from_dyn(buffer.read(cx).file())
+ .filter(|f| f.is_local())
+ .cloned();
+ cx.spawn(|this, mut cx| async move {
+ if let Some(old_file) = &old_file {
+ this.update(&mut cx, |this, cx| {
+ this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
+ });
+ }
+ let (worktree, path) = worktree_task.await?;
+ worktree
+ .update(&mut cx, |worktree, cx| match worktree {
+ Worktree::Local(worktree) => {
+ worktree.save_buffer(buffer.clone(), path.into(), true, cx)
+ }
+ Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
+ })
+ .await?;
+
+ this.update(&mut cx, |this, cx| {
+ this.detect_language_for_buffer(&buffer, cx);
+ this.register_buffer_with_language_servers(&buffer, cx);
+ });
+ Ok(())
+ })
+ }
+
+ pub fn get_open_buffer(
+ &mut self,
+ path: &ProjectPath,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Handle<Buffer>> {
+ let worktree = self.worktree_for_id(path.worktree_id, cx)?;
+ self.opened_buffers.values().find_map(|buffer| {
+ let buffer = buffer.upgrade()?;
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ if file.worktree == worktree && file.path() == &path.path {
+ Some(buffer)
+ } else {
+ None
+ }
+ })
+ }
+
+ fn register_buffer(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ self.request_buffer_diff_recalculation(buffer, cx);
+ buffer.update(cx, |buffer, _| {
+ buffer.set_language_registry(self.languages.clone())
+ });
+
+ let remote_id = buffer.read(cx).remote_id();
+ let is_remote = self.is_remote();
+ let open_buffer = if is_remote || self.is_shared() {
+ OpenBuffer::Strong(buffer.clone())
+ } else {
+ OpenBuffer::Weak(buffer.downgrade())
+ };
+
+ match self.opened_buffers.entry(remote_id) {
+ hash_map::Entry::Vacant(entry) => {
+ entry.insert(open_buffer);
+ }
+ hash_map::Entry::Occupied(mut entry) => {
+ if let OpenBuffer::Operations(operations) = entry.get_mut() {
+ buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
+ } else if entry.get().upgrade().is_some() {
+ if is_remote {
+ return Ok(());
+ } else {
+ debug_panic!("buffer {} was already registered", remote_id);
+ Err(anyhow!("buffer {} was already registered", remote_id))?;
+ }
+ }
+ entry.insert(open_buffer);
+ }
+ }
+ cx.subscribe(buffer, |this, buffer, event, cx| {
+ this.on_buffer_event(buffer, event, cx);
+ })
+ .detach();
+
+ if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
+ if file.is_local {
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path.clone(),
+ },
+ remote_id,
+ );
+
+ self.local_buffer_ids_by_entry_id
+ .insert(file.entry_id, remote_id);
+ }
+ }
+
+ self.detect_language_for_buffer(buffer, cx);
+ self.register_buffer_with_language_servers(buffer, cx);
+ self.register_buffer_with_copilot(buffer, cx);
+ cx.observe_release(buffer, |this, buffer, cx| {
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ if file.is_local() {
+ let uri = lsp2::Url::from_file_path(file.abs_path(cx)).unwrap();
+ for server in this.language_servers_for_buffer(buffer, cx) {
+ server
+ .1
+ .notify::<lsp2::notification::DidCloseTextDocument>(
+ lsp2::DidCloseTextDocumentParams {
+ text_document: lsp2::TextDocumentIdentifier::new(uri.clone()),
+ },
+ )
+ .log_err();
+ }
+ }
+ }
+ })
+ .detach();
+
+ *self.opened_buffer.0.borrow_mut() = ();
+ Ok(())
+ }
+
+ fn register_buffer_with_language_servers(
+ &mut self,
+ buffer_handle: &Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let buffer = buffer_handle.read(cx);
+ let buffer_id = buffer.remote_id();
+
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ if !file.is_local() {
+ return;
+ }
+
+ let abs_path = file.abs_path(cx);
+ let uri = lsp2::Url::from_file_path(&abs_path)
+ .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
+ let initial_snapshot = buffer.text_snapshot();
+ let language = buffer.language().cloned();
+ let worktree_id = file.worktree_id(cx);
+
+ if let Some(local_worktree) = file.worktree.read(cx).as_local() {
+ for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
+ self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
+ .log_err();
+ }
+ }
+
+ if let Some(language) = language {
+ for adapter in language.lsp_adapters() {
+ let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
+ let server = self
+ .language_server_ids
+ .get(&(worktree_id, adapter.name.clone()))
+ .and_then(|id| self.language_servers.get(id))
+ .and_then(|server_state| {
+ if let LanguageServerState::Running { server, .. } = server_state {
+ Some(server.clone())
+ } else {
+ None
+ }
+ });
+ let server = match server {
+ Some(server) => server,
+ None => continue,
+ };
+
+ server
+ .notify::<lsp2::notification::DidOpenTextDocument>(
+ lsp2::DidOpenTextDocumentParams {
+ text_document: lsp2::TextDocumentItem::new(
+ uri.clone(),
+ language_id.unwrap_or_default(),
+ 0,
+ initial_snapshot.text(),
+ ),
+ },
+ )
+ .log_err();
+
+ buffer_handle.update(cx, |buffer, cx| {
+ buffer.set_completion_triggers(
+ server
+ .capabilities()
+ .completion_provider
+ .as_ref()
+ .and_then(|provider| provider.trigger_characters.clone())
+ .unwrap_or_default(),
+ cx,
+ );
+ });
+
+ let snapshot = LspBufferSnapshot {
+ version: 0,
+ snapshot: initial_snapshot.clone(),
+ };
+ self.buffer_snapshots
+ .entry(buffer_id)
+ .or_default()
+ .insert(server.server_id(), vec![snapshot]);
+ }
+ }
+ }
+ }
+
+ fn unregister_buffer_from_language_servers(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ old_file: &File,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let old_path = match old_file.as_local() {
+ Some(local) => local.abs_path(cx),
+ None => return,
+ };
+
+ buffer.update(cx, |buffer, cx| {
+ let worktree_id = old_file.worktree_id(cx);
+ let ids = &self.language_server_ids;
+
+ let language = buffer.language().cloned();
+ let adapters = language.iter().flat_map(|language| language.lsp_adapters());
+ for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
+ buffer.update_diagnostics(server_id, Default::default(), cx);
+ }
+
+ self.buffer_snapshots.remove(&buffer.remote_id());
+ let file_url = lsp2::Url::from_file_path(old_path).unwrap();
+ for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
+ language_server
+ .notify::<lsp2::notification::DidCloseTextDocument>(
+ lsp2::DidCloseTextDocumentParams {
+ text_document: lsp2::TextDocumentIdentifier::new(file_url.clone()),
+ },
+ )
+ .log_err();
+ }
+ });
+ }
+
+ fn register_buffer_with_copilot(
+ &self,
+ buffer_handle: &Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(copilot) = Copilot::global(cx) {
+ copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
+ }
+ }
+
+ async fn send_buffer_ordered_messages(
+ this: WeakHandle<Self>,
+ rx: UnboundedReceiver<BufferOrderedMessage>,
+ mut cx: AsyncAppContext,
+ ) -> Option<()> {
+ const MAX_BATCH_SIZE: usize = 128;
+
+ let mut operations_by_buffer_id = HashMap::default();
+ async fn flush_operations(
+ this: &Handle<Project>,
+ operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
+ needs_resync_with_host: &mut bool,
+ is_local: bool,
+ cx: &AsyncAppContext,
+ ) {
+ for (buffer_id, operations) in operations_by_buffer_id.drain() {
+ let request = this.read_with(cx, |this, _| {
+ let project_id = this.remote_id()?;
+ Some(this.client.request(proto::UpdateBuffer {
+ buffer_id,
+ project_id,
+ operations,
+ }))
+ });
+ if let Some(request) = request {
+ if request.await.is_err() && !is_local {
+ *needs_resync_with_host = true;
+ break;
+ }
+ }
+ }
+ }
+
+ let mut needs_resync_with_host = false;
+ let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
+
+ while let Some(changes) = changes.next().await {
+ let this = this.upgrade()?;
+ let is_local = this.read_with(&cx, |this, _| this.is_local());
+
+ for change in changes {
+ match change {
+ BufferOrderedMessage::Operation {
+ buffer_id,
+ operation,
+ } => {
+ if needs_resync_with_host {
+ continue;
+ }
+
+ operations_by_buffer_id
+ .entry(buffer_id)
+ .or_insert(Vec::new())
+ .push(operation);
+ }
+
+ BufferOrderedMessage::Resync => {
+ operations_by_buffer_id.clear();
+ if this
+ .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
+ .await
+ .is_ok()
+ {
+ needs_resync_with_host = false;
+ }
+ }
+
+ BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message,
+ } => {
+ flush_operations(
+ &this,
+ &mut operations_by_buffer_id,
+ &mut needs_resync_with_host,
+ is_local,
+ &cx,
+ )
+ .await;
+
+ this.read_with(&cx, |this, _| {
+ if let Some(project_id) = this.remote_id() {
+ this.client
+ .send(proto::UpdateLanguageServer {
+ project_id,
+ language_server_id: language_server_id.0 as u64,
+ variant: Some(message),
+ })
+ .log_err();
+ }
+ });
+ }
+ }
+ }
+
+ flush_operations(
+ &this,
+ &mut operations_by_buffer_id,
+ &mut needs_resync_with_host,
+ is_local,
+ &cx,
+ )
+ .await;
+ }
+
+ None
+ }
+
+ fn on_buffer_event(
+ &mut self,
+ buffer: Handle<Buffer>,
+ event: &BufferEvent,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<()> {
+ if matches!(
+ event,
+ BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
+ ) {
+ self.request_buffer_diff_recalculation(&buffer, cx);
+ }
+
+ match event {
+ BufferEvent::Operation(operation) => {
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::Operation {
+ buffer_id: buffer.read(cx).remote_id(),
+ operation: language2::proto::serialize_operation(operation),
+ })
+ .ok();
+ }
+
+ BufferEvent::Edited { .. } => {
+ let buffer = buffer.read(cx);
+ let file = File::from_dyn(buffer.file())?;
+ let abs_path = file.as_local()?.abs_path(cx);
+ let uri = lsp2::Url::from_file_path(abs_path).unwrap();
+ let next_snapshot = buffer.text_snapshot();
+
+ let language_servers: Vec<_> = self
+ .language_servers_for_buffer(buffer, cx)
+ .map(|i| i.1.clone())
+ .collect();
+
+ for language_server in language_servers {
+ let language_server = language_server.clone();
+
+ let buffer_snapshots = self
+ .buffer_snapshots
+ .get_mut(&buffer.remote_id())
+ .and_then(|m| m.get_mut(&language_server.server_id()))?;
+ let previous_snapshot = buffer_snapshots.last()?;
+
+ let build_incremental_change = || {
+ buffer
+ .edits_since::<(PointUtf16, usize)>(
+ previous_snapshot.snapshot.version(),
+ )
+ .map(|edit| {
+ let edit_start = edit.new.start.0;
+ let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
+ let new_text = next_snapshot
+ .text_for_range(edit.new.start.1..edit.new.end.1)
+ .collect();
+ lsp2::TextDocumentContentChangeEvent {
+ range: Some(lsp2::Range::new(
+ point_to_lsp(edit_start),
+ point_to_lsp(edit_end),
+ )),
+ range_length: None,
+ text: new_text,
+ }
+ })
+ .collect()
+ };
+
+ let document_sync_kind = language_server
+ .capabilities()
+ .text_document_sync
+ .as_ref()
+ .and_then(|sync| match sync {
+ lsp2::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
+ lsp2::TextDocumentSyncCapability::Options(options) => options.change,
+ });
+
+ let content_changes: Vec<_> = match document_sync_kind {
+ Some(lsp2::TextDocumentSyncKind::FULL) => {
+ vec![lsp2::TextDocumentContentChangeEvent {
+ range: None,
+ range_length: None,
+ text: next_snapshot.text(),
+ }]
+ }
+ Some(lsp2::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
+ _ => {
+ #[cfg(any(test, feature = "test-support"))]
+ {
+ build_incremental_change()
+ }
+
+ #[cfg(not(any(test, feature = "test-support")))]
+ {
+ continue;
+ }
+ }
+ };
+
+ let next_version = previous_snapshot.version + 1;
+
+ buffer_snapshots.push(LspBufferSnapshot {
+ version: next_version,
+ snapshot: next_snapshot.clone(),
+ });
+
+ language_server
+ .notify::<lsp2::notification::DidChangeTextDocument>(
+ lsp2::DidChangeTextDocumentParams {
+ text_document: lsp2::VersionedTextDocumentIdentifier::new(
+ uri.clone(),
+ next_version,
+ ),
+ content_changes,
+ },
+ )
+ .log_err();
+ }
+ }
+
+ BufferEvent::Saved => {
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ let worktree_id = file.worktree_id(cx);
+ let abs_path = file.as_local()?.abs_path(cx);
+ let text_document = lsp2::TextDocumentIdentifier {
+ uri: lsp2::Url::from_file_path(abs_path).unwrap(),
+ };
+
+ for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
+ let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
+
+ server
+ .notify::<lsp2::notification::DidSaveTextDocument>(
+ lsp2::DidSaveTextDocumentParams {
+ text_document: text_document.clone(),
+ text,
+ },
+ )
+ .log_err();
+ }
+
+ let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
+ for language_server_id in language_server_ids {
+ if let Some(LanguageServerState::Running {
+ adapter,
+ simulate_disk_based_diagnostics_completion,
+ ..
+ }) = self.language_servers.get_mut(&language_server_id)
+ {
+ // After saving a buffer using a language server that doesn't provide
+ // a disk-based progress token, kick off a timer that will reset every
+ // time the buffer is saved. If the timer eventually fires, simulate
+ // disk-based diagnostics being finished so that other pieces of UI
+ // (e.g., project diagnostics view, diagnostic status bar) can update.
+ // We don't emit an event right away because the language server might take
+ // some time to publish diagnostics.
+ if adapter.disk_based_diagnostics_progress_token.is_none() {
+ const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
+ Duration::from_secs(1);
+
+ let task = cx.spawn_weak(|this, mut cx| async move {
+ cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.disk_based_diagnostics_finished(
+ language_server_id,
+ cx,
+ );
+ this.buffer_ordered_messages_tx
+ .unbounded_send(
+ BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
+ },
+ )
+ .ok();
+ });
+ }
+ });
+ *simulate_disk_based_diagnostics_completion = Some(task);
+ }
+ }
+ }
+ }
+ BufferEvent::FileHandleChanged => {
+ let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
+ return None;
+ };
+
+ match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
+ Some(_) => {
+ return None;
+ }
+ None => {
+ let remote_id = buffer.read(cx).remote_id();
+ self.local_buffer_ids_by_entry_id
+ .insert(file.entry_id, remote_id);
+
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path.clone(),
+ },
+ remote_id,
+ );
+ }
+ }
+ }
+ _ => {}
+ }
+
+ None
+ }
+
+ fn request_buffer_diff_recalculation(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ self.buffers_needing_diff.insert(buffer.downgrade());
+ let first_insertion = self.buffers_needing_diff.len() == 1;
+
+ let settings = settings2::get::<ProjectSettings>(cx);
+ let delay = if let Some(delay) = settings.git.gutter_debounce {
+ delay
+ } else {
+ if first_insertion {
+ let this = cx.weak_handle();
+ cx.defer(move |cx| {
+ if let Some(this) = this.upgrade() {
+ this.update(cx, |this, cx| {
+ this.recalculate_buffer_diffs(cx).detach();
+ });
+ }
+ });
+ }
+ return;
+ };
+
+ const MIN_DELAY: u64 = 50;
+ let delay = delay.max(MIN_DELAY);
+ let duration = Duration::from_millis(delay);
+
+ self.git_diff_debouncer
+ .fire_new(duration, cx, move |this, cx| {
+ this.recalculate_buffer_diffs(cx)
+ });
+ }
+
+ fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
+ cx.spawn(|this, mut cx| async move {
+ let buffers: Vec<_> = this.update(&mut cx, |this, _| {
+ this.buffers_needing_diff.drain().collect()
+ });
+
+ let tasks: Vec<_> = this.update(&mut cx, |_, cx| {
+ buffers
+ .iter()
+ .filter_map(|buffer| {
+ let buffer = buffer.upgrade()?;
+ buffer.update(cx, |buffer, cx| buffer.git_diff_recalc(cx))
+ })
+ .collect()
+ });
+
+ futures::future::join_all(tasks).await;
+
+ this.update(&mut cx, |this, cx| {
+ if !this.buffers_needing_diff.is_empty() {
+ this.recalculate_buffer_diffs(cx).detach();
+ } else {
+ // TODO: Would a `ModelContext<Project>.notify()` suffice here?
+ for buffer in buffers {
+ if let Some(buffer) = buffer.upgrade() {
+ buffer.update(cx, |_, cx| cx.notify());
+ }
+ }
+ }
+ });
+ })
+ }
+
+ fn language_servers_for_worktree(
+ &self,
+ worktree_id: WorktreeId,
+ ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
+ self.language_server_ids
+ .iter()
+ .filter_map(move |((language_server_worktree_id, _), id)| {
+ if *language_server_worktree_id == worktree_id {
+ if let Some(LanguageServerState::Running {
+ adapter,
+ language,
+ server,
+ ..
+ }) = self.language_servers.get(id)
+ {
+ return Some((adapter, language, server));
+ }
+ }
+ None
+ })
+ }
+
+ fn maintain_buffer_languages(
+ languages: Arc<LanguageRegistry>,
+ cx: &mut ModelContext<Project>,
+ ) -> Task<()> {
+ let mut subscription = languages.subscribe();
+ let mut prev_reload_count = languages.reload_count();
+ cx.spawn_weak(|project, mut cx| async move {
+ while let Some(()) = subscription.next().await {
+ if let Some(project) = project.upgrade(&cx) {
+ // If the language registry has been reloaded, then remove and
+ // re-assign the languages on all open buffers.
+ let reload_count = languages.reload_count();
+ if reload_count > prev_reload_count {
+ prev_reload_count = reload_count;
+ project.update(&mut cx, |this, cx| {
+ let buffers = this
+ .opened_buffers
+ .values()
+ .filter_map(|b| b.upgrade())
+ .collect::<Vec<_>>();
+ for buffer in buffers {
+ if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() {
+ this.unregister_buffer_from_language_servers(&buffer, &f, cx);
+ buffer.update(cx, |buffer, cx| buffer.set_language(None, cx));
+ }
+ }
+ });
+ }
+
+ project.update(&mut cx, |project, cx| {
+ let mut plain_text_buffers = Vec::new();
+ let mut buffers_with_unknown_injections = Vec::new();
+ for buffer in project.opened_buffers.values() {
+ if let Some(handle) = buffer.upgrade() {
+ let buffer = &handle.read(cx);
+ if buffer.language().is_none()
+ || buffer.language() == Some(&*language2::PLAIN_TEXT)
+ {
+ plain_text_buffers.push(handle);
+ } else if buffer.contains_unknown_injections() {
+ buffers_with_unknown_injections.push(handle);
+ }
+ }
+ }
+
+ for buffer in plain_text_buffers {
+ project.detect_language_for_buffer(&buffer, cx);
+ project.register_buffer_with_language_servers(&buffer, cx);
+ }
+
+ for buffer in buffers_with_unknown_injections {
+ buffer.update(cx, |buffer, cx| buffer.reparse(cx));
+ }
+ });
+ }
+ }
+ })
+ }
+
+ fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<()> {
+ let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
+ let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
+
+ let settings_observation = cx.observe_global::<SettingsStore, _>(move |_, _| {
+ *settings_changed_tx.borrow_mut() = ();
+ });
+
+ cx.spawn_weak(|this, mut cx| async move {
+ while let Some(_) = settings_changed_rx.next().await {
+ let Some(this) = this.upgrade(&cx) else {
+ break;
+ };
+
+ let servers: Vec<_> = this.read_with(&cx, |this, _| {
+ this.language_servers
+ .values()
+ .filter_map(|state| match state {
+ LanguageServerState::Starting(_) => None,
+ LanguageServerState::Running {
+ adapter, server, ..
+ } => Some((adapter.clone(), server.clone())),
+ })
+ .collect()
+ });
+
+ for (adapter, server) in servers {
+ let workspace_config =
+ cx.update(|cx| adapter.workspace_configuration(cx)).await;
+ server
+ .notify::<lsp2::notification::DidChangeConfiguration>(
+ lsp2::DidChangeConfigurationParams {
+ settings: workspace_config.clone(),
+ },
+ )
+ .ok();
+ }
+ }
+
+ drop(settings_observation);
+ })
+ }
+
+ fn detect_language_for_buffer(
+ &mut self,
+ buffer_handle: &Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<()> {
+ // If the buffer has a language, set it and start the language server if we haven't already.
+ let buffer = buffer_handle.read(cx);
+ let full_path = buffer.file()?.full_path(cx);
+ let content = buffer.as_rope();
+ let new_language = self
+ .languages
+ .language_for_file(&full_path, Some(content))
+ .now_or_never()?
+ .ok()?;
+ self.set_language_for_buffer(buffer_handle, new_language, cx);
+ None
+ }
+
+ pub fn set_language_for_buffer(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ new_language: Arc<Language>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ buffer.update(cx, |buffer, cx| {
+ if buffer.language().map_or(true, |old_language| {
+ !Arc::ptr_eq(old_language, &new_language)
+ }) {
+ buffer.set_language(Some(new_language.clone()), cx);
+ }
+ });
+
+ let buffer_file = buffer.read(cx).file().cloned();
+ let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
+ let buffer_file = File::from_dyn(buffer_file.as_ref());
+ let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
+
+ let task_buffer = buffer.clone();
+ let prettier_installation_task =
+ self.install_default_formatters(worktree, &new_language, &settings, cx);
+ cx.spawn(|project, mut cx| async move {
+ prettier_installation_task.await?;
+ let _ = project
+ .update(&mut cx, |project, cx| {
+ project.prettier_instance_for_buffer(&task_buffer, cx)
+ })
+ .await;
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+
+ if let Some(file) = buffer_file {
+ let worktree = file.worktree.clone();
+ if let Some(tree) = worktree.read(cx).as_local() {
+ self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
+ }
+ }
+ }
+
+ fn start_language_servers(
+ &mut self,
+ worktree: &Handle<Worktree>,
+ worktree_path: Arc<Path>,
+ language: Arc<Language>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
+ let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
+ if !settings.enable_language_server {
+ return;
+ }
+
+ let worktree_id = worktree.read(cx).id();
+ for adapter in language.lsp_adapters() {
+ self.start_language_server(
+ worktree_id,
+ worktree_path.clone(),
+ adapter.clone(),
+ language.clone(),
+ cx,
+ );
+ }
+ }
+
+ fn start_language_server(
+ &mut self,
+ worktree_id: WorktreeId,
+ worktree_path: Arc<Path>,
+ adapter: Arc<CachedLspAdapter>,
+ language: Arc<Language>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let key = (worktree_id, adapter.name.clone());
+ if self.language_server_ids.contains_key(&key) {
+ return;
+ }
+
+ let pending_server = match self.languages.create_pending_language_server(
+ language.clone(),
+ adapter.clone(),
+ worktree_path,
+ ProjectLspAdapterDelegate::new(self, cx),
+ cx,
+ ) {
+ Some(pending_server) => pending_server,
+ None => return,
+ };
+
+ let project_settings = settings2::get::<ProjectSettings>(cx);
+ let lsp = project_settings.lsp2.get(&adapter.name.0);
+ let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
+
+ let mut initialization_options = adapter.initialization_options.clone();
+ match (&mut initialization_options, override_options) {
+ (Some(initialization_options), Some(override_options)) => {
+ merge_json_value_into(override_options, initialization_options);
+ }
+ (None, override_options) => initialization_options = override_options,
+ _ => {}
+ }
+
+ let server_id = pending_server.server_id;
+ let container_dir = pending_server.container_dir.clone();
+ let state = LanguageServerState::Starting({
+ let adapter = adapter.clone();
+ let server_name = adapter.name.0.clone();
+ let language = language.clone();
+ let key = key.clone();
+
+ cx.spawn_weak(|this, mut cx| async move {
+ let result = Self::setup_and_insert_language_server(
+ this,
+ initialization_options,
+ pending_server,
+ adapter.clone(),
+ language.clone(),
+ server_id,
+ key,
+ &mut cx,
+ )
+ .await;
+
+ match result {
+ Ok(server) => server,
+
+ Err(err) => {
+ log::error!("failed to start language server {:?}: {}", server_name, err);
+
+ if let Some(this) = this.upgrade() {
+ if let Some(container_dir) = container_dir {
+ let installation_test_binary = adapter
+ .installation_test_binary(container_dir.to_path_buf())
+ .await;
+
+ this.update(&mut cx, |_, cx| {
+ Self::check_errored_server(
+ language,
+ adapter,
+ server_id,
+ installation_test_binary,
+ cx,
+ )
+ });
+ }
+ }
+
+ None
+ }
+ }
+ })
+ });
+
+ self.language_servers.insert(server_id, state);
+ self.language_server_ids.insert(key, server_id);
+ }
+
+ fn reinstall_language_server(
+ &mut self,
+ language: Arc<Language>,
+ adapter: Arc<CachedLspAdapter>,
+ server_id: LanguageServerId,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<Task<()>> {
+ log::info!("beginning to reinstall server");
+
+ let existing_server = match self.language_servers.remove(&server_id) {
+ Some(LanguageServerState::Running { server, .. }) => Some(server),
+ _ => None,
+ };
+
+ for worktree in &self.worktrees {
+ if let Some(worktree) = worktree.upgrade() {
+ let key = (worktree.read(cx).id(), adapter.name.clone());
+ self.language_server_ids.remove(&key);
+ }
+ }
+
+ Some(cx.spawn(move |this, mut cx| async move {
+ if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
+ log::info!("shutting down existing server");
+ task.await;
+ }
+
+ // TODO: This is race-safe with regards to preventing new instances from
+ // starting while deleting, but existing instances in other projects are going
+ // to be very confused and messed up
+ this.update(&mut cx, |this, cx| {
+ this.languages.delete_server_container(adapter.clone(), cx)
+ })
+ .await;
+
+ this.update(&mut cx, |this, mut cx| {
+ let worktrees = this.worktrees.clone();
+ for worktree in worktrees {
+ let worktree = match worktree.upgrade() {
+ Some(worktree) => worktree.read(cx),
+ None => continue,
+ };
+ let worktree_id = worktree.id();
+ let root_path = worktree.abs_path();
+
+ this.start_language_server(
+ worktree_id,
+ root_path,
+ adapter.clone(),
+ language.clone(),
+ &mut cx,
+ );
+ }
+ })
+ }))
+ }
+
+ async fn setup_and_insert_language_server(
+ this: WeakHandle<Self>,
+ initialization_options: Option<serde_json::Value>,
+ pending_server: PendingLanguageServer,
+ adapter: Arc<CachedLspAdapter>,
+ language: Arc<Language>,
+ server_id: LanguageServerId,
+ key: (WorktreeId, LanguageServerName),
+ cx: &mut AsyncAppContext,
+ ) -> Result<Option<Arc<LanguageServer>>> {
+ let setup = Self::setup_pending_language_server(
+ this,
+ initialization_options,
+ pending_server,
+ adapter.clone(),
+ server_id,
+ cx,
+ );
+
+ let language_server = match setup.await? {
+ Some(language_server) => language_server,
+ None => return Ok(None),
+ };
+ let this = match this.upgrade() {
+ Some(this) => this,
+ None => return Err(anyhow!("failed to upgrade project handle")),
+ };
+
+ this.update(cx, |this, cx| {
+ this.insert_newly_running_language_server(
+ language,
+ adapter,
+ language_server.clone(),
+ server_id,
+ key,
+ cx,
+ )
+ })?;
+
+ Ok(Some(language_server))
+ }
+
+ async fn setup_pending_language_server(
+ this: WeakHandle<Self>,
+ initialization_options: Option<serde_json::Value>,
+ pending_server: PendingLanguageServer,
+ adapter: Arc<CachedLspAdapter>,
+ server_id: LanguageServerId,
+ cx: &mut AsyncAppContext,
+ ) -> Result<Option<Arc<LanguageServer>>> {
+ let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await;
+ let language_server = match pending_server.task.await? {
+ Some(server) => server,
+ None => return Ok(None),
+ };
+
+ language_server
+ .on_notification::<lsp2::notification::PublishDiagnostics, _>({
+ let adapter = adapter.clone();
+ move |mut params, mut cx| {
+ let this = this;
+ let adapter = adapter.clone();
+ adapter.process_diagnostics(&mut params);
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.update_diagnostics(
+ server_id,
+ params,
+ &adapter.disk_based_diagnostic_sources,
+ cx,
+ )
+ .log_err();
+ });
+ }
+ }
+ })
+ .detach();
+
+ language_server
+ .on_request::<lsp2::request::WorkspaceConfiguration, _, _>({
+ let adapter = adapter.clone();
+ move |params, mut cx| {
+ let adapter = adapter.clone();
+ async move {
+ let workspace_config =
+ cx.update(|cx| adapter.workspace_configuration(cx)).await;
+ Ok(params
+ .items
+ .into_iter()
+ .map(|item| {
+ if let Some(section) = &item.section {
+ workspace_config
+ .get(section)
+ .cloned()
+ .unwrap_or(serde_json::Value::Null)
+ } else {
+ workspace_config.clone()
+ }
+ })
+ .collect())
+ }
+ }
+ })
+ .detach();
+
+ // Even though we don't have handling for these requests, respond to them to
+ // avoid stalling any language server like `gopls` which waits for a response
+ // to these requests when initializing.
+ language_server
+ .on_request::<lsp2::request::WorkDoneProgressCreate, _, _>(
+ move |params, mut cx| async move {
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, _| {
+ if let Some(status) = this.language_server_statuses.get_mut(&server_id)
+ {
+ if let lsp2::NumberOrString::String(token) = params.token {
+ status.progress_tokens.insert(token);
+ }
+ }
+ });
+ }
+ Ok(())
+ },
+ )
+ .detach();
+ language_server
+ .on_request::<lsp2::request::RegisterCapability, _, _>({
+ move |params, mut cx| async move {
+ let this = this
+ .upgrade(&cx)
+ .ok_or_else(|| anyhow!("project dropped"))?;
+ for reg in params.registrations {
+ if reg.method == "workspace/didChangeWatchedFiles" {
+ if let Some(options) = reg.register_options {
+ let options = serde_json::from_value(options)?;
+ this.update(&mut cx, |this, cx| {
+ this.on_lsp_did_change_watched_files(server_id, options, cx);
+ });
+ }
+ }
+ }
+ Ok(())
+ }
+ })
+ .detach();
+
+ language_server
+ .on_request::<lsp2::request::ApplyWorkspaceEdit, _, _>({
+ let adapter = adapter.clone();
+ move |params, cx| {
+ Self::on_lsp_workspace_edit(this, params, server_id, adapter.clone(), cx)
+ }
+ })
+ .detach();
+
+ language_server
+ .on_request::<lsp2::request::InlayHintRefreshRequest, _, _>({
+ move |(), mut cx| async move {
+ let this = this
+ .upgrade(&cx)
+ .ok_or_else(|| anyhow!("project dropped"))?;
+ this.update(&mut cx, |project, cx| {
+ cx.emit(Event::RefreshInlayHints);
+ project.remote_id().map(|project_id| {
+ project.client.send(proto::RefreshInlayHints { project_id })
+ })
+ })
+ .transpose()?;
+ Ok(())
+ }
+ })
+ .detach();
+
+ let disk_based_diagnostics_progress_token =
+ adapter.disk_based_diagnostics_progress_token.clone();
+
+ language_server
+ .on_notification::<lsp2::notification::Progress, _>(move |params, mut cx| {
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.on_lsp_progress(
+ params,
+ server_id,
+ disk_based_diagnostics_progress_token.clone(),
+ cx,
+ );
+ });
+ }
+ })
+ .detach();
+
+ let language_server = language_server.initialize(initialization_options).await?;
+
+ language_server
+ .notify::<lsp2::notification::DidChangeConfiguration>(
+ lsp2::DidChangeConfigurationParams {
+ settings: workspace_config,
+ },
+ )
+ .ok();
+
+ Ok(Some(language_server))
+ }
+
+ fn insert_newly_running_language_server(
+ &mut self,
+ language: Arc<Language>,
+ adapter: Arc<CachedLspAdapter>,
+ language_server: Arc<LanguageServer>,
+ server_id: LanguageServerId,
+ key: (WorktreeId, LanguageServerName),
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ // If the language server for this key doesn't match the server id, don't store the
+ // server. Which will cause it to be dropped, killing the process
+ if self
+ .language_server_ids
+ .get(&key)
+ .map(|id| id != &server_id)
+ .unwrap_or(false)
+ {
+ return Ok(());
+ }
+
+ // Update language_servers collection with Running variant of LanguageServerState
+ // indicating that the server is up and running and ready
+ self.language_servers.insert(
+ server_id,
+ LanguageServerState::Running {
+ adapter: adapter.clone(),
+ language: language.clone(),
+ watched_paths: Default::default(),
+ server: language_server.clone(),
+ simulate_disk_based_diagnostics_completion: None,
+ },
+ );
+
+ self.language_server_statuses.insert(
+ server_id,
+ LanguageServerStatus {
+ name: language_server.name().to_string(),
+ pending_work: Default::default(),
+ has_pending_diagnostic_updates: false,
+ progress_tokens: Default::default(),
+ },
+ );
+
+ cx.emit(Event::LanguageServerAdded(server_id));
+
+ if let Some(project_id) = self.remote_id() {
+ self.client.send(proto::StartLanguageServer {
+ project_id,
+ server: Some(proto::LanguageServer {
+ id: server_id.0 as u64,
+ name: language_server.name().to_string(),
+ }),
+ })?;
+ }
+
+ // Tell the language server about every open buffer in the worktree that matches the language.
+ for buffer in self.opened_buffers.values() {
+ if let Some(buffer_handle) = buffer.upgrade() {
+ let buffer = buffer_handle.read(cx);
+ let file = match File::from_dyn(buffer.file()) {
+ Some(file) => file,
+ None => continue,
+ };
+ let language = match buffer.language() {
+ Some(language) => language,
+ None => continue,
+ };
+
+ if file.worktree.read(cx).id() != key.0
+ || !language.lsp_adapters().iter().any(|a| a.name == key.1)
+ {
+ continue;
+ }
+
+ let file = match file.as_local() {
+ Some(file) => file,
+ None => continue,
+ };
+
+ let versions = self
+ .buffer_snapshots
+ .entry(buffer.remote_id())
+ .or_default()
+ .entry(server_id)
+ .or_insert_with(|| {
+ vec![LspBufferSnapshot {
+ version: 0,
+ snapshot: buffer.text_snapshot(),
+ }]
+ });
+
+ let snapshot = versions.last().unwrap();
+ let version = snapshot.version;
+ let initial_snapshot = &snapshot.snapshot;
+ let uri = lsp2::Url::from_file_path(file.abs_path(cx)).unwrap();
+ language_server.notify::<lsp2::notification::DidOpenTextDocument>(
+ lsp2::DidOpenTextDocumentParams {
+ text_document: lsp2::TextDocumentItem::new(
+ uri,
+ adapter
+ .language_ids
+ .get(language.name().as_ref())
+ .cloned()
+ .unwrap_or_default(),
+ version,
+ initial_snapshot.text(),
+ ),
+ },
+ )?;
+
+ buffer_handle.update(cx, |buffer, cx| {
+ buffer.set_completion_triggers(
+ language_server
+ .capabilities()
+ .completion_provider
+ .as_ref()
+ .and_then(|provider| provider.trigger_characters.clone())
+ .unwrap_or_default(),
+ cx,
+ )
+ });
+ }
+ }
+
+ cx.notify();
+ Ok(())
+ }
+
+ // Returns a list of all of the worktrees which no longer have a language server and the root path
+ // for the stopped server
+ fn stop_language_server(
+ &mut self,
+ worktree_id: WorktreeId,
+ adapter_name: LanguageServerName,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
+ let key = (worktree_id, adapter_name);
+ if let Some(server_id) = self.language_server_ids.remove(&key) {
+ log::info!("stopping language server {}", key.1 .0);
+
+ // Remove other entries for this language server as well
+ let mut orphaned_worktrees = vec![worktree_id];
+ let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
+ for other_key in other_keys {
+ if self.language_server_ids.get(&other_key) == Some(&server_id) {
+ self.language_server_ids.remove(&other_key);
+ orphaned_worktrees.push(other_key.0);
+ }
+ }
+
+ for buffer in self.opened_buffers.values() {
+ if let Some(buffer) = buffer.upgrade() {
+ buffer.update(cx, |buffer, cx| {
+ buffer.update_diagnostics(server_id, Default::default(), cx);
+ });
+ }
+ }
+ for worktree in &self.worktrees {
+ if let Some(worktree) = worktree.upgrade() {
+ worktree.update(cx, |worktree, cx| {
+ if let Some(worktree) = worktree.as_local_mut() {
+ worktree.clear_diagnostics_for_language_server(server_id, cx);
+ }
+ });
+ }
+ }
+
+ self.language_server_statuses.remove(&server_id);
+ cx.notify();
+
+ let server_state = self.language_servers.remove(&server_id);
+ cx.emit(Event::LanguageServerRemoved(server_id));
+ cx.spawn_weak(|this, mut cx| async move {
+ let mut root_path = None;
+
+ let server = match server_state {
+ Some(LanguageServerState::Starting(task)) => task.await,
+ Some(LanguageServerState::Running { server, .. }) => Some(server),
+ None => None,
+ };
+
+ if let Some(server) = server {
+ root_path = Some(server.root_path().clone());
+ if let Some(shutdown) = server.shutdown() {
+ shutdown.await;
+ }
+ }
+
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.language_server_statuses.remove(&server_id);
+ cx.notify();
+ });
+ }
+
+ (root_path, orphaned_worktrees)
+ })
+ } else {
+ Task::ready((None, Vec::new()))
+ }
+ }
+
+ pub fn restart_language_servers_for_buffers(
+ &mut self,
+ buffers: impl IntoIterator<Item = Handle<Buffer>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<()> {
+ let language_server_lookup_info: HashSet<(Handle<Worktree>, Arc<Language>)> = buffers
+ .into_iter()
+ .filter_map(|buffer| {
+ let buffer = buffer.read(cx);
+ let file = File::from_dyn(buffer.file())?;
+ let full_path = file.full_path(cx);
+ let language = self
+ .languages
+ .language_for_file(&full_path, Some(buffer.as_rope()))
+ .now_or_never()?
+ .ok()?;
+ Some((file.worktree.clone(), language))
+ })
+ .collect();
+ for (worktree, language) in language_server_lookup_info {
+ self.restart_language_servers(worktree, language, cx);
+ }
+
+ None
+ }
+
+ // TODO This will break in the case where the adapter's root paths and worktrees are not equal
+ fn restart_language_servers(
+ &mut self,
+ worktree: Handle<Worktree>,
+ language: Arc<Language>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let worktree_id = worktree.read(cx).id();
+ let fallback_path = worktree.read(cx).abs_path();
+
+ let mut stops = Vec::new();
+ for adapter in language.lsp_adapters() {
+ stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
+ }
+
+ if stops.is_empty() {
+ return;
+ }
+ let mut stops = stops.into_iter();
+
+ cx.spawn_weak(|this, mut cx| async move {
+ let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
+ for stop in stops {
+ let (_, worktrees) = stop.await;
+ orphaned_worktrees.extend_from_slice(&worktrees);
+ }
+
+ let this = match this.upgrade(&cx) {
+ Some(this) => this,
+ None => return,
+ };
+
+ this.update(&mut cx, |this, cx| {
+ // Attempt to restart using original server path. Fallback to passed in
+ // path if we could not retrieve the root path
+ let root_path = original_root_path
+ .map(|path_buf| Arc::from(path_buf.as_path()))
+ .unwrap_or(fallback_path);
+
+ this.start_language_servers(&worktree, root_path, language.clone(), cx);
+
+ // Lookup new server ids and set them for each of the orphaned worktrees
+ for adapter in language.lsp_adapters() {
+ if let Some(new_server_id) = this
+ .language_server_ids
+ .get(&(worktree_id, adapter.name.clone()))
+ .cloned()
+ {
+ for &orphaned_worktree in &orphaned_worktrees {
+ this.language_server_ids
+ .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
+ }
+ }
+ }
+ });
+ })
+ .detach();
+ }
+
+ fn check_errored_server(
+ language: Arc<Language>,
+ adapter: Arc<CachedLspAdapter>,
+ server_id: LanguageServerId,
+ installation_test_binary: Option<LanguageServerBinary>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if !adapter.can_be_reinstalled() {
+ log::info!(
+ "Validation check requested for {:?} but it cannot be reinstalled",
+ adapter.name.0
+ );
+ return;
+ }
+
+ cx.spawn(|this, mut cx| async move {
+ log::info!("About to spawn test binary");
+
+ // A lack of test binary counts as a failure
+ let process = installation_test_binary.and_then(|binary| {
+ smol::process::Command::new(&binary.path)
+ .current_dir(&binary.path)
+ .args(binary.arguments)
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::inherit())
+ .kill_on_drop(true)
+ .spawn()
+ .ok()
+ });
+
+ const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
+ let mut timeout = cx.background().timer(PROCESS_TIMEOUT).fuse();
+
+ let mut errored = false;
+ if let Some(mut process) = process {
+ futures::select! {
+ status = process.status().fuse() => match status {
+ Ok(status) => errored = !status.success(),
+ Err(_) => errored = true,
+ },
+
+ _ = timeout => {
+ log::info!("test binary time-ed out, this counts as a success");
+ _ = process.kill();
+ }
+ }
+ } else {
+ log::warn!("test binary failed to launch");
+ errored = true;
+ }
+
+ if errored {
+ log::warn!("test binary check failed");
+ let task = this.update(&mut cx, move |this, mut cx| {
+ this.reinstall_language_server(language, adapter, server_id, &mut cx)
+ });
+
+ if let Some(task) = task {
+ task.await;
+ }
+ }
+ })
+ .detach();
+ }
+
+ fn on_lsp_progress(
+ &mut self,
+ progress: lsp2::ProgressParams,
+ language_server_id: LanguageServerId,
+ disk_based_diagnostics_progress_token: Option<String>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let token = match progress.token {
+ lsp2::NumberOrString::String(token) => token,
+ lsp2::NumberOrString::Number(token) => {
+ log::info!("skipping numeric progress token {}", token);
+ return;
+ }
+ };
+ let lsp2::ProgressParamsValue::WorkDone(progress) = progress.value;
+ let language_server_status =
+ if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
+ status
+ } else {
+ return;
+ };
+
+ if !language_server_status.progress_tokens.contains(&token) {
+ return;
+ }
+
+ let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
+ .as_ref()
+ .map_or(false, |disk_based_token| {
+ token.starts_with(disk_based_token)
+ });
+
+ match progress {
+ lsp2::WorkDoneProgress::Begin(report) => {
+ if is_disk_based_diagnostics_progress {
+ language_server_status.has_pending_diagnostic_updates = true;
+ self.disk_based_diagnostics_started(language_server_id, cx);
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
+ })
+ .ok();
+ } else {
+ self.on_lsp_work_start(
+ language_server_id,
+ token.clone(),
+ LanguageServerProgress {
+ message: report.message.clone(),
+ percentage: report.percentage.map(|p| p as usize),
+ last_update_at: Instant::now(),
+ },
+ cx,
+ );
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message: proto::update_language_server::Variant::WorkStart(
+ proto::LspWorkStart {
+ token,
+ message: report.message,
+ percentage: report.percentage.map(|p| p as u32),
+ },
+ ),
+ })
+ .ok();
+ }
+ }
+ lsp2::WorkDoneProgress::Report(report) => {
+ if !is_disk_based_diagnostics_progress {
+ self.on_lsp_work_progress(
+ language_server_id,
+ token.clone(),
+ LanguageServerProgress {
+ message: report.message.clone(),
+ percentage: report.percentage.map(|p| p as usize),
+ last_update_at: Instant::now(),
+ },
+ cx,
+ );
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message: proto::update_language_server::Variant::WorkProgress(
+ proto::LspWorkProgress {
+ token,
+ message: report.message,
+ percentage: report.percentage.map(|p| p as u32),
+ },
+ ),
+ })
+ .ok();
+ }
+ }
+ lsp2::WorkDoneProgress::End(_) => {
+ language_server_status.progress_tokens.remove(&token);
+
+ if is_disk_based_diagnostics_progress {
+ language_server_status.has_pending_diagnostic_updates = false;
+ self.disk_based_diagnostics_finished(language_server_id, cx);
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message:
+ proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
+ Default::default(),
+ ),
+ })
+ .ok();
+ } else {
+ self.on_lsp_work_end(language_server_id, token.clone(), cx);
+ self.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
+ language_server_id,
+ message: proto::update_language_server::Variant::WorkEnd(
+ proto::LspWorkEnd { token },
+ ),
+ })
+ .ok();
+ }
+ }
+ }
+ }
+
+ fn on_lsp_work_start(
+ &mut self,
+ language_server_id: LanguageServerId,
+ token: String,
+ progress: LanguageServerProgress,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
+ status.pending_work.insert(token, progress);
+ cx.notify();
+ }
+ }
+
+ fn on_lsp_work_progress(
+ &mut self,
+ language_server_id: LanguageServerId,
+ token: String,
+ progress: LanguageServerProgress,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
+ let entry = status
+ .pending_work
+ .entry(token)
+ .or_insert(LanguageServerProgress {
+ message: Default::default(),
+ percentage: Default::default(),
+ last_update_at: progress.last_update_at,
+ });
+ if progress.message.is_some() {
+ entry.message = progress.message;
+ }
+ if progress.percentage.is_some() {
+ entry.percentage = progress.percentage;
+ }
+ entry.last_update_at = progress.last_update_at;
+ cx.notify();
+ }
+ }
+
+ fn on_lsp_work_end(
+ &mut self,
+ language_server_id: LanguageServerId,
+ token: String,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
+ cx.emit(Event::RefreshInlayHints);
+ status.pending_work.remove(&token);
+ cx.notify();
+ }
+ }
+
+ fn on_lsp_did_change_watched_files(
+ &mut self,
+ language_server_id: LanguageServerId,
+ params: DidChangeWatchedFilesRegistrationOptions,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(LanguageServerState::Running { watched_paths, .. }) =
+ self.language_servers.get_mut(&language_server_id)
+ {
+ let mut builders = HashMap::default();
+ for watcher in params.watchers {
+ for worktree in &self.worktrees {
+ if let Some(worktree) = worktree.upgrade() {
+ let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
+ if let Some(abs_path) = tree.abs_path().to_str() {
+ let relative_glob_pattern = match &watcher.glob_pattern {
+ lsp2::GlobPattern::String(s) => s
+ .strip_prefix(abs_path)
+ .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
+ lsp2::GlobPattern::Relative(rp) => {
+ let base_uri = match &rp.base_uri {
+ lsp2::OneOf::Left(workspace_folder) => {
+ &workspace_folder.uri
+ }
+ lsp2::OneOf::Right(base_uri) => base_uri,
+ };
+ base_uri.to_file_path().ok().and_then(|file_path| {
+ (file_path.to_str() == Some(abs_path))
+ .then_some(rp.pattern.as_str())
+ })
+ }
+ };
+ if let Some(relative_glob_pattern) = relative_glob_pattern {
+ let literal_prefix =
+ glob_literal_prefix(&relative_glob_pattern);
+ tree.as_local_mut()
+ .unwrap()
+ .add_path_prefix_to_scan(Path::new(literal_prefix).into());
+ if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
+ builders
+ .entry(tree.id())
+ .or_insert_with(|| GlobSetBuilder::new())
+ .add(glob);
+ }
+ return true;
+ }
+ }
+ false
+ });
+ if glob_is_inside_worktree {
+ break;
+ }
+ }
+ }
+ }
+
+ watched_paths.clear();
+ for (worktree_id, builder) in builders {
+ if let Ok(globset) = builder.build() {
+ watched_paths.insert(worktree_id, globset);
+ }
+ }
+
+ cx.notify();
+ }
+ }
+
+ async fn on_lsp_workspace_edit(
+ this: WeakHandle<Self>,
+ params: lsp2::ApplyWorkspaceEditParams,
+ server_id: LanguageServerId,
+ adapter: Arc<CachedLspAdapter>,
+ mut cx: AsyncAppContext,
+ ) -> Result<lsp2::ApplyWorkspaceEditResponse> {
+ let this = this
+ .upgrade(&cx)
+ .ok_or_else(|| anyhow!("project project closed"))?;
+ let language_server = this
+ .read_with(&cx, |this, _| this.language_server_for_id(server_id))
+ .ok_or_else(|| anyhow!("language server not found"))?;
+ let transaction = Self::deserialize_workspace_edit(
+ this.clone(),
+ params.edit,
+ true,
+ adapter.clone(),
+ language_server.clone(),
+ &mut cx,
+ )
+ .await
+ .log_err();
+ this.update(&mut cx, |this, _| {
+ if let Some(transaction) = transaction {
+ this.last_workspace_edits_by_language_server
+ .insert(server_id, transaction);
+ }
+ });
+ Ok(lsp2::ApplyWorkspaceEditResponse {
+ applied: true,
+ failed_change: None,
+ failure_reason: None,
+ })
+ }
+
+ pub fn language_server_statuses(
+ &self,
+ ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
+ self.language_server_statuses.values()
+ }
+
+ pub fn update_diagnostics(
+ &mut self,
+ language_server_id: LanguageServerId,
+ mut params: lsp2::PublishDiagnosticsParams,
+ disk_based_sources: &[String],
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ let abs_path = params
+ .uri
+ .to_file_path()
+ .map_err(|_| anyhow!("URI is not a file"))?;
+ let mut diagnostics = Vec::default();
+ let mut primary_diagnostic_group_ids = HashMap::default();
+ let mut sources_by_group_id = HashMap::default();
+ let mut supporting_diagnostics = HashMap::default();
+
+ // Ensure that primary diagnostics are always the most severe
+ params.diagnostics.sort_by_key(|item| item.severity);
+
+ for diagnostic in ¶ms.diagnostics {
+ let source = diagnostic.source.as_ref();
+ let code = diagnostic.code.as_ref().map(|code| match code {
+ lsp2::NumberOrString::Number(code) => code.to_string(),
+ lsp2::NumberOrString::String(code) => code.clone(),
+ });
+ let range = range_from_lsp(diagnostic.range);
+ let is_supporting = diagnostic
+ .related_information
+ .as_ref()
+ .map_or(false, |infos| {
+ infos.iter().any(|info| {
+ primary_diagnostic_group_ids.contains_key(&(
+ source,
+ code.clone(),
+ range_from_lsp(info.location.range),
+ ))
+ })
+ });
+
+ let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
+ tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
+ });
+
+ if is_supporting {
+ supporting_diagnostics.insert(
+ (source, code.clone(), range),
+ (diagnostic.severity, is_unnecessary),
+ );
+ } else {
+ let group_id = post_inc(&mut self.next_diagnostic_group_id);
+ let is_disk_based =
+ source.map_or(false, |source| disk_based_sources.contains(source));
+
+ sources_by_group_id.insert(group_id, source);
+ primary_diagnostic_group_ids
+ .insert((source, code.clone(), range.clone()), group_id);
+
+ diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: Diagnostic {
+ source: diagnostic.source.clone(),
+ code: code.clone(),
+ severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
+ message: diagnostic.message.clone(),
+ group_id,
+ is_primary: true,
+ is_valid: true,
+ is_disk_based,
+ is_unnecessary,
+ },
+ });
+ if let Some(infos) = &diagnostic.related_information {
+ for info in infos {
+ if info.location.uri == params.uri && !info.message.is_empty() {
+ let range = range_from_lsp(info.location.range);
+ diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: Diagnostic {
+ source: diagnostic.source.clone(),
+ code: code.clone(),
+ severity: DiagnosticSeverity::INFORMATION,
+ message: info.message.clone(),
+ group_id,
+ is_primary: false,
+ is_valid: true,
+ is_disk_based,
+ is_unnecessary: false,
+ },
+ });
+ }
+ }
+ }
+ }
+ }
+
+ for entry in &mut diagnostics {
+ let diagnostic = &mut entry.diagnostic;
+ if !diagnostic.is_primary {
+ let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
+ if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
+ source,
+ diagnostic.code.clone(),
+ entry.range.clone(),
+ )) {
+ if let Some(severity) = severity {
+ diagnostic.severity = severity;
+ }
+ diagnostic.is_unnecessary = is_unnecessary;
+ }
+ }
+ }
+
+ self.update_diagnostic_entries(
+ language_server_id,
+ abs_path,
+ params.version,
+ diagnostics,
+ cx,
+ )?;
+ Ok(())
+ }
+
+ pub fn update_diagnostic_entries(
+ &mut self,
+ server_id: LanguageServerId,
+ abs_path: PathBuf,
+ version: Option<i32>,
+ diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ cx: &mut ModelContext<Project>,
+ ) -> Result<(), anyhow::Error> {
+ let (worktree, relative_path) = self
+ .find_local_worktree(&abs_path, cx)
+ .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
+
+ let project_path = ProjectPath {
+ worktree_id: worktree.read(cx).id(),
+ path: relative_path.into(),
+ };
+
+ if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
+ self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
+ }
+
+ let updated = worktree.update(cx, |worktree, cx| {
+ worktree
+ .as_local_mut()
+ .ok_or_else(|| anyhow!("not a local worktree"))?
+ .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
+ })?;
+ if updated {
+ cx.emit(Event::DiagnosticsUpdated {
+ language_server_id: server_id,
+ path: project_path,
+ });
+ }
+ Ok(())
+ }
+
+ fn update_buffer_diagnostics(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ server_id: LanguageServerId,
+ version: Option<i32>,
+ mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
+ Ordering::Equal
+ .then_with(|| b.is_primary.cmp(&a.is_primary))
+ .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
+ .then_with(|| a.severity.cmp(&b.severity))
+ .then_with(|| a.message.cmp(&b.message))
+ }
+
+ let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
+
+ diagnostics.sort_unstable_by(|a, b| {
+ Ordering::Equal
+ .then_with(|| a.range.start.cmp(&b.range.start))
+ .then_with(|| b.range.end.cmp(&a.range.end))
+ .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
+ });
+
+ let mut sanitized_diagnostics = Vec::new();
+ let edits_since_save = Patch::new(
+ snapshot
+ .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
+ .collect(),
+ );
+ for entry in diagnostics {
+ let start;
+ let end;
+ if entry.diagnostic.is_disk_based {
+ // Some diagnostics are based on files on disk instead of buffers'
+ // current contents. Adjust these diagnostics' ranges to reflect
+ // any unsaved edits.
+ start = edits_since_save.old_to_new(entry.range.start);
+ end = edits_since_save.old_to_new(entry.range.end);
+ } else {
+ start = entry.range.start;
+ end = entry.range.end;
+ }
+
+ let mut range = snapshot.clip_point_utf16(start, Bias::Left)
+ ..snapshot.clip_point_utf16(end, Bias::Right);
+
+ // Expand empty ranges by one codepoint
+ if range.start == range.end {
+ // This will be go to the next boundary when being clipped
+ range.end.column += 1;
+ range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
+ if range.start == range.end && range.end.column > 0 {
+ range.start.column -= 1;
+ range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
+ }
+ }
+
+ sanitized_diagnostics.push(DiagnosticEntry {
+ range,
+ diagnostic: entry.diagnostic,
+ });
+ }
+ drop(edits_since_save);
+
+ let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
+ buffer.update(cx, |buffer, cx| {
+ buffer.update_diagnostics(server_id, set, cx)
+ });
+ Ok(())
+ }
+
+ pub fn reload_buffers(
+ &self,
+ buffers: HashSet<Handle<Buffer>>,
+ push_to_history: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<ProjectTransaction>> {
+ let mut local_buffers = Vec::new();
+ let mut remote_buffers = None;
+ for buffer_handle in buffers {
+ let buffer = buffer_handle.read(cx);
+ if buffer.is_dirty() {
+ if let Some(file) = File::from_dyn(buffer.file()) {
+ if file.is_local() {
+ local_buffers.push(buffer_handle);
+ } else {
+ remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
+ }
+ }
+ }
+ }
+
+ let remote_buffers = self.remote_id().zip(remote_buffers);
+ let client = self.client.clone();
+
+ cx.spawn(|this, mut cx| async move {
+ let mut project_transaction = ProjectTransaction::default();
+
+ if let Some((project_id, remote_buffers)) = remote_buffers {
+ let response = client
+ .request(proto::ReloadBuffers {
+ project_id,
+ buffer_ids: remote_buffers
+ .iter()
+ .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
+ .collect(),
+ })
+ .await?
+ .transaction
+ .ok_or_else(|| anyhow!("missing transaction"))?;
+ project_transaction = this
+ .update(&mut cx, |this, cx| {
+ this.deserialize_project_transaction(response, push_to_history, cx)
+ })
+ .await?;
+ }
+
+ for buffer in local_buffers {
+ let transaction = buffer
+ .update(&mut cx, |buffer, cx| buffer.reload(cx))
+ .await?;
+ buffer.update(&mut cx, |buffer, cx| {
+ if let Some(transaction) = transaction {
+ if !push_to_history {
+ buffer.forget_transaction(transaction.id);
+ }
+ project_transaction.0.insert(cx.handle(), transaction);
+ }
+ });
+ }
+
+ Ok(project_transaction)
+ })
+ }
+
+ pub fn format(
+ &self,
+ buffers: HashSet<Handle<Buffer>>,
+ push_to_history: bool,
+ trigger: FormatTrigger,
+ cx: &mut ModelContext<Project>,
+ ) -> Task<anyhow::Result<ProjectTransaction>> {
+ if self.is_local() {
+ let mut buffers_with_paths_and_servers = buffers
+ .into_iter()
+ .filter_map(|buffer_handle| {
+ let buffer = buffer_handle.read(cx);
+ let file = File::from_dyn(buffer.file())?;
+ let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
+ let server = self
+ .primary_language_server_for_buffer(buffer, cx)
+ .map(|s| s.1.clone());
+ Some((buffer_handle, buffer_abs_path, server))
+ })
+ .collect::<Vec<_>>();
+
+ cx.spawn(|this, mut cx| async move {
+ // Do not allow multiple concurrent formatting requests for the
+ // same buffer.
+ this.update(&mut cx, |this, cx| {
+ buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
+ this.buffers_being_formatted
+ .insert(buffer.read(cx).remote_id())
+ });
+ });
+
+ let _cleanup = defer({
+ let this = this.clone();
+ let mut cx = cx.clone();
+ let buffers = &buffers_with_paths_and_servers;
+ move || {
+ this.update(&mut cx, |this, cx| {
+ for (buffer, _, _) in buffers {
+ this.buffers_being_formatted
+ .remove(&buffer.read(cx).remote_id());
+ }
+ });
+ }
+ });
+
+ let mut project_transaction = ProjectTransaction::default();
+ for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
+ let settings = buffer.read_with(&cx, |buffer, cx| {
+ language_settings(buffer.language(), buffer.file(), cx).clone()
+ });
+
+ let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
+ let ensure_final_newline = settings.ensure_final_newline_on_save;
+ let format_on_save = settings.format_on_save.clone();
+ let formatter = settings.formatter.clone();
+ let tab_size = settings.tab_size;
+
+ // First, format buffer's whitespace according to the settings.
+ let trailing_whitespace_diff = if remove_trailing_whitespace {
+ Some(
+ buffer
+ .read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
+ .await,
+ )
+ } else {
+ None
+ };
+ let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
+ buffer.finalize_last_transaction();
+ buffer.start_transaction();
+ if let Some(diff) = trailing_whitespace_diff {
+ buffer.apply_diff(diff, cx);
+ }
+ if ensure_final_newline {
+ buffer.ensure_final_newline(cx);
+ }
+ buffer.end_transaction(cx)
+ });
+
+ // Currently, formatting operations are represented differently depending on
+ // whether they come from a language server or an external command.
+ enum FormatOperation {
+ Lsp(Vec<(Range<Anchor>, String)>),
+ External(Diff),
+ Prettier(Diff),
+ }
+
+ // Apply language-specific formatting using either a language server
+ // or external command.
+ let mut format_operation = None;
+ match (formatter, format_on_save) {
+ (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
+
+ (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
+ | (_, FormatOnSave::LanguageServer) => {
+ if let Some((language_server, buffer_abs_path)) =
+ language_server.as_ref().zip(buffer_abs_path.as_ref())
+ {
+ format_operation = Some(FormatOperation::Lsp(
+ Self::format_via_lsp(
+ &this,
+ &buffer,
+ buffer_abs_path,
+ &language_server,
+ tab_size,
+ &mut cx,
+ )
+ .await
+ .context("failed to format via language server")?,
+ ));
+ }
+ }
+
+ (
+ Formatter::External { command, arguments },
+ FormatOnSave::On | FormatOnSave::Off,
+ )
+ | (_, FormatOnSave::External { command, arguments }) => {
+ if let Some(buffer_abs_path) = buffer_abs_path {
+ format_operation = Self::format_via_external_command(
+ buffer,
+ buffer_abs_path,
+ &command,
+ &arguments,
+ &mut cx,
+ )
+ .await
+ .context(format!(
+ "failed to format via external command {:?}",
+ command
+ ))?
+ .map(FormatOperation::External);
+ }
+ }
+ (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
+ if let Some(prettier_task) = this
+ .update(&mut cx, |project, cx| {
+ project.prettier_instance_for_buffer(buffer, cx)
+ }).await {
+ match prettier_task.await
+ {
+ Ok(prettier) => {
+ let buffer_path = buffer.read_with(&cx, |buffer, cx| {
+ File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
+ });
+ format_operation = Some(FormatOperation::Prettier(
+ prettier
+ .format(buffer, buffer_path, &cx)
+ .await
+ .context("formatting via prettier")?,
+ ));
+ }
+ Err(e) => anyhow::bail!(
+ "Failed to create prettier instance for buffer during autoformatting: {e:#}"
+ ),
+ }
+ } else if let Some((language_server, buffer_abs_path)) =
+ language_server.as_ref().zip(buffer_abs_path.as_ref())
+ {
+ format_operation = Some(FormatOperation::Lsp(
+ Self::format_via_lsp(
+ &this,
+ &buffer,
+ buffer_abs_path,
+ &language_server,
+ tab_size,
+ &mut cx,
+ )
+ .await
+ .context("failed to format via language server")?,
+ ));
+ }
+ }
+ (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
+ if let Some(prettier_task) = this
+ .update(&mut cx, |project, cx| {
+ project.prettier_instance_for_buffer(buffer, cx)
+ }).await {
+ match prettier_task.await
+ {
+ Ok(prettier) => {
+ let buffer_path = buffer.read_with(&cx, |buffer, cx| {
+ File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
+ });
+ format_operation = Some(FormatOperation::Prettier(
+ prettier
+ .format(buffer, buffer_path, &cx)
+ .await
+ .context("formatting via prettier")?,
+ ));
+ }
+ Err(e) => anyhow::bail!(
+ "Failed to create prettier instance for buffer during formatting: {e:#}"
+ ),
+ }
+ }
+ }
+ };
+
+ buffer.update(&mut cx, |b, cx| {
+ // If the buffer had its whitespace formatted and was edited while the language-specific
+ // formatting was being computed, avoid applying the language-specific formatting, because
+ // it can't be grouped with the whitespace formatting in the undo history.
+ if let Some(transaction_id) = whitespace_transaction_id {
+ if b.peek_undo_stack()
+ .map_or(true, |e| e.transaction_id() != transaction_id)
+ {
+ format_operation.take();
+ }
+ }
+
+ // Apply any language-specific formatting, and group the two formatting operations
+ // in the buffer's undo history.
+ if let Some(operation) = format_operation {
+ match operation {
+ FormatOperation::Lsp(edits) => {
+ b.edit(edits, None, cx);
+ }
+ FormatOperation::External(diff) => {
+ b.apply_diff(diff, cx);
+ }
+ FormatOperation::Prettier(diff) => {
+ b.apply_diff(diff, cx);
+ }
+ }
+
+ if let Some(transaction_id) = whitespace_transaction_id {
+ b.group_until_transaction(transaction_id);
+ }
+ }
+
+ if let Some(transaction) = b.finalize_last_transaction().cloned() {
+ if !push_to_history {
+ b.forget_transaction(transaction.id);
+ }
+ project_transaction.0.insert(buffer.clone(), transaction);
+ }
+ });
+ }
+
+ Ok(project_transaction)
+ })
+ } else {
+ let remote_id = self.remote_id();
+ let client = self.client.clone();
+ cx.spawn(|this, mut cx| async move {
+ let mut project_transaction = ProjectTransaction::default();
+ if let Some(project_id) = remote_id {
+ let response = client
+ .request(proto::FormatBuffers {
+ project_id,
+ trigger: trigger as i32,
+ buffer_ids: buffers
+ .iter()
+ .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
+ .collect(),
+ })
+ .await?
+ .transaction
+ .ok_or_else(|| anyhow!("missing transaction"))?;
+ project_transaction = this
+ .update(&mut cx, |this, cx| {
+ this.deserialize_project_transaction(response, push_to_history, cx)
+ })
+ .await?;
+ }
+ Ok(project_transaction)
+ })
+ }
+ }
+
+ async fn format_via_lsp(
+ this: &Handle<Self>,
+ buffer: &Handle<Buffer>,
+ abs_path: &Path,
+ language_server: &Arc<LanguageServer>,
+ tab_size: NonZeroU32,
+ cx: &mut AsyncAppContext,
+ ) -> Result<Vec<(Range<Anchor>, String)>> {
+ let uri = lsp2::Url::from_file_path(abs_path)
+ .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
+ let text_document = lsp2::TextDocumentIdentifier::new(uri);
+ let capabilities = &language_server.capabilities();
+
+ let formatting_provider = capabilities.document_formatting_provider.as_ref();
+ let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
+
+ let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
+ language_server
+ .request::<lsp2::request::Formatting>(lsp2::DocumentFormattingParams {
+ text_document,
+ options: lsp_command::lsp_formatting_options(tab_size.get()),
+ work_done_progress_params: Default::default(),
+ })
+ .await?
+ } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
+ let buffer_start = lsp2::Position::new(0, 0);
+ let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()));
+
+ language_server
+ .request::<lsp2::request::RangeFormatting>(lsp2::DocumentRangeFormattingParams {
+ text_document,
+ range: lsp2::Range::new(buffer_start, buffer_end),
+ options: lsp_command::lsp_formatting_options(tab_size.get()),
+ work_done_progress_params: Default::default(),
+ })
+ .await?
+ } else {
+ None
+ };
+
+ if let Some(lsp_edits) = lsp_edits {
+ this.update(cx, |this, cx| {
+ this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
+ })
+ .await
+ } else {
+ Ok(Vec::new())
+ }
+ }
+
+ async fn format_via_external_command(
+ buffer: &Handle<Buffer>,
+ buffer_abs_path: &Path,
+ command: &str,
+ arguments: &[String],
+ cx: &mut AsyncAppContext,
+ ) -> Result<Option<Diff>> {
+ let working_dir_path = buffer.read_with(cx, |buffer, cx| {
+ let file = File::from_dyn(buffer.file())?;
+ let worktree = file.worktree.read(cx).as_local()?;
+ let mut worktree_path = worktree.abs_path().to_path_buf();
+ if worktree.root_entry()?.is_file() {
+ worktree_path.pop();
+ }
+ Some(worktree_path)
+ });
+
+ if let Some(working_dir_path) = working_dir_path {
+ let mut child =
+ smol::process::Command::new(command)
+ .args(arguments.iter().map(|arg| {
+ arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
+ }))
+ .current_dir(&working_dir_path)
+ .stdin(smol::process::Stdio::piped())
+ .stdout(smol::process::Stdio::piped())
+ .stderr(smol::process::Stdio::piped())
+ .spawn()?;
+ let stdin = child
+ .stdin
+ .as_mut()
+ .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
+ let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
+ for chunk in text.chunks() {
+ stdin.write_all(chunk.as_bytes()).await?;
+ }
+ stdin.flush().await?;
+
+ let output = child.output().await?;
+ if !output.status.success() {
+ return Err(anyhow!(
+ "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
+ output.status.code(),
+ String::from_utf8_lossy(&output.stdout),
+ String::from_utf8_lossy(&output.stderr),
+ ));
+ }
+
+ let stdout = String::from_utf8(output.stdout)?;
+ Ok(Some(
+ buffer
+ .read_with(cx, |buffer, cx| buffer.diff(stdout, cx))
+ .await,
+ ))
+ } else {
+ Ok(None)
+ }
+ }
+
+ pub fn definition<T: ToPointUtf16>(
+ &self,
+ buffer: &Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<LocationLink>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Primary,
+ GetDefinition { position },
+ cx,
+ )
+ }
+
+ pub fn type_definition<T: ToPointUtf16>(
+ &self,
+ buffer: &Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<LocationLink>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Primary,
+ GetTypeDefinition { position },
+ cx,
+ )
+ }
+
+ pub fn references<T: ToPointUtf16>(
+ &self,
+ buffer: &Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<Location>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Primary,
+ GetReferences { position },
+ cx,
+ )
+ }
+
+ pub fn document_highlights<T: ToPointUtf16>(
+ &self,
+ buffer: &Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<DocumentHighlight>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Primary,
+ GetDocumentHighlights { position },
+ cx,
+ )
+ }
+
+ pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
+ if self.is_local() {
+ let mut requests = Vec::new();
+ for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
+ let worktree_id = *worktree_id;
+ let worktree_handle = self.worktree_for_id(worktree_id, cx);
+ let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
+ Some(worktree) => worktree,
+ None => continue,
+ };
+ let worktree_abs_path = worktree.abs_path().clone();
+
+ let (adapter, language, server) = match self.language_servers.get(server_id) {
+ Some(LanguageServerState::Running {
+ adapter,
+ language,
+ server,
+ ..
+ }) => (adapter.clone(), language.clone(), server),
+
+ _ => continue,
+ };
+
+ requests.push(
+ server
+ .request::<lsp2::request::WorkspaceSymbolRequest>(
+ lsp2::WorkspaceSymbolParams {
+ query: query.to_string(),
+ ..Default::default()
+ },
+ )
+ .log_err()
+ .map(move |response| {
+ let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
+ lsp2::WorkspaceSymbolResponse::Flat(flat_responses) => {
+ flat_responses.into_iter().map(|lsp_symbol| {
+ (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
+ }).collect::<Vec<_>>()
+ }
+ lsp2::WorkspaceSymbolResponse::Nested(nested_responses) => {
+ nested_responses.into_iter().filter_map(|lsp_symbol| {
+ let location = match lsp_symbol.location {
+ OneOf::Left(location) => location,
+ OneOf::Right(_) => {
+ error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
+ return None
+ }
+ };
+ Some((lsp_symbol.name, lsp_symbol.kind, location))
+ }).collect::<Vec<_>>()
+ }
+ }).unwrap_or_default();
+
+ (
+ adapter,
+ language,
+ worktree_id,
+ worktree_abs_path,
+ lsp_symbols,
+ )
+ }),
+ );
+ }
+
+ cx.spawn_weak(|this, cx| async move {
+ let responses = futures::future::join_all(requests).await;
+ let this = match this.upgrade(&cx) {
+ Some(this) => this,
+ None => return Ok(Vec::new()),
+ };
+
+ let symbols = this.read_with(&cx, |this, cx| {
+ let mut symbols = Vec::new();
+ for (
+ adapter,
+ adapter_language,
+ source_worktree_id,
+ worktree_abs_path,
+ lsp_symbols,
+ ) in responses
+ {
+ symbols.extend(lsp_symbols.into_iter().filter_map(
+ |(symbol_name, symbol_kind, symbol_location)| {
+ let abs_path = symbol_location.uri.to_file_path().ok()?;
+ let mut worktree_id = source_worktree_id;
+ let path;
+ if let Some((worktree, rel_path)) =
+ this.find_local_worktree(&abs_path, cx)
+ {
+ worktree_id = worktree.read(cx).id();
+ path = rel_path;
+ } else {
+ path = relativize_path(&worktree_abs_path, &abs_path);
+ }
+
+ let project_path = ProjectPath {
+ worktree_id,
+ path: path.into(),
+ };
+ let signature = this.symbol_signature(&project_path);
+ let adapter_language = adapter_language.clone();
+ let language = this
+ .languages
+ .language_for_file(&project_path.path, None)
+ .unwrap_or_else(move |_| adapter_language);
+ let language_server_name = adapter.name.clone();
+ Some(async move {
+ let language = language.await;
+ let label =
+ language.label_for_symbol(&symbol_name, symbol_kind).await;
+
+ Symbol {
+ language_server_name,
+ source_worktree_id,
+ path: project_path,
+ label: label.unwrap_or_else(|| {
+ CodeLabel::plain(symbol_name.clone(), None)
+ }),
+ kind: symbol_kind,
+ name: symbol_name,
+ range: range_from_lsp(symbol_location.range),
+ signature,
+ }
+ })
+ },
+ ));
+ }
+
+ symbols
+ });
+
+ Ok(futures::future::join_all(symbols).await)
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let request = self.client.request(proto::GetProjectSymbols {
+ project_id,
+ query: query.to_string(),
+ });
+ cx.spawn_weak(|this, cx| async move {
+ let response = request.await?;
+ let mut symbols = Vec::new();
+ if let Some(this) = this.upgrade(&cx) {
+ let new_symbols = this.read_with(&cx, |this, _| {
+ response
+ .symbols
+ .into_iter()
+ .map(|symbol| this.deserialize_symbol(symbol))
+ .collect::<Vec<_>>()
+ });
+ symbols = futures::future::join_all(new_symbols)
+ .await
+ .into_iter()
+ .filter_map(|symbol| symbol.log_err())
+ .collect::<Vec<_>>();
+ }
+ Ok(symbols)
+ })
+ } else {
+ Task::ready(Ok(Default::default()))
+ }
+ }
+
+ pub fn open_buffer_for_symbol(
+ &mut self,
+ symbol: &Symbol,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ if self.is_local() {
+ let language_server_id = if let Some(id) = self.language_server_ids.get(&(
+ symbol.source_worktree_id,
+ symbol.language_server_name.clone(),
+ )) {
+ *id
+ } else {
+ return Task::ready(Err(anyhow!(
+ "language server for worktree and language not found"
+ )));
+ };
+
+ let worktree_abs_path = if let Some(worktree_abs_path) = self
+ .worktree_for_id(symbol.path.worktree_id, cx)
+ .and_then(|worktree| worktree.read(cx).as_local())
+ .map(|local_worktree| local_worktree.abs_path())
+ {
+ worktree_abs_path
+ } else {
+ return Task::ready(Err(anyhow!("worktree not found for symbol")));
+ };
+ let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
+ let symbol_uri = if let Ok(uri) = lsp2::Url::from_file_path(symbol_abs_path) {
+ uri
+ } else {
+ return Task::ready(Err(anyhow!("invalid symbol path")));
+ };
+
+ self.open_local_buffer_via_lsp(
+ symbol_uri,
+ language_server_id,
+ symbol.language_server_name.clone(),
+ cx,
+ )
+ } else if let Some(project_id) = self.remote_id() {
+ let request = self.client.request(proto::OpenBufferForSymbol {
+ project_id,
+ symbol: Some(serialize_symbol(symbol)),
+ });
+ cx.spawn(|this, mut cx| async move {
+ let response = request.await?;
+ this.update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(response.buffer_id, cx)
+ })
+ .await
+ })
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ pub fn hover<T: ToPointUtf16>(
+ &self,
+ buffer: &Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Hover>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Primary,
+ GetHover { position },
+ cx,
+ )
+ }
+
+ pub fn completions<T: ToOffset + ToPointUtf16>(
+ &self,
+ buffer: &Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<Completion>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ if self.is_local() {
+ let snapshot = buffer.read(cx).snapshot();
+ let offset = position.to_offset(&snapshot);
+ let scope = snapshot.language_scope_at(offset);
+
+ let server_ids: Vec<_> = self
+ .language_servers_for_buffer(buffer.read(cx), cx)
+ .filter(|(_, server)| server.capabilities().completion_provider.is_some())
+ .filter(|(adapter, _)| {
+ scope
+ .as_ref()
+ .map(|scope| scope.language_allowed(&adapter.name))
+ .unwrap_or(true)
+ })
+ .map(|(_, server)| server.server_id())
+ .collect();
+
+ let buffer = buffer.clone();
+ cx.spawn(|this, mut cx| async move {
+ let mut tasks = Vec::with_capacity(server_ids.len());
+ this.update(&mut cx, |this, cx| {
+ for server_id in server_ids {
+ tasks.push(this.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Other(server_id),
+ GetCompletions { position },
+ cx,
+ ));
+ }
+ });
+
+ let mut completions = Vec::new();
+ for task in tasks {
+ if let Ok(new_completions) = task.await {
+ completions.extend_from_slice(&new_completions);
+ }
+ }
+
+ Ok(completions)
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
+ } else {
+ Task::ready(Ok(Default::default()))
+ }
+ }
+
+ pub fn apply_additional_edits_for_completion(
+ &self,
+ buffer_handle: Handle<Buffer>,
+ completion: Completion,
+ push_to_history: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Transaction>>> {
+ let buffer = buffer_handle.read(cx);
+ let buffer_id = buffer.remote_id();
+
+ if self.is_local() {
+ let server_id = completion.server_id;
+ let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
+ Some((_, server)) => server.clone(),
+ _ => return Task::ready(Ok(Default::default())),
+ };
+
+ cx.spawn(|this, mut cx| async move {
+ let can_resolve = lang_server
+ .capabilities()
+ .completion_provider
+ .as_ref()
+ .and_then(|options| options.resolve_provider)
+ .unwrap_or(false);
+ let additional_text_edits = if can_resolve {
+ lang_server
+ .request::<lsp2::request::ResolveCompletionItem>(completion.lsp2_completion)
+ .await?
+ .additional_text_edits
+ } else {
+ completion.lsp_completion.additional_text_edits
+ };
+ if let Some(edits) = additional_text_edits {
+ let edits = this
+ .update(&mut cx, |this, cx| {
+ this.edits_from_lsp(
+ &buffer_handle,
+ edits,
+ lang_server.server_id(),
+ None,
+ cx,
+ )
+ })
+ .await?;
+
+ buffer_handle.update(&mut cx, |buffer, cx| {
+ buffer.finalize_last_transaction();
+ buffer.start_transaction();
+
+ for (range, text) in edits {
+ let primary = &completion.old_range;
+ let start_within = primary.start.cmp(&range.start, buffer).is_le()
+ && primary.end.cmp(&range.start, buffer).is_ge();
+ let end_within = range.start.cmp(&primary.end, buffer).is_le()
+ && range.end.cmp(&primary.end, buffer).is_ge();
+
+ //Skip additional edits which overlap with the primary completion edit
+ //https://github.com/zed-industries/zed/pull/1871
+ if !start_within && !end_within {
+ buffer.edit([(range, text)], None, cx);
+ }
+ }
+
+ let transaction = if buffer.end_transaction(cx).is_some() {
+ let transaction = buffer.finalize_last_transaction().unwrap().clone();
+ if !push_to_history {
+ buffer.forget_transaction(transaction.id);
+ }
+ Some(transaction)
+ } else {
+ None
+ };
+ Ok(transaction)
+ })
+ } else {
+ Ok(None)
+ }
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let client = self.client.clone();
+ cx.spawn(|_, mut cx| async move {
+ let response = client
+ .request(proto::ApplyCompletionAdditionalEdits {
+ project_id,
+ buffer_id,
+ completion: Some(language2::proto::serialize_completion(&completion)),
+ })
+ .await?;
+
+ if let Some(transaction) = response.transaction {
+ let transaction = language2::proto::deserialize_transaction(transaction)?;
+ buffer_handle
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_edits(transaction.edit_ids.iter().copied())
+ })
+ .await?;
+ if push_to_history {
+ buffer_handle.update(&mut cx, |buffer, _| {
+ buffer.push_transaction(transaction.clone(), Instant::now());
+ });
+ }
+ Ok(Some(transaction))
+ } else {
+ Ok(None)
+ }
+ })
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ pub fn code_actions<T: Clone + ToOffset>(
+ &self,
+ buffer_handle: &Handle<Buffer>,
+ range: Range<T>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<CodeAction>>> {
+ let buffer = buffer_handle.read(cx);
+ let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
+ self.request_lsp(
+ buffer_handle.clone(),
+ LanguageServerToQuery::Primary,
+ GetCodeActions { range },
+ cx,
+ )
+ }
+
+ pub fn apply_code_action(
+ &self,
+ buffer_handle: Handle<Buffer>,
+ mut action: CodeAction,
+ push_to_history: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<ProjectTransaction>> {
+ if self.is_local() {
+ let buffer = buffer_handle.read(cx);
+ let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
+ self.language_server_for_buffer(buffer, action.server_id, cx)
+ {
+ (adapter.clone(), server.clone())
+ } else {
+ return Task::ready(Ok(Default::default()));
+ };
+ let range = action.range.to_point_utf16(buffer);
+
+ cx.spawn(|this, mut cx| async move {
+ if let Some(lsp_range) = action
+ .lsp_action
+ .data
+ .as_mut()
+ .and_then(|d| d.get_mut("codeActionParams"))
+ .and_then(|d| d.get_mut("range"))
+ {
+ *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
+ action.lsp2_action = lang_server
+ .request::<lsp2::request::CodeActionResolveRequest>(action.lsp2_action)
+ .await?;
+ } else {
+ let actions = this
+ .update(&mut cx, |this, cx| {
+ this.code_actions(&buffer_handle, action.range, cx)
+ })
+ .await?;
+ action.lsp_action = actions
+ .into_iter()
+ .find(|a| a.lsp_action.title == action.lsp_action.title)
+ .ok_or_else(|| anyhow!("code action is outdated"))?
+ .lsp_action;
+ }
+
+ if let Some(edit) = action.lsp_action.edit {
+ if edit.changes.is_some() || edit.document_changes.is_some() {
+ return Self::deserialize_workspace_edit(
+ this,
+ edit,
+ push_to_history,
+ lsp_adapter.clone(),
+ lang_server.clone(),
+ &mut cx,
+ )
+ .await;
+ }
+ }
+
+ if let Some(command) = action.lsp_action.command {
+ this.update(&mut cx, |this, _| {
+ this.last_workspace_edits_by_language_server
+ .remove(&lang_server.server_id());
+ });
+
+ let result = lang_server
+ .request::<lsp2::request::ExecuteCommand>(lsp2::ExecuteCommandParams {
+ command: command.command,
+ arguments: command.arguments.unwrap_or_default(),
+ ..Default::default()
+ })
+ .await;
+
+ if let Err(err) = result {
+ // TODO: LSP ERROR
+ return Err(err);
+ }
+
+ return Ok(this.update(&mut cx, |this, _| {
+ this.last_workspace_edits_by_language_server
+ .remove(&lang_server.server_id())
+ .unwrap_or_default()
+ }));
+ }
+
+ Ok(ProjectTransaction::default())
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let client = self.client.clone();
+ let request = proto::ApplyCodeAction {
+ project_id,
+ buffer_id: buffer_handle.read(cx).remote_id(),
+ action: Some(language2::proto::serialize_code_action(&action)),
+ };
+ cx.spawn(|this, mut cx| async move {
+ let response = client
+ .request(request)
+ .await?
+ .transaction
+ .ok_or_else(|| anyhow!("missing transaction"))?;
+ this.update(&mut cx, |this, cx| {
+ this.deserialize_project_transaction(response, push_to_history, cx)
+ })
+ .await
+ })
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ fn apply_on_type_formatting(
+ &self,
+ buffer: Handle<Buffer>,
+ position: Anchor,
+ trigger: String,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Transaction>>> {
+ if self.is_local() {
+ cx.spawn(|this, mut cx| async move {
+ // Do not allow multiple concurrent formatting requests for the
+ // same buffer.
+ this.update(&mut cx, |this, cx| {
+ this.buffers_being_formatted
+ .insert(buffer.read(cx).remote_id())
+ });
+
+ let _cleanup = defer({
+ let this = this.clone();
+ let mut cx = cx.clone();
+ let closure_buffer = buffer.clone();
+ move || {
+ this.update(&mut cx, |this, cx| {
+ this.buffers_being_formatted
+ .remove(&closure_buffer.read(cx).remote_id());
+ });
+ }
+ });
+
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_edits(Some(position.timestamp))
+ })
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ let position = position.to_point_utf16(buffer.read(cx));
+ this.on_type_format(buffer, position, trigger, false, cx)
+ })
+ .await
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let client = self.client.clone();
+ let request = proto::OnTypeFormatting {
+ project_id,
+ buffer_id: buffer.read(cx).remote_id(),
+ position: Some(serialize_anchor(&position)),
+ trigger,
+ version: serialize_version(&buffer.read(cx).version()),
+ };
+ cx.spawn(|_, _| async move {
+ client
+ .request(request)
+ .await?
+ .transaction
+ .map(language2::proto::deserialize_transaction)
+ .transpose()
+ })
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ async fn deserialize_edits(
+ this: Handle<Self>,
+ buffer_to_edit: Handle<Buffer>,
+ edits: Vec<lsp2::TextEdit>,
+ push_to_history: bool,
+ _: Arc<CachedLspAdapter>,
+ language_server: Arc<LanguageServer>,
+ cx: &mut AsyncAppContext,
+ ) -> Result<Option<Transaction>> {
+ let edits = this
+ .update(cx, |this, cx| {
+ this.edits_from_lsp(
+ &buffer_to_edit,
+ edits,
+ language_server.server_id(),
+ None,
+ cx,
+ )
+ })
+ .await?;
+
+ let transaction = buffer_to_edit.update(cx, |buffer, cx| {
+ buffer.finalize_last_transaction();
+ buffer.start_transaction();
+ for (range, text) in edits {
+ buffer.edit([(range, text)], None, cx);
+ }
+
+ if buffer.end_transaction(cx).is_some() {
+ let transaction = buffer.finalize_last_transaction().unwrap().clone();
+ if !push_to_history {
+ buffer.forget_transaction(transaction.id);
+ }
+ Some(transaction)
+ } else {
+ None
+ }
+ });
+
+ Ok(transaction)
+ }
+
+ async fn deserialize_workspace_edit(
+ this: Handle<Self>,
+ edit: lsp2::WorkspaceEdit,
+ push_to_history: bool,
+ lsp_adapter: Arc<CachedLspAdapter>,
+ language_server: Arc<LanguageServer>,
+ cx: &mut AsyncAppContext,
+ ) -> Result<ProjectTransaction> {
+ let fs = this.read_with(cx, |this, _| this.fs.clone());
+ let mut operations = Vec::new();
+ if let Some(document_changes) = edit.document_changes {
+ match document_changes {
+ lsp2::DocumentChanges::Edits(edits) => {
+ operations.extend(edits.into_iter().map(lsp2::DocumentChangeOperation::Edit))
+ }
+ lsp2::DocumentChanges::Operations(ops) => operations = ops,
+ }
+ } else if let Some(changes) = edit.changes {
+ operations.extend(changes.into_iter().map(|(uri, edits)| {
+ lsp2::DocumentChangeOperation::Edit(lsp2::TextDocumentEdit {
+ text_document: lsp2::OptionalVersionedTextDocumentIdentifier {
+ uri,
+ version: None,
+ },
+ edits: edits.into_iter().map(OneOf::Left).collect(),
+ })
+ }));
+ }
+
+ let mut project_transaction = ProjectTransaction::default();
+ for operation in operations {
+ match operation {
+ lsp2::DocumentChangeOperation::Op(lsp2::ResourceOp::Create(op)) => {
+ let abs_path = op
+ .uri
+ .to_file_path()
+ .map_err(|_| anyhow!("can't convert URI to path"))?;
+
+ if let Some(parent_path) = abs_path.parent() {
+ fs.create_dir(parent_path).await?;
+ }
+ if abs_path.ends_with("/") {
+ fs.create_dir(&abs_path).await?;
+ } else {
+ fs.create_file(
+ &abs_path,
+ op.options
+ .map(|options| fs::CreateOptions {
+ overwrite: options.overwrite.unwrap_or(false),
+ ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
+ })
+ .unwrap_or_default(),
+ )
+ .await?;
+ }
+ }
+
+ lsp2::DocumentChangeOperation::Op(lsp2::ResourceOp::Rename(op)) => {
+ let source_abs_path = op
+ .old_uri
+ .to_file_path()
+ .map_err(|_| anyhow!("can't convert URI to path"))?;
+ let target_abs_path = op
+ .new_uri
+ .to_file_path()
+ .map_err(|_| anyhow!("can't convert URI to path"))?;
+ fs.rename(
+ &source_abs_path,
+ &target_abs_path,
+ op.options
+ .map(|options| fs::RenameOptions {
+ overwrite: options.overwrite.unwrap_or(false),
+ ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
+ })
+ .unwrap_or_default(),
+ )
+ .await?;
+ }
+
+ lsp2::DocumentChangeOperation::Op(lsp2::ResourceOp::Delete(op)) => {
+ let abs_path = op
+ .uri
+ .to_file_path()
+ .map_err(|_| anyhow!("can't convert URI to path"))?;
+ let options = op
+ .options
+ .map(|options| fs::RemoveOptions {
+ recursive: options.recursive.unwrap_or(false),
+ ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
+ })
+ .unwrap_or_default();
+ if abs_path.ends_with("/") {
+ fs.remove_dir(&abs_path, options).await?;
+ } else {
+ fs.remove_file(&abs_path, options).await?;
+ }
+ }
+
+ lsp2::DocumentChangeOperation::Edit(op) => {
+ let buffer_to_edit = this
+ .update(cx, |this, cx| {
+ this.open_local_buffer_via_lsp(
+ op.text_document.uri,
+ language_server.server_id(),
+ lsp_adapter.name.clone(),
+ cx,
+ )
+ })
+ .await?;
+
+ let edits = this
+ .update(cx, |this, cx| {
+ let edits = op.edits.into_iter().map(|edit| match edit {
+ OneOf::Left(edit) => edit,
+ OneOf::Right(edit) => edit.text_edit,
+ });
+ this.edits_from_lsp(
+ &buffer_to_edit,
+ edits,
+ language_server.server_id(),
+ op.text_document.version,
+ cx,
+ )
+ })
+ .await?;
+
+ let transaction = buffer_to_edit.update(cx, |buffer, cx| {
+ buffer.finalize_last_transaction();
+ buffer.start_transaction();
+ for (range, text) in edits {
+ buffer.edit([(range, text)], None, cx);
+ }
+ let transaction = if buffer.end_transaction(cx).is_some() {
+ let transaction = buffer.finalize_last_transaction().unwrap().clone();
+ if !push_to_history {
+ buffer.forget_transaction(transaction.id);
+ }
+ Some(transaction)
+ } else {
+ None
+ };
+
+ transaction
+ });
+ if let Some(transaction) = transaction {
+ project_transaction.0.insert(buffer_to_edit, transaction);
+ }
+ }
+ }
+ }
+
+ Ok(project_transaction)
+ }
+
+ pub fn prepare_rename<T: ToPointUtf16>(
+ &self,
+ buffer: Handle<Buffer>,
+ position: T,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Range<Anchor>>>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer,
+ LanguageServerToQuery::Primary,
+ PrepareRename { position },
+ cx,
+ )
+ }
+
+ pub fn perform_rename<T: ToPointUtf16>(
+ &self,
+ buffer: Handle<Buffer>,
+ position: T,
+ new_name: String,
+ push_to_history: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<ProjectTransaction>> {
+ let position = position.to_point_utf16(buffer.read(cx));
+ self.request_lsp(
+ buffer,
+ LanguageServerToQuery::Primary,
+ PerformRename {
+ position,
+ new_name,
+ push_to_history,
+ },
+ cx,
+ )
+ }
+
+ pub fn on_type_format<T: ToPointUtf16>(
+ &self,
+ buffer: Handle<Buffer>,
+ position: T,
+ trigger: String,
+ push_to_history: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Option<Transaction>>> {
+ let (position, tab_size) = buffer.read_with(cx, |buffer, cx| {
+ let position = position.to_point_utf16(buffer);
+ (
+ position,
+ language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
+ .tab_size,
+ )
+ });
+ self.request_lsp(
+ buffer.clone(),
+ LanguageServerToQuery::Primary,
+ OnTypeFormatting {
+ position,
+ trigger,
+ options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
+ push_to_history,
+ },
+ cx,
+ )
+ }
+
+ pub fn inlay_hints<T: ToOffset>(
+ &self,
+ buffer_handle: Handle<Buffer>,
+ range: Range<T>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<anyhow::Result<Vec<InlayHint>>> {
+ let buffer = buffer_handle.read(cx);
+ let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
+ let range_start = range.start;
+ let range_end = range.end;
+ let buffer_id = buffer.remote_id();
+ let buffer_version = buffer.version().clone();
+ let lsp_request = InlayHints { range };
+
+ if self.is_local() {
+ let lsp_request_task = self.request_lsp(
+ buffer_handle.clone(),
+ LanguageServerToQuery::Primary,
+ lsp_request,
+ cx,
+ );
+ cx.spawn(|_, mut cx| async move {
+ buffer_handle
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
+ })
+ .await
+ .context("waiting for inlay hint request range edits")?;
+ lsp_request_task.await.context("inlay hints LSP request")
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let client = self.client.clone();
+ let request = proto::InlayHints {
+ project_id,
+ buffer_id,
+ start: Some(serialize_anchor(&range_start)),
+ end: Some(serialize_anchor(&range_end)),
+ version: serialize_version(&buffer_version),
+ };
+ cx.spawn(|project, cx| async move {
+ let response = client
+ .request(request)
+ .await
+ .context("inlay hints proto request")?;
+ let hints_request_result = LspCommand::response_from_proto(
+ lsp_request,
+ response,
+ project,
+ buffer_handle.clone(),
+ cx,
+ )
+ .await;
+
+ hints_request_result.context("inlay hints proto response conversion")
+ })
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ pub fn resolve_inlay_hint(
+ &self,
+ hint: InlayHint,
+ buffer_handle: Handle<Buffer>,
+ server_id: LanguageServerId,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<anyhow::Result<InlayHint>> {
+ if self.is_local() {
+ let buffer = buffer_handle.read(cx);
+ let (_, lang_server) = if let Some((adapter, server)) =
+ self.language_server_for_buffer(buffer, server_id, cx)
+ {
+ (adapter.clone(), server.clone())
+ } else {
+ return Task::ready(Ok(hint));
+ };
+ if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
+ return Task::ready(Ok(hint));
+ }
+
+ let buffer_snapshot = buffer.snapshot();
+ cx.spawn(|_, mut cx| async move {
+ let resolve_task = lang_server.request::<lsp2::request::InlayHintResolveRequest>(
+ InlayHints::project_to_lsp2_hint(hint, &buffer_snapshot),
+ );
+ let resolved_hint = resolve_task
+ .await
+ .context("inlay hint resolve LSP request")?;
+ let resolved_hint = InlayHints::lsp_to_project_hint(
+ resolved_hint,
+ &buffer_handle,
+ server_id,
+ ResolveState::Resolved,
+ false,
+ &mut cx,
+ )
+ .await?;
+ Ok(resolved_hint)
+ })
+ } else if let Some(project_id) = self.remote_id() {
+ let client = self.client.clone();
+ let request = proto::ResolveInlayHint {
+ project_id,
+ buffer_id: buffer_handle.read(cx).remote_id(),
+ language_server_id: server_id.0 as u64,
+ hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
+ };
+ cx.spawn(|_, _| async move {
+ let response = client
+ .request(request)
+ .await
+ .context("inlay hints proto request")?;
+ match response.hint {
+ Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
+ .context("inlay hints proto resolve response conversion"),
+ None => Ok(hint),
+ }
+ })
+ } else {
+ Task::ready(Err(anyhow!("project does not have a remote id")))
+ }
+ }
+
+ #[allow(clippy::type_complexity)]
+ pub fn search(
+ &self,
+ query: SearchQuery,
+ cx: &mut ModelContext<Self>,
+ ) -> Receiver<(Handle<Buffer>, Vec<Range<Anchor>>)> {
+ if self.is_local() {
+ self.search_local(query, cx)
+ } else if let Some(project_id) = self.remote_id() {
+ let (tx, rx) = smol::channel::unbounded();
+ let request = self.client.request(query.to_proto(project_id));
+ cx.spawn(|this, mut cx| async move {
+ let response = request.await?;
+ let mut result = HashMap::default();
+ for location in response.locations {
+ let target_buffer = this
+ .update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(location.buffer_id, cx)
+ })
+ .await?;
+ let start = location
+ .start
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target start"))?;
+ let end = location
+ .end
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("missing target end"))?;
+ result
+ .entry(target_buffer)
+ .or_insert(Vec::new())
+ .push(start..end)
+ }
+ for (buffer, ranges) in result {
+ let _ = tx.send((buffer, ranges)).await;
+ }
+ Result::<(), anyhow::Error>::Ok(())
+ })
+ .detach_and_log_err(cx);
+ rx
+ } else {
+ unimplemented!();
+ }
+ }
+
+ pub fn search_local(
+ &self,
+ query: SearchQuery,
+ cx: &mut ModelContext<Self>,
+ ) -> Receiver<(Handle<Buffer>, Vec<Range<Anchor>>)> {
+ // Local search is split into several phases.
+ // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
+ // and the second phase that finds positions of all the matches found in the candidate files.
+ // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
+ //
+ // It gets a bit hairy though, because we must account for files that do not have a persistent representation
+ // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
+ //
+ // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
+ // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
+ // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
+ // 2. At this point, we have a list of all potentially matching buffers/files.
+ // We sort that list by buffer path - this list is retained for later use.
+ // We ensure that all buffers are now opened and available in project.
+ // 3. We run a scan over all the candidate buffers on multiple background threads.
+ // We cannot assume that there will even be a match - while at least one match
+ // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
+ // There is also an auxilliary background thread responsible for result gathering.
+ // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
+ // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
+ // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
+ // entry - which might already be available thanks to out-of-order processing.
+ //
+ // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
+ // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
+ // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
+ // in face of constantly updating list of sorted matches.
+ // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
+ let snapshots = self
+ .visible_worktrees(cx)
+ .filter_map(|tree| {
+ let tree = tree.read(cx).as_local()?;
+ Some(tree.snapshot())
+ })
+ .collect::<Vec<_>>();
+
+ let background = cx.background().clone();
+ let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
+ if path_count == 0 {
+ let (_, rx) = smol::channel::bounded(1024);
+ return rx;
+ }
+ let workers = background.num_cpus().min(path_count);
+ let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
+ let mut unnamed_files = vec![];
+ let opened_buffers = self
+ .opened_buffers
+ .iter()
+ .filter_map(|(_, b)| {
+ let buffer = b.upgrade()?;
+ let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+ if let Some(path) = snapshot.file().map(|file| file.path()) {
+ Some((path.clone(), (buffer, snapshot)))
+ } else {
+ unnamed_files.push(buffer);
+ None
+ }
+ })
+ .collect();
+ cx.background()
+ .spawn(Self::background_search(
+ unnamed_files,
+ opened_buffers,
+ cx.background().clone(),
+ self.fs.clone(),
+ workers,
+ query.clone(),
+ path_count,
+ snapshots,
+ matching_paths_tx,
+ ))
+ .detach();
+
+ let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
+ let background = cx.background().clone();
+ let (result_tx, result_rx) = smol::channel::bounded(1024);
+ cx.background()
+ .spawn(async move {
+ let Ok(buffers) = buffers.await else {
+ return;
+ };
+
+ let buffers_len = buffers.len();
+ if buffers_len == 0 {
+ return;
+ }
+ let query = &query;
+ let (finished_tx, mut finished_rx) = smol::channel::unbounded();
+ background
+ .scoped(|scope| {
+ #[derive(Clone)]
+ struct FinishedStatus {
+ entry: Option<(Handle<Buffer>, Vec<Range<Anchor>>)>,
+ buffer_index: SearchMatchCandidateIndex,
+ }
+
+ for _ in 0..workers {
+ let finished_tx = finished_tx.clone();
+ let mut buffers_rx = buffers_rx.clone();
+ scope.spawn(async move {
+ while let Some((entry, buffer_index)) = buffers_rx.next().await {
+ let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
+ {
+ if query.file_matches(
+ snapshot.file().map(|file| file.path().as_ref()),
+ ) {
+ query
+ .search(&snapshot, None)
+ .await
+ .iter()
+ .map(|range| {
+ snapshot.anchor_before(range.start)
+ ..snapshot.anchor_after(range.end)
+ })
+ .collect()
+ } else {
+ Vec::new()
+ }
+ } else {
+ Vec::new()
+ };
+
+ let status = if !buffer_matches.is_empty() {
+ let entry = if let Some((buffer, _)) = entry.as_ref() {
+ Some((buffer.clone(), buffer_matches))
+ } else {
+ None
+ };
+ FinishedStatus {
+ entry,
+ buffer_index,
+ }
+ } else {
+ FinishedStatus {
+ entry: None,
+ buffer_index,
+ }
+ };
+ if finished_tx.send(status).await.is_err() {
+ break;
+ }
+ }
+ });
+ }
+ // Report sorted matches
+ scope.spawn(async move {
+ let mut current_index = 0;
+ let mut scratch = vec![None; buffers_len];
+ while let Some(status) = finished_rx.next().await {
+ debug_assert!(
+ scratch[status.buffer_index].is_none(),
+ "Got match status of position {} twice",
+ status.buffer_index
+ );
+ let index = status.buffer_index;
+ scratch[index] = Some(status);
+ while current_index < buffers_len {
+ let Some(current_entry) = scratch[current_index].take() else {
+ // We intentionally **do not** increment `current_index` here. When next element arrives
+ // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
+ // this time.
+ break;
+ };
+ if let Some(entry) = current_entry.entry {
+ result_tx.send(entry).await.log_err();
+ }
+ current_index += 1;
+ }
+ if current_index == buffers_len {
+ break;
+ }
+ }
+ });
+ })
+ .await;
+ })
+ .detach();
+ result_rx
+ }
+ /// Pick paths that might potentially contain a match of a given search query.
+ async fn background_search(
+ unnamed_buffers: Vec<Handle<Buffer>>,
+ opened_buffers: HashMap<Arc<Path>, (Handle<Buffer>, BufferSnapshot)>,
+ background: Arc<Background>,
+ fs: Arc<dyn Fs>,
+ workers: usize,
+ query: SearchQuery,
+ path_count: usize,
+ snapshots: Vec<LocalSnapshot>,
+ matching_paths_tx: Sender<SearchMatchCandidate>,
+ ) {
+ let fs = &fs;
+ let query = &query;
+ let matching_paths_tx = &matching_paths_tx;
+ let snapshots = &snapshots;
+ let paths_per_worker = (path_count + workers - 1) / workers;
+ for buffer in unnamed_buffers {
+ matching_paths_tx
+ .send(SearchMatchCandidate::OpenBuffer {
+ buffer: buffer.clone(),
+ path: None,
+ })
+ .await
+ .log_err();
+ }
+ for (path, (buffer, _)) in opened_buffers.iter() {
+ matching_paths_tx
+ .send(SearchMatchCandidate::OpenBuffer {
+ buffer: buffer.clone(),
+ path: Some(path.clone()),
+ })
+ .await
+ .log_err();
+ }
+ background
+ .scoped(|scope| {
+ for worker_ix in 0..workers {
+ let worker_start_ix = worker_ix * paths_per_worker;
+ let worker_end_ix = worker_start_ix + paths_per_worker;
+ let unnamed_buffers = opened_buffers.clone();
+ scope.spawn(async move {
+ let mut snapshot_start_ix = 0;
+ let mut abs_path = PathBuf::new();
+ for snapshot in snapshots {
+ let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
+ if worker_end_ix <= snapshot_start_ix {
+ break;
+ } else if worker_start_ix > snapshot_end_ix {
+ snapshot_start_ix = snapshot_end_ix;
+ continue;
+ } else {
+ let start_in_snapshot =
+ worker_start_ix.saturating_sub(snapshot_start_ix);
+ let end_in_snapshot =
+ cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
+
+ for entry in snapshot
+ .files(false, start_in_snapshot)
+ .take(end_in_snapshot - start_in_snapshot)
+ {
+ if matching_paths_tx.is_closed() {
+ break;
+ }
+ if unnamed_buffers.contains_key(&entry.path) {
+ continue;
+ }
+ let matches = if query.file_matches(Some(&entry.path)) {
+ abs_path.clear();
+ abs_path.push(&snapshot.abs_path());
+ abs_path.push(&entry.path);
+ if let Some(file) = fs.open_sync(&abs_path).await.log_err()
+ {
+ query.detect(file).unwrap_or(false)
+ } else {
+ false
+ }
+ } else {
+ false
+ };
+
+ if matches {
+ let project_path = SearchMatchCandidate::Path {
+ worktree_id: snapshot.id(),
+ path: entry.path.clone(),
+ };
+ if matching_paths_tx.send(project_path).await.is_err() {
+ break;
+ }
+ }
+ }
+
+ snapshot_start_ix = snapshot_end_ix;
+ }
+ }
+ });
+ }
+ })
+ .await;
+ }
+
+ fn request_lsp<R: LspCommand>(
+ &self,
+ buffer_handle: Handle<Buffer>,
+ server: LanguageServerToQuery,
+ request: R,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<R::Response>>
+ where
+ <R::LspRequest as lsp2::request::Request>::Result: Send,
+ {
+ let buffer = buffer_handle.read(cx);
+ if self.is_local() {
+ let language_server = match server {
+ LanguageServerToQuery::Primary => {
+ match self.primary_language_server_for_buffer(buffer, cx) {
+ Some((_, server)) => Some(Arc::clone(server)),
+ None => return Task::ready(Ok(Default::default())),
+ }
+ }
+ LanguageServerToQuery::Other(id) => self
+ .language_server_for_buffer(buffer, id, cx)
+ .map(|(_, server)| Arc::clone(server)),
+ };
+ let file = File::from_dyn(buffer.file()).and_then(File::as_local);
+ if let (Some(file), Some(language_server)) = (file, language_server) {
+ let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
+ return cx.spawn(|this, cx| async move {
+ if !request.check_capabilities(language_server.capabilities()) {
+ return Ok(Default::default());
+ }
+
+ let result = language_server.request::<R::LspRequest>(lsp_params).await;
+ let response = match result {
+ Ok(response) => response,
+
+ Err(err) => {
+ log::warn!(
+ "Generic lsp request to {} failed: {}",
+ language_server.name(),
+ err
+ );
+ return Err(err);
+ }
+ };
+
+ request
+ .response_from_lsp(
+ response,
+ this,
+ buffer_handle,
+ language_server.server_id(),
+ cx,
+ )
+ .await
+ });
+ }
+ } else if let Some(project_id) = self.remote_id() {
+ return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
+ }
+
+ Task::ready(Ok(Default::default()))
+ }
+
+ fn send_lsp_proto_request<R: LspCommand>(
+ &self,
+ buffer: Handle<Buffer>,
+ project_id: u64,
+ request: R,
+ cx: &mut ModelContext<'_, Project>,
+ ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
+ let rpc = self.client.clone();
+ let message = request.to_proto(project_id, buffer.read(cx));
+ cx.spawn_weak(|this, cx| async move {
+ // Ensure the project is still alive by the time the task
+ // is scheduled.
+ this.upgrade(&cx)
+ .ok_or_else(|| anyhow!("project dropped"))?;
+ let response = rpc.request(message).await?;
+ let this = this
+ .upgrade(&cx)
+ .ok_or_else(|| anyhow!("project dropped"))?;
+ if this.read_with(&cx, |this, _| this.is_read_only()) {
+ Err(anyhow!("disconnected before completing request"))
+ } else {
+ request
+ .response_from_proto(response, this, buffer, cx)
+ .await
+ }
+ })
+ }
+
+ fn sort_candidates_and_open_buffers(
+ mut matching_paths_rx: Receiver<SearchMatchCandidate>,
+ cx: &mut ModelContext<Self>,
+ ) -> (
+ futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
+ Receiver<(
+ Option<(Handle<Buffer>, BufferSnapshot)>,
+ SearchMatchCandidateIndex,
+ )>,
+ ) {
+ let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
+ let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
+ cx.spawn(|this, cx| async move {
+ let mut buffers = vec![];
+ while let Some(entry) = matching_paths_rx.next().await {
+ buffers.push(entry);
+ }
+ buffers.sort_by_key(|candidate| candidate.path());
+ let matching_paths = buffers.clone();
+ let _ = sorted_buffers_tx.send(buffers);
+ for (index, candidate) in matching_paths.into_iter().enumerate() {
+ if buffers_tx.is_closed() {
+ break;
+ }
+ let this = this.clone();
+ let buffers_tx = buffers_tx.clone();
+ cx.spawn(|mut cx| async move {
+ let buffer = match candidate {
+ SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
+ SearchMatchCandidate::Path { worktree_id, path } => this
+ .update(&mut cx, |this, cx| {
+ this.open_buffer((worktree_id, path), cx)
+ })
+ .await
+ .log_err(),
+ };
+ if let Some(buffer) = buffer {
+ let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
+ buffers_tx
+ .send((Some((buffer, snapshot)), index))
+ .await
+ .log_err();
+ } else {
+ buffers_tx.send((None, index)).await.log_err();
+ }
+
+ Ok::<_, anyhow::Error>(())
+ })
+ .detach();
+ }
+ })
+ .detach();
+ (sorted_buffers_rx, buffers_rx)
+ }
+
+ pub fn find_or_create_local_worktree(
+ &mut self,
+ abs_path: impl AsRef<Path>,
+ visible: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<(Handle<Worktree>, PathBuf)>> {
+ let abs_path = abs_path.as_ref();
+ if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
+ Task::ready(Ok((tree, relative_path)))
+ } else {
+ let worktree = self.create_local_worktree(abs_path, visible, cx);
+ cx.foreground()
+ .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
+ }
+ }
+
+ pub fn find_local_worktree(
+ &self,
+ abs_path: &Path,
+ cx: &AppContext,
+ ) -> Option<(Handle<Worktree>, PathBuf)> {
+ for tree in &self.worktrees {
+ if let Some(tree) = tree.upgrade() {
+ if let Some(relative_path) = tree
+ .read(cx)
+ .as_local()
+ .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
+ {
+ return Some((tree.clone(), relative_path.into()));
+ }
+ }
+ }
+ None
+ }
+
+ pub fn is_shared(&self) -> bool {
+ match &self.client_state {
+ Some(ProjectClientState::Local { .. }) => true,
+ _ => false,
+ }
+ }
+
+ fn create_local_worktree(
+ &mut self,
+ abs_path: impl AsRef<Path>,
+ visible: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Worktree>>> {
+ let fs = self.fs.clone();
+ let client = self.client.clone();
+ let next_entry_id = self.next_entry_id.clone();
+ let path: Arc<Path> = abs_path.as_ref().into();
+ let task = self
+ .loading_local_worktrees
+ .entry(path.clone())
+ .or_insert_with(|| {
+ cx.spawn(|project, mut cx| {
+ async move {
+ let worktree = Worktree::local(
+ client.clone(),
+ path.clone(),
+ visible,
+ fs,
+ next_entry_id,
+ &mut cx,
+ )
+ .await;
+
+ project.update(&mut cx, |project, _| {
+ project.loading_local_worktrees.remove(&path);
+ });
+
+ let worktree = worktree?;
+ project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
+ Ok(worktree)
+ }
+ .map_err(Arc::new)
+ })
+ .shared()
+ })
+ .clone();
+ cx.foreground().spawn(async move {
+ match task.await {
+ Ok(worktree) => Ok(worktree),
+ Err(err) => Err(anyhow!("{}", err)),
+ }
+ })
+ }
+
+ pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
+ self.worktrees.retain(|worktree| {
+ if let Some(worktree) = worktree.upgrade() {
+ let id = worktree.read(cx).id();
+ if id == id_to_remove {
+ cx.emit(Event::WorktreeRemoved(id));
+ false
+ } else {
+ true
+ }
+ } else {
+ false
+ }
+ });
+ self.metadata_changed(cx);
+ }
+
+ fn add_worktree(&mut self, worktree: &Handle<Worktree>, cx: &mut ModelContext<Self>) {
+ cx.observe(worktree, |_, _, cx| cx.notify()).detach();
+ if worktree.read(cx).is_local() {
+ cx.subscribe(worktree, |this, worktree, event, cx| match event {
+ worktree::Event::UpdatedEntries(changes) => {
+ this.update_local_worktree_buffers(&worktree, changes, cx);
+ this.update_local_worktree_language_servers(&worktree, changes, cx);
+ this.update_local_worktree_settings(&worktree, changes, cx);
+ this.update_prettier_settings(&worktree, changes, cx);
+ cx.emit(Event::WorktreeUpdatedEntries(
+ worktree.read(cx).id(),
+ changes.clone(),
+ ));
+ }
+ worktree::Event::UpdatedGitRepositories(updated_repos) => {
+ this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
+ }
+ })
+ .detach();
+ }
+
+ let push_strong_handle = {
+ let worktree = worktree.read(cx);
+ self.is_shared() || worktree.is_visible() || worktree.is_remote()
+ };
+ if push_strong_handle {
+ self.worktrees
+ .push(WorktreeHandle::Strong(worktree.clone()));
+ } else {
+ self.worktrees
+ .push(WorktreeHandle::Weak(worktree.downgrade()));
+ }
+
+ let handle_id = worktree.id();
+ cx.observe_release(worktree, move |this, worktree, cx| {
+ let _ = this.remove_worktree(worktree.id(), cx);
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.clear_local_settings(handle_id, cx).log_err()
+ });
+ })
+ .detach();
+
+ cx.emit(Event::WorktreeAdded);
+ self.metadata_changed(cx);
+ }
+
+ fn update_local_worktree_buffers(
+ &mut self,
+ worktree_handle: &Handle<Worktree>,
+ changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
+ cx: &mut ModelContext<Self>,
+ ) {
+ let snapshot = worktree_handle.read(cx).snapshot();
+
+ let mut renamed_buffers = Vec::new();
+ for (path, entry_id, _) in changes {
+ let worktree_id = worktree_handle.read(cx).id();
+ let project_path = ProjectPath {
+ worktree_id,
+ path: path.clone(),
+ };
+
+ let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
+ Some(&buffer_id) => buffer_id,
+ None => match self.local_buffer_ids_by_path.get(&project_path) {
+ Some(&buffer_id) => buffer_id,
+ None => {
+ continue;
+ }
+ },
+ };
+
+ let open_buffer = self.opened_buffers.get(&buffer_id);
+ let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
+ buffer
+ } else {
+ self.opened_buffers.remove(&buffer_id);
+ self.local_buffer_ids_by_path.remove(&project_path);
+ self.local_buffer_ids_by_entry_id.remove(entry_id);
+ continue;
+ };
+
+ buffer.update(cx, |buffer, cx| {
+ if let Some(old_file) = File::from_dyn(buffer.file()) {
+ if old_file.worktree != *worktree_handle {
+ return;
+ }
+
+ let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
+ File {
+ is_local: true,
+ entry_id: entry.id,
+ mtime: entry.mtime,
+ path: entry.path.clone(),
+ worktree: worktree_handle.clone(),
+ is_deleted: false,
+ }
+ } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
+ File {
+ is_local: true,
+ entry_id: entry.id,
+ mtime: entry.mtime,
+ path: entry.path.clone(),
+ worktree: worktree_handle.clone(),
+ is_deleted: false,
+ }
+ } else {
+ File {
+ is_local: true,
+ entry_id: old_file.entry_id,
+ path: old_file.path().clone(),
+ mtime: old_file.mtime(),
+ worktree: worktree_handle.clone(),
+ is_deleted: true,
+ }
+ };
+
+ let old_path = old_file.abs_path(cx);
+ if new_file.abs_path(cx) != old_path {
+ renamed_buffers.push((cx.handle(), old_file.clone()));
+ self.local_buffer_ids_by_path.remove(&project_path);
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id,
+ path: path.clone(),
+ },
+ buffer_id,
+ );
+ }
+
+ if new_file.entry_id != *entry_id {
+ self.local_buffer_ids_by_entry_id.remove(entry_id);
+ self.local_buffer_ids_by_entry_id
+ .insert(new_file.entry_id, buffer_id);
+ }
+
+ if new_file != *old_file {
+ if let Some(project_id) = self.remote_id() {
+ self.client
+ .send(proto::UpdateBufferFile {
+ project_id,
+ buffer_id: buffer_id as u64,
+ file: Some(new_file.to_proto()),
+ })
+ .log_err();
+ }
+
+ buffer.file_updated(Arc::new(new_file), cx).detach();
+ }
+ }
+ });
+ }
+
+ for (buffer, old_file) in renamed_buffers {
+ self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
+ self.detect_language_for_buffer(&buffer, cx);
+ self.register_buffer_with_language_servers(&buffer, cx);
+ }
+ }
+
+ fn update_local_worktree_language_servers(
+ &mut self,
+ worktree_handle: &Handle<Worktree>,
+ changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
+ cx: &mut ModelContext<Self>,
+ ) {
+ if changes.is_empty() {
+ return;
+ }
+
+ let worktree_id = worktree_handle.read(cx).id();
+ let mut language_server_ids = self
+ .language_server_ids
+ .iter()
+ .filter_map(|((server_worktree_id, _), server_id)| {
+ (*server_worktree_id == worktree_id).then_some(*server_id)
+ })
+ .collect::<Vec<_>>();
+ language_server_ids.sort();
+ language_server_ids.dedup();
+
+ let abs_path = worktree_handle.read(cx).abs_path();
+ for server_id in &language_server_ids {
+ if let Some(LanguageServerState::Running {
+ server,
+ watched_paths,
+ ..
+ }) = self.language_servers.get(server_id)
+ {
+ if let Some(watched_paths) = watched_paths.get(&worktree_id) {
+ let params = lsp2::DidChangeWatchedFilesParams {
+ changes: changes
+ .iter()
+ .filter_map(|(path, _, change)| {
+ if !watched_paths.is_match(&path) {
+ return None;
+ }
+ let typ = match change {
+ PathChange::Loaded => return None,
+ PathChange::Added => lsp2::FileChangeType::CREATED,
+ PathChange::Removed => lsp2::FileChangeType::DELETED,
+ PathChange::Updated => lsp2::FileChangeType::CHANGED,
+ PathChange::AddedOrUpdated => lsp2::FileChangeType::CHANGED,
+ };
+ Some(lsp2::FileEvent {
+ uri: lsp2::Url::from_file_path(abs_path.join(path)).unwrap(),
+ typ,
+ })
+ })
+ .collect(),
+ };
+
+ if !params.changes.is_empty() {
+ server
+ .notify::<lsp2::notification::DidChangeWatchedFiles>(params)
+ .log_err();
+ }
+ }
+ }
+ }
+ }
+
+ fn update_local_worktree_buffers_git_repos(
+ &mut self,
+ worktree_handle: Handle<Worktree>,
+ changed_repos: &UpdatedGitRepositoriesSet,
+ cx: &mut ModelContext<Self>,
+ ) {
+ debug_assert!(worktree_handle.read(cx).is_local());
+
+ // Identify the loading buffers whose containing repository that has changed.
+ let future_buffers = self
+ .loading_buffers_by_path
+ .iter()
+ .filter_map(|(project_path, receiver)| {
+ if project_path.worktree_id != worktree_handle.read(cx).id() {
+ return None;
+ }
+ let path = &project_path.path;
+ changed_repos
+ .iter()
+ .find(|(work_dir, _)| path.starts_with(work_dir))?;
+ let receiver = receiver.clone();
+ let path = path.clone();
+ Some(async move {
+ wait_for_loading_buffer(receiver)
+ .await
+ .ok()
+ .map(|buffer| (buffer, path))
+ })
+ })
+ .collect::<FuturesUnordered<_>>();
+
+ // Identify the current buffers whose containing repository has changed.
+ let current_buffers = self
+ .opened_buffers
+ .values()
+ .filter_map(|buffer| {
+ let buffer = buffer.upgrade()?;
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ if file.worktree != worktree_handle {
+ return None;
+ }
+ let path = file.path();
+ changed_repos
+ .iter()
+ .find(|(work_dir, _)| path.starts_with(work_dir))?;
+ Some((buffer, path.clone()))
+ })
+ .collect::<Vec<_>>();
+
+ if future_buffers.len() + current_buffers.len() == 0 {
+ return;
+ }
+
+ let remote_id = self.remote_id();
+ let client = self.client.clone();
+ cx.spawn_weak(move |_, mut cx| async move {
+ // Wait for all of the buffers to load.
+ let future_buffers = future_buffers.collect::<Vec<_>>().await;
+
+ // Reload the diff base for every buffer whose containing git repository has changed.
+ let snapshot =
+ worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
+ let diff_bases_by_buffer = cx
+ .background()
+ .spawn(async move {
+ future_buffers
+ .into_iter()
+ .filter_map(|e| e)
+ .chain(current_buffers)
+ .filter_map(|(buffer, path)| {
+ let (work_directory, repo) =
+ snapshot.repository_and_work_directory_for_path(&path)?;
+ let repo = snapshot.get_local_repo(&repo)?;
+ let relative_path = path.strip_prefix(&work_directory).ok()?;
+ let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
+ Some((buffer, base_text))
+ })
+ .collect::<Vec<_>>()
+ })
+ .await;
+
+ // Assign the new diff bases on all of the buffers.
+ for (buffer, diff_base) in diff_bases_by_buffer {
+ let buffer_id = buffer.update(&mut cx, |buffer, cx| {
+ buffer.set_diff_base(diff_base.clone(), cx);
+ buffer.remote_id()
+ });
+ if let Some(project_id) = remote_id {
+ client
+ .send(proto::UpdateDiffBase {
+ project_id,
+ buffer_id,
+ diff_base,
+ })
+ .log_err();
+ }
+ }
+ })
+ .detach();
+ }
+
+ fn update_local_worktree_settings(
+ &mut self,
+ worktree: &Handle<Worktree>,
+ changes: &UpdatedEntriesSet,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let project_id = self.remote_id();
+ let worktree_id = worktree.id();
+ let worktree = worktree.read(cx).as_local().unwrap();
+ let remote_worktree_id = worktree.id();
+
+ let mut settings_contents = Vec::new();
+ for (path, _, change) in changes.iter() {
+ if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
+ let settings_dir = Arc::from(
+ path.ancestors()
+ .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
+ .unwrap(),
+ );
+ let fs = self.fs.clone();
+ let removed = *change == PathChange::Removed;
+ let abs_path = worktree.absolutize(path);
+ settings_contents.push(async move {
+ (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
+ });
+ }
+ }
+
+ if settings_contents.is_empty() {
+ return;
+ }
+
+ let client = self.client.clone();
+ cx.spawn_weak(move |_, mut cx| async move {
+ let settings_contents: Vec<(Arc<Path>, _)> =
+ futures::future::join_all(settings_contents).await;
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ for (directory, file_content) in settings_contents {
+ let file_content = file_content.and_then(|content| content.log_err());
+ store
+ .set_local_settings(
+ worktree_id,
+ directory.clone(),
+ file_content.as_ref().map(String::as_str),
+ cx,
+ )
+ .log_err();
+ if let Some(remote_id) = project_id {
+ client
+ .send(proto::UpdateWorktreeSettings {
+ project_id: remote_id,
+ worktree_id: remote_worktree_id.to_proto(),
+ path: directory.to_string_lossy().into_owned(),
+ content: file_content,
+ })
+ .log_err();
+ }
+ }
+ });
+ });
+ })
+ .detach();
+ }
+
+ fn update_prettier_settings(
+ &self,
+ worktree: &Handle<Worktree>,
+ changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
+ cx: &mut ModelContext<'_, Project>,
+ ) {
+ let prettier_config_files = Prettier::CONFIG_FILE_NAMES
+ .iter()
+ .map(Path::new)
+ .collect::<HashSet<_>>();
+
+ let prettier_config_file_changed = changes
+ .iter()
+ .filter(|(_, _, change)| !matches!(change, PathChange::Loaded))
+ .filter(|(path, _, _)| {
+ !path
+ .components()
+ .any(|component| component.as_os_str().to_string_lossy() == "node_modules")
+ })
+ .find(|(path, _, _)| prettier_config_files.contains(path.as_ref()));
+ let current_worktree_id = worktree.read(cx).id();
+ if let Some((config_path, _, _)) = prettier_config_file_changed {
+ log::info!(
+ "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
+ );
+ let prettiers_to_reload = self
+ .prettier_instances
+ .iter()
+ .filter_map(|((worktree_id, prettier_path), prettier_task)| {
+ if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
+ Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<_>>();
+
+ cx.background()
+ .spawn(async move {
+ for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
+ async move {
+ prettier_task.await?
+ .clear_cache()
+ .await
+ .with_context(|| {
+ format!(
+ "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
+ )
+ })
+ .map_err(Arc::new)
+ }
+ }))
+ .await
+ {
+ if let Err(e) = task_result {
+ log::error!("Failed to clear cache for prettier: {e:#}");
+ }
+ }
+ })
+ .detach();
+ }
+ }
+
+ pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
+ let new_active_entry = entry.and_then(|project_path| {
+ let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
+ let entry = worktree.read(cx).entry_for_path(project_path.path)?;
+ Some(entry.id)
+ });
+ if new_active_entry != self.active_entry {
+ self.active_entry = new_active_entry;
+ cx.emit(Event::ActiveEntryChanged(new_active_entry));
+ }
+ }
+
+ pub fn language_servers_running_disk_based_diagnostics(
+ &self,
+ ) -> impl Iterator<Item = LanguageServerId> + '_ {
+ self.language_server_statuses
+ .iter()
+ .filter_map(|(id, status)| {
+ if status.has_pending_diagnostic_updates {
+ Some(*id)
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
+ let mut summary = DiagnosticSummary::default();
+ for (_, _, path_summary) in self.diagnostic_summaries(cx) {
+ summary.error_count += path_summary.error_count;
+ summary.warning_count += path_summary.warning_count;
+ }
+ summary
+ }
+
+ pub fn diagnostic_summaries<'a>(
+ &'a self,
+ cx: &'a AppContext,
+ ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
+ self.visible_worktrees(cx).flat_map(move |worktree| {
+ let worktree = worktree.read(cx);
+ let worktree_id = worktree.id();
+ worktree
+ .diagnostic_summaries()
+ .map(move |(path, server_id, summary)| {
+ (ProjectPath { worktree_id, path }, server_id, summary)
+ })
+ })
+ }
+
+ pub fn disk_based_diagnostics_started(
+ &mut self,
+ language_server_id: LanguageServerId,
+ cx: &mut ModelContext<Self>,
+ ) {
+ cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
+ }
+
+ pub fn disk_based_diagnostics_finished(
+ &mut self,
+ language_server_id: LanguageServerId,
+ cx: &mut ModelContext<Self>,
+ ) {
+ cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
+ }
+
+ pub fn active_entry(&self) -> Option<ProjectEntryId> {
+ self.active_entry
+ }
+
+ pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
+ self.worktree_for_id(path.worktree_id, cx)?
+ .read(cx)
+ .entry_for_path(&path.path)
+ .cloned()
+ }
+
+ pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
+ let worktree = self.worktree_for_entry(entry_id, cx)?;
+ let worktree = worktree.read(cx);
+ let worktree_id = worktree.id();
+ let path = worktree.entry_for_id(entry_id)?.path.clone();
+ Some(ProjectPath { worktree_id, path })
+ }
+
+ pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
+ let workspace_root = self
+ .worktree_for_id(project_path.worktree_id, cx)?
+ .read(cx)
+ .abs_path();
+ let project_path = project_path.path.as_ref();
+
+ Some(if project_path == Path::new("") {
+ workspace_root.to_path_buf()
+ } else {
+ workspace_root.join(project_path)
+ })
+ }
+
+ // RPC message handlers
+
+ async fn handle_unshare_project(
+ this: Handle<Self>,
+ _: TypedEnvelope<proto::UnshareProject>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ if this.is_local() {
+ this.unshare(cx)?;
+ } else {
+ this.disconnected_from_host(cx);
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_add_collaborator(
+ this: Handle<Self>,
+ mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let collaborator = envelope
+ .payload
+ .collaborator
+ .take()
+ .ok_or_else(|| anyhow!("empty collaborator"))?;
+
+ let collaborator = Collaborator::from_proto(collaborator)?;
+ this.update(&mut cx, |this, cx| {
+ this.shared_buffers.remove(&collaborator.peer_id);
+ cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
+ this.collaborators
+ .insert(collaborator.peer_id, collaborator);
+ cx.notify();
+ });
+
+ Ok(())
+ }
+
+ async fn handle_update_project_collaborator(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let old_peer_id = envelope
+ .payload
+ .old_peer_id
+ .ok_or_else(|| anyhow!("missing old peer id"))?;
+ let new_peer_id = envelope
+ .payload
+ .new_peer_id
+ .ok_or_else(|| anyhow!("missing new peer id"))?;
+ this.update(&mut cx, |this, cx| {
+ let collaborator = this
+ .collaborators
+ .remove(&old_peer_id)
+ .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
+ let is_host = collaborator.replica_id == 0;
+ this.collaborators.insert(new_peer_id, collaborator);
+
+ let buffers = this.shared_buffers.remove(&old_peer_id);
+ log::info!(
+ "peer {} became {}. moving buffers {:?}",
+ old_peer_id,
+ new_peer_id,
+ &buffers
+ );
+ if let Some(buffers) = buffers {
+ this.shared_buffers.insert(new_peer_id, buffers);
+ }
+
+ if is_host {
+ this.opened_buffers
+ .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
+ this.buffer_ordered_messages_tx
+ .unbounded_send(BufferOrderedMessage::Resync)
+ .unwrap();
+ }
+
+ cx.emit(Event::CollaboratorUpdated {
+ old_peer_id,
+ new_peer_id,
+ });
+ cx.notify();
+ Ok(())
+ })
+ }
+
+ async fn handle_remove_collaborator(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let peer_id = envelope
+ .payload
+ .peer_id
+ .ok_or_else(|| anyhow!("invalid peer id"))?;
+ let replica_id = this
+ .collaborators
+ .remove(&peer_id)
+ .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
+ .replica_id;
+ for buffer in this.opened_buffers.values() {
+ if let Some(buffer) = buffer.upgrade() {
+ buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
+ }
+ }
+ this.shared_buffers.remove(&peer_id);
+
+ cx.emit(Event::CollaboratorLeft(peer_id));
+ cx.notify();
+ Ok(())
+ })
+ }
+
+ async fn handle_update_project(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateProject>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ // Don't handle messages that were sent before the response to us joining the project
+ if envelope.message_id > this.join_project_response_message_id {
+ this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_update_worktree(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateWorktree>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
+ worktree.update(cx, |worktree, _| {
+ let worktree = worktree.as_remote_mut().unwrap();
+ worktree.update_from_remote(envelope.payload);
+ });
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_update_worktree_settings(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store
+ .set_local_settings(
+ worktree.id(),
+ PathBuf::from(&envelope.payload.path).into(),
+ envelope.payload.content.as_ref().map(String::as_str),
+ cx,
+ )
+ .log_err();
+ });
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_create_project_entry(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::CreateProjectEntry>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let worktree = this.update(&mut cx, |this, cx| {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ this.worktree_for_id(worktree_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })?;
+ let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
+ let entry = worktree
+ .update(&mut cx, |worktree, cx| {
+ let worktree = worktree.as_local_mut().unwrap();
+ let path = PathBuf::from(envelope.payload.path);
+ worktree.create_entry(path, envelope.payload.is_directory, cx)
+ })
+ .await?;
+ Ok(proto::ProjectEntryResponse {
+ entry: Some((&entry).into()),
+ worktree_scan_id: worktree_scan_id as u64,
+ })
+ }
+
+ async fn handle_rename_project_entry(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::RenameProjectEntry>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this.read_with(&cx, |this, cx| {
+ this.worktree_for_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })?;
+ let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
+ let entry = worktree
+ .update(&mut cx, |worktree, cx| {
+ let new_path = PathBuf::from(envelope.payload.new_path);
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .rename_entry(entry_id, new_path, cx)
+ .ok_or_else(|| anyhow!("invalid entry"))
+ })?
+ .await?;
+ Ok(proto::ProjectEntryResponse {
+ entry: Some((&entry).into()),
+ worktree_scan_id: worktree_scan_id as u64,
+ })
+ }
+
+ async fn handle_copy_project_entry(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::CopyProjectEntry>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this.read_with(&cx, |this, cx| {
+ this.worktree_for_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })?;
+ let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
+ let entry = worktree
+ .update(&mut cx, |worktree, cx| {
+ let new_path = PathBuf::from(envelope.payload.new_path);
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .copy_entry(entry_id, new_path, cx)
+ .ok_or_else(|| anyhow!("invalid entry"))
+ })?
+ .await?;
+ Ok(proto::ProjectEntryResponse {
+ entry: Some((&entry).into()),
+ worktree_scan_id: worktree_scan_id as u64,
+ })
+ }
+
+ async fn handle_delete_project_entry(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::DeleteProjectEntry>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+
+ this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
+
+ let worktree = this.read_with(&cx, |this, cx| {
+ this.worktree_for_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("worktree not found"))
+ })?;
+ let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
+ worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .delete_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("invalid entry"))
+ })?
+ .await?;
+ Ok(proto::ProjectEntryResponse {
+ entry: None,
+ worktree_scan_id: worktree_scan_id as u64,
+ })
+ }
+
+ async fn handle_expand_project_entry(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::ExpandProjectEntry>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ExpandProjectEntryResponse> {
+ let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
+ let worktree = this
+ .read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx))
+ .ok_or_else(|| anyhow!("invalid request"))?;
+ worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree
+ .as_local_mut()
+ .unwrap()
+ .expand_entry(entry_id, cx)
+ .ok_or_else(|| anyhow!("invalid entry"))
+ })?
+ .await?;
+ let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
+ Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
+ }
+
+ async fn handle_update_diagnostic_summary(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
+ if let Some(summary) = envelope.payload.summary {
+ let project_path = ProjectPath {
+ worktree_id,
+ path: Path::new(&summary.path).into(),
+ };
+ worktree.update(cx, |worktree, _| {
+ worktree
+ .as_remote_mut()
+ .unwrap()
+ .update_diagnostic_summary(project_path.path.clone(), &summary);
+ });
+ cx.emit(Event::DiagnosticsUpdated {
+ language_server_id: LanguageServerId(summary.language_server_id as usize),
+ path: project_path,
+ });
+ }
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_start_language_server(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::StartLanguageServer>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let server = envelope
+ .payload
+ .server
+ .ok_or_else(|| anyhow!("invalid server"))?;
+ this.update(&mut cx, |this, cx| {
+ this.language_server_statuses.insert(
+ LanguageServerId(server.id as usize),
+ LanguageServerStatus {
+ name: server.name,
+ pending_work: Default::default(),
+ has_pending_diagnostic_updates: false,
+ progress_tokens: Default::default(),
+ },
+ );
+ cx.notify();
+ });
+ Ok(())
+ }
+
+ async fn handle_update_language_server(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateLanguageServer>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
+
+ match envelope
+ .payload
+ .variant
+ .ok_or_else(|| anyhow!("invalid variant"))?
+ {
+ proto::update_language_server::Variant::WorkStart(payload) => {
+ this.on_lsp_work_start(
+ language_server_id,
+ payload.token,
+ LanguageServerProgress {
+ message: payload.message,
+ percentage: payload.percentage.map(|p| p as usize),
+ last_update_at: Instant::now(),
+ },
+ cx,
+ );
+ }
+
+ proto::update_language_server::Variant::WorkProgress(payload) => {
+ this.on_lsp_work_progress(
+ language_server_id,
+ payload.token,
+ LanguageServerProgress {
+ message: payload.message,
+ percentage: payload.percentage.map(|p| p as usize),
+ last_update_at: Instant::now(),
+ },
+ cx,
+ );
+ }
+
+ proto::update_language_server::Variant::WorkEnd(payload) => {
+ this.on_lsp_work_end(language_server_id, payload.token, cx);
+ }
+
+ proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
+ this.disk_based_diagnostics_started(language_server_id, cx);
+ }
+
+ proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
+ this.disk_based_diagnostics_finished(language_server_id, cx)
+ }
+ }
+
+ Ok(())
+ })
+ }
+
+ async fn handle_update_buffer(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateBuffer>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::Ack> {
+ this.update(&mut cx, |this, cx| {
+ let payload = envelope.payload.clone();
+ let buffer_id = payload.buffer_id;
+ let ops = payload
+ .operations
+ .into_iter()
+ .map(language2::proto::deserialize_operation)
+ .collect::<Result<Vec<_>, _>>()?;
+ let is_remote = this.is_remote();
+ match this.opened_buffers.entry(buffer_id) {
+ hash_map::Entry::Occupied(mut e) => match e.get_mut() {
+ OpenBuffer::Strong(buffer) => {
+ buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
+ }
+ OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
+ OpenBuffer::Weak(_) => {}
+ },
+ hash_map::Entry::Vacant(e) => {
+ assert!(
+ is_remote,
+ "received buffer update from {:?}",
+ envelope.original_sender_id
+ );
+ e.insert(OpenBuffer::Operations(ops));
+ }
+ }
+ Ok(proto::Ack {})
+ })
+ }
+
+ async fn handle_create_buffer_for_peer(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::CreateBufferForPeer>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ match envelope
+ .payload
+ .variant
+ .ok_or_else(|| anyhow!("missing variant"))?
+ {
+ proto::create_buffer_for_peer::Variant::State(mut state) => {
+ let mut buffer_file = None;
+ if let Some(file) = state.file.take() {
+ let worktree_id = WorktreeId::from_proto(file.worktree_id);
+ let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
+ anyhow!("no worktree found for id {}", file.worktree_id)
+ })?;
+ buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
+ as Arc<dyn language2::File>);
+ }
+
+ let buffer_id = state.id;
+ let buffer = cx.add_model(|_| {
+ Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
+ });
+ this.incomplete_remote_buffers
+ .insert(buffer_id, Some(buffer));
+ }
+ proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
+ let buffer = this
+ .incomplete_remote_buffers
+ .get(&chunk.buffer_id)
+ .cloned()
+ .flatten()
+ .ok_or_else(|| {
+ anyhow!(
+ "received chunk for buffer {} without initial state",
+ chunk.buffer_id
+ )
+ })?;
+ let operations = chunk
+ .operations
+ .into_iter()
+ .map(language2::proto::deserialize_operation)
+ .collect::<Result<Vec<_>>>()?;
+ buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
+
+ if chunk.is_last {
+ this.incomplete_remote_buffers.remove(&chunk.buffer_id);
+ this.register_buffer(&buffer, cx)?;
+ }
+ }
+ }
+
+ Ok(())
+ })
+ }
+
+ async fn handle_update_diff_base(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateDiffBase>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let buffer_id = envelope.payload.buffer_id;
+ let diff_base = envelope.payload.diff_base;
+ if let Some(buffer) = this
+ .opened_buffers
+ .get_mut(&buffer_id)
+ .and_then(|b| b.upgrade())
+ .or_else(|| {
+ this.incomplete_remote_buffers
+ .get(&buffer_id)
+ .cloned()
+ .flatten()
+ })
+ {
+ buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_update_buffer_file(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::UpdateBufferFile>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let buffer_id = envelope.payload.buffer_id;
+
+ this.update(&mut cx, |this, cx| {
+ let payload = envelope.payload.clone();
+ if let Some(buffer) = this
+ .opened_buffers
+ .get(&buffer_id)
+ .and_then(|b| b.upgrade())
+ .or_else(|| {
+ this.incomplete_remote_buffers
+ .get(&buffer_id)
+ .cloned()
+ .flatten()
+ })
+ {
+ let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
+ let worktree = this
+ .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
+ .ok_or_else(|| anyhow!("no such worktree"))?;
+ let file = File::from_proto(file, worktree, cx)?;
+ buffer.update(cx, |buffer, cx| {
+ buffer.file_updated(Arc::new(file), cx).detach();
+ });
+ this.detect_language_for_buffer(&buffer, cx);
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_save_buffer(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::SaveBuffer>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::BufferSaved> {
+ let buffer_id = envelope.payload.buffer_id;
+ let (project_id, buffer) = this.update(&mut cx, |this, cx| {
+ let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
+ let buffer = this
+ .opened_buffers
+ .get(&buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
+ anyhow::Ok((project_id, buffer))
+ })?;
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(deserialize_version(&envelope.payload.version))
+ })
+ .await?;
+ let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
+
+ this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
+ .await?;
+ Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
+ project_id,
+ buffer_id,
+ version: serialize_version(buffer.saved_version()),
+ mtime: Some(buffer.saved_mtime().into()),
+ fingerprint: language2::proto::serialize_fingerprint(
+ buffer.saved_version_fingerprint(),
+ ),
+ }))
+ }
+
+ async fn handle_reload_buffers(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::ReloadBuffers>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ReloadBuffersResponse> {
+ let sender_id = envelope.original_sender_id()?;
+ let reload = this.update(&mut cx, |this, cx| {
+ let mut buffers = HashSet::default();
+ for buffer_id in &envelope.payload.buffer_ids {
+ buffers.insert(
+ this.opened_buffers
+ .get(buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
+ );
+ }
+ Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
+ })?;
+
+ let project_transaction = reload.await?;
+ let project_transaction = this.update(&mut cx, |this, cx| {
+ this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
+ });
+ Ok(proto::ReloadBuffersResponse {
+ transaction: Some(project_transaction),
+ })
+ }
+
+ async fn handle_synchronize_buffers(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::SynchronizeBuffers>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::SynchronizeBuffersResponse> {
+ let project_id = envelope.payload.project_id;
+ let mut response = proto::SynchronizeBuffersResponse {
+ buffers: Default::default(),
+ };
+
+ this.update(&mut cx, |this, cx| {
+ let Some(guest_id) = envelope.original_sender_id else {
+ error!("missing original_sender_id on SynchronizeBuffers request");
+ return;
+ };
+
+ this.shared_buffers.entry(guest_id).or_default().clear();
+ for buffer in envelope.payload.buffers {
+ let buffer_id = buffer.id;
+ let remote_version = language2::proto::deserialize_version(&buffer.version);
+ if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
+ this.shared_buffers
+ .entry(guest_id)
+ .or_default()
+ .insert(buffer_id);
+
+ let buffer = buffer.read(cx);
+ response.buffers.push(proto::BufferVersion {
+ id: buffer_id,
+ version: language2::proto::serialize_version(&buffer.version),
+ });
+
+ let operations = buffer.serialize_ops(Some(remote_version), cx);
+ let client = this.client.clone();
+ if let Some(file) = buffer.file() {
+ client
+ .send(proto::UpdateBufferFile {
+ project_id,
+ buffer_id: buffer_id as u64,
+ file: Some(file.to_proto()),
+ })
+ .log_err();
+ }
+
+ client
+ .send(proto::UpdateDiffBase {
+ project_id,
+ buffer_id: buffer_id as u64,
+ diff_base: buffer.diff_base().map(Into::into),
+ })
+ .log_err();
+
+ client
+ .send(proto::BufferReloaded {
+ project_id,
+ buffer_id,
+ version: language2::proto::serialize_version(buffer.saved_version()),
+ mtime: Some(buffer.saved_mtime().into()),
+ fingerprint: language2::proto::serialize_fingerprint(
+ buffer.saved_version_fingerprint(),
+ ),
+ line_ending: language2::proto::serialize_line_ending(
+ buffer.line_ending(),
+ ) as i32,
+ })
+ .log_err();
+
+ cx.background()
+ .spawn(
+ async move {
+ let operations = operations.await;
+ for chunk in split_operations(operations) {
+ client
+ .request(proto::UpdateBuffer {
+ project_id,
+ buffer_id,
+ operations: chunk,
+ })
+ .await?;
+ }
+ anyhow::Ok(())
+ }
+ .log_err(),
+ )
+ .detach();
+ }
+ }
+ });
+
+ Ok(response)
+ }
+
+ async fn handle_format_buffers(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::FormatBuffers>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::FormatBuffersResponse> {
+ let sender_id = envelope.original_sender_id()?;
+ let format = this.update(&mut cx, |this, cx| {
+ let mut buffers = HashSet::default();
+ for buffer_id in &envelope.payload.buffer_ids {
+ buffers.insert(
+ this.opened_buffers
+ .get(buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
+ );
+ }
+ let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
+ Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
+ })?;
+
+ let project_transaction = format.await?;
+ let project_transaction = this.update(&mut cx, |this, cx| {
+ this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
+ });
+ Ok(proto::FormatBuffersResponse {
+ transaction: Some(project_transaction),
+ })
+ }
+
+ async fn handle_apply_additional_edits_for_completion(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
+ let (buffer, completion) = this.update(&mut cx, |this, cx| {
+ let buffer = this
+ .opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+ let language = buffer.read(cx).language();
+ let completion = language2::proto::deserialize_completion(
+ envelope
+ .payload
+ .completion
+ .ok_or_else(|| anyhow!("invalid completion"))?,
+ language.cloned(),
+ );
+ Ok::<_, anyhow::Error>((buffer, completion))
+ })?;
+
+ let completion = completion.await?;
+
+ let apply_additional_edits = this.update(&mut cx, |this, cx| {
+ this.apply_additional_edits_for_completion(buffer, completion, false, cx)
+ });
+
+ Ok(proto::ApplyCompletionAdditionalEditsResponse {
+ transaction: apply_additional_edits
+ .await?
+ .as_ref()
+ .map(language2::proto::serialize_transaction),
+ })
+ }
+
+ async fn handle_apply_code_action(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::ApplyCodeAction>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ApplyCodeActionResponse> {
+ let sender_id = envelope.original_sender_id()?;
+ let action = language2::proto::deserialize_code_action(
+ envelope
+ .payload
+ .action
+ .ok_or_else(|| anyhow!("invalid action"))?,
+ )?;
+ let apply_code_action = this.update(&mut cx, |this, cx| {
+ let buffer = this
+ .opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+ Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
+ })?;
+
+ let project_transaction = apply_code_action.await?;
+ let project_transaction = this.update(&mut cx, |this, cx| {
+ this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
+ });
+ Ok(proto::ApplyCodeActionResponse {
+ transaction: Some(project_transaction),
+ })
+ }
+
+ async fn handle_on_type_formatting(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::OnTypeFormatting>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::OnTypeFormattingResponse> {
+ let on_type_formatting = this.update(&mut cx, |this, cx| {
+ let buffer = this
+ .opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
+ let position = envelope
+ .payload
+ .position
+ .and_then(deserialize_anchor)
+ .ok_or_else(|| anyhow!("invalid position"))?;
+ Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
+ buffer,
+ position,
+ envelope.payload.trigger.clone(),
+ cx,
+ ))
+ })?;
+
+ let transaction = on_type_formatting
+ .await?
+ .as_ref()
+ .map(language2::proto::serialize_transaction);
+ Ok(proto::OnTypeFormattingResponse { transaction })
+ }
+
+ async fn handle_inlay_hints(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::InlayHints>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::InlayHintsResponse> {
+ let sender_id = envelope.original_sender_id()?;
+ let buffer = this.update(&mut cx, |this, cx| {
+ this.opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
+ })?;
+ let buffer_version = deserialize_version(&envelope.payload.version);
+
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_version(buffer_version.clone())
+ })
+ .await
+ .with_context(|| {
+ format!(
+ "waiting for version {:?} for buffer {}",
+ buffer_version,
+ buffer.id()
+ )
+ })?;
+
+ let start = envelope
+ .payload
+ .start
+ .and_then(deserialize_anchor)
+ .context("missing range start")?;
+ let end = envelope
+ .payload
+ .end
+ .and_then(deserialize_anchor)
+ .context("missing range end")?;
+ let buffer_hints = this
+ .update(&mut cx, |project, cx| {
+ project.inlay_hints(buffer, start..end, cx)
+ })
+ .await
+ .context("inlay hints fetch")?;
+
+ Ok(this.update(&mut cx, |project, cx| {
+ InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
+ }))
+ }
+
+ async fn handle_resolve_inlay_hint(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::ResolveInlayHint>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::ResolveInlayHintResponse> {
+ let proto_hint = envelope
+ .payload
+ .hint
+ .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
+ let hint = InlayHints::proto_to_project_hint(proto_hint)
+ .context("resolved proto inlay hint conversion")?;
+ let buffer = this.update(&mut cx, |this, cx| {
+ this.opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
+ })?;
+ let response_hint = this
+ .update(&mut cx, |project, cx| {
+ project.resolve_inlay_hint(
+ hint,
+ buffer,
+ LanguageServerId(envelope.payload.language_server_id as usize),
+ cx,
+ )
+ })
+ .await
+ .context("inlay hints fetch")?;
+ Ok(proto::ResolveInlayHintResponse {
+ hint: Some(InlayHints::project_to_proto_hint(response_hint)),
+ })
+ }
+
+ async fn handle_refresh_inlay_hints(
+ this: Handle<Self>,
+ _: TypedEnvelope<proto::RefreshInlayHints>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::Ack> {
+ this.update(&mut cx, |_, cx| {
+ cx.emit(Event::RefreshInlayHints);
+ });
+ Ok(proto::Ack {})
+ }
+
+ async fn handle_lsp_command<T: LspCommand>(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<T::ProtoRequest>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
+ where
+ <T::LspRequest as lsp2::request::Request>::Result: Send,
+ {
+ let sender_id = envelope.original_sender_id()?;
+ let buffer_id = T::buffer_id_from_proto(&envelope.payload);
+ let buffer_handle = this.read_with(&cx, |this, _| {
+ this.opened_buffers
+ .get(&buffer_id)
+ .and_then(|buffer| buffer.upgrade(&cx))
+ .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
+ })?;
+ let request = T::from_proto(
+ envelope.payload,
+ this.clone(),
+ buffer_handle.clone(),
+ cx.clone(),
+ )
+ .await?;
+ let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
+ let response = this
+ .update(&mut cx, |this, cx| {
+ this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
+ })
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ Ok(T::response_to_proto(
+ response,
+ this,
+ sender_id,
+ &buffer_version,
+ cx,
+ ))
+ })
+ }
+
+ async fn handle_get_project_symbols(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::GetProjectSymbols>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::GetProjectSymbolsResponse> {
+ let symbols = this
+ .update(&mut cx, |this, cx| {
+ this.symbols(&envelope.payload.query, cx)
+ })
+ .await?;
+
+ Ok(proto::GetProjectSymbolsResponse {
+ symbols: symbols.iter().map(serialize_symbol).collect(),
+ })
+ }
+
+ async fn handle_search_project(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::SearchProject>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::SearchProjectResponse> {
+ let peer_id = envelope.original_sender_id()?;
+ let query = SearchQuery::from_proto(envelope.payload)?;
+ let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
+
+ cx.spawn(|mut cx| async move {
+ let mut locations = Vec::new();
+ while let Some((buffer, ranges)) = result.next().await {
+ for range in ranges {
+ let start = serialize_anchor(&range.start);
+ let end = serialize_anchor(&range.end);
+ let buffer_id = this.update(&mut cx, |this, cx| {
+ this.create_buffer_for_peer(&buffer, peer_id, cx)
+ });
+ locations.push(proto::Location {
+ buffer_id,
+ start: Some(start),
+ end: Some(end),
+ });
+ }
+ }
+ Ok(proto::SearchProjectResponse { locations })
+ })
+ .await
+ }
+
+ async fn handle_open_buffer_for_symbol(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::OpenBufferForSymbolResponse> {
+ let peer_id = envelope.original_sender_id()?;
+ let symbol = envelope
+ .payload
+ .symbol
+ .ok_or_else(|| anyhow!("invalid symbol"))?;
+ let symbol = this
+ .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
+ .await?;
+ let symbol = this.read_with(&cx, |this, _| {
+ let signature = this.symbol_signature(&symbol.path);
+ if signature == symbol.signature {
+ Ok(symbol)
+ } else {
+ Err(anyhow!("invalid symbol signature"))
+ }
+ })?;
+ let buffer = this
+ .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
+ .await?;
+
+ Ok(proto::OpenBufferForSymbolResponse {
+ buffer_id: this.update(&mut cx, |this, cx| {
+ this.create_buffer_for_peer(&buffer, peer_id, cx)
+ }),
+ })
+ }
+
+ fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
+ let mut hasher = Sha256::new();
+ hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
+ hasher.update(project_path.path.to_string_lossy().as_bytes());
+ hasher.update(self.nonce.to_be_bytes());
+ hasher.finalize().as_slice().try_into().unwrap()
+ }
+
+ async fn handle_open_buffer_by_id(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::OpenBufferById>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::OpenBufferResponse> {
+ let peer_id = envelope.original_sender_id()?;
+ let buffer = this
+ .update(&mut cx, |this, cx| {
+ this.open_buffer_by_id(envelope.payload.id, cx)
+ })
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ Ok(proto::OpenBufferResponse {
+ buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
+ })
+ })
+ }
+
+ async fn handle_open_buffer_by_path(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::OpenBufferByPath>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<proto::OpenBufferResponse> {
+ let peer_id = envelope.original_sender_id()?;
+ let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
+ let open_buffer = this.update(&mut cx, |this, cx| {
+ this.open_buffer(
+ ProjectPath {
+ worktree_id,
+ path: PathBuf::from(envelope.payload.path).into(),
+ },
+ cx,
+ )
+ });
+
+ let buffer = open_buffer.await?;
+ this.update(&mut cx, |this, cx| {
+ Ok(proto::OpenBufferResponse {
+ buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
+ })
+ })
+ }
+
+ fn serialize_project_transaction_for_peer(
+ &mut self,
+ project_transaction: ProjectTransaction,
+ peer_id: proto::PeerId,
+ cx: &mut AppContext,
+ ) -> proto::ProjectTransaction {
+ let mut serialized_transaction = proto::ProjectTransaction {
+ buffer_ids: Default::default(),
+ transactions: Default::default(),
+ };
+ for (buffer, transaction) in project_transaction.0 {
+ serialized_transaction
+ .buffer_ids
+ .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
+ serialized_transaction
+ .transactions
+ .push(language2::proto::serialize_transaction(&transaction));
+ }
+ serialized_transaction
+ }
+
+ fn deserialize_project_transaction(
+ &mut self,
+ message: proto::ProjectTransaction,
+ push_to_history: bool,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<ProjectTransaction>> {
+ cx.spawn(|this, mut cx| async move {
+ let mut project_transaction = ProjectTransaction::default();
+ for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
+ {
+ let buffer = this
+ .update(&mut cx, |this, cx| {
+ this.wait_for_remote_buffer(buffer_id, cx)
+ })
+ .await?;
+ let transaction = language2::proto::deserialize_transaction(transaction)?;
+ project_transaction.0.insert(buffer, transaction);
+ }
+
+ for (buffer, transaction) in &project_transaction.0 {
+ buffer
+ .update(&mut cx, |buffer, _| {
+ buffer.wait_for_edits(transaction.edit_ids.iter().copied())
+ })
+ .await?;
+
+ if push_to_history {
+ buffer.update(&mut cx, |buffer, _| {
+ buffer.push_transaction(transaction.clone(), Instant::now());
+ });
+ }
+ }
+
+ Ok(project_transaction)
+ })
+ }
+
+ fn create_buffer_for_peer(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ peer_id: proto::PeerId,
+ cx: &mut AppContext,
+ ) -> u64 {
+ let buffer_id = buffer.read(cx).remote_id();
+ if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
+ updates_tx
+ .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
+ .ok();
+ }
+ buffer_id
+ }
+
+ fn wait_for_remote_buffer(
+ &mut self,
+ id: u64,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ let mut opened_buffer_rx = self.opened_buffer.1.clone();
+
+ cx.spawn_weak(|this, mut cx| async move {
+ let buffer = loop {
+ let Some(this) = this.upgrade(&cx) else {
+ return Err(anyhow!("project dropped"));
+ };
+
+ let buffer = this.read_with(&cx, |this, cx| {
+ this.opened_buffers
+ .get(&id)
+ .and_then(|buffer| buffer.upgrade())
+ });
+
+ if let Some(buffer) = buffer {
+ break buffer;
+ } else if this.read_with(&cx, |this, _| this.is_read_only()) {
+ return Err(anyhow!("disconnected before buffer {} could be opened", id));
+ }
+
+ this.update(&mut cx, |this, _| {
+ this.incomplete_remote_buffers.entry(id).or_default();
+ });
+ drop(this);
+
+ opened_buffer_rx
+ .next()
+ .await
+ .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
+ };
+
+ Ok(buffer)
+ })
+ }
+
+ fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
+ let project_id = match self.client_state.as_ref() {
+ Some(ProjectClientState::Remote {
+ sharing_has_stopped,
+ remote_id,
+ ..
+ }) => {
+ if *sharing_has_stopped {
+ return Task::ready(Err(anyhow!(
+ "can't synchronize remote buffers on a readonly project"
+ )));
+ } else {
+ *remote_id
+ }
+ }
+ Some(ProjectClientState::Local { .. }) | None => {
+ return Task::ready(Err(anyhow!(
+ "can't synchronize remote buffers on a local project"
+ )))
+ }
+ };
+
+ let client = self.client.clone();
+ cx.spawn(|this, cx| async move {
+ let (buffers, incomplete_buffer_ids) = this.read_with(&cx, |this, cx| {
+ let buffers = this
+ .opened_buffers
+ .iter()
+ .filter_map(|(id, buffer)| {
+ let buffer = buffer.upgrade()?;
+ Some(proto::BufferVersion {
+ id: *id,
+ version: language2::proto::serialize_version(&buffer.read(cx).version),
+ })
+ })
+ .collect();
+ let incomplete_buffer_ids = this
+ .incomplete_remote_buffers
+ .keys()
+ .copied()
+ .collect::<Vec<_>>();
+
+ (buffers, incomplete_buffer_ids)
+ });
+ let response = client
+ .request(proto::SynchronizeBuffers {
+ project_id,
+ buffers,
+ })
+ .await?;
+
+ let send_updates_for_buffers = response.buffers.into_iter().map(|buffer| {
+ let client = client.clone();
+ let buffer_id = buffer.id;
+ let remote_version = language2::proto::deserialize_version(&buffer.version);
+ this.read_with(&cx, |this, cx| {
+ if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
+ let operations = buffer.read(cx).serialize_ops(Some(remote_version), cx);
+ cx.background().spawn(async move {
+ let operations = operations.await;
+ for chunk in split_operations(operations) {
+ client
+ .request(proto::UpdateBuffer {
+ project_id,
+ buffer_id,
+ operations: chunk,
+ })
+ .await?;
+ }
+ anyhow::Ok(())
+ })
+ } else {
+ Task::ready(Ok(()))
+ }
+ })
+ });
+
+ // Any incomplete buffers have open requests waiting. Request that the host sends
+ // creates these buffers for us again to unblock any waiting futures.
+ for id in incomplete_buffer_ids {
+ cx.background()
+ .spawn(client.request(proto::OpenBufferById { project_id, id }))
+ .detach();
+ }
+
+ futures::future::join_all(send_updates_for_buffers)
+ .await
+ .into_iter()
+ .collect()
+ })
+ }
+
+ pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
+ self.worktrees(cx)
+ .map(|worktree| {
+ let worktree = worktree.read(cx);
+ proto::WorktreeMetadata {
+ id: worktree.id().to_proto(),
+ root_name: worktree.root_name().into(),
+ visible: worktree.is_visible(),
+ abs_path: worktree.abs_path().to_string_lossy().into(),
+ }
+ })
+ .collect()
+ }
+
+ fn set_worktrees_from_proto(
+ &mut self,
+ worktrees: Vec<proto::WorktreeMetadata>,
+ cx: &mut ModelContext<Project>,
+ ) -> Result<()> {
+ let replica_id = self.replica_id();
+ let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
+
+ let mut old_worktrees_by_id = self
+ .worktrees
+ .drain(..)
+ .filter_map(|worktree| {
+ let worktree = worktree.upgrade()?;
+ Some((worktree.read(cx).id(), worktree))
+ })
+ .collect::<HashMap<_, _>>();
+
+ for worktree in worktrees {
+ if let Some(old_worktree) =
+ old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
+ {
+ self.worktrees.push(WorktreeHandle::Strong(old_worktree));
+ } else {
+ let worktree =
+ Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
+ let _ = self.add_worktree(&worktree, cx);
+ }
+ }
+
+ self.metadata_changed(cx);
+ for id in old_worktrees_by_id.keys() {
+ cx.emit(Event::WorktreeRemoved(*id));
+ }
+
+ Ok(())
+ }
+
+ fn set_collaborators_from_proto(
+ &mut self,
+ messages: Vec<proto::Collaborator>,
+ cx: &mut ModelContext<Self>,
+ ) -> Result<()> {
+ let mut collaborators = HashMap::default();
+ for message in messages {
+ let collaborator = Collaborator::from_proto(message)?;
+ collaborators.insert(collaborator.peer_id, collaborator);
+ }
+ for old_peer_id in self.collaborators.keys() {
+ if !collaborators.contains_key(old_peer_id) {
+ cx.emit(Event::CollaboratorLeft(*old_peer_id));
+ }
+ }
+ self.collaborators = collaborators;
+ Ok(())
+ }
+
+ fn deserialize_symbol(
+ &self,
+ serialized_symbol: proto::Symbol,
+ ) -> impl Future<Output = Result<Symbol>> {
+ let languages = self.languages.clone();
+ async move {
+ let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
+ let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
+ let start = serialized_symbol
+ .start
+ .ok_or_else(|| anyhow!("invalid start"))?;
+ let end = serialized_symbol
+ .end
+ .ok_or_else(|| anyhow!("invalid end"))?;
+ let kind = unsafe { mem::transmute(serialized_symbol.kind) };
+ let path = ProjectPath {
+ worktree_id,
+ path: PathBuf::from(serialized_symbol.path).into(),
+ };
+ let language = languages
+ .language_for_file(&path.path, None)
+ .await
+ .log_err();
+ Ok(Symbol {
+ language_server_name: LanguageServerName(
+ serialized_symbol.language_server_name.into(),
+ ),
+ source_worktree_id,
+ path,
+ label: {
+ match language {
+ Some(language) => {
+ language
+ .label_for_symbol(&serialized_symbol.name, kind)
+ .await
+ }
+ None => None,
+ }
+ .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
+ },
+
+ name: serialized_symbol.name,
+ range: Unclipped(PointUtf16::new(start.row, start.column))
+ ..Unclipped(PointUtf16::new(end.row, end.column)),
+ kind,
+ signature: serialized_symbol
+ .signature
+ .try_into()
+ .map_err(|_| anyhow!("invalid signature"))?,
+ })
+ }
+ }
+
+ async fn handle_buffer_saved(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::BufferSaved>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
+ let version = deserialize_version(&envelope.payload.version);
+ let mtime = envelope
+ .payload
+ .mtime
+ .ok_or_else(|| anyhow!("missing mtime"))?
+ .into();
+
+ this.update(&mut cx, |this, cx| {
+ let buffer = this
+ .opened_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .or_else(|| {
+ this.incomplete_remote_buffers
+ .get(&envelope.payload.buffer_id)
+ .and_then(|b| b.clone())
+ });
+ if let Some(buffer) = buffer {
+ buffer.update(cx, |buffer, cx| {
+ buffer.did_save(version, fingerprint, mtime, cx);
+ });
+ }
+ Ok(())
+ })
+ }
+
+ async fn handle_buffer_reloaded(
+ this: Handle<Self>,
+ envelope: TypedEnvelope<proto::BufferReloaded>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ let payload = envelope.payload;
+ let version = deserialize_version(&payload.version);
+ let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
+ let line_ending = deserialize_line_ending(
+ proto::LineEnding::from_i32(payload.line_ending)
+ .ok_or_else(|| anyhow!("missing line ending"))?,
+ );
+ let mtime = payload
+ .mtime
+ .ok_or_else(|| anyhow!("missing mtime"))?
+ .into();
+ this.update(&mut cx, |this, cx| {
+ let buffer = this
+ .opened_buffers
+ .get(&payload.buffer_id)
+ .and_then(|buffer| buffer.upgrade())
+ .or_else(|| {
+ this.incomplete_remote_buffers
+ .get(&payload.buffer_id)
+ .cloned()
+ .flatten()
+ });
+ if let Some(buffer) = buffer {
+ buffer.update(cx, |buffer, cx| {
+ buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
+ });
+ }
+ Ok(())
+ })
+ }
+
+ #[allow(clippy::type_complexity)]
+ fn edits_from_lsp(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ lsp2_edits: impl 'static + Send + IntoIterator<Item = lsp2::TextEdit>,
+ server_id: LanguageServerId,
+ version: Option<i32>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
+ let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
+ cx.background().spawn(async move {
+ let snapshot = snapshot?;
+ let mut lsp_edits = lsp_edits
+ .into_iter()
+ .map(|edit| (range_from_lsp(edit.range), edit.new_text))
+ .collect::<Vec<_>>();
+ lsp_edits.sort_by_key(|(range, _)| range.start);
+
+ let mut lsp_edits = lsp_edits.into_iter().peekable();
+ let mut edits = Vec::new();
+ while let Some((range, mut new_text)) = lsp_edits.next() {
+ // Clip invalid ranges provided by the language server.
+ let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
+ ..snapshot.clip_point_utf16(range.end, Bias::Left);
+
+ // Combine any LSP edits that are adjacent.
+ //
+ // Also, combine LSP edits that are separated from each other by only
+ // a newline. This is important because for some code actions,
+ // Rust-analyzer rewrites the entire buffer via a series of edits that
+ // are separated by unchanged newline characters.
+ //
+ // In order for the diffing logic below to work properly, any edits that
+ // cancel each other out must be combined into one.
+ while let Some((next_range, next_text)) = lsp_edits.peek() {
+ if next_range.start.0 > range.end {
+ if next_range.start.0.row > range.end.row + 1
+ || next_range.start.0.column > 0
+ || snapshot.clip_point_utf16(
+ Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
+ Bias::Left,
+ ) > range.end
+ {
+ break;
+ }
+ new_text.push('\n');
+ }
+ range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
+ new_text.push_str(next_text);
+ lsp_edits.next();
+ }
+
+ // For multiline edits, perform a diff of the old and new text so that
+ // we can identify the changes more precisely, preserving the locations
+ // of any anchors positioned in the unchanged regions.
+ if range.end.row > range.start.row {
+ let mut offset = range.start.to_offset(&snapshot);
+ let old_text = snapshot.text_for_range(range).collect::<String>();
+
+ let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
+ let mut moved_since_edit = true;
+ for change in diff.iter_all_changes() {
+ let tag = change.tag();
+ let value = change.value();
+ match tag {
+ ChangeTag::Equal => {
+ offset += value.len();
+ moved_since_edit = true;
+ }
+ ChangeTag::Delete => {
+ let start = snapshot.anchor_after(offset);
+ let end = snapshot.anchor_before(offset + value.len());
+ if moved_since_edit {
+ edits.push((start..end, String::new()));
+ } else {
+ edits.last_mut().unwrap().0.end = end;
+ }
+ offset += value.len();
+ moved_since_edit = false;
+ }
+ ChangeTag::Insert => {
+ if moved_since_edit {
+ let anchor = snapshot.anchor_after(offset);
+ edits.push((anchor..anchor, value.to_string()));
+ } else {
+ edits.last_mut().unwrap().1.push_str(value);
+ }
+ moved_since_edit = false;
+ }
+ }
+ }
+ } else if range.end == range.start {
+ let anchor = snapshot.anchor_after(range.start);
+ edits.push((anchor..anchor, new_text));
+ } else {
+ let edit_start = snapshot.anchor_after(range.start);
+ let edit_end = snapshot.anchor_before(range.end);
+ edits.push((edit_start..edit_end, new_text));
+ }
+ }
+
+ Ok(edits)
+ })
+ }
+
+ fn buffer_snapshot_for_lsp_version(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ server_id: LanguageServerId,
+ version: Option<i32>,
+ cx: &AppContext,
+ ) -> Result<TextBufferSnapshot> {
+ const OLD_VERSIONS_TO_RETAIN: i32 = 10;
+
+ if let Some(version) = version {
+ let buffer_id = buffer.read(cx).remote_id();
+ let snapshots = self
+ .buffer_snapshots
+ .get_mut(&buffer_id)
+ .and_then(|m| m.get_mut(&server_id))
+ .ok_or_else(|| {
+ anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
+ })?;
+
+ let found_snapshot = snapshots
+ .binary_search_by_key(&version, |e| e.version)
+ .map(|ix| snapshots[ix].snapshot.clone())
+ .map_err(|_| {
+ anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
+ })?;
+
+ snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
+ Ok(found_snapshot)
+ } else {
+ Ok((buffer.read(cx)).text_snapshot())
+ }
+ }
+
+ pub fn language_servers(
+ &self,
+ ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
+ self.language_server_ids
+ .iter()
+ .map(|((worktree_id, server_name), server_id)| {
+ (*server_id, server_name.clone(), *worktree_id)
+ })
+ }
+
+ pub fn supplementary_language_servers(
+ &self,
+ ) -> impl '_
+ + Iterator<
+ Item = (
+ &LanguageServerId,
+ &(LanguageServerName, Arc<LanguageServer>),
+ ),
+ > {
+ self.supplementary_language_servers.iter()
+ }
+
+ pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
+ if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
+ Some(server.clone())
+ } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
+ Some(Arc::clone(server))
+ } else {
+ None
+ }
+ }
+
+ pub fn language_servers_for_buffer(
+ &self,
+ buffer: &Buffer,
+ cx: &AppContext,
+ ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
+ self.language_server_ids_for_buffer(buffer, cx)
+ .into_iter()
+ .filter_map(|server_id| match self.language_servers.get(&server_id)? {
+ LanguageServerState::Running {
+ adapter, server, ..
+ } => Some((adapter, server)),
+ _ => None,
+ })
+ }
+
+ fn primary_language_server_for_buffer(
+ &self,
+ buffer: &Buffer,
+ cx: &AppContext,
+ ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
+ self.language_servers_for_buffer(buffer, cx).next()
+ }
+
+ pub fn language_server_for_buffer(
+ &self,
+ buffer: &Buffer,
+ server_id: LanguageServerId,
+ cx: &AppContext,
+ ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
+ self.language_servers_for_buffer(buffer, cx)
+ .find(|(_, s)| s.server_id() == server_id)
+ }
+
+ fn language_server_ids_for_buffer(
+ &self,
+ buffer: &Buffer,
+ cx: &AppContext,
+ ) -> Vec<LanguageServerId> {
+ if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
+ let worktree_id = file.worktree_id(cx);
+ language
+ .lsp_adapters()
+ .iter()
+ .flat_map(|adapter| {
+ let key = (worktree_id, adapter.name.clone());
+ self.language_server_ids.get(&key).copied()
+ })
+ .collect()
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn prettier_instance_for_buffer(
+ &mut self,
+ buffer: &Handle<Buffer>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
+ let buffer = buffer.read(cx);
+ let buffer_file = buffer.file();
+ let Some(buffer_language) = buffer.language() else {
+ return Task::ready(None);
+ };
+ if !buffer_language
+ .lsp_adapters()
+ .iter()
+ .flat_map(|adapter| adapter.enabled_formatters())
+ .any(|formatter| matches!(formatter, BundledFormatter::Prettier { .. }))
+ {
+ return Task::ready(None);
+ }
+
+ let buffer_file = File::from_dyn(buffer_file);
+ let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
+ let worktree_path = buffer_file
+ .as_ref()
+ .and_then(|file| Some(file.worktree.read(cx).abs_path()));
+ let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
+ if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
+ let Some(node) = self.node.as_ref().map(Arc::clone) else {
+ return Task::ready(None);
+ };
+ cx.spawn(|this, mut cx| async move {
+ let fs = this.update(&mut cx, |project, _| Arc::clone(&project.fs));
+ let prettier_dir = match cx
+ .background()
+ .spawn(Prettier::locate(
+ worktree_path.zip(buffer_path).map(
+ |(worktree_root_path, starting_path)| LocateStart {
+ worktree_root_path,
+ starting_path,
+ },
+ ),
+ fs,
+ ))
+ .await
+ {
+ Ok(path) => path,
+ Err(e) => {
+ return Some(
+ Task::ready(Err(Arc::new(e.context(
+ "determining prettier path for worktree {worktree_path:?}",
+ ))))
+ .shared(),
+ );
+ }
+ };
+
+ if let Some(existing_prettier) = this.update(&mut cx, |project, _| {
+ project
+ .prettier_instances
+ .get(&(worktree_id, prettier_dir.clone()))
+ .cloned()
+ }) {
+ return Some(existing_prettier);
+ }
+
+ log::info!("Found prettier in {prettier_dir:?}, starting.");
+ let task_prettier_dir = prettier_dir.clone();
+ let weak_project = this.downgrade();
+ let new_server_id =
+ this.update(&mut cx, |this, _| this.languages.next_language_server_id());
+ let new_prettier_task = cx
+ .spawn(|mut cx| async move {
+ let prettier = Prettier::start(
+ worktree_id.map(|id| id.to_usize()),
+ new_server_id,
+ task_prettier_dir,
+ node,
+ cx.clone(),
+ )
+ .await
+ .context("prettier start")
+ .map_err(Arc::new)?;
+ log::info!("Started prettier in {:?}", prettier.prettier_dir());
+
+ if let Some((project, prettier_server)) =
+ weak_project.upgrade(&mut cx).zip(prettier.server())
+ {
+ project.update(&mut cx, |project, cx| {
+ let name = if prettier.is_default() {
+ LanguageServerName(Arc::from("prettier (default)"))
+ } else {
+ let prettier_dir = prettier.prettier_dir();
+ let worktree_path = prettier
+ .worktree_id()
+ .map(WorktreeId::from_usize)
+ .and_then(|id| project.worktree_for_id(id, cx))
+ .map(|worktree| worktree.read(cx).abs_path());
+ match worktree_path {
+ Some(worktree_path) => {
+ if worktree_path.as_ref() == prettier_dir {
+ LanguageServerName(Arc::from(format!(
+ "prettier ({})",
+ prettier_dir
+ .file_name()
+ .and_then(|name| name.to_str())
+ .unwrap_or_default()
+ )))
+ } else {
+ let dir_to_display = match prettier_dir
+ .strip_prefix(&worktree_path)
+ .ok()
+ {
+ Some(relative_path) => relative_path,
+ None => prettier_dir,
+ };
+ LanguageServerName(Arc::from(format!(
+ "prettier ({})",
+ dir_to_display.display(),
+ )))
+ }
+ }
+ None => LanguageServerName(Arc::from(format!(
+ "prettier ({})",
+ prettier_dir.display(),
+ ))),
+ }
+ };
+
+ project
+ .supplementary_language_servers
+ .insert(new_server_id, (name, Arc::clone(prettier_server)));
+ cx.emit(Event::LanguageServerAdded(new_server_id));
+ });
+ }
+ Ok(Arc::new(prettier)).map_err(Arc::new)
+ })
+ .shared();
+ this.update(&mut cx, |project, _| {
+ project
+ .prettier_instances
+ .insert((worktree_id, prettier_dir), new_prettier_task.clone());
+ });
+ Some(new_prettier_task)
+ })
+ } else if self.remote_id().is_some() {
+ return Task::ready(None);
+ } else {
+ Task::ready(Some(
+ Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
+ ))
+ }
+ }
+
+ fn install_default_formatters(
+ &self,
+ worktree: Option<WorktreeId>,
+ new_language: &Language,
+ language_settings: &LanguageSettings,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<anyhow::Result<()>> {
+ match &language_settings.formatter {
+ Formatter::Prettier { .. } | Formatter::Auto => {}
+ Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
+ };
+ let Some(node) = self.node.as_ref().cloned() else {
+ return Task::ready(Ok(()));
+ };
+
+ let mut prettier_plugins = None;
+ for formatter in new_language
+ .lsp_adapters()
+ .into_iter()
+ .flat_map(|adapter| adapter.enabled_formatters())
+ {
+ match formatter {
+ BundledFormatter::Prettier { plugin_names, .. } => prettier_plugins
+ .get_or_insert_with(|| HashSet::default())
+ .extend(plugin_names),
+ }
+ }
+ let Some(prettier_plugins) = prettier_plugins else {
+ return Task::ready(Ok(()));
+ };
+
+ let default_prettier_dir = DEFAULT_PRETTIER_DIR.as_path();
+ let already_running_prettier = self
+ .prettier_instances
+ .get(&(worktree, default_prettier_dir.to_path_buf()))
+ .cloned();
+
+ let fs = Arc::clone(&self.fs);
+ cx.background()
+ .spawn(async move {
+ let prettier_wrapper_path = default_prettier_dir.join(PRETTIER_SERVER_FILE);
+ // method creates parent directory if it doesn't exist
+ fs.save(&prettier_wrapper_path, &Rope::from(PRETTIER_SERVER_JS), LineEnding::Unix).await
+ .with_context(|| format!("writing {PRETTIER_SERVER_FILE} file at {prettier_wrapper_path:?}"))?;
+
+ let packages_to_versions = future::try_join_all(
+ prettier_plugins
+ .iter()
+ .chain(Some(&"prettier"))
+ .map(|package_name| async {
+ let returned_package_name = package_name.to_string();
+ let latest_version = node.npm_package_latest_version(package_name)
+ .await
+ .with_context(|| {
+ format!("fetching latest npm version for package {returned_package_name}")
+ })?;
+ anyhow::Ok((returned_package_name, latest_version))
+ }),
+ )
+ .await
+ .context("fetching latest npm versions")?;
+
+ log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
+ let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
+ (package.as_str(), version.as_str())
+ }).collect::<Vec<_>>();
+ node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
+
+ if !prettier_plugins.is_empty() {
+ if let Some(prettier) = already_running_prettier {
+ prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
+ }
+ }
+
+ anyhow::Ok(())
+ })
+ }
+}
+
+fn subscribe_for_copilot_events(
+ copilot: &Handle<Copilot>,
+ cx: &mut ModelContext<'_, Project>,
+) -> gpui2::Subscription {
+ cx.subscribe(
+ copilot,
+ |project, copilot, copilot_event, cx| match copilot_event {
+ copilot::Event::CopilotLanguageServerStarted => {
+ match copilot.read(cx).language_server() {
+ Some((name, copilot_server)) => {
+ // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
+ if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
+ let new_server_id = copilot_server.server_id();
+ let weak_project = cx.weak_handle();
+ let copilot_log_subscription = copilot_server
+ .on_notification::<copilot::request::LogMessage, _>(
+ move |params, mut cx| {
+ if let Some(project) = weak_project.upgrade(&mut cx) {
+ project.update(&mut cx, |_, cx| {
+ cx.emit(Event::LanguageServerLog(
+ new_server_id,
+ params.message,
+ ));
+ })
+ }
+ },
+ );
+ project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
+ project.copilot_log_subscription = Some(copilot_log_subscription);
+ cx.emit(Event::LanguageServerAdded(new_server_id));
+ }
+ }
+ None => debug_panic!("Received Copilot language server started event, but no language server is running"),
+ }
+ }
+ },
+ )
+}
+
+fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
+ let mut literal_end = 0;
+ for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
+ if part.contains(&['*', '?', '{', '}']) {
+ break;
+ } else {
+ if i > 0 {
+ // Acount for separator prior to this part
+ literal_end += path::MAIN_SEPARATOR.len_utf8();
+ }
+ literal_end += part.len();
+ }
+ }
+ &glob[..literal_end]
+}
+
+impl WorktreeHandle {
+ pub fn upgrade(&self) -> Option<Handle<Worktree>> {
+ match self {
+ WorktreeHandle::Strong(handle) => Some(handle.clone()),
+ WorktreeHandle::Weak(handle) => handle.upgrade(),
+ }
+ }
+
+ pub fn handle_id(&self) -> usize {
+ match self {
+ WorktreeHandle::Strong(handle) => handle.id(),
+ WorktreeHandle::Weak(handle) => handle.id(),
+ }
+ }
+}
+
+impl OpenBuffer {
+ pub fn upgrade(&self) -> Option<Handle<Buffer>> {
+ match self {
+ OpenBuffer::Strong(handle) => Some(handle.clone()),
+ OpenBuffer::Weak(handle) => handle.upgrade(),
+ OpenBuffer::Operations(_) => None,
+ }
+ }
+}
+
+pub struct PathMatchCandidateSet {
+ pub snapshot: Snapshot,
+ pub include_ignored: bool,
+ pub include_root_name: bool,
+}
+
+impl<'a> fuzzy2::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
+ type Candidates = PathMatchCandidateSetIter<'a>;
+
+ fn id(&self) -> usize {
+ self.snapshot.id().to_usize()
+ }
+
+ fn len(&self) -> usize {
+ if self.include_ignored {
+ self.snapshot.file_count()
+ } else {
+ self.snapshot.visible_file_count()
+ }
+ }
+
+ fn prefix(&self) -> Arc<str> {
+ if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
+ self.snapshot.root_name().into()
+ } else if self.include_root_name {
+ format!("{}/", self.snapshot.root_name()).into()
+ } else {
+ "".into()
+ }
+ }
+
+ fn candidates(&'a self, start: usize) -> Self::Candidates {
+ PathMatchCandidateSetIter {
+ traversal: self.snapshot.files(self.include_ignored, start),
+ }
+ }
+}
+
+pub struct PathMatchCandidateSetIter<'a> {
+ traversal: Traversal<'a>,
+}
+
+impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
+ type Item = fuzzy2::PathMatchCandidate<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.traversal.next().map(|entry| {
+ if let EntryKind::File(char_bag) = entry.kind {
+ fuzzy2::PathMatchCandidate {
+ path: &entry.path,
+ char_bag,
+ }
+ } else {
+ unreachable!()
+ }
+ })
+ }
+}
+
+impl EventEmitter for Project {
+ type Event = Event;
+}
+
+impl Entity for Project {
+ fn app_will_quit(
+ &mut self,
+ _: &mut AppContext,
+ ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
+ let shutdown_futures = self
+ .language_servers
+ .drain()
+ .map(|(_, server_state)| async {
+ use LanguageServerState::*;
+ match server_state {
+ Running { server, .. } => server.shutdown()?.await,
+ Starting(task) => task.await?.shutdown()?.await,
+ }
+ })
+ .collect::<Vec<_>>();
+
+ Some(
+ async move {
+ futures::future::join_all(shutdown_futures).await;
+ }
+ .boxed(),
+ )
+ }
+}
+
+impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
+ fn from((worktree_id, path): (WorktreeId, P)) -> Self {
+ Self {
+ worktree_id,
+ path: path.as_ref().into(),
+ }
+ }
+}
+
+impl ProjectLspAdapterDelegate {
+ fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
+ Arc::new(Self {
+ project: cx.handle(),
+ http_client: project.client.http_client(),
+ })
+ }
+}
+
+impl LspAdapterDelegate for ProjectLspAdapterDelegate {
+ fn show_notification(&self, message: &str, cx: &mut AppContext) {
+ self.project
+ .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
+ }
+
+ fn http_client(&self) -> Arc<dyn HttpClient> {
+ self.http_client.clone()
+ }
+}
+
+fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
+ proto::Symbol {
+ language_server_name: symbol.language_server_name.0.to_string(),
+ source_worktree_id: symbol.source_worktree_id.to_proto(),
+ worktree_id: symbol.path.worktree_id.to_proto(),
+ path: symbol.path.path.to_string_lossy().to_string(),
+ name: symbol.name.clone(),
+ kind: unsafe { mem::transmute(symbol.kind) },
+ start: Some(proto::PointUtf16 {
+ row: symbol.range.start.0.row,
+ column: symbol.range.start.0.column,
+ }),
+ end: Some(proto::PointUtf16 {
+ row: symbol.range.end.0.row,
+ column: symbol.range.end.0.column,
+ }),
+ signature: symbol.signature.to_vec(),
+ }
+}
+
+fn relativize_path(base: &Path, path: &Path) -> PathBuf {
+ let mut path_components = path.components();
+ let mut base_components = base.components();
+ let mut components: Vec<Component> = Vec::new();
+ loop {
+ match (path_components.next(), base_components.next()) {
+ (None, None) => break,
+ (Some(a), None) => {
+ components.push(a);
+ components.extend(path_components.by_ref());
+ break;
+ }
+ (None, _) => components.push(Component::ParentDir),
+ (Some(a), Some(b)) if components.is_empty() && a == b => (),
+ (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
+ (Some(a), Some(_)) => {
+ components.push(Component::ParentDir);
+ for _ in base_components {
+ components.push(Component::ParentDir);
+ }
+ components.push(a);
+ components.extend(path_components.by_ref());
+ break;
+ }
+ }
+ }
+ components.iter().map(|c| c.as_os_str()).collect()
+}
+
+impl Item for Buffer {
+ fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
+ File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
+ }
+
+ fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
+ File::from_dyn(self.file()).map(|file| ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path().clone(),
+ })
+ }
+}
+
+async fn wait_for_loading_buffer(
+ mut receiver: postage::watch::Receiver<Option<Result<Handle<Buffer>, Arc<anyhow::Error>>>>,
+) -> Result<Handle<Buffer>, Arc<anyhow::Error>> {
+ loop {
+ if let Some(result) = receiver.borrow().as_ref() {
+ match result {
+ Ok(buffer) => return Ok(buffer.to_owned()),
+ Err(e) => return Err(e.to_owned()),
+ }
+ }
+ receiver.next().await;
+ }
+}
+
+fn include_text(server: &lsp2::LanguageServer) -> bool {
+ server
+ .capabilities()
+ .text_document_sync
+ .as_ref()
+ .and_then(|sync| match sync {
+ lsp2::TextDocumentSyncCapability::Kind(_) => None,
+ lsp2::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
+ })
+ .and_then(|save_options| match save_options {
+ lsp2::TextDocumentSyncSaveOptions::Supported(_) => None,
+ lsp2::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
+ })
+ .unwrap_or(false)
+}
@@ -0,0 +1,47 @@
+use collections::HashMap;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use settings::Setting;
+use std::sync::Arc;
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
+pub struct ProjectSettings {
+ #[serde(default)]
+ pub lsp: HashMap<Arc<str>, LspSettings>,
+ #[serde(default)]
+ pub git: GitSettings,
+}
+
+#[derive(Copy, Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
+pub struct GitSettings {
+ pub git_gutter: Option<GitGutterSetting>,
+ pub gutter_debounce: Option<u64>,
+}
+
+#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum GitGutterSetting {
+ #[default]
+ TrackedFiles,
+ Hide,
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub struct LspSettings {
+ pub initialization_options: Option<serde_json::Value>,
+}
+
+impl Setting for ProjectSettings {
+ const KEY: Option<&'static str> = None;
+
+ type FileContent = Self;
+
+ fn load(
+ default_value: &Self::FileContent,
+ user_values: &[&Self::FileContent],
+ _: &gpui::AppContext,
+ ) -> anyhow::Result<Self> {
+ Self::load_via_json_merge(default_value, user_values)
+ }
+}
@@ -0,0 +1,4077 @@
+// use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
+// use fs::{FakeFs, RealFs};
+// use futures::{future, StreamExt};
+// use gpui::{executor::Deterministic, test::subscribe, AppContext};
+// use language2::{
+// language_settings::{AllLanguageSettings, LanguageSettingsContent},
+// tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
+// LineEnding, OffsetRangeExt, Point, ToPoint,
+// };
+// use lsp2::Url;
+// use parking_lot::Mutex;
+// use pretty_assertions::assert_eq;
+// use serde_json::json;
+// use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
+// use unindent::Unindent as _;
+// use util::{assert_set_eq, test::temp_tree};
+
+// #[cfg(test)]
+// #[ctor::ctor]
+// fn init_logger() {
+// if std::env::var("RUST_LOG").is_ok() {
+// env_logger::init();
+// }
+// }
+
+// #[gpui::test]
+// async fn test_symlinks(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+// cx.foreground().allow_parking();
+
+// let dir = temp_tree(json!({
+// "root": {
+// "apple": "",
+// "banana": {
+// "carrot": {
+// "date": "",
+// "endive": "",
+// }
+// },
+// "fennel": {
+// "grape": "",
+// }
+// }
+// }));
+
+// let root_link_path = dir.path().join("root_link");
+// unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
+// unix::fs::symlink(
+// &dir.path().join("root/fennel"),
+// &dir.path().join("root/finnochio"),
+// )
+// .unwrap();
+
+// let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
+// project.read_with(cx, |project, cx| {
+// let tree = project.worktrees(cx).next().unwrap().read(cx);
+// assert_eq!(tree.file_count(), 5);
+// assert_eq!(
+// tree.inode_for_path("fennel/grape"),
+// tree.inode_for_path("finnochio/grape")
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_managing_project_specific_settings(
+// deterministic: Arc<Deterministic>,
+// cx: &mut gpui::TestAppContext,
+// ) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/the-root",
+// json!({
+// ".zed": {
+// "settings.json": r#"{ "tab_size": 8 }"#
+// },
+// "a": {
+// "a.rs": "fn a() {\n A\n}"
+// },
+// "b": {
+// ".zed": {
+// "settings.json": r#"{ "tab_size": 2 }"#
+// },
+// "b.rs": "fn b() {\n B\n}"
+// }
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+// let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
+
+// deterministic.run_until_parked();
+// cx.read(|cx| {
+// let tree = worktree.read(cx);
+
+// let settings_a = language_settings(
+// None,
+// Some(
+// &(File::for_entry(
+// tree.entry_for_path("a/a.rs").unwrap().clone(),
+// worktree.clone(),
+// ) as _),
+// ),
+// cx,
+// );
+// let settings_b = language_settings(
+// None,
+// Some(
+// &(File::for_entry(
+// tree.entry_for_path("b/b.rs").unwrap().clone(),
+// worktree.clone(),
+// ) as _),
+// ),
+// cx,
+// );
+
+// assert_eq!(settings_a.tab_size.get(), 8);
+// assert_eq!(settings_b.tab_size.get(), 2);
+// });
+// }
+
+// #[gpui::test]
+// async fn test_managing_language_servers(
+// deterministic: Arc<Deterministic>,
+// cx: &mut gpui::TestAppContext,
+// ) {
+// init_test(cx);
+
+// let mut rust_language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut json_language = Language::new(
+// LanguageConfig {
+// name: "JSON".into(),
+// path_suffixes: vec!["json".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_rust_servers = rust_language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// name: "the-rust-language-server",
+// capabilities: lsp::ServerCapabilities {
+// completion_provider: Some(lsp::CompletionOptions {
+// trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
+// ..Default::default()
+// }),
+// ..Default::default()
+// },
+// ..Default::default()
+// }))
+// .await;
+// let mut fake_json_servers = json_language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// name: "the-json-language-server",
+// capabilities: lsp::ServerCapabilities {
+// completion_provider: Some(lsp::CompletionOptions {
+// trigger_characters: Some(vec![":".to_string()]),
+// ..Default::default()
+// }),
+// ..Default::default()
+// },
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/the-root",
+// json!({
+// "test.rs": "const A: i32 = 1;",
+// "test2.rs": "",
+// "Cargo.toml": "a = 1",
+// "package.json": "{\"a\": 1}",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+
+// // Open a buffer without an associated language server.
+// let toml_buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/the-root/Cargo.toml", cx)
+// })
+// .await
+// .unwrap();
+
+// // Open a buffer with an associated language server before the language for it has been loaded.
+// let rust_buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/the-root/test.rs", cx)
+// })
+// .await
+// .unwrap();
+// rust_buffer.read_with(cx, |buffer, _| {
+// assert_eq!(buffer.language().map(|l| l.name()), None);
+// });
+
+// // Now we add the languages to the project, and ensure they get assigned to all
+// // the relevant open buffers.
+// project.update(cx, |project, _| {
+// project.languages.add(Arc::new(json_language));
+// project.languages.add(Arc::new(rust_language));
+// });
+// deterministic.run_until_parked();
+// rust_buffer.read_with(cx, |buffer, _| {
+// assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
+// });
+
+// // A server is started up, and it is notified about Rust files.
+// let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+// version: 0,
+// text: "const A: i32 = 1;".to_string(),
+// language_id: Default::default()
+// }
+// );
+
+// // The buffer is configured based on the language server's capabilities.
+// rust_buffer.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer.completion_triggers(),
+// &[".".to_string(), "::".to_string()]
+// );
+// });
+// toml_buffer.read_with(cx, |buffer, _| {
+// assert!(buffer.completion_triggers().is_empty());
+// });
+
+// // Edit a buffer. The changes are reported to the language server.
+// rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+// .await
+// .text_document,
+// lsp2::VersionedTextDocumentIdentifier::new(
+// lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+// 1
+// )
+// );
+
+// // Open a third buffer with a different associated language server.
+// let json_buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/the-root/package.json", cx)
+// })
+// .await
+// .unwrap();
+
+// // A json language server is started up and is only notified about the json buffer.
+// let mut fake_json_server = fake_json_servers.next().await.unwrap();
+// assert_eq!(
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+// version: 0,
+// text: "{\"a\": 1}".to_string(),
+// language_id: Default::default()
+// }
+// );
+
+// // This buffer is configured based on the second language server's
+// // capabilities.
+// json_buffer.read_with(cx, |buffer, _| {
+// assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
+// });
+
+// // When opening another buffer whose language server is already running,
+// // it is also configured based on the existing language server's capabilities.
+// let rust_buffer2 = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/the-root/test2.rs", cx)
+// })
+// .await
+// .unwrap();
+// rust_buffer2.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer.completion_triggers(),
+// &[".".to_string(), "::".to_string()]
+// );
+// });
+
+// // Changes are reported only to servers matching the buffer's language.
+// toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
+// rust_buffer2.update(cx, |buffer, cx| {
+// buffer.edit([(0..0, "let x = 1;")], None, cx)
+// });
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+// .await
+// .text_document,
+// lsp2::VersionedTextDocumentIdentifier::new(
+// lsp2::Url::from_file_path("/the-root/test2.rs").unwrap(),
+// 1
+// )
+// );
+
+// // Save notifications are reported to all servers.
+// project
+// .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidSaveTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentIdentifier::new(
+// lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
+// )
+// );
+// assert_eq!(
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidSaveTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentIdentifier::new(
+// lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
+// )
+// );
+
+// // Renames are reported only to servers matching the buffer's language.
+// fs.rename(
+// Path::new("/the-root/test2.rs"),
+// Path::new("/the-root/test3.rs"),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test2.rs").unwrap()),
+// );
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),
+// version: 0,
+// text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
+// language_id: Default::default()
+// },
+// );
+
+// rust_buffer2.update(cx, |buffer, cx| {
+// buffer.update_diagnostics(
+// LanguageServerId(0),
+// DiagnosticSet::from_sorted_entries(
+// vec![DiagnosticEntry {
+// diagnostic: Default::default(),
+// range: Anchor::MIN..Anchor::MAX,
+// }],
+// &buffer.snapshot(),
+// ),
+// cx,
+// );
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+// .count(),
+// 1
+// );
+// });
+
+// // When the rename changes the extension of the file, the buffer gets closed on the old
+// // language server and gets opened on the new one.
+// fs.rename(
+// Path::new("/the-root/test3.rs"),
+// Path::new("/the-root/test3.json"),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),),
+// );
+// assert_eq!(
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+// version: 0,
+// text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
+// language_id: Default::default()
+// },
+// );
+
+// // We clear the diagnostics, since the language has changed.
+// rust_buffer2.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+// .count(),
+// 0
+// );
+// });
+
+// // The renamed file's version resets after changing language server.
+// rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
+// assert_eq!(
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+// .await
+// .text_document,
+// lsp2::VersionedTextDocumentIdentifier::new(
+// lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+// 1
+// )
+// );
+
+// // Restart language servers
+// project.update(cx, |project, cx| {
+// project.restart_language_servers_for_buffers(
+// vec![rust_buffer.clone(), json_buffer.clone()],
+// cx,
+// );
+// });
+
+// let mut rust_shutdown_requests = fake_rust_server
+// .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
+// let mut json_shutdown_requests = fake_json_server
+// .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
+// futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
+
+// let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
+// let mut fake_json_server = fake_json_servers.next().await.unwrap();
+
+// // Ensure rust document is reopened in new rust language server
+// assert_eq!(
+// fake_rust_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+// version: 0,
+// text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
+// language_id: Default::default()
+// }
+// );
+
+// // Ensure json documents are reopened in new json language server
+// assert_set_eq!(
+// [
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document,
+// ],
+// [
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+// version: 0,
+// text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
+// language_id: Default::default()
+// },
+// lsp2::TextDocumentItem {
+// uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+// version: 0,
+// text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
+// language_id: Default::default()
+// }
+// ]
+// );
+
+// // Close notifications are reported only to servers matching the buffer's language.
+// cx.update(|_| drop(json_buffer));
+// let close_message = lsp2::DidCloseTextDocumentParams {
+// text_document: lsp2::TextDocumentIdentifier::new(
+// lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+// ),
+// };
+// assert_eq!(
+// fake_json_server
+// .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+// .await,
+// close_message,
+// );
+// }
+
+// #[gpui::test]
+// async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// name: "the-language-server",
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/the-root",
+// json!({
+// ".gitignore": "target\n",
+// "src": {
+// "a.rs": "",
+// "b.rs": "",
+// },
+// "target": {
+// "x": {
+// "out": {
+// "x.rs": ""
+// }
+// },
+// "y": {
+// "out": {
+// "y.rs": "",
+// }
+// },
+// "z": {
+// "out": {
+// "z.rs": ""
+// }
+// }
+// }
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+// project.update(cx, |project, _| {
+// project.languages.add(Arc::new(language));
+// });
+// cx.foreground().run_until_parked();
+
+// // Start the language server by opening a buffer with a compatible file extension.
+// let _buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/the-root/src/a.rs", cx)
+// })
+// .await
+// .unwrap();
+
+// // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
+// project.read_with(cx, |project, cx| {
+// let worktree = project.worktrees(cx).next().unwrap();
+// assert_eq!(
+// worktree
+// .read(cx)
+// .snapshot()
+// .entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+// .collect::<Vec<_>>(),
+// &[
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("src"), false),
+// (Path::new("src/a.rs"), false),
+// (Path::new("src/b.rs"), false),
+// (Path::new("target"), true),
+// ]
+// );
+// });
+
+// let prev_read_dir_count = fs.read_dir_call_count();
+
+// // Keep track of the FS events reported to the language server.
+// let fake_server = fake_servers.next().await.unwrap();
+// let file_changes = Arc::new(Mutex::new(Vec::new()));
+// fake_server
+// .request::<lsp2::request::RegisterCapability>(lsp2::RegistrationParams {
+// registrations: vec![lsp2::Registration {
+// id: Default::default(),
+// method: "workspace/didChangeWatchedFiles".to_string(),
+// register_options: serde_json::to_value(
+// lsp::DidChangeWatchedFilesRegistrationOptions {
+// watchers: vec![
+// lsp2::FileSystemWatcher {
+// glob_pattern: lsp2::GlobPattern::String(
+// "/the-root/Cargo.toml".to_string(),
+// ),
+// kind: None,
+// },
+// lsp2::FileSystemWatcher {
+// glob_pattern: lsp2::GlobPattern::String(
+// "/the-root/src/*.{rs,c}".to_string(),
+// ),
+// kind: None,
+// },
+// lsp2::FileSystemWatcher {
+// glob_pattern: lsp2::GlobPattern::String(
+// "/the-root/target/y/**/*.rs".to_string(),
+// ),
+// kind: None,
+// },
+// ],
+// },
+// )
+// .ok(),
+// }],
+// })
+// .await
+// .unwrap();
+// fake_server.handle_notification::<lsp2::notification::DidChangeWatchedFiles, _>({
+// let file_changes = file_changes.clone();
+// move |params, _| {
+// let mut file_changes = file_changes.lock();
+// file_changes.extend(params.changes);
+// file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
+// }
+// });
+
+// cx.foreground().run_until_parked();
+// assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
+// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
+
+// // Now the language server has asked us to watch an ignored directory path,
+// // so we recursively load it.
+// project.read_with(cx, |project, cx| {
+// let worktree = project.worktrees(cx).next().unwrap();
+// assert_eq!(
+// worktree
+// .read(cx)
+// .snapshot()
+// .entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+// .collect::<Vec<_>>(),
+// &[
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("src"), false),
+// (Path::new("src/a.rs"), false),
+// (Path::new("src/b.rs"), false),
+// (Path::new("target"), true),
+// (Path::new("target/x"), true),
+// (Path::new("target/y"), true),
+// (Path::new("target/y/out"), true),
+// (Path::new("target/y/out/y.rs"), true),
+// (Path::new("target/z"), true),
+// ]
+// );
+// });
+
+// // Perform some file system mutations, two of which match the watched patterns,
+// // and one of which does not.
+// fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
+// .await
+// .unwrap();
+// fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
+// .await
+// .unwrap();
+// fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
+// .await
+// .unwrap();
+// fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
+// .await
+// .unwrap();
+// fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
+// .await
+// .unwrap();
+
+// // The language server receives events for the FS mutations that match its watch patterns.
+// cx.foreground().run_until_parked();
+// assert_eq!(
+// &*file_changes.lock(),
+// &[
+// lsp2::FileEvent {
+// uri: lsp2::Url::from_file_path("/the-root/src/b.rs").unwrap(),
+// typ: lsp2::FileChangeType::DELETED,
+// },
+// lsp2::FileEvent {
+// uri: lsp2::Url::from_file_path("/the-root/src/c.rs").unwrap(),
+// typ: lsp2::FileChangeType::CREATED,
+// },
+// lsp2::FileEvent {
+// uri: lsp2::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
+// typ: lsp2::FileChangeType::CREATED,
+// },
+// ]
+// );
+// }
+
+// #[gpui::test]
+// async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.rs": "let a = 1;",
+// "b.rs": "let b = 2;"
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
+
+// let buffer_a = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+// let buffer_b = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+// .await
+// .unwrap();
+
+// project.update(cx, |project, cx| {
+// project
+// .update_diagnostics(
+// LanguageServerId(0),
+// lsp::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/dir/a.rs").unwrap(),
+// version: None,
+// diagnostics: vec![lsp2::Diagnostic {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 4),
+// lsp2::Position::new(0, 5),
+// ),
+// severity: Some(lsp2::DiagnosticSeverity::ERROR),
+// message: "error 1".to_string(),
+// ..Default::default()
+// }],
+// },
+// &[],
+// cx,
+// )
+// .unwrap();
+// project
+// .update_diagnostics(
+// LanguageServerId(0),
+// lsp::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/dir/b.rs").unwrap(),
+// version: None,
+// diagnostics: vec![lsp2::Diagnostic {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 4),
+// lsp2::Position::new(0, 5),
+// ),
+// severity: Some(lsp2::DiagnosticSeverity::WARNING),
+// message: "error 2".to_string(),
+// ..Default::default()
+// }],
+// },
+// &[],
+// cx,
+// )
+// .unwrap();
+// });
+
+// buffer_a.read_with(cx, |buffer, _| {
+// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+// assert_eq!(
+// chunks
+// .iter()
+// .map(|(s, d)| (s.as_str(), *d))
+// .collect::<Vec<_>>(),
+// &[
+// ("let ", None),
+// ("a", Some(DiagnosticSeverity::ERROR)),
+// (" = 1;", None),
+// ]
+// );
+// });
+// buffer_b.read_with(cx, |buffer, _| {
+// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+// assert_eq!(
+// chunks
+// .iter()
+// .map(|(s, d)| (s.as_str(), *d))
+// .collect::<Vec<_>>(),
+// &[
+// ("let ", None),
+// ("b", Some(DiagnosticSeverity::WARNING)),
+// (" = 2;", None),
+// ]
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// "dir": {
+// "a.rs": "let a = 1;",
+// },
+// "other.rs": "let b = c;"
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
+
+// let (worktree, _) = project
+// .update(cx, |project, cx| {
+// project.find_or_create_local_worktree("/root/other.rs", false, cx)
+// })
+// .await
+// .unwrap();
+// let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
+
+// project.update(cx, |project, cx| {
+// project
+// .update_diagnostics(
+// LanguageServerId(0),
+// lsp::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/root/other.rs").unwrap(),
+// version: None,
+// diagnostics: vec![lsp2::Diagnostic {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 8),
+// lsp2::Position::new(0, 9),
+// ),
+// severity: Some(lsp2::DiagnosticSeverity::ERROR),
+// message: "unknown variable 'c'".to_string(),
+// ..Default::default()
+// }],
+// },
+// &[],
+// cx,
+// )
+// .unwrap();
+// });
+
+// let buffer = project
+// .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
+// .await
+// .unwrap();
+// buffer.read_with(cx, |buffer, _| {
+// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+// assert_eq!(
+// chunks
+// .iter()
+// .map(|(s, d)| (s.as_str(), *d))
+// .collect::<Vec<_>>(),
+// &[
+// ("let b = ", None),
+// ("c", Some(DiagnosticSeverity::ERROR)),
+// (";", None),
+// ]
+// );
+// });
+
+// project.read_with(cx, |project, cx| {
+// assert_eq!(project.diagnostic_summaries(cx).next(), None);
+// assert_eq!(project.diagnostic_summary(cx).error_count, 0);
+// });
+// }
+
+// #[gpui::test]
+// async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let progress_token = "the-progress-token";
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// disk_based_diagnostics_progress_token: Some(progress_token.into()),
+// disk_based_diagnostics_sources: vec!["disk".into()],
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.rs": "fn a() { A }",
+// "b.rs": "const y: i32 = 1",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+// let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
+
+// // Cause worktree to start the fake language server
+// let _buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+// .await
+// .unwrap();
+
+// let mut events = subscribe(&project, cx);
+
+// let fake_server = fake_servers.next().await.unwrap();
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::LanguageServerAdded(LanguageServerId(0)),
+// );
+
+// fake_server
+// .start_progress(format!("{}/0", progress_token))
+// .await;
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::DiskBasedDiagnosticsStarted {
+// language_server_id: LanguageServerId(0),
+// }
+// );
+
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/dir/a.rs").unwrap(),
+// version: None,
+// diagnostics: vec![lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+// severity: Some(lsp2::DiagnosticSeverity::ERROR),
+// message: "undefined variable 'A'".to_string(),
+// ..Default::default()
+// }],
+// });
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::DiagnosticsUpdated {
+// language_server_id: LanguageServerId(0),
+// path: (worktree_id, Path::new("a.rs")).into()
+// }
+// );
+
+// fake_server.end_progress(format!("{}/0", progress_token));
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::DiskBasedDiagnosticsFinished {
+// language_server_id: LanguageServerId(0)
+// }
+// );
+
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// buffer.read_with(cx, |buffer, _| {
+// let snapshot = buffer.snapshot();
+// let diagnostics = snapshot
+// .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+// .collect::<Vec<_>>();
+// assert_eq!(
+// diagnostics,
+// &[DiagnosticEntry {
+// range: Point::new(0, 9)..Point::new(0, 10),
+// diagnostic: Diagnostic {
+// severity: lsp2::DiagnosticSeverity::ERROR,
+// message: "undefined variable 'A'".to_string(),
+// group_id: 0,
+// is_primary: true,
+// ..Default::default()
+// }
+// }]
+// )
+// });
+
+// // Ensure publishing empty diagnostics twice only results in one update event.
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/dir/a.rs").unwrap(),
+// version: None,
+// diagnostics: Default::default(),
+// });
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::DiagnosticsUpdated {
+// language_server_id: LanguageServerId(0),
+// path: (worktree_id, Path::new("a.rs")).into()
+// }
+// );
+
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/dir/a.rs").unwrap(),
+// version: None,
+// diagnostics: Default::default(),
+// });
+// cx.foreground().run_until_parked();
+// assert_eq!(futures::poll!(events.next()), Poll::Pending);
+// }
+
+// #[gpui::test]
+// async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let progress_token = "the-progress-token";
+// let mut language = Language::new(
+// LanguageConfig {
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// disk_based_diagnostics_sources: vec!["disk".into()],
+// disk_based_diagnostics_progress_token: Some(progress_token.into()),
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// // Simulate diagnostics starting to update.
+// let fake_server = fake_servers.next().await.unwrap();
+// fake_server.start_progress(progress_token).await;
+
+// // Restart the server before the diagnostics finish updating.
+// project.update(cx, |project, cx| {
+// project.restart_language_servers_for_buffers([buffer], cx);
+// });
+// let mut events = subscribe(&project, cx);
+
+// // Simulate the newly started server sending more diagnostics.
+// let fake_server = fake_servers.next().await.unwrap();
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::LanguageServerAdded(LanguageServerId(1))
+// );
+// fake_server.start_progress(progress_token).await;
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::DiskBasedDiagnosticsStarted {
+// language_server_id: LanguageServerId(1)
+// }
+// );
+// project.read_with(cx, |project, _| {
+// assert_eq!(
+// project
+// .language_servers_running_disk_based_diagnostics()
+// .collect::<Vec<_>>(),
+// [LanguageServerId(1)]
+// );
+// });
+
+// // All diagnostics are considered done, despite the old server's diagnostic
+// // task never completing.
+// fake_server.end_progress(progress_token);
+// assert_eq!(
+// events.next().await.unwrap(),
+// Event::DiskBasedDiagnosticsFinished {
+// language_server_id: LanguageServerId(1)
+// }
+// );
+// project.read_with(cx, |project, _| {
+// assert_eq!(
+// project
+// .language_servers_running_disk_based_diagnostics()
+// .collect::<Vec<_>>(),
+// [LanguageServerId(0); 0]
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// // Publish diagnostics
+// let fake_server = fake_servers.next().await.unwrap();
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: Url::from_file_path("/dir/a.rs").unwrap(),
+// version: None,
+// diagnostics: vec![lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(0, 0), lsp2::Position::new(0, 0)),
+// severity: Some(lsp2::DiagnosticSeverity::ERROR),
+// message: "the message".to_string(),
+// ..Default::default()
+// }],
+// });
+
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, usize>(0..1, false)
+// .map(|entry| entry.diagnostic.message.clone())
+// .collect::<Vec<_>>(),
+// ["the message".to_string()]
+// );
+// });
+// project.read_with(cx, |project, cx| {
+// assert_eq!(
+// project.diagnostic_summary(cx),
+// DiagnosticSummary {
+// error_count: 1,
+// warning_count: 0,
+// }
+// );
+// });
+
+// project.update(cx, |project, cx| {
+// project.restart_language_servers_for_buffers([buffer.clone()], cx);
+// });
+
+// // The diagnostics are cleared.
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, usize>(0..1, false)
+// .map(|entry| entry.diagnostic.message.clone())
+// .collect::<Vec<_>>(),
+// Vec::<String>::new(),
+// );
+// });
+// project.read_with(cx, |project, cx| {
+// assert_eq!(
+// project.diagnostic_summary(cx),
+// DiagnosticSummary {
+// error_count: 0,
+// warning_count: 0,
+// }
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// name: "the-lsp",
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// // Before restarting the server, report diagnostics with an unknown buffer version.
+// let fake_server = fake_servers.next().await.unwrap();
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+// version: Some(10000),
+// diagnostics: Vec::new(),
+// });
+// cx.foreground().run_until_parked();
+
+// project.update(cx, |project, cx| {
+// project.restart_language_servers_for_buffers([buffer.clone()], cx);
+// });
+// let mut fake_server = fake_servers.next().await.unwrap();
+// let notification = fake_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document;
+// assert_eq!(notification.version, 0);
+// }
+
+// #[gpui::test]
+// async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut rust = Language::new(
+// LanguageConfig {
+// name: Arc::from("Rust"),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_rust_servers = rust
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// name: "rust-lsp",
+// ..Default::default()
+// }))
+// .await;
+// let mut js = Language::new(
+// LanguageConfig {
+// name: Arc::from("JavaScript"),
+// path_suffixes: vec!["js".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_js_servers = js
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// name: "js-lsp",
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| {
+// project.languages.add(Arc::new(rust));
+// project.languages.add(Arc::new(js));
+// });
+
+// let _rs_buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+// let _js_buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
+// .await
+// .unwrap();
+
+// let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
+// assert_eq!(
+// fake_rust_server_1
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document
+// .uri
+// .as_str(),
+// "file:///dir/a.rs"
+// );
+
+// let mut fake_js_server = fake_js_servers.next().await.unwrap();
+// assert_eq!(
+// fake_js_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document
+// .uri
+// .as_str(),
+// "file:///dir/b.js"
+// );
+
+// // Disable Rust language server, ensuring only that server gets stopped.
+// cx.update(|cx| {
+// cx.update_global(|settings: &mut SettingsStore, cx| {
+// settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+// settings.languages.insert(
+// Arc::from("Rust"),
+// LanguageSettingsContent {
+// enable_language_server: Some(false),
+// ..Default::default()
+// },
+// );
+// });
+// })
+// });
+// fake_rust_server_1
+// .receive_notification::<lsp2::notification::Exit>()
+// .await;
+
+// // Enable Rust and disable JavaScript language servers, ensuring that the
+// // former gets started again and that the latter stops.
+// cx.update(|cx| {
+// cx.update_global(|settings: &mut SettingsStore, cx| {
+// settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+// settings.languages.insert(
+// Arc::from("Rust"),
+// LanguageSettingsContent {
+// enable_language_server: Some(true),
+// ..Default::default()
+// },
+// );
+// settings.languages.insert(
+// Arc::from("JavaScript"),
+// LanguageSettingsContent {
+// enable_language_server: Some(false),
+// ..Default::default()
+// },
+// );
+// });
+// })
+// });
+// let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
+// assert_eq!(
+// fake_rust_server_2
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document
+// .uri
+// .as_str(),
+// "file:///dir/a.rs"
+// );
+// fake_js_server
+// .receive_notification::<lsp2::notification::Exit>()
+// .await;
+// }
+
+// #[gpui::test(iterations = 3)]
+// async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// disk_based_diagnostics_sources: vec!["disk".into()],
+// ..Default::default()
+// }))
+// .await;
+
+// let text = "
+// fn a() { A }
+// fn b() { BB }
+// fn c() { CCC }
+// "
+// .unindent();
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// let mut fake_server = fake_servers.next().await.unwrap();
+// let open_notification = fake_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await;
+
+// // Edit the buffer, moving the content down
+// buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
+// let change_notification_1 = fake_server
+// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+// .await;
+// assert!(change_notification_1.text_document.version > open_notification.text_document.version);
+
+// // Report some diagnostics for the initial version of the buffer
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+// version: Some(open_notification.text_document.version),
+// diagnostics: vec![
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+// severity: Some(DiagnosticSeverity::ERROR),
+// message: "undefined variable 'A'".to_string(),
+// source: Some("disk".to_string()),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
+// severity: Some(DiagnosticSeverity::ERROR),
+// message: "undefined variable 'BB'".to_string(),
+// source: Some("disk".to_string()),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(2, 9), lsp2::Position::new(2, 12)),
+// severity: Some(DiagnosticSeverity::ERROR),
+// source: Some("disk".to_string()),
+// message: "undefined variable 'CCC'".to_string(),
+// ..Default::default()
+// },
+// ],
+// });
+
+// // The diagnostics have moved down since they were created.
+// buffer.next_notification(cx).await;
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
+// .collect::<Vec<_>>(),
+// &[
+// DiagnosticEntry {
+// range: Point::new(3, 9)..Point::new(3, 11),
+// diagnostic: Diagnostic {
+// source: Some("disk".into()),
+// severity: DiagnosticSeverity::ERROR,
+// message: "undefined variable 'BB'".to_string(),
+// is_disk_based: true,
+// group_id: 1,
+// is_primary: true,
+// ..Default::default()
+// },
+// },
+// DiagnosticEntry {
+// range: Point::new(4, 9)..Point::new(4, 12),
+// diagnostic: Diagnostic {
+// source: Some("disk".into()),
+// severity: DiagnosticSeverity::ERROR,
+// message: "undefined variable 'CCC'".to_string(),
+// is_disk_based: true,
+// group_id: 2,
+// is_primary: true,
+// ..Default::default()
+// }
+// }
+// ]
+// );
+// assert_eq!(
+// chunks_with_diagnostics(buffer, 0..buffer.len()),
+// [
+// ("\n\nfn a() { ".to_string(), None),
+// ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+// (" }\nfn b() { ".to_string(), None),
+// ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
+// (" }\nfn c() { ".to_string(), None),
+// ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
+// (" }\n".to_string(), None),
+// ]
+// );
+// assert_eq!(
+// chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
+// [
+// ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
+// (" }\nfn c() { ".to_string(), None),
+// ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
+// ]
+// );
+// });
+
+// // Ensure overlapping diagnostics are highlighted correctly.
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+// version: Some(open_notification.text_document.version),
+// diagnostics: vec![
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+// severity: Some(DiagnosticSeverity::ERROR),
+// message: "undefined variable 'A'".to_string(),
+// source: Some("disk".to_string()),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 12)),
+// severity: Some(DiagnosticSeverity::WARNING),
+// message: "unreachable statement".to_string(),
+// source: Some("disk".to_string()),
+// ..Default::default()
+// },
+// ],
+// });
+
+// buffer.next_notification(cx).await;
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
+// .collect::<Vec<_>>(),
+// &[
+// DiagnosticEntry {
+// range: Point::new(2, 9)..Point::new(2, 12),
+// diagnostic: Diagnostic {
+// source: Some("disk".into()),
+// severity: DiagnosticSeverity::WARNING,
+// message: "unreachable statement".to_string(),
+// is_disk_based: true,
+// group_id: 4,
+// is_primary: true,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(2, 9)..Point::new(2, 10),
+// diagnostic: Diagnostic {
+// source: Some("disk".into()),
+// severity: DiagnosticSeverity::ERROR,
+// message: "undefined variable 'A'".to_string(),
+// is_disk_based: true,
+// group_id: 3,
+// is_primary: true,
+// ..Default::default()
+// },
+// }
+// ]
+// );
+// assert_eq!(
+// chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
+// [
+// ("fn a() { ".to_string(), None),
+// ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+// (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+// ("\n".to_string(), None),
+// ]
+// );
+// assert_eq!(
+// chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
+// [
+// (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+// ("\n".to_string(), None),
+// ]
+// );
+// });
+
+// // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
+// // changes since the last save.
+// buffer.update(cx, |buffer, cx| {
+// buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
+// buffer.edit(
+// [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
+// None,
+// cx,
+// );
+// buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
+// });
+// let change_notification_2 = fake_server
+// .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+// .await;
+// assert!(
+// change_notification_2.text_document.version > change_notification_1.text_document.version
+// );
+
+// // Handle out-of-order diagnostics
+// fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+// uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+// version: Some(change_notification_2.text_document.version),
+// diagnostics: vec![
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
+// severity: Some(DiagnosticSeverity::ERROR),
+// message: "undefined variable 'BB'".to_string(),
+// source: Some("disk".to_string()),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+// severity: Some(DiagnosticSeverity::WARNING),
+// message: "undefined variable 'A'".to_string(),
+// source: Some("disk".to_string()),
+// ..Default::default()
+// },
+// ],
+// });
+
+// buffer.next_notification(cx).await;
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, _| {
+// assert_eq!(
+// buffer
+// .snapshot()
+// .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+// .collect::<Vec<_>>(),
+// &[
+// DiagnosticEntry {
+// range: Point::new(2, 21)..Point::new(2, 22),
+// diagnostic: Diagnostic {
+// source: Some("disk".into()),
+// severity: DiagnosticSeverity::WARNING,
+// message: "undefined variable 'A'".to_string(),
+// is_disk_based: true,
+// group_id: 6,
+// is_primary: true,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(3, 9)..Point::new(3, 14),
+// diagnostic: Diagnostic {
+// source: Some("disk".into()),
+// severity: DiagnosticSeverity::ERROR,
+// message: "undefined variable 'BB'".to_string(),
+// is_disk_based: true,
+// group_id: 5,
+// is_primary: true,
+// ..Default::default()
+// },
+// }
+// ]
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let text = concat!(
+// "let one = ;\n", //
+// "let two = \n",
+// "let three = 3;\n",
+// );
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// project.update(cx, |project, cx| {
+// project
+// .update_buffer_diagnostics(
+// &buffer,
+// LanguageServerId(0),
+// None,
+// vec![
+// DiagnosticEntry {
+// range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::ERROR,
+// message: "syntax error 1".to_string(),
+// ..Default::default()
+// },
+// },
+// DiagnosticEntry {
+// range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::ERROR,
+// message: "syntax error 2".to_string(),
+// ..Default::default()
+// },
+// },
+// ],
+// cx,
+// )
+// .unwrap();
+// });
+
+// // An empty range is extended forward to include the following character.
+// // At the end of a line, an empty range is extended backward to include
+// // the preceding character.
+// buffer.read_with(cx, |buffer, _| {
+// let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+// assert_eq!(
+// chunks
+// .iter()
+// .map(|(s, d)| (s.as_str(), *d))
+// .collect::<Vec<_>>(),
+// &[
+// ("let one = ", None),
+// (";", Some(DiagnosticSeverity::ERROR)),
+// ("\nlet two =", None),
+// (" ", Some(DiagnosticSeverity::ERROR)),
+// ("\nlet three = 3;\n", None)
+// ]
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+
+// project.update(cx, |project, cx| {
+// project
+// .update_diagnostic_entries(
+// LanguageServerId(0),
+// Path::new("/dir/a.rs").to_owned(),
+// None,
+// vec![DiagnosticEntry {
+// range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::ERROR,
+// is_primary: true,
+// message: "syntax error a1".to_string(),
+// ..Default::default()
+// },
+// }],
+// cx,
+// )
+// .unwrap();
+// project
+// .update_diagnostic_entries(
+// LanguageServerId(1),
+// Path::new("/dir/a.rs").to_owned(),
+// None,
+// vec![DiagnosticEntry {
+// range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::ERROR,
+// is_primary: true,
+// message: "syntax error b1".to_string(),
+// ..Default::default()
+// },
+// }],
+// cx,
+// )
+// .unwrap();
+
+// assert_eq!(
+// project.diagnostic_summary(cx),
+// DiagnosticSummary {
+// error_count: 2,
+// warning_count: 0,
+// }
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+// let text = "
+// fn a() {
+// f1();
+// }
+// fn b() {
+// f2();
+// }
+// fn c() {
+// f3();
+// }
+// "
+// .unindent();
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.rs": text.clone(),
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// let mut fake_server = fake_servers.next().await.unwrap();
+// let lsp_document_version = fake_server
+// .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+// .await
+// .text_document
+// .version;
+
+// // Simulate editing the buffer after the language server computes some edits.
+// buffer.update(cx, |buffer, cx| {
+// buffer.edit(
+// [(
+// Point::new(0, 0)..Point::new(0, 0),
+// "// above first function\n",
+// )],
+// None,
+// cx,
+// );
+// buffer.edit(
+// [(
+// Point::new(2, 0)..Point::new(2, 0),
+// " // inside first function\n",
+// )],
+// None,
+// cx,
+// );
+// buffer.edit(
+// [(
+// Point::new(6, 4)..Point::new(6, 4),
+// "// inside second function ",
+// )],
+// None,
+// cx,
+// );
+
+// assert_eq!(
+// buffer.text(),
+// "
+// // above first function
+// fn a() {
+// // inside first function
+// f1();
+// }
+// fn b() {
+// // inside second function f2();
+// }
+// fn c() {
+// f3();
+// }
+// "
+// .unindent()
+// );
+// });
+
+// let edits = project
+// .update(cx, |project, cx| {
+// project.edits_from_lsp(
+// &buffer,
+// vec![
+// // replace body of first function
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 0),
+// lsp2::Position::new(3, 0),
+// ),
+// new_text: "
+// fn a() {
+// f10();
+// }
+// "
+// .unindent(),
+// },
+// // edit inside second function
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(4, 6),
+// lsp2::Position::new(4, 6),
+// ),
+// new_text: "00".into(),
+// },
+// // edit inside third function via two distinct edits
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(7, 5),
+// lsp2::Position::new(7, 5),
+// ),
+// new_text: "4000".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(7, 5),
+// lsp2::Position::new(7, 6),
+// ),
+// new_text: "".into(),
+// },
+// ],
+// LanguageServerId(0),
+// Some(lsp_document_version),
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+
+// buffer.update(cx, |buffer, cx| {
+// for (range, new_text) in edits {
+// buffer.edit([(range, new_text)], None, cx);
+// }
+// assert_eq!(
+// buffer.text(),
+// "
+// // above first function
+// fn a() {
+// // inside first function
+// f10();
+// }
+// fn b() {
+// // inside second function f200();
+// }
+// fn c() {
+// f4000();
+// }
+// "
+// .unindent()
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let text = "
+// use a::b;
+// use a::c;
+
+// fn f() {
+// b();
+// c();
+// }
+// "
+// .unindent();
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.rs": text.clone(),
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// // Simulate the language server sending us a small edit in the form of a very large diff.
+// // Rust-analyzer does this when performing a merge-imports code action.
+// let edits = project
+// .update(cx, |project, cx| {
+// project.edits_from_lsp(
+// &buffer,
+// [
+// // Replace the first use statement without editing the semicolon.
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 4),
+// lsp2::Position::new(0, 8),
+// ),
+// new_text: "a::{b, c}".into(),
+// },
+// // Reinsert the remainder of the file between the semicolon and the final
+// // newline of the file.
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 9),
+// lsp2::Position::new(0, 9),
+// ),
+// new_text: "\n\n".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 9),
+// lsp2::Position::new(0, 9),
+// ),
+// new_text: "
+// fn f() {
+// b();
+// c();
+// }"
+// .unindent(),
+// },
+// // Delete everything after the first newline of the file.
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 0),
+// lsp2::Position::new(7, 0),
+// ),
+// new_text: "".into(),
+// },
+// ],
+// LanguageServerId(0),
+// None,
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+
+// buffer.update(cx, |buffer, cx| {
+// let edits = edits
+// .into_iter()
+// .map(|(range, text)| {
+// (
+// range.start.to_point(buffer)..range.end.to_point(buffer),
+// text,
+// )
+// })
+// .collect::<Vec<_>>();
+
+// assert_eq!(
+// edits,
+// [
+// (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+// (Point::new(1, 0)..Point::new(2, 0), "".into())
+// ]
+// );
+
+// for (range, new_text) in edits {
+// buffer.edit([(range, new_text)], None, cx);
+// }
+// assert_eq!(
+// buffer.text(),
+// "
+// use a::{b, c};
+
+// fn f() {
+// b();
+// c();
+// }
+// "
+// .unindent()
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let text = "
+// use a::b;
+// use a::c;
+
+// fn f() {
+// b();
+// c();
+// }
+// "
+// .unindent();
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.rs": text.clone(),
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// // Simulate the language server sending us edits in a non-ordered fashion,
+// // with ranges sometimes being inverted or pointing to invalid locations.
+// let edits = project
+// .update(cx, |project, cx| {
+// project.edits_from_lsp(
+// &buffer,
+// [
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 9),
+// lsp2::Position::new(0, 9),
+// ),
+// new_text: "\n\n".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 8),
+// lsp2::Position::new(0, 4),
+// ),
+// new_text: "a::{b, c}".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 0),
+// lsp2::Position::new(99, 0),
+// ),
+// new_text: "".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 9),
+// lsp2::Position::new(0, 9),
+// ),
+// new_text: "
+// fn f() {
+// b();
+// c();
+// }"
+// .unindent(),
+// },
+// ],
+// LanguageServerId(0),
+// None,
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+
+// buffer.update(cx, |buffer, cx| {
+// let edits = edits
+// .into_iter()
+// .map(|(range, text)| {
+// (
+// range.start.to_point(buffer)..range.end.to_point(buffer),
+// text,
+// )
+// })
+// .collect::<Vec<_>>();
+
+// assert_eq!(
+// edits,
+// [
+// (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+// (Point::new(1, 0)..Point::new(2, 0), "".into())
+// ]
+// );
+
+// for (range, new_text) in edits {
+// buffer.edit([(range, new_text)], None, cx);
+// }
+// assert_eq!(
+// buffer.text(),
+// "
+// use a::{b, c};
+
+// fn f() {
+// b();
+// c();
+// }
+// "
+// .unindent()
+// );
+// });
+// }
+
+// fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
+// buffer: &Buffer,
+// range: Range<T>,
+// ) -> Vec<(String, Option<DiagnosticSeverity>)> {
+// let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
+// for chunk in buffer.snapshot().chunks(range, true) {
+// if chunks.last().map_or(false, |prev_chunk| {
+// prev_chunk.1 == chunk.diagnostic_severity
+// }) {
+// chunks.last_mut().unwrap().0.push_str(chunk.text);
+// } else {
+// chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
+// }
+// }
+// chunks
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_definition(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.rs": "const fn a() { A }",
+// "b.rs": "const y: i32 = crate::a()",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+// .await
+// .unwrap();
+
+// let fake_server = fake_servers.next().await.unwrap();
+// fake_server.handle_request::<lsp2::request::GotoDefinition, _, _>(|params, _| async move {
+// let params = params.text_document_position_params;
+// assert_eq!(
+// params.text_document.uri.to_file_path().unwrap(),
+// Path::new("/dir/b.rs"),
+// );
+// assert_eq!(params.position, lsp2::Position::new(0, 22));
+
+// Ok(Some(lsp2::GotoDefinitionResponse::Scalar(
+// lsp2::Location::new(
+// lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+// lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+// ),
+// )))
+// });
+
+// let mut definitions = project
+// .update(cx, |project, cx| project.definition(&buffer, 22, cx))
+// .await
+// .unwrap();
+
+// // Assert no new language server started
+// cx.foreground().run_until_parked();
+// assert!(fake_servers.try_next().is_err());
+
+// assert_eq!(definitions.len(), 1);
+// let definition = definitions.pop().unwrap();
+// cx.update(|cx| {
+// let target_buffer = definition.target.buffer.read(cx);
+// assert_eq!(
+// target_buffer
+// .file()
+// .unwrap()
+// .as_local()
+// .unwrap()
+// .abs_path(cx),
+// Path::new("/dir/a.rs"),
+// );
+// assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
+// assert_eq!(
+// list_worktrees(&project, cx),
+// [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
+// );
+
+// drop(definition);
+// });
+// cx.read(|cx| {
+// assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
+// });
+
+// fn list_worktrees<'a>(
+// project: &'a ModelHandle<Project>,
+// cx: &'a AppContext,
+// ) -> Vec<(&'a Path, bool)> {
+// project
+// .read(cx)
+// .worktrees(cx)
+// .map(|worktree| {
+// let worktree = worktree.read(cx);
+// (
+// worktree.as_local().unwrap().abs_path().as_ref(),
+// worktree.is_visible(),
+// )
+// })
+// .collect::<Vec<_>>()
+// }
+// }
+
+// #[gpui::test]
+// async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "TypeScript".into(),
+// path_suffixes: vec!["ts".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_typescript::language_typescript()),
+// );
+// let mut fake_language_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// capabilities: lsp::ServerCapabilities {
+// completion_provider: Some(lsp::CompletionOptions {
+// trigger_characters: Some(vec![":".to_string()]),
+// ..Default::default()
+// }),
+// ..Default::default()
+// },
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.ts": "",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
+// .await
+// .unwrap();
+
+// let fake_server = fake_language_servers.next().await.unwrap();
+
+// let text = "let a = b.fqn";
+// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
+// let completions = project.update(cx, |project, cx| {
+// project.completions(&buffer, text.len(), cx)
+// });
+
+// fake_server
+// .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
+// Ok(Some(lsp2::CompletionResponse::Array(vec![
+// lsp2::CompletionItem {
+// label: "fullyQualifiedName?".into(),
+// insert_text: Some("fullyQualifiedName".into()),
+// ..Default::default()
+// },
+// ])))
+// })
+// .next()
+// .await;
+// let completions = completions.await.unwrap();
+// let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+// assert_eq!(completions.len(), 1);
+// assert_eq!(completions[0].new_text, "fullyQualifiedName");
+// assert_eq!(
+// completions[0].old_range.to_offset(&snapshot),
+// text.len() - 3..text.len()
+// );
+
+// let text = "let a = \"atoms/cmp\"";
+// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
+// let completions = project.update(cx, |project, cx| {
+// project.completions(&buffer, text.len() - 1, cx)
+// });
+
+// fake_server
+// .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
+// Ok(Some(lsp2::CompletionResponse::Array(vec![
+// lsp2::CompletionItem {
+// label: "component".into(),
+// ..Default::default()
+// },
+// ])))
+// })
+// .next()
+// .await;
+// let completions = completions.await.unwrap();
+// let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+// assert_eq!(completions.len(), 1);
+// assert_eq!(completions[0].new_text, "component");
+// assert_eq!(
+// completions[0].old_range.to_offset(&snapshot),
+// text.len() - 4..text.len() - 1
+// );
+// }
+
+// #[gpui::test]
+// async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "TypeScript".into(),
+// path_suffixes: vec!["ts".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_typescript::language_typescript()),
+// );
+// let mut fake_language_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// capabilities: lsp::ServerCapabilities {
+// completion_provider: Some(lsp::CompletionOptions {
+// trigger_characters: Some(vec![":".to_string()]),
+// ..Default::default()
+// }),
+// ..Default::default()
+// },
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.ts": "",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
+// .await
+// .unwrap();
+
+// let fake_server = fake_language_servers.next().await.unwrap();
+
+// let text = "let a = b.fqn";
+// buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
+// let completions = project.update(cx, |project, cx| {
+// project.completions(&buffer, text.len(), cx)
+// });
+
+// fake_server
+// .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
+// Ok(Some(lsp2::CompletionResponse::Array(vec![
+// lsp2::CompletionItem {
+// label: "fullyQualifiedName?".into(),
+// insert_text: Some("fully\rQualified\r\nName".into()),
+// ..Default::default()
+// },
+// ])))
+// })
+// .next()
+// .await;
+// let completions = completions.await.unwrap();
+// assert_eq!(completions.len(), 1);
+// assert_eq!(completions[0].new_text, "fully\nQualified\nName");
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "TypeScript".into(),
+// path_suffixes: vec!["ts".to_string()],
+// ..Default::default()
+// },
+// None,
+// );
+// let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.ts": "a",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
+// .await
+// .unwrap();
+
+// let fake_server = fake_language_servers.next().await.unwrap();
+
+// // Language server returns code actions that contain commands, and not edits.
+// let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
+// fake_server
+// .handle_request::<lsp2::request::CodeActionRequest, _, _>(|_, _| async move {
+// Ok(Some(vec![
+// lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
+// title: "The code action".into(),
+// command: Some(lsp::Command {
+// title: "The command".into(),
+// command: "_the/command".into(),
+// arguments: Some(vec![json!("the-argument")]),
+// }),
+// ..Default::default()
+// }),
+// lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
+// title: "two".into(),
+// ..Default::default()
+// }),
+// ]))
+// })
+// .next()
+// .await;
+
+// let action = actions.await.unwrap()[0].clone();
+// let apply = project.update(cx, |project, cx| {
+// project.apply_code_action(buffer.clone(), action, true, cx)
+// });
+
+// // Resolving the code action does not populate its edits. In absence of
+// // edits, we must execute the given command.
+// fake_server.handle_request::<lsp2::request::CodeActionResolveRequest, _, _>(
+// |action, _| async move { Ok(action) },
+// );
+
+// // While executing the command, the language server sends the editor
+// // a `workspaceEdit` request.
+// fake_server
+// .handle_request::<lsp2::request::ExecuteCommand, _, _>({
+// let fake = fake_server.clone();
+// move |params, _| {
+// assert_eq!(params.command, "_the/command");
+// let fake = fake.clone();
+// async move {
+// fake.server
+// .request::<lsp2::request::ApplyWorkspaceEdit>(
+// lsp2::ApplyWorkspaceEditParams {
+// label: None,
+// edit: lsp::WorkspaceEdit {
+// changes: Some(
+// [(
+// lsp2::Url::from_file_path("/dir/a.ts").unwrap(),
+// vec![lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 0),
+// lsp2::Position::new(0, 0),
+// ),
+// new_text: "X".into(),
+// }],
+// )]
+// .into_iter()
+// .collect(),
+// ),
+// ..Default::default()
+// },
+// },
+// )
+// .await
+// .unwrap();
+// Ok(Some(json!(null)))
+// }
+// }
+// })
+// .next()
+// .await;
+
+// // Applying the code action returns a project transaction containing the edits
+// // sent by the language server in its `workspaceEdit` request.
+// let transaction = apply.await.unwrap();
+// assert!(transaction.0.contains_key(&buffer));
+// buffer.update(cx, |buffer, cx| {
+// assert_eq!(buffer.text(), "Xa");
+// buffer.undo(cx);
+// assert_eq!(buffer.text(), "a");
+// });
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_save_file(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "file1": "the old contents",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+// .await
+// .unwrap();
+// buffer.update(cx, |buffer, cx| {
+// assert_eq!(buffer.text(), "the old contents");
+// buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
+// });
+
+// project
+// .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+// .await
+// .unwrap();
+
+// let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
+// assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
+// }
+
+// #[gpui::test]
+// async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "file1": "the old contents",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+// .await
+// .unwrap();
+// buffer.update(cx, |buffer, cx| {
+// buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
+// });
+
+// project
+// .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
+// .await
+// .unwrap();
+
+// let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
+// assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
+// }
+
+// #[gpui::test]
+// async fn test_save_as(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({})).await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// let languages = project.read_with(cx, |project, _| project.languages().clone());
+// languages.register(
+// "/some/path",
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".into()],
+// ..Default::default()
+// },
+// tree_sitter_rust::language(),
+// vec![],
+// |_| Default::default(),
+// );
+
+// let buffer = project.update(cx, |project, cx| {
+// project.create_buffer("", None, cx).unwrap()
+// });
+// buffer.update(cx, |buffer, cx| {
+// buffer.edit([(0..0, "abc")], None, cx);
+// assert!(buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+// assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
+// });
+// project
+// .update(cx, |project, cx| {
+// project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
+
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, cx| {
+// assert_eq!(
+// buffer.file().unwrap().full_path(cx),
+// Path::new("dir/file1.rs")
+// );
+// assert!(!buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+// assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
+// });
+
+// let opened_buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/dir/file1.rs", cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(opened_buffer, buffer);
+// }
+
+// #[gpui::test(retries = 5)]
+// async fn test_rescan_and_remote_updates(
+// deterministic: Arc<Deterministic>,
+// cx: &mut gpui::TestAppContext,
+// ) {
+// init_test(cx);
+// cx.foreground().allow_parking();
+
+// let dir = temp_tree(json!({
+// "a": {
+// "file1": "",
+// "file2": "",
+// "file3": "",
+// },
+// "b": {
+// "c": {
+// "file4": "",
+// "file5": "",
+// }
+// }
+// }));
+
+// let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
+// let rpc = project.read_with(cx, |p, _| p.client.clone());
+
+// let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
+// let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
+// async move { buffer.await.unwrap() }
+// };
+// let id_for_path = |path: &'static str, cx: &gpui2::TestAppContext| {
+// project.read_with(cx, |project, cx| {
+// let tree = project.worktrees(cx).next().unwrap();
+// tree.read(cx)
+// .entry_for_path(path)
+// .unwrap_or_else(|| panic!("no entry for path {}", path))
+// .id
+// })
+// };
+
+// let buffer2 = buffer_for_path("a/file2", cx).await;
+// let buffer3 = buffer_for_path("a/file3", cx).await;
+// let buffer4 = buffer_for_path("b/c/file4", cx).await;
+// let buffer5 = buffer_for_path("b/c/file5", cx).await;
+
+// let file2_id = id_for_path("a/file2", cx);
+// let file3_id = id_for_path("a/file3", cx);
+// let file4_id = id_for_path("b/c/file4", cx);
+
+// // Create a remote copy of this worktree.
+// let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
+
+// let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
+
+// let updates = Arc::new(Mutex::new(Vec::new()));
+// tree.update(cx, |tree, cx| {
+// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+// let updates = updates.clone();
+// move |update| {
+// updates.lock().push(update);
+// async { true }
+// }
+// });
+// });
+
+// let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
+// deterministic.run_until_parked();
+
+// cx.read(|cx| {
+// assert!(!buffer2.read(cx).is_dirty());
+// assert!(!buffer3.read(cx).is_dirty());
+// assert!(!buffer4.read(cx).is_dirty());
+// assert!(!buffer5.read(cx).is_dirty());
+// });
+
+// // Rename and delete files and directories.
+// tree.flush_fs_events(cx).await;
+// std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
+// std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
+// std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
+// std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
+// tree.flush_fs_events(cx).await;
+
+// let expected_paths = vec![
+// "a",
+// "a/file1",
+// "a/file2.new",
+// "b",
+// "d",
+// "d/file3",
+// "d/file4",
+// ];
+
+// cx.read(|app| {
+// assert_eq!(
+// tree.read(app)
+// .paths()
+// .map(|p| p.to_str().unwrap())
+// .collect::<Vec<_>>(),
+// expected_paths
+// );
+
+// assert_eq!(id_for_path("a/file2.new", cx), file2_id);
+// assert_eq!(id_for_path("d/file3", cx), file3_id);
+// assert_eq!(id_for_path("d/file4", cx), file4_id);
+
+// assert_eq!(
+// buffer2.read(app).file().unwrap().path().as_ref(),
+// Path::new("a/file2.new")
+// );
+// assert_eq!(
+// buffer3.read(app).file().unwrap().path().as_ref(),
+// Path::new("d/file3")
+// );
+// assert_eq!(
+// buffer4.read(app).file().unwrap().path().as_ref(),
+// Path::new("d/file4")
+// );
+// assert_eq!(
+// buffer5.read(app).file().unwrap().path().as_ref(),
+// Path::new("b/c/file5")
+// );
+
+// assert!(!buffer2.read(app).file().unwrap().is_deleted());
+// assert!(!buffer3.read(app).file().unwrap().is_deleted());
+// assert!(!buffer4.read(app).file().unwrap().is_deleted());
+// assert!(buffer5.read(app).file().unwrap().is_deleted());
+// });
+
+// // Update the remote worktree. Check that it becomes consistent with the
+// // local worktree.
+// deterministic.run_until_parked();
+// remote.update(cx, |remote, _| {
+// for update in updates.lock().drain(..) {
+// remote.as_remote_mut().unwrap().update_from_remote(update);
+// }
+// });
+// deterministic.run_until_parked();
+// remote.read_with(cx, |remote, _| {
+// assert_eq!(
+// remote
+// .paths()
+// .map(|p| p.to_str().unwrap())
+// .collect::<Vec<_>>(),
+// expected_paths
+// );
+// });
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_buffer_identity_across_renames(
+// deterministic: Arc<Deterministic>,
+// cx: &mut gpui::TestAppContext,
+// ) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a": {
+// "file1": "",
+// }
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs, [Path::new("/dir")], cx).await;
+// let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
+// let tree_id = tree.read_with(cx, |tree, _| tree.id());
+
+// let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
+// project.read_with(cx, |project, cx| {
+// let tree = project.worktrees(cx).next().unwrap();
+// tree.read(cx)
+// .entry_for_path(path)
+// .unwrap_or_else(|| panic!("no entry for path {}", path))
+// .id
+// })
+// };
+
+// let dir_id = id_for_path("a", cx);
+// let file_id = id_for_path("a/file1", cx);
+// let buffer = project
+// .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
+// .await
+// .unwrap();
+// buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
+
+// project
+// .update(cx, |project, cx| {
+// project.rename_entry(dir_id, Path::new("b"), cx)
+// })
+// .unwrap()
+// .await
+// .unwrap();
+// deterministic.run_until_parked();
+// assert_eq!(id_for_path("b", cx), dir_id);
+// assert_eq!(id_for_path("b/file1", cx), file_id);
+// buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
+// }
+
+// #[gpui2::test]
+// async fn test_buffer_deduping(cx: &mut gpui2::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "a.txt": "a-contents",
+// "b.txt": "b-contents",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// // Spawn multiple tasks to open paths, repeating some paths.
+// let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
+// (
+// p.open_local_buffer("/dir/a.txt", cx),
+// p.open_local_buffer("/dir/b.txt", cx),
+// p.open_local_buffer("/dir/a.txt", cx),
+// )
+// });
+
+// let buffer_a_1 = buffer_a_1.await.unwrap();
+// let buffer_a_2 = buffer_a_2.await.unwrap();
+// let buffer_b = buffer_b.await.unwrap();
+// assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
+// assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
+
+// // There is only one buffer per path.
+// let buffer_a_id = buffer_a_1.id();
+// assert_eq!(buffer_a_2.id(), buffer_a_id);
+
+// // Open the same path again while it is still open.
+// drop(buffer_a_1);
+// let buffer_a_3 = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
+// .await
+// .unwrap();
+
+// // There's still only one buffer per path.
+// assert_eq!(buffer_a_3.id(), buffer_a_id);
+// }
+
+// #[gpui2::test]
+// async fn test_buffer_is_dirty(cx: &mut gpui2::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "file1": "abc",
+// "file2": "def",
+// "file3": "ghi",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// let buffer1 = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+// .await
+// .unwrap();
+// let events = Rc::new(RefCell::new(Vec::new()));
+
+// // initially, the buffer isn't dirty.
+// buffer1.update(cx, |buffer, cx| {
+// cx.subscribe(&buffer1, {
+// let events = events.clone();
+// move |_, _, event, _| match event {
+// BufferEvent::Operation(_) => {}
+// _ => events.borrow_mut().push(event.clone()),
+// }
+// })
+// .detach();
+
+// assert!(!buffer.is_dirty());
+// assert!(events.borrow().is_empty());
+
+// buffer.edit([(1..2, "")], None, cx);
+// });
+
+// // after the first edit, the buffer is dirty, and emits a dirtied event.
+// buffer1.update(cx, |buffer, cx| {
+// assert!(buffer.text() == "ac");
+// assert!(buffer.is_dirty());
+// assert_eq!(
+// *events.borrow(),
+// &[language2::Event::Edited, language2::Event::DirtyChanged]
+// );
+// events.borrow_mut().clear();
+// buffer.did_save(
+// buffer.version(),
+// buffer.as_rope().fingerprint(),
+// buffer.file().unwrap().mtime(),
+// cx,
+// );
+// });
+
+// // after saving, the buffer is not dirty, and emits a saved event.
+// buffer1.update(cx, |buffer, cx| {
+// assert!(!buffer.is_dirty());
+// assert_eq!(*events.borrow(), &[language2::Event::Saved]);
+// events.borrow_mut().clear();
+
+// buffer.edit([(1..1, "B")], None, cx);
+// buffer.edit([(2..2, "D")], None, cx);
+// });
+
+// // after editing again, the buffer is dirty, and emits another dirty event.
+// buffer1.update(cx, |buffer, cx| {
+// assert!(buffer.text() == "aBDc");
+// assert!(buffer.is_dirty());
+// assert_eq!(
+// *events.borrow(),
+// &[
+// language2::Event::Edited,
+// language2::Event::DirtyChanged,
+// language2::Event::Edited,
+// ],
+// );
+// events.borrow_mut().clear();
+
+// // After restoring the buffer to its previously-saved state,
+// // the buffer is not considered dirty anymore.
+// buffer.edit([(1..3, "")], None, cx);
+// assert!(buffer.text() == "ac");
+// assert!(!buffer.is_dirty());
+// });
+
+// assert_eq!(
+// *events.borrow(),
+// &[language2::Event::Edited, language2::Event::DirtyChanged]
+// );
+
+// // When a file is deleted, the buffer is considered dirty.
+// let events = Rc::new(RefCell::new(Vec::new()));
+// let buffer2 = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
+// .await
+// .unwrap();
+// buffer2.update(cx, |_, cx| {
+// cx.subscribe(&buffer2, {
+// let events = events.clone();
+// move |_, _, event, _| events.borrow_mut().push(event.clone())
+// })
+// .detach();
+// });
+
+// fs.remove_file("/dir/file2".as_ref(), Default::default())
+// .await
+// .unwrap();
+// cx.foreground().run_until_parked();
+// buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
+// assert_eq!(
+// *events.borrow(),
+// &[
+// language2::Event::DirtyChanged,
+// language2::Event::FileHandleChanged
+// ]
+// );
+
+// // When a file is already dirty when deleted, we don't emit a Dirtied event.
+// let events = Rc::new(RefCell::new(Vec::new()));
+// let buffer3 = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
+// .await
+// .unwrap();
+// buffer3.update(cx, |_, cx| {
+// cx.subscribe(&buffer3, {
+// let events = events.clone();
+// move |_, _, event, _| events.borrow_mut().push(event.clone())
+// })
+// .detach();
+// });
+
+// buffer3.update(cx, |buffer, cx| {
+// buffer.edit([(0..0, "x")], None, cx);
+// });
+// events.borrow_mut().clear();
+// fs.remove_file("/dir/file3".as_ref(), Default::default())
+// .await
+// .unwrap();
+// cx.foreground().run_until_parked();
+// assert_eq!(*events.borrow(), &[language2::Event::FileHandleChanged]);
+// cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
+// }
+
+// #[gpui::test]
+// async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let initial_contents = "aaa\nbbbbb\nc\n";
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "the-file": initial_contents,
+// }),
+// )
+// .await;
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
+// .await
+// .unwrap();
+
+// let anchors = (0..3)
+// .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
+// .collect::<Vec<_>>();
+
+// // Change the file on disk, adding two new lines of text, and removing
+// // one line.
+// buffer.read_with(cx, |buffer, _| {
+// assert!(!buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+// });
+// let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
+// fs.save(
+// "/dir/the-file".as_ref(),
+// &new_contents.into(),
+// LineEnding::Unix,
+// )
+// .await
+// .unwrap();
+
+// // Because the buffer was not modified, it is reloaded from disk. Its
+// // contents are edited according to the diff between the old and new
+// // file contents.
+// cx.foreground().run_until_parked();
+// buffer.update(cx, |buffer, _| {
+// assert_eq!(buffer.text(), new_contents);
+// assert!(!buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+
+// let anchor_positions = anchors
+// .iter()
+// .map(|anchor| anchor.to_point(&*buffer))
+// .collect::<Vec<_>>();
+// assert_eq!(
+// anchor_positions,
+// [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
+// );
+// });
+
+// // Modify the buffer
+// buffer.update(cx, |buffer, cx| {
+// buffer.edit([(0..0, " ")], None, cx);
+// assert!(buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+// });
+
+// // Change the file on disk again, adding blank lines to the beginning.
+// fs.save(
+// "/dir/the-file".as_ref(),
+// &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
+// LineEnding::Unix,
+// )
+// .await
+// .unwrap();
+
+// // Because the buffer is modified, it doesn't reload from disk, but is
+// // marked as having a conflict.
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, _| {
+// assert!(buffer.has_conflict());
+// });
+// }
+
+// #[gpui::test]
+// async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "file1": "a\nb\nc\n",
+// "file2": "one\r\ntwo\r\nthree\r\n",
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+// let buffer1 = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
+// .await
+// .unwrap();
+// let buffer2 = project
+// .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
+// .await
+// .unwrap();
+
+// buffer1.read_with(cx, |buffer, _| {
+// assert_eq!(buffer.text(), "a\nb\nc\n");
+// assert_eq!(buffer.line_ending(), LineEnding::Unix);
+// });
+// buffer2.read_with(cx, |buffer, _| {
+// assert_eq!(buffer.text(), "one\ntwo\nthree\n");
+// assert_eq!(buffer.line_ending(), LineEnding::Windows);
+// });
+
+// // Change a file's line endings on disk from unix to windows. The buffer's
+// // state updates correctly.
+// fs.save(
+// "/dir/file1".as_ref(),
+// &"aaa\nb\nc\n".into(),
+// LineEnding::Windows,
+// )
+// .await
+// .unwrap();
+// cx.foreground().run_until_parked();
+// buffer1.read_with(cx, |buffer, _| {
+// assert_eq!(buffer.text(), "aaa\nb\nc\n");
+// assert_eq!(buffer.line_ending(), LineEnding::Windows);
+// });
+
+// // Save a file with windows line endings. The file is written correctly.
+// buffer2.update(cx, |buffer, cx| {
+// buffer.set_text("one\ntwo\nthree\nfour\n", cx);
+// });
+// project
+// .update(cx, |project, cx| project.save_buffer(buffer2, cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// fs.load("/dir/file2".as_ref()).await.unwrap(),
+// "one\r\ntwo\r\nthree\r\nfour\r\n",
+// );
+// }
+
+// #[gpui::test]
+// async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/the-dir",
+// json!({
+// "a.rs": "
+// fn foo(mut v: Vec<usize>) {
+// for x in &v {
+// v.push(1);
+// }
+// }
+// "
+// .unindent(),
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
+// let message = lsp::PublishDiagnosticsParams {
+// uri: buffer_uri.clone(),
+// diagnostics: vec![
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
+// severity: Some(DiagnosticSeverity::WARNING),
+// message: "error 1".to_string(),
+// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
+// location: lsp::Location {
+// uri: buffer_uri.clone(),
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 8),
+// lsp2::Position::new(1, 9),
+// ),
+// },
+// message: "error 1 hint 1".to_string(),
+// }]),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
+// severity: Some(DiagnosticSeverity::HINT),
+// message: "error 1 hint 1".to_string(),
+// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
+// location: lsp::Location {
+// uri: buffer_uri.clone(),
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 8),
+// lsp2::Position::new(1, 9),
+// ),
+// },
+// message: "original diagnostic".to_string(),
+// }]),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(2, 8), lsp2::Position::new(2, 17)),
+// severity: Some(DiagnosticSeverity::ERROR),
+// message: "error 2".to_string(),
+// related_information: Some(vec![
+// lsp::DiagnosticRelatedInformation {
+// location: lsp::Location {
+// uri: buffer_uri.clone(),
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 13),
+// lsp2::Position::new(1, 15),
+// ),
+// },
+// message: "error 2 hint 1".to_string(),
+// },
+// lsp::DiagnosticRelatedInformation {
+// location: lsp::Location {
+// uri: buffer_uri.clone(),
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 13),
+// lsp2::Position::new(1, 15),
+// ),
+// },
+// message: "error 2 hint 2".to_string(),
+// },
+// ]),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
+// severity: Some(DiagnosticSeverity::HINT),
+// message: "error 2 hint 1".to_string(),
+// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
+// location: lsp::Location {
+// uri: buffer_uri.clone(),
+// range: lsp2::Range::new(
+// lsp2::Position::new(2, 8),
+// lsp2::Position::new(2, 17),
+// ),
+// },
+// message: "original diagnostic".to_string(),
+// }]),
+// ..Default::default()
+// },
+// lsp2::Diagnostic {
+// range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
+// severity: Some(DiagnosticSeverity::HINT),
+// message: "error 2 hint 2".to_string(),
+// related_information: Some(vec![lsp::DiagnosticRelatedInformation {
+// location: lsp::Location {
+// uri: buffer_uri,
+// range: lsp2::Range::new(
+// lsp2::Position::new(2, 8),
+// lsp2::Position::new(2, 17),
+// ),
+// },
+// message: "original diagnostic".to_string(),
+// }]),
+// ..Default::default()
+// },
+// ],
+// version: None,
+// };
+
+// project
+// .update(cx, |p, cx| {
+// p.update_diagnostics(LanguageServerId(0), message, &[], cx)
+// })
+// .unwrap();
+// let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
+
+// assert_eq!(
+// buffer
+// .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+// .collect::<Vec<_>>(),
+// &[
+// DiagnosticEntry {
+// range: Point::new(1, 8)..Point::new(1, 9),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::WARNING,
+// message: "error 1".to_string(),
+// group_id: 1,
+// is_primary: true,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(1, 8)..Point::new(1, 9),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::HINT,
+// message: "error 1 hint 1".to_string(),
+// group_id: 1,
+// is_primary: false,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(1, 13)..Point::new(1, 15),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::HINT,
+// message: "error 2 hint 1".to_string(),
+// group_id: 0,
+// is_primary: false,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(1, 13)..Point::new(1, 15),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::HINT,
+// message: "error 2 hint 2".to_string(),
+// group_id: 0,
+// is_primary: false,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(2, 8)..Point::new(2, 17),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::ERROR,
+// message: "error 2".to_string(),
+// group_id: 0,
+// is_primary: true,
+// ..Default::default()
+// }
+// }
+// ]
+// );
+
+// assert_eq!(
+// buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
+// &[
+// DiagnosticEntry {
+// range: Point::new(1, 13)..Point::new(1, 15),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::HINT,
+// message: "error 2 hint 1".to_string(),
+// group_id: 0,
+// is_primary: false,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(1, 13)..Point::new(1, 15),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::HINT,
+// message: "error 2 hint 2".to_string(),
+// group_id: 0,
+// is_primary: false,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(2, 8)..Point::new(2, 17),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::ERROR,
+// message: "error 2".to_string(),
+// group_id: 0,
+// is_primary: true,
+// ..Default::default()
+// }
+// }
+// ]
+// );
+
+// assert_eq!(
+// buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
+// &[
+// DiagnosticEntry {
+// range: Point::new(1, 8)..Point::new(1, 9),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::WARNING,
+// message: "error 1".to_string(),
+// group_id: 1,
+// is_primary: true,
+// ..Default::default()
+// }
+// },
+// DiagnosticEntry {
+// range: Point::new(1, 8)..Point::new(1, 9),
+// diagnostic: Diagnostic {
+// severity: DiagnosticSeverity::HINT,
+// message: "error 1 hint 1".to_string(),
+// group_id: 1,
+// is_primary: false,
+// ..Default::default()
+// }
+// },
+// ]
+// );
+// }
+
+// #[gpui::test]
+// async fn test_rename(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let mut language = Language::new(
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// );
+// let mut fake_servers = language
+// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+// capabilities: lsp2::ServerCapabilities {
+// rename_provider: Some(lsp2::OneOf::Right(lsp2::RenameOptions {
+// prepare_provider: Some(true),
+// work_done_progress_options: Default::default(),
+// })),
+// ..Default::default()
+// },
+// ..Default::default()
+// }))
+// .await;
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "one.rs": "const ONE: usize = 1;",
+// "two.rs": "const TWO: usize = one::ONE + one::ONE;"
+// }),
+// )
+// .await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+// let buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/dir/one.rs", cx)
+// })
+// .await
+// .unwrap();
+
+// let fake_server = fake_servers.next().await.unwrap();
+
+// let response = project.update(cx, |project, cx| {
+// project.prepare_rename(buffer.clone(), 7, cx)
+// });
+// fake_server
+// .handle_request::<lsp2::request::PrepareRenameRequest, _, _>(|params, _| async move {
+// assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
+// assert_eq!(params.position, lsp2::Position::new(0, 7));
+// Ok(Some(lsp2::PrepareRenameResponse::Range(lsp2::Range::new(
+// lsp2::Position::new(0, 6),
+// lsp2::Position::new(0, 9),
+// ))))
+// })
+// .next()
+// .await
+// .unwrap();
+// let range = response.await.unwrap().unwrap();
+// let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
+// assert_eq!(range, 6..9);
+
+// let response = project.update(cx, |project, cx| {
+// project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
+// });
+// fake_server
+// .handle_request::<lsp2::request::Rename, _, _>(|params, _| async move {
+// assert_eq!(
+// params.text_document_position.text_document.uri.as_str(),
+// "file:///dir/one.rs"
+// );
+// assert_eq!(
+// params.text_document_position.position,
+// lsp2::Position::new(0, 7)
+// );
+// assert_eq!(params.new_name, "THREE");
+// Ok(Some(lsp::WorkspaceEdit {
+// changes: Some(
+// [
+// (
+// lsp2::Url::from_file_path("/dir/one.rs").unwrap(),
+// vec![lsp2::TextEdit::new(
+// lsp2::Range::new(
+// lsp2::Position::new(0, 6),
+// lsp2::Position::new(0, 9),
+// ),
+// "THREE".to_string(),
+// )],
+// ),
+// (
+// lsp2::Url::from_file_path("/dir/two.rs").unwrap(),
+// vec![
+// lsp2::TextEdit::new(
+// lsp2::Range::new(
+// lsp2::Position::new(0, 24),
+// lsp2::Position::new(0, 27),
+// ),
+// "THREE".to_string(),
+// ),
+// lsp2::TextEdit::new(
+// lsp2::Range::new(
+// lsp2::Position::new(0, 35),
+// lsp2::Position::new(0, 38),
+// ),
+// "THREE".to_string(),
+// ),
+// ],
+// ),
+// ]
+// .into_iter()
+// .collect(),
+// ),
+// ..Default::default()
+// }))
+// })
+// .next()
+// .await
+// .unwrap();
+// let mut transaction = response.await.unwrap().0;
+// assert_eq!(transaction.len(), 2);
+// assert_eq!(
+// transaction
+// .remove_entry(&buffer)
+// .unwrap()
+// .0
+// .read_with(cx, |buffer, _| buffer.text()),
+// "const THREE: usize = 1;"
+// );
+// assert_eq!(
+// transaction
+// .into_keys()
+// .next()
+// .unwrap()
+// .read_with(cx, |buffer, _| buffer.text()),
+// "const TWO: usize = one::THREE + one::THREE;"
+// );
+// }
+
+// #[gpui::test]
+// async fn test_search(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "one.rs": "const ONE: usize = 1;",
+// "two.rs": "const TWO: usize = one::ONE + one::ONE;",
+// "three.rs": "const THREE: usize = one::ONE + two::TWO;",
+// "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
+// }),
+// )
+// .await;
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("two.rs".to_string(), vec![6..9]),
+// ("three.rs".to_string(), vec![37..40])
+// ])
+// );
+
+// let buffer_4 = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/dir/four.rs", cx)
+// })
+// .await
+// .unwrap();
+// buffer_4.update(cx, |buffer, cx| {
+// let text = "two::TWO";
+// buffer.edit([(20..28, text), (31..43, text)], None, cx);
+// });
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("two.rs".to_string(), vec![6..9]),
+// ("three.rs".to_string(), vec![37..40]),
+// ("four.rs".to_string(), vec![25..28, 36..39])
+// ])
+// );
+// }
+
+// #[gpui::test]
+// async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let search_query = "file";
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "one.rs": r#"// Rust file one"#,
+// "one.ts": r#"// TypeScript file one"#,
+// "two.rs": r#"// Rust file two"#,
+// "two.ts": r#"// TypeScript file two"#,
+// }),
+// )
+// .await;
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// assert!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![PathMatcher::new("*.odd").unwrap()],
+// Vec::new()
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap()
+// .is_empty(),
+// "If no inclusions match, no files should be returned"
+// );
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![PathMatcher::new("*.rs").unwrap()],
+// Vec::new()
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.rs".to_string(), vec![8..12]),
+// ("two.rs".to_string(), vec![8..12]),
+// ]),
+// "Rust only search should give only Rust files"
+// );
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap(),
+// ],
+// Vec::new()
+// ).unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.ts".to_string(), vec![14..18]),
+// ("two.ts".to_string(), vec![14..18]),
+// ]),
+// "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
+// );
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![
+// PathMatcher::new("*.rs").unwrap(),
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap(),
+// ],
+// Vec::new()
+// ).unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.rs".to_string(), vec![8..12]),
+// ("one.ts".to_string(), vec![14..18]),
+// ("two.rs".to_string(), vec![8..12]),
+// ("two.ts".to_string(), vec![14..18]),
+// ]),
+// "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
+// );
+// }
+
+// #[gpui::test]
+// async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let search_query = "file";
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "one.rs": r#"// Rust file one"#,
+// "one.ts": r#"// TypeScript file one"#,
+// "two.rs": r#"// Rust file two"#,
+// "two.ts": r#"// TypeScript file two"#,
+// }),
+// )
+// .await;
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// Vec::new(),
+// vec![PathMatcher::new("*.odd").unwrap()],
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.rs".to_string(), vec![8..12]),
+// ("one.ts".to_string(), vec![14..18]),
+// ("two.rs".to_string(), vec![8..12]),
+// ("two.ts".to_string(), vec![14..18]),
+// ]),
+// "If no exclusions match, all files should be returned"
+// );
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// Vec::new(),
+// vec![PathMatcher::new("*.rs").unwrap()],
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.ts".to_string(), vec![14..18]),
+// ("two.ts".to_string(), vec![14..18]),
+// ]),
+// "Rust exclusion search should give only TypeScript files"
+// );
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// Vec::new(),
+// vec![
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap(),
+// ],
+// ).unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.rs".to_string(), vec![8..12]),
+// ("two.rs".to_string(), vec![8..12]),
+// ]),
+// "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
+// );
+
+// assert!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// Vec::new(),
+// vec![
+// PathMatcher::new("*.rs").unwrap(),
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap(),
+// ],
+// ).unwrap(),
+// cx
+// )
+// .await
+// .unwrap().is_empty(),
+// "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
+// );
+// }
+
+// #[gpui::test]
+// async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let search_query = "file";
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/dir",
+// json!({
+// "one.rs": r#"// Rust file one"#,
+// "one.ts": r#"// TypeScript file one"#,
+// "two.rs": r#"// Rust file two"#,
+// "two.ts": r#"// TypeScript file two"#,
+// }),
+// )
+// .await;
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// assert!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![PathMatcher::new("*.odd").unwrap()],
+// vec![PathMatcher::new("*.odd").unwrap()],
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap()
+// .is_empty(),
+// "If both no exclusions and inclusions match, exclusions should win and return nothing"
+// );
+
+// assert!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![PathMatcher::new("*.ts").unwrap()],
+// vec![PathMatcher::new("*.ts").unwrap()],
+// ).unwrap(),
+// cx
+// )
+// .await
+// .unwrap()
+// .is_empty(),
+// "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
+// );
+
+// assert!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap()
+// ],
+// vec![
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap()
+// ],
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap()
+// .is_empty(),
+// "Non-matching inclusions and exclusions should not change that."
+// );
+
+// assert_eq!(
+// search(
+// &project,
+// SearchQuery::text(
+// search_query,
+// false,
+// true,
+// vec![
+// PathMatcher::new("*.ts").unwrap(),
+// PathMatcher::new("*.odd").unwrap()
+// ],
+// vec![
+// PathMatcher::new("*.rs").unwrap(),
+// PathMatcher::new("*.odd").unwrap()
+// ],
+// )
+// .unwrap(),
+// cx
+// )
+// .await
+// .unwrap(),
+// HashMap::from_iter([
+// ("one.ts".to_string(), vec![14..18]),
+// ("two.ts".to_string(), vec![14..18]),
+// ]),
+// "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
+// );
+// }
+
+// #[test]
+// fn test_glob_literal_prefix() {
+// assert_eq!(glob_literal_prefix("**/*.js"), "");
+// assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
+// assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
+// assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
+// }
+
+// async fn search(
+// project: &ModelHandle<Project>,
+// query: SearchQuery,
+// cx: &mut gpui::TestAppContext,
+// ) -> Result<HashMap<String, Vec<Range<usize>>>> {
+// let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
+// let mut result = HashMap::default();
+// while let Some((buffer, range)) = search_rx.next().await {
+// result.entry(buffer).or_insert(range);
+// }
+// Ok(result
+// .into_iter()
+// .map(|(buffer, ranges)| {
+// buffer.read_with(cx, |buffer, _| {
+// let path = buffer.file().unwrap().path().to_string_lossy().to_string();
+// let ranges = ranges
+// .into_iter()
+// .map(|range| range.to_offset(buffer))
+// .collect::<Vec<_>>();
+// (path, ranges)
+// })
+// })
+// .collect())
+// }
+
+// fn init_test(cx: &mut gpui::TestAppContext) {
+// cx.foreground().forbid_parking();
+
+// cx.update(|cx| {
+// cx.set_global(SettingsStore::test(cx));
+// language2::init(cx);
+// Project::init_settings(cx);
+// });
+// }
@@ -0,0 +1,458 @@
+use aho_corasick::{AhoCorasick, AhoCorasickBuilder};
+use anyhow::{Context, Result};
+use client2::proto;
+use globset::{Glob, GlobMatcher};
+use itertools::Itertools;
+use language2::{char_kind, BufferSnapshot};
+use regex::{Regex, RegexBuilder};
+use smol::future::yield_now;
+use std::{
+ borrow::Cow,
+ io::{BufRead, BufReader, Read},
+ ops::Range,
+ path::{Path, PathBuf},
+ sync::Arc,
+};
+
+#[derive(Clone, Debug)]
+pub struct SearchInputs {
+ query: Arc<str>,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
+}
+
+impl SearchInputs {
+ pub fn as_str(&self) -> &str {
+ self.query.as_ref()
+ }
+ pub fn files_to_include(&self) -> &[PathMatcher] {
+ &self.files_to_include
+ }
+ pub fn files_to_exclude(&self) -> &[PathMatcher] {
+ &self.files_to_exclude
+ }
+}
+#[derive(Clone, Debug)]
+pub enum SearchQuery {
+ Text {
+ search: Arc<AhoCorasick>,
+ replacement: Option<String>,
+ whole_word: bool,
+ case_sensitive: bool,
+ inner: SearchInputs,
+ },
+
+ Regex {
+ regex: Regex,
+ replacement: Option<String>,
+ multiline: bool,
+ whole_word: bool,
+ case_sensitive: bool,
+ inner: SearchInputs,
+ },
+}
+
+#[derive(Clone, Debug)]
+pub struct PathMatcher {
+ maybe_path: PathBuf,
+ glob: GlobMatcher,
+}
+
+impl std::fmt::Display for PathMatcher {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ self.maybe_path.to_string_lossy().fmt(f)
+ }
+}
+
+impl PathMatcher {
+ pub fn new(maybe_glob: &str) -> Result<Self, globset::Error> {
+ Ok(PathMatcher {
+ glob: Glob::new(&maybe_glob)?.compile_matcher(),
+ maybe_path: PathBuf::from(maybe_glob),
+ })
+ }
+
+ pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
+ other.as_ref().starts_with(&self.maybe_path) || self.glob.is_match(other)
+ }
+}
+
+impl SearchQuery {
+ pub fn text(
+ query: impl ToString,
+ whole_word: bool,
+ case_sensitive: bool,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
+ ) -> Result<Self> {
+ let query = query.to_string();
+ let search = AhoCorasickBuilder::new()
+ .ascii_case_insensitive(!case_sensitive)
+ .build(&[&query])?;
+ let inner = SearchInputs {
+ query: query.into(),
+ files_to_exclude,
+ files_to_include,
+ };
+ Ok(Self::Text {
+ search: Arc::new(search),
+ replacement: None,
+ whole_word,
+ case_sensitive,
+ inner,
+ })
+ }
+
+ pub fn regex(
+ query: impl ToString,
+ whole_word: bool,
+ case_sensitive: bool,
+ files_to_include: Vec<PathMatcher>,
+ files_to_exclude: Vec<PathMatcher>,
+ ) -> Result<Self> {
+ let mut query = query.to_string();
+ let initial_query = Arc::from(query.as_str());
+ if whole_word {
+ let mut word_query = String::new();
+ word_query.push_str("\\b");
+ word_query.push_str(&query);
+ word_query.push_str("\\b");
+ query = word_query
+ }
+
+ let multiline = query.contains('\n') || query.contains("\\n");
+ let regex = RegexBuilder::new(&query)
+ .case_insensitive(!case_sensitive)
+ .multi_line(multiline)
+ .build()?;
+ let inner = SearchInputs {
+ query: initial_query,
+ files_to_exclude,
+ files_to_include,
+ };
+ Ok(Self::Regex {
+ regex,
+ replacement: None,
+ multiline,
+ whole_word,
+ case_sensitive,
+ inner,
+ })
+ }
+
+ pub fn from_proto(message: proto::SearchProject) -> Result<Self> {
+ if message.regex {
+ Self::regex(
+ message.query,
+ message.whole_word,
+ message.case_sensitive,
+ deserialize_path_matches(&message.files_to_include)?,
+ deserialize_path_matches(&message.files_to_exclude)?,
+ )
+ } else {
+ Self::text(
+ message.query,
+ message.whole_word,
+ message.case_sensitive,
+ deserialize_path_matches(&message.files_to_include)?,
+ deserialize_path_matches(&message.files_to_exclude)?,
+ )
+ }
+ }
+ pub fn with_replacement(mut self, new_replacement: String) -> Self {
+ match self {
+ Self::Text {
+ ref mut replacement,
+ ..
+ }
+ | Self::Regex {
+ ref mut replacement,
+ ..
+ } => {
+ *replacement = Some(new_replacement);
+ self
+ }
+ }
+ }
+ pub fn to_proto(&self, project_id: u64) -> proto::SearchProject {
+ proto::SearchProject {
+ project_id,
+ query: self.as_str().to_string(),
+ regex: self.is_regex(),
+ whole_word: self.whole_word(),
+ case_sensitive: self.case_sensitive(),
+ files_to_include: self
+ .files_to_include()
+ .iter()
+ .map(|matcher| matcher.to_string())
+ .join(","),
+ files_to_exclude: self
+ .files_to_exclude()
+ .iter()
+ .map(|matcher| matcher.to_string())
+ .join(","),
+ }
+ }
+
+ pub fn detect<T: Read>(&self, stream: T) -> Result<bool> {
+ if self.as_str().is_empty() {
+ return Ok(false);
+ }
+
+ match self {
+ Self::Text { search, .. } => {
+ let mat = search.stream_find_iter(stream).next();
+ match mat {
+ Some(Ok(_)) => Ok(true),
+ Some(Err(err)) => Err(err.into()),
+ None => Ok(false),
+ }
+ }
+ Self::Regex {
+ regex, multiline, ..
+ } => {
+ let mut reader = BufReader::new(stream);
+ if *multiline {
+ let mut text = String::new();
+ if let Err(err) = reader.read_to_string(&mut text) {
+ Err(err.into())
+ } else {
+ Ok(regex.find(&text).is_some())
+ }
+ } else {
+ for line in reader.lines() {
+ let line = line?;
+ if regex.find(&line).is_some() {
+ return Ok(true);
+ }
+ }
+ Ok(false)
+ }
+ }
+ }
+ }
+ /// Returns the replacement text for this `SearchQuery`.
+ pub fn replacement(&self) -> Option<&str> {
+ match self {
+ SearchQuery::Text { replacement, .. } | SearchQuery::Regex { replacement, .. } => {
+ replacement.as_deref()
+ }
+ }
+ }
+ /// Replaces search hits if replacement is set. `text` is assumed to be a string that matches this `SearchQuery` exactly, without any leftovers on either side.
+ pub fn replacement_for<'a>(&self, text: &'a str) -> Option<Cow<'a, str>> {
+ match self {
+ SearchQuery::Text { replacement, .. } => replacement.clone().map(Cow::from),
+ SearchQuery::Regex {
+ regex, replacement, ..
+ } => {
+ if let Some(replacement) = replacement {
+ Some(regex.replace(text, replacement))
+ } else {
+ None
+ }
+ }
+ }
+ }
+ pub async fn search(
+ &self,
+ buffer: &BufferSnapshot,
+ subrange: Option<Range<usize>>,
+ ) -> Vec<Range<usize>> {
+ const YIELD_INTERVAL: usize = 20000;
+
+ if self.as_str().is_empty() {
+ return Default::default();
+ }
+
+ let range_offset = subrange.as_ref().map(|r| r.start).unwrap_or(0);
+ let rope = if let Some(range) = subrange {
+ buffer.as_rope().slice(range)
+ } else {
+ buffer.as_rope().clone()
+ };
+
+ let mut matches = Vec::new();
+ match self {
+ Self::Text {
+ search, whole_word, ..
+ } => {
+ for (ix, mat) in search
+ .stream_find_iter(rope.bytes_in_range(0..rope.len()))
+ .enumerate()
+ {
+ if (ix + 1) % YIELD_INTERVAL == 0 {
+ yield_now().await;
+ }
+
+ let mat = mat.unwrap();
+ if *whole_word {
+ let scope = buffer.language_scope_at(range_offset + mat.start());
+ let kind = |c| char_kind(&scope, c);
+
+ let prev_kind = rope.reversed_chars_at(mat.start()).next().map(kind);
+ let start_kind = kind(rope.chars_at(mat.start()).next().unwrap());
+ let end_kind = kind(rope.reversed_chars_at(mat.end()).next().unwrap());
+ let next_kind = rope.chars_at(mat.end()).next().map(kind);
+ if Some(start_kind) == prev_kind || Some(end_kind) == next_kind {
+ continue;
+ }
+ }
+ matches.push(mat.start()..mat.end())
+ }
+ }
+
+ Self::Regex {
+ regex, multiline, ..
+ } => {
+ if *multiline {
+ let text = rope.to_string();
+ for (ix, mat) in regex.find_iter(&text).enumerate() {
+ if (ix + 1) % YIELD_INTERVAL == 0 {
+ yield_now().await;
+ }
+
+ matches.push(mat.start()..mat.end());
+ }
+ } else {
+ let mut line = String::new();
+ let mut line_offset = 0;
+ for (chunk_ix, chunk) in rope.chunks().chain(["\n"]).enumerate() {
+ if (chunk_ix + 1) % YIELD_INTERVAL == 0 {
+ yield_now().await;
+ }
+
+ for (newline_ix, text) in chunk.split('\n').enumerate() {
+ if newline_ix > 0 {
+ for mat in regex.find_iter(&line) {
+ let start = line_offset + mat.start();
+ let end = line_offset + mat.end();
+ matches.push(start..end);
+ }
+
+ line_offset += line.len() + 1;
+ line.clear();
+ }
+ line.push_str(text);
+ }
+ }
+ }
+ }
+ }
+
+ matches
+ }
+
+ pub fn as_str(&self) -> &str {
+ self.as_inner().as_str()
+ }
+
+ pub fn whole_word(&self) -> bool {
+ match self {
+ Self::Text { whole_word, .. } => *whole_word,
+ Self::Regex { whole_word, .. } => *whole_word,
+ }
+ }
+
+ pub fn case_sensitive(&self) -> bool {
+ match self {
+ Self::Text { case_sensitive, .. } => *case_sensitive,
+ Self::Regex { case_sensitive, .. } => *case_sensitive,
+ }
+ }
+
+ pub fn is_regex(&self) -> bool {
+ matches!(self, Self::Regex { .. })
+ }
+
+ pub fn files_to_include(&self) -> &[PathMatcher] {
+ self.as_inner().files_to_include()
+ }
+
+ pub fn files_to_exclude(&self) -> &[PathMatcher] {
+ self.as_inner().files_to_exclude()
+ }
+
+ pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
+ match file_path {
+ Some(file_path) => {
+ !self
+ .files_to_exclude()
+ .iter()
+ .any(|exclude_glob| exclude_glob.is_match(file_path))
+ && (self.files_to_include().is_empty()
+ || self
+ .files_to_include()
+ .iter()
+ .any(|include_glob| include_glob.is_match(file_path)))
+ }
+ None => self.files_to_include().is_empty(),
+ }
+ }
+ pub fn as_inner(&self) -> &SearchInputs {
+ match self {
+ Self::Regex { inner, .. } | Self::Text { inner, .. } => inner,
+ }
+ }
+}
+
+fn deserialize_path_matches(glob_set: &str) -> anyhow::Result<Vec<PathMatcher>> {
+ glob_set
+ .split(',')
+ .map(str::trim)
+ .filter(|glob_str| !glob_str.is_empty())
+ .map(|glob_str| {
+ PathMatcher::new(glob_str)
+ .with_context(|| format!("deserializing path match glob {glob_str}"))
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn path_matcher_creation_for_valid_paths() {
+ for valid_path in [
+ "file",
+ "Cargo.toml",
+ ".DS_Store",
+ "~/dir/another_dir/",
+ "./dir/file",
+ "dir/[a-z].txt",
+ "../dir/filé",
+ ] {
+ let path_matcher = PathMatcher::new(valid_path).unwrap_or_else(|e| {
+ panic!("Valid path {valid_path} should be accepted, but got: {e}")
+ });
+ assert!(
+ path_matcher.is_match(valid_path),
+ "Path matcher for valid path {valid_path} should match itself"
+ )
+ }
+ }
+
+ #[test]
+ fn path_matcher_creation_for_globs() {
+ for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
+ match PathMatcher::new(invalid_glob) {
+ Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
+ Err(_expected) => {}
+ }
+ }
+
+ for valid_glob in [
+ "dir/?ile",
+ "dir/*.txt",
+ "dir/**/file",
+ "dir/[a-z].txt",
+ "{dir,file}",
+ ] {
+ match PathMatcher::new(valid_glob) {
+ Ok(_expected) => {}
+ Err(e) => panic!("Valid glob {valid_glob} should be accepted, but got: {e}"),
+ }
+ }
+ }
+}
@@ -0,0 +1,124 @@
+use crate::Project;
+use gpui::{AnyWindowHandle, ModelContext, ModelHandle, WeakModelHandle};
+use std::path::{Path, PathBuf};
+use terminal::{
+ terminal_settings::{self, TerminalSettings, VenvSettingsContent},
+ Terminal, TerminalBuilder,
+};
+
+#[cfg(target_os = "macos")]
+use std::os::unix::ffi::OsStrExt;
+
+pub struct Terminals {
+ pub(crate) local_handles: Vec<WeakModelHandle<terminal::Terminal>>,
+}
+
+impl Project {
+ pub fn create_terminal(
+ &mut self,
+ working_directory: Option<PathBuf>,
+ window: AnyWindowHandle,
+ cx: &mut ModelContext<Self>,
+ ) -> anyhow::Result<ModelHandle<Terminal>> {
+ if self.is_remote() {
+ return Err(anyhow::anyhow!(
+ "creating terminals as a guest is not supported yet"
+ ));
+ } else {
+ let settings = settings::get::<TerminalSettings>(cx);
+ let python_settings = settings.detect_venv.clone();
+ let shell = settings.shell.clone();
+
+ let terminal = TerminalBuilder::new(
+ working_directory.clone(),
+ shell.clone(),
+ settings.env.clone(),
+ Some(settings.blinking.clone()),
+ settings.alternate_scroll,
+ window,
+ )
+ .map(|builder| {
+ let terminal_handle = cx.add_model(|cx| builder.subscribe(cx));
+
+ self.terminals
+ .local_handles
+ .push(terminal_handle.downgrade());
+
+ let id = terminal_handle.id();
+ cx.observe_release(&terminal_handle, move |project, _terminal, cx| {
+ let handles = &mut project.terminals.local_handles;
+
+ if let Some(index) = handles.iter().position(|terminal| terminal.id() == id) {
+ handles.remove(index);
+ cx.notify();
+ }
+ })
+ .detach();
+
+ if let Some(python_settings) = &python_settings.as_option() {
+ let activate_script_path =
+ self.find_activate_script_path(&python_settings, working_directory);
+ self.activate_python_virtual_environment(
+ activate_script_path,
+ &terminal_handle,
+ cx,
+ );
+ }
+ terminal_handle
+ });
+
+ terminal
+ }
+ }
+
+ pub fn find_activate_script_path(
+ &mut self,
+ settings: &VenvSettingsContent,
+ working_directory: Option<PathBuf>,
+ ) -> Option<PathBuf> {
+ // When we are unable to resolve the working directory, the terminal builder
+ // defaults to '/'. We should probably encode this directly somewhere, but for
+ // now, let's just hard code it here.
+ let working_directory = working_directory.unwrap_or_else(|| Path::new("/").to_path_buf());
+ let activate_script_name = match settings.activate_script {
+ terminal_settings::ActivateScript::Default => "activate",
+ terminal_settings::ActivateScript::Csh => "activate.csh",
+ terminal_settings::ActivateScript::Fish => "activate.fish",
+ terminal_settings::ActivateScript::Nushell => "activate.nu",
+ };
+
+ for virtual_environment_name in settings.directories {
+ let mut path = working_directory.join(virtual_environment_name);
+ path.push("bin/");
+ path.push(activate_script_name);
+
+ if path.exists() {
+ return Some(path);
+ }
+ }
+
+ None
+ }
+
+ fn activate_python_virtual_environment(
+ &mut self,
+ activate_script: Option<PathBuf>,
+ terminal_handle: &ModelHandle<Terminal>,
+ cx: &mut ModelContext<Project>,
+ ) {
+ if let Some(activate_script) = activate_script {
+ // Paths are not strings so we need to jump through some hoops to format the command without `format!`
+ let mut command = Vec::from("source ".as_bytes());
+ command.extend_from_slice(activate_script.as_os_str().as_bytes());
+ command.push(b'\n');
+
+ terminal_handle.update(cx, |this, _| this.input_bytes(command));
+ }
+ }
+
+ pub fn local_terminal_handles(&self) -> &Vec<WeakModelHandle<terminal::Terminal>> {
+ &self.terminals.local_handles
+ }
+}
+
+// TODO: Add a few tests for adding and removing terminal tabs
@@ -0,0 +1,4387 @@
+use crate::{
+ copy_recursive, ignore::IgnoreStack, DiagnosticSummary, ProjectEntryId, RemoveOptions,
+};
+use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
+use anyhow::{anyhow, Context, Result};
+use client2::{proto, Client};
+use clock::ReplicaId;
+use collections::{HashMap, HashSet, VecDeque};
+use fs::{
+ repository::{GitFileStatus, GitRepository, RepoPath},
+ Fs,
+};
+use futures::{
+ channel::{
+ mpsc::{self, UnboundedSender},
+ oneshot,
+ },
+ select_biased,
+ task::Poll,
+ FutureExt, Stream, StreamExt,
+};
+use fuzzy2::CharBag;
+use git::{DOT_GIT, GITIGNORE};
+use gpui2::{AppContext, AsyncAppContext, EventEmitter, Executor, Handle, ModelContext, Task};
+use language2::{
+ proto::{
+ deserialize_fingerprint, deserialize_version, serialize_fingerprint, serialize_line_ending,
+ serialize_version,
+ },
+ Buffer, DiagnosticEntry, File as _, LineEnding, PointUtf16, Rope, RopeFingerprint, Unclipped,
+};
+use lsp2::LanguageServerId;
+use parking_lot::Mutex;
+use postage::{
+ barrier,
+ prelude::{Sink as _, Stream as _},
+ watch,
+};
+use smol::channel::{self, Sender};
+use std::{
+ any::Any,
+ cmp::{self, Ordering},
+ convert::TryFrom,
+ ffi::OsStr,
+ fmt,
+ future::Future,
+ mem,
+ ops::{AddAssign, Deref, DerefMut, Sub},
+ path::{Path, PathBuf},
+ pin::Pin,
+ sync::{
+ atomic::{AtomicUsize, Ordering::SeqCst},
+ Arc,
+ },
+ time::{Duration, SystemTime},
+};
+use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
+use util::{paths::HOME, ResultExt};
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
+pub struct WorktreeId(usize);
+
+pub enum Worktree {
+ Local(LocalWorktree),
+ Remote(RemoteWorktree),
+}
+
+pub struct LocalWorktree {
+ snapshot: LocalSnapshot,
+ scan_requests_tx: channel::Sender<ScanRequest>,
+ path_prefixes_to_scan_tx: channel::Sender<Arc<Path>>,
+ is_scanning: (watch::Sender<bool>, watch::Receiver<bool>),
+ _background_scanner_task: Task<()>,
+ share: Option<ShareState>,
+ diagnostics: HashMap<
+ Arc<Path>,
+ Vec<(
+ LanguageServerId,
+ Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ )>,
+ >,
+ diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
+ client: Arc<Client>,
+ fs: Arc<dyn Fs>,
+ visible: bool,
+}
+
+struct ScanRequest {
+ relative_paths: Vec<Arc<Path>>,
+ done: barrier::Sender,
+}
+
+pub struct RemoteWorktree {
+ snapshot: Snapshot,
+ background_snapshot: Arc<Mutex<Snapshot>>,
+ project_id: u64,
+ client: Arc<Client>,
+ updates_tx: Option<UnboundedSender<proto::UpdateWorktree>>,
+ snapshot_subscriptions: VecDeque<(usize, oneshot::Sender<()>)>,
+ replica_id: ReplicaId,
+ diagnostic_summaries: HashMap<Arc<Path>, HashMap<LanguageServerId, DiagnosticSummary>>,
+ visible: bool,
+ disconnected: bool,
+}
+
+#[derive(Clone)]
+pub struct Snapshot {
+ id: WorktreeId,
+ abs_path: Arc<Path>,
+ root_name: String,
+ root_char_bag: CharBag,
+ entries_by_path: SumTree<Entry>,
+ entries_by_id: SumTree<PathEntry>,
+ repository_entries: TreeMap<RepositoryWorkDirectory, RepositoryEntry>,
+
+ /// A number that increases every time the worktree begins scanning
+ /// a set of paths from the filesystem. This scanning could be caused
+ /// by some operation performed on the worktree, such as reading or
+ /// writing a file, or by an event reported by the filesystem.
+ scan_id: usize,
+
+ /// The latest scan id that has completed, and whose preceding scans
+ /// have all completed. The current `scan_id` could be more than one
+ /// greater than the `completed_scan_id` if operations are performed
+ /// on the worktree while it is processing a file-system event.
+ completed_scan_id: usize,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct RepositoryEntry {
+ pub(crate) work_directory: WorkDirectoryEntry,
+ pub(crate) branch: Option<Arc<str>>,
+}
+
+impl RepositoryEntry {
+ pub fn branch(&self) -> Option<Arc<str>> {
+ self.branch.clone()
+ }
+
+ pub fn work_directory_id(&self) -> ProjectEntryId {
+ *self.work_directory
+ }
+
+ pub fn work_directory(&self, snapshot: &Snapshot) -> Option<RepositoryWorkDirectory> {
+ snapshot
+ .entry_for_id(self.work_directory_id())
+ .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
+ }
+
+ pub fn build_update(&self, _: &Self) -> proto::RepositoryEntry {
+ proto::RepositoryEntry {
+ work_directory_id: self.work_directory_id().to_proto(),
+ branch: self.branch.as_ref().map(|str| str.to_string()),
+ }
+ }
+}
+
+impl From<&RepositoryEntry> for proto::RepositoryEntry {
+ fn from(value: &RepositoryEntry) -> Self {
+ proto::RepositoryEntry {
+ work_directory_id: value.work_directory.to_proto(),
+ branch: value.branch.as_ref().map(|str| str.to_string()),
+ }
+ }
+}
+
+/// This path corresponds to the 'content path' (the folder that contains the .git)
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct RepositoryWorkDirectory(pub(crate) Arc<Path>);
+
+impl Default for RepositoryWorkDirectory {
+ fn default() -> Self {
+ RepositoryWorkDirectory(Arc::from(Path::new("")))
+ }
+}
+
+impl AsRef<Path> for RepositoryWorkDirectory {
+ fn as_ref(&self) -> &Path {
+ self.0.as_ref()
+ }
+}
+
+#[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
+pub struct WorkDirectoryEntry(ProjectEntryId);
+
+impl WorkDirectoryEntry {
+ pub(crate) fn relativize(&self, worktree: &Snapshot, path: &Path) -> Option<RepoPath> {
+ worktree.entry_for_id(self.0).and_then(|entry| {
+ path.strip_prefix(&entry.path)
+ .ok()
+ .map(move |path| path.into())
+ })
+ }
+}
+
+impl Deref for WorkDirectoryEntry {
+ type Target = ProjectEntryId;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl<'a> From<ProjectEntryId> for WorkDirectoryEntry {
+ fn from(value: ProjectEntryId) -> Self {
+ WorkDirectoryEntry(value)
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct LocalSnapshot {
+ snapshot: Snapshot,
+ /// All of the gitignore files in the worktree, indexed by their relative path.
+ /// The boolean indicates whether the gitignore needs to be updated.
+ ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, bool)>,
+ /// All of the git repositories in the worktree, indexed by the project entry
+ /// id of their parent directory.
+ git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
+}
+
+struct BackgroundScannerState {
+ snapshot: LocalSnapshot,
+ scanned_dirs: HashSet<ProjectEntryId>,
+ path_prefixes_to_scan: HashSet<Arc<Path>>,
+ paths_to_scan: HashSet<Arc<Path>>,
+ /// The ids of all of the entries that were removed from the snapshot
+ /// as part of the current update. These entry ids may be re-used
+ /// if the same inode is discovered at a new path, or if the given
+ /// path is re-created after being deleted.
+ removed_entry_ids: HashMap<u64, ProjectEntryId>,
+ changed_paths: Vec<Arc<Path>>,
+ prev_snapshot: Snapshot,
+}
+
+#[derive(Debug, Clone)]
+pub struct LocalRepositoryEntry {
+ pub(crate) git_dir_scan_id: usize,
+ pub(crate) repo_ptr: Arc<Mutex<dyn GitRepository>>,
+ /// Path to the actual .git folder.
+ /// Note: if .git is a file, this points to the folder indicated by the .git file
+ pub(crate) git_dir_path: Arc<Path>,
+}
+
+impl Deref for LocalSnapshot {
+ type Target = Snapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.snapshot
+ }
+}
+
+impl DerefMut for LocalSnapshot {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.snapshot
+ }
+}
+
+enum ScanState {
+ Started,
+ Updated {
+ snapshot: LocalSnapshot,
+ changes: UpdatedEntriesSet,
+ barrier: Option<barrier::Sender>,
+ scanning: bool,
+ },
+}
+
+struct ShareState {
+ project_id: u64,
+ snapshots_tx:
+ mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
+ resume_updates: watch::Sender<()>,
+ _maintain_remote_snapshot: Task<Option<()>>,
+}
+
+pub enum Event {
+ UpdatedEntries(UpdatedEntriesSet),
+ UpdatedGitRepositories(UpdatedGitRepositoriesSet),
+}
+
+impl EventEmitter for Worktree {
+ type Event = Event;
+}
+
+impl Worktree {
+ pub async fn local(
+ client: Arc<Client>,
+ path: impl Into<Arc<Path>>,
+ visible: bool,
+ fs: Arc<dyn Fs>,
+ next_entry_id: Arc<AtomicUsize>,
+ cx: &mut AsyncAppContext,
+ ) -> Result<Handle<Self>> {
+ // After determining whether the root entry is a file or a directory, populate the
+ // snapshot's "root name", which will be used for the purpose of fuzzy matching.
+ let abs_path = path.into();
+ let metadata = fs
+ .metadata(&abs_path)
+ .await
+ .context("failed to stat worktree path")?;
+
+ Ok(cx.add_model(move |cx: &mut ModelContext<Worktree>| {
+ let root_name = abs_path
+ .file_name()
+ .map_or(String::new(), |f| f.to_string_lossy().to_string());
+
+ let mut snapshot = LocalSnapshot {
+ ignores_by_parent_abs_path: Default::default(),
+ git_repositories: Default::default(),
+ snapshot: Snapshot {
+ id: WorktreeId::from_usize(cx.model_id()),
+ abs_path: abs_path.clone(),
+ root_name: root_name.clone(),
+ root_char_bag: root_name.chars().map(|c| c.to_ascii_lowercase()).collect(),
+ entries_by_path: Default::default(),
+ entries_by_id: Default::default(),
+ repository_entries: Default::default(),
+ scan_id: 1,
+ completed_scan_id: 0,
+ },
+ };
+
+ if let Some(metadata) = metadata {
+ snapshot.insert_entry(
+ Entry::new(
+ Arc::from(Path::new("")),
+ &metadata,
+ &next_entry_id,
+ snapshot.root_char_bag,
+ ),
+ fs.as_ref(),
+ );
+ }
+
+ let (scan_requests_tx, scan_requests_rx) = channel::unbounded();
+ let (path_prefixes_to_scan_tx, path_prefixes_to_scan_rx) = channel::unbounded();
+ let (scan_states_tx, mut scan_states_rx) = mpsc::unbounded();
+
+ cx.spawn_weak(|this, mut cx| async move {
+ while let Some((state, this)) = scan_states_rx.next().await.zip(this.upgrade(&cx)) {
+ this.update(&mut cx, |this, cx| {
+ let this = this.as_local_mut().unwrap();
+ match state {
+ ScanState::Started => {
+ *this.is_scanning.0.borrow_mut() = true;
+ }
+ ScanState::Updated {
+ snapshot,
+ changes,
+ barrier,
+ scanning,
+ } => {
+ *this.is_scanning.0.borrow_mut() = scanning;
+ this.set_snapshot(snapshot, changes, cx);
+ drop(barrier);
+ }
+ }
+ cx.notify();
+ });
+ }
+ })
+ .detach();
+
+ let background_scanner_task = cx.background().spawn({
+ let fs = fs.clone();
+ let snapshot = snapshot.clone();
+ let background = cx.background().clone();
+ async move {
+ let events = fs.watch(&abs_path, Duration::from_millis(100)).await;
+ BackgroundScanner::new(
+ snapshot,
+ next_entry_id,
+ fs,
+ scan_states_tx,
+ background,
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ )
+ .run(events)
+ .await;
+ }
+ });
+
+ Worktree::Local(LocalWorktree {
+ snapshot,
+ is_scanning: watch::channel_with(true),
+ share: None,
+ scan_requests_tx,
+ path_prefixes_to_scan_tx,
+ _background_scanner_task: background_scanner_task,
+ diagnostics: Default::default(),
+ diagnostic_summaries: Default::default(),
+ client,
+ fs,
+ visible,
+ })
+ }))
+ }
+
+ // abcdefghi
+ pub fn remote(
+ project_remote_id: u64,
+ replica_id: ReplicaId,
+ worktree: proto::WorktreeMetadata,
+ client: Arc<Client>,
+ cx: &mut AppContext,
+ ) -> Handle<Self> {
+ cx.add_model(|cx: &mut ModelContext<Self>| {
+ let snapshot = Snapshot {
+ id: WorktreeId(worktree.id as usize),
+ abs_path: Arc::from(PathBuf::from(worktree.abs_path)),
+ root_name: worktree.root_name.clone(),
+ root_char_bag: worktree
+ .root_name
+ .chars()
+ .map(|c| c.to_ascii_lowercase())
+ .collect(),
+ entries_by_path: Default::default(),
+ entries_by_id: Default::default(),
+ repository_entries: Default::default(),
+ scan_id: 1,
+ completed_scan_id: 0,
+ };
+
+ let (updates_tx, mut updates_rx) = mpsc::unbounded();
+ let background_snapshot = Arc::new(Mutex::new(snapshot.clone()));
+ let (mut snapshot_updated_tx, mut snapshot_updated_rx) = watch::channel();
+
+ cx.background()
+ .spawn({
+ let background_snapshot = background_snapshot.clone();
+ async move {
+ while let Some(update) = updates_rx.next().await {
+ if let Err(error) =
+ background_snapshot.lock().apply_remote_update(update)
+ {
+ log::error!("error applying worktree update: {}", error);
+ }
+ snapshot_updated_tx.send(()).await.ok();
+ }
+ }
+ })
+ .detach();
+
+ cx.spawn_weak(|this, mut cx| async move {
+ while (snapshot_updated_rx.recv().await).is_some() {
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ let this = this.as_remote_mut().unwrap();
+ this.snapshot = this.background_snapshot.lock().clone();
+ cx.emit(Event::UpdatedEntries(Arc::from([])));
+ cx.notify();
+ while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
+ if this.observed_snapshot(*scan_id) {
+ let (_, tx) = this.snapshot_subscriptions.pop_front().unwrap();
+ let _ = tx.send(());
+ } else {
+ break;
+ }
+ }
+ });
+ } else {
+ break;
+ }
+ }
+ })
+ .detach();
+
+ Worktree::Remote(RemoteWorktree {
+ project_id: project_remote_id,
+ replica_id,
+ snapshot: snapshot.clone(),
+ background_snapshot,
+ updates_tx: Some(updates_tx),
+ snapshot_subscriptions: Default::default(),
+ client: client.clone(),
+ diagnostic_summaries: Default::default(),
+ visible: worktree.visible,
+ disconnected: false,
+ })
+ })
+ }
+
+ pub fn as_local(&self) -> Option<&LocalWorktree> {
+ if let Worktree::Local(worktree) = self {
+ Some(worktree)
+ } else {
+ None
+ }
+ }
+
+ pub fn as_remote(&self) -> Option<&RemoteWorktree> {
+ if let Worktree::Remote(worktree) = self {
+ Some(worktree)
+ } else {
+ None
+ }
+ }
+
+ pub fn as_local_mut(&mut self) -> Option<&mut LocalWorktree> {
+ if let Worktree::Local(worktree) = self {
+ Some(worktree)
+ } else {
+ None
+ }
+ }
+
+ pub fn as_remote_mut(&mut self) -> Option<&mut RemoteWorktree> {
+ if let Worktree::Remote(worktree) = self {
+ Some(worktree)
+ } else {
+ None
+ }
+ }
+
+ pub fn is_local(&self) -> bool {
+ matches!(self, Worktree::Local(_))
+ }
+
+ pub fn is_remote(&self) -> bool {
+ !self.is_local()
+ }
+
+ pub fn snapshot(&self) -> Snapshot {
+ match self {
+ Worktree::Local(worktree) => worktree.snapshot().snapshot,
+ Worktree::Remote(worktree) => worktree.snapshot(),
+ }
+ }
+
+ pub fn scan_id(&self) -> usize {
+ match self {
+ Worktree::Local(worktree) => worktree.snapshot.scan_id,
+ Worktree::Remote(worktree) => worktree.snapshot.scan_id,
+ }
+ }
+
+ pub fn completed_scan_id(&self) -> usize {
+ match self {
+ Worktree::Local(worktree) => worktree.snapshot.completed_scan_id,
+ Worktree::Remote(worktree) => worktree.snapshot.completed_scan_id,
+ }
+ }
+
+ pub fn is_visible(&self) -> bool {
+ match self {
+ Worktree::Local(worktree) => worktree.visible,
+ Worktree::Remote(worktree) => worktree.visible,
+ }
+ }
+
+ pub fn replica_id(&self) -> ReplicaId {
+ match self {
+ Worktree::Local(_) => 0,
+ Worktree::Remote(worktree) => worktree.replica_id,
+ }
+ }
+
+ pub fn diagnostic_summaries(
+ &self,
+ ) -> impl Iterator<Item = (Arc<Path>, LanguageServerId, DiagnosticSummary)> + '_ {
+ match self {
+ Worktree::Local(worktree) => &worktree.diagnostic_summaries,
+ Worktree::Remote(worktree) => &worktree.diagnostic_summaries,
+ }
+ .iter()
+ .flat_map(|(path, summaries)| {
+ summaries
+ .iter()
+ .map(move |(&server_id, &summary)| (path.clone(), server_id, summary))
+ })
+ }
+
+ pub fn abs_path(&self) -> Arc<Path> {
+ match self {
+ Worktree::Local(worktree) => worktree.abs_path.clone(),
+ Worktree::Remote(worktree) => worktree.abs_path.clone(),
+ }
+ }
+
+ pub fn root_file(&self, cx: &mut ModelContext<Self>) -> Option<Arc<File>> {
+ let entry = self.root_entry()?;
+ Some(File::for_entry(entry.clone(), cx.handle()))
+ }
+}
+
+impl LocalWorktree {
+ pub fn contains_abs_path(&self, path: &Path) -> bool {
+ path.starts_with(&self.abs_path)
+ }
+
+ pub(crate) fn load_buffer(
+ &mut self,
+ id: u64,
+ path: &Path,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<Handle<Buffer>>> {
+ let path = Arc::from(path);
+ cx.spawn(move |this, mut cx| async move {
+ let (file, contents, diff_base) = this
+ .update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
+ .await?;
+ let text_buffer = cx
+ .background()
+ .spawn(async move { text::Buffer::new(0, id, contents) })
+ .await;
+ Ok(cx.add_model(|_| Buffer::build(text_buffer, diff_base, Some(Arc::new(file)))))
+ })
+ }
+
+ pub fn diagnostics_for_path(
+ &self,
+ path: &Path,
+ ) -> Vec<(
+ LanguageServerId,
+ Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ )> {
+ self.diagnostics.get(path).cloned().unwrap_or_default()
+ }
+
+ pub fn clear_diagnostics_for_language_server(
+ &mut self,
+ server_id: LanguageServerId,
+ _: &mut ModelContext<Worktree>,
+ ) {
+ let worktree_id = self.id().to_proto();
+ self.diagnostic_summaries
+ .retain(|path, summaries_by_server_id| {
+ if summaries_by_server_id.remove(&server_id).is_some() {
+ if let Some(share) = self.share.as_ref() {
+ self.client
+ .send(proto::UpdateDiagnosticSummary {
+ project_id: share.project_id,
+ worktree_id,
+ summary: Some(proto::DiagnosticSummary {
+ path: path.to_string_lossy().to_string(),
+ language_server_id: server_id.0 as u64,
+ error_count: 0,
+ warning_count: 0,
+ }),
+ })
+ .log_err();
+ }
+ !summaries_by_server_id.is_empty()
+ } else {
+ true
+ }
+ });
+
+ self.diagnostics.retain(|_, diagnostics_by_server_id| {
+ if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
+ diagnostics_by_server_id.remove(ix);
+ !diagnostics_by_server_id.is_empty()
+ } else {
+ true
+ }
+ });
+ }
+
+ pub fn update_diagnostics(
+ &mut self,
+ server_id: LanguageServerId,
+ worktree_path: Arc<Path>,
+ diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
+ _: &mut ModelContext<Worktree>,
+ ) -> Result<bool> {
+ let summaries_by_server_id = self
+ .diagnostic_summaries
+ .entry(worktree_path.clone())
+ .or_default();
+
+ let old_summary = summaries_by_server_id
+ .remove(&server_id)
+ .unwrap_or_default();
+
+ let new_summary = DiagnosticSummary::new(&diagnostics);
+ if new_summary.is_empty() {
+ if let Some(diagnostics_by_server_id) = self.diagnostics.get_mut(&worktree_path) {
+ if let Ok(ix) = diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
+ diagnostics_by_server_id.remove(ix);
+ }
+ if diagnostics_by_server_id.is_empty() {
+ self.diagnostics.remove(&worktree_path);
+ }
+ }
+ } else {
+ summaries_by_server_id.insert(server_id, new_summary);
+ let diagnostics_by_server_id =
+ self.diagnostics.entry(worktree_path.clone()).or_default();
+ match diagnostics_by_server_id.binary_search_by_key(&server_id, |e| e.0) {
+ Ok(ix) => {
+ diagnostics_by_server_id[ix] = (server_id, diagnostics);
+ }
+ Err(ix) => {
+ diagnostics_by_server_id.insert(ix, (server_id, diagnostics));
+ }
+ }
+ }
+
+ if !old_summary.is_empty() || !new_summary.is_empty() {
+ if let Some(share) = self.share.as_ref() {
+ self.client
+ .send(proto::UpdateDiagnosticSummary {
+ project_id: share.project_id,
+ worktree_id: self.id().to_proto(),
+ summary: Some(proto::DiagnosticSummary {
+ path: worktree_path.to_string_lossy().to_string(),
+ language_server_id: server_id.0 as u64,
+ error_count: new_summary.error_count as u32,
+ warning_count: new_summary.warning_count as u32,
+ }),
+ })
+ .log_err();
+ }
+ }
+
+ Ok(!old_summary.is_empty() || !new_summary.is_empty())
+ }
+
+ fn set_snapshot(
+ &mut self,
+ new_snapshot: LocalSnapshot,
+ entry_changes: UpdatedEntriesSet,
+ cx: &mut ModelContext<Worktree>,
+ ) {
+ let repo_changes = self.changed_repos(&self.snapshot, &new_snapshot);
+
+ self.snapshot = new_snapshot;
+
+ if let Some(share) = self.share.as_mut() {
+ share
+ .snapshots_tx
+ .unbounded_send((
+ self.snapshot.clone(),
+ entry_changes.clone(),
+ repo_changes.clone(),
+ ))
+ .ok();
+ }
+
+ if !entry_changes.is_empty() {
+ cx.emit(Event::UpdatedEntries(entry_changes));
+ }
+ if !repo_changes.is_empty() {
+ cx.emit(Event::UpdatedGitRepositories(repo_changes));
+ }
+ }
+
+ fn changed_repos(
+ &self,
+ old_snapshot: &LocalSnapshot,
+ new_snapshot: &LocalSnapshot,
+ ) -> UpdatedGitRepositoriesSet {
+ let mut changes = Vec::new();
+ let mut old_repos = old_snapshot.git_repositories.iter().peekable();
+ let mut new_repos = new_snapshot.git_repositories.iter().peekable();
+ loop {
+ match (new_repos.peek().map(clone), old_repos.peek().map(clone)) {
+ (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => {
+ match Ord::cmp(&new_entry_id, &old_entry_id) {
+ Ordering::Less => {
+ if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: None,
+ },
+ ));
+ }
+ new_repos.next();
+ }
+ Ordering::Equal => {
+ if new_repo.git_dir_scan_id != old_repo.git_dir_scan_id {
+ if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
+ let old_repo = old_snapshot
+ .repository_entries
+ .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .cloned();
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: old_repo,
+ },
+ ));
+ }
+ }
+ new_repos.next();
+ old_repos.next();
+ }
+ Ordering::Greater => {
+ if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) {
+ let old_repo = old_snapshot
+ .repository_entries
+ .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .cloned();
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: old_repo,
+ },
+ ));
+ }
+ old_repos.next();
+ }
+ }
+ }
+ (Some((entry_id, _)), None) => {
+ if let Some(entry) = new_snapshot.entry_for_id(entry_id) {
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: None,
+ },
+ ));
+ }
+ new_repos.next();
+ }
+ (None, Some((entry_id, _))) => {
+ if let Some(entry) = old_snapshot.entry_for_id(entry_id) {
+ let old_repo = old_snapshot
+ .repository_entries
+ .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .cloned();
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: old_repo,
+ },
+ ));
+ }
+ old_repos.next();
+ }
+ (None, None) => break,
+ }
+ }
+
+ fn clone<T: Clone, U: Clone>(value: &(&T, &U)) -> (T, U) {
+ (value.0.clone(), value.1.clone())
+ }
+
+ changes.into()
+ }
+
+ pub fn scan_complete(&self) -> impl Future<Output = ()> {
+ let mut is_scanning_rx = self.is_scanning.1.clone();
+ async move {
+ let mut is_scanning = is_scanning_rx.borrow().clone();
+ while is_scanning {
+ if let Some(value) = is_scanning_rx.recv().await {
+ is_scanning = value;
+ } else {
+ break;
+ }
+ }
+ }
+ }
+
+ pub fn snapshot(&self) -> LocalSnapshot {
+ self.snapshot.clone()
+ }
+
+ pub fn metadata_proto(&self) -> proto::WorktreeMetadata {
+ proto::WorktreeMetadata {
+ id: self.id().to_proto(),
+ root_name: self.root_name().to_string(),
+ visible: self.visible,
+ abs_path: self.abs_path().as_os_str().to_string_lossy().into(),
+ }
+ }
+
+ fn load(
+ &self,
+ path: &Path,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<(File, String, Option<String>)>> {
+ let path = Arc::from(path);
+ let abs_path = self.absolutize(&path);
+ let fs = self.fs.clone();
+ let entry = self.refresh_entry(path.clone(), None, cx);
+
+ cx.spawn(|this, cx| async move {
+ let text = fs.load(&abs_path).await?;
+ let entry = entry.await?;
+
+ let mut index_task = None;
+ let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
+ if let Some(repo) = snapshot.repository_for_path(&path) {
+ let repo_path = repo.work_directory.relativize(&snapshot, &path).unwrap();
+ if let Some(repo) = snapshot.git_repositories.get(&*repo.work_directory) {
+ let repo = repo.repo_ptr.clone();
+ index_task = Some(
+ cx.background()
+ .spawn(async move { repo.lock().load_index_text(&repo_path) }),
+ );
+ }
+ }
+
+ let diff_base = if let Some(index_task) = index_task {
+ index_task.await
+ } else {
+ None
+ };
+
+ Ok((
+ File {
+ entry_id: entry.id,
+ worktree: this,
+ path: entry.path,
+ mtime: entry.mtime,
+ is_local: true,
+ is_deleted: false,
+ },
+ text,
+ diff_base,
+ ))
+ })
+ }
+
+ pub fn save_buffer(
+ &self,
+ buffer_handle: Handle<Buffer>,
+ path: Arc<Path>,
+ has_changed_file: bool,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<()>> {
+ let handle = cx.handle();
+ let buffer = buffer_handle.read(cx);
+
+ let rpc = self.client.clone();
+ let buffer_id = buffer.remote_id();
+ let project_id = self.share.as_ref().map(|share| share.project_id);
+
+ let text = buffer.as_rope().clone();
+ let fingerprint = text.fingerprint();
+ let version = buffer.version();
+ let save = self.write_file(path, text, buffer.line_ending(), cx);
+
+ cx.as_mut().spawn(|mut cx| async move {
+ let entry = save.await?;
+
+ if has_changed_file {
+ let new_file = Arc::new(File {
+ entry_id: entry.id,
+ worktree: handle,
+ path: entry.path,
+ mtime: entry.mtime,
+ is_local: true,
+ is_deleted: false,
+ });
+
+ if let Some(project_id) = project_id {
+ rpc.send(proto::UpdateBufferFile {
+ project_id,
+ buffer_id,
+ file: Some(new_file.to_proto()),
+ })
+ .log_err();
+ }
+
+ buffer_handle.update(&mut cx, |buffer, cx| {
+ if has_changed_file {
+ buffer.file_updated(new_file, cx).detach();
+ }
+ });
+ }
+
+ if let Some(project_id) = project_id {
+ rpc.send(proto::BufferSaved {
+ project_id,
+ buffer_id,
+ version: serialize_version(&version),
+ mtime: Some(entry.mtime.into()),
+ fingerprint: serialize_fingerprint(fingerprint),
+ })?;
+ }
+
+ buffer_handle.update(&mut cx, |buffer, cx| {
+ buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
+ });
+
+ Ok(())
+ })
+ }
+
+ /// Find the lowest path in the worktree's datastructures that is an ancestor
+ fn lowest_ancestor(&self, path: &Path) -> PathBuf {
+ let mut lowest_ancestor = None;
+ for path in path.ancestors() {
+ if self.entry_for_path(path).is_some() {
+ lowest_ancestor = Some(path.to_path_buf());
+ break;
+ }
+ }
+
+ lowest_ancestor.unwrap_or_else(|| PathBuf::from(""))
+ }
+
+ pub fn create_entry(
+ &self,
+ path: impl Into<Arc<Path>>,
+ is_dir: bool,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<Entry>> {
+ let path = path.into();
+ let lowest_ancestor = self.lowest_ancestor(&path);
+ let abs_path = self.absolutize(&path);
+ let fs = self.fs.clone();
+ let write = cx.background().spawn(async move {
+ if is_dir {
+ fs.create_dir(&abs_path).await
+ } else {
+ fs.save(&abs_path, &Default::default(), Default::default())
+ .await
+ }
+ });
+
+ cx.spawn(|this, mut cx| async move {
+ write.await?;
+ let (result, refreshes) = this.update(&mut cx, |this, cx| {
+ let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
+ let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
+ for refresh_path in refresh_paths.ancestors() {
+ if refresh_path == Path::new("") {
+ continue;
+ }
+ let refresh_full_path = lowest_ancestor.join(refresh_path);
+
+ refreshes.push(this.as_local_mut().unwrap().refresh_entry(
+ refresh_full_path.into(),
+ None,
+ cx,
+ ));
+ }
+ (
+ this.as_local_mut().unwrap().refresh_entry(path, None, cx),
+ refreshes,
+ )
+ });
+ for refresh in refreshes {
+ refresh.await.log_err();
+ }
+
+ result.await
+ })
+ }
+
+ pub fn write_file(
+ &self,
+ path: impl Into<Arc<Path>>,
+ text: Rope,
+ line_ending: LineEnding,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<Entry>> {
+ let path = path.into();
+ let abs_path = self.absolutize(&path);
+ let fs = self.fs.clone();
+ let write = cx
+ .background()
+ .spawn(async move { fs.save(&abs_path, &text, line_ending).await });
+
+ cx.spawn(|this, mut cx| async move {
+ write.await?;
+ this.update(&mut cx, |this, cx| {
+ this.as_local_mut().unwrap().refresh_entry(path, None, cx)
+ })
+ .await
+ })
+ }
+
+ pub fn delete_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Option<Task<Result<()>>> {
+ let entry = self.entry_for_id(entry_id)?.clone();
+ let abs_path = self.absolutize(&entry.path);
+ let fs = self.fs.clone();
+
+ let delete = cx.background().spawn(async move {
+ if entry.is_file() {
+ fs.remove_file(&abs_path, Default::default()).await?;
+ } else {
+ fs.remove_dir(
+ &abs_path,
+ RemoveOptions {
+ recursive: true,
+ ignore_if_not_exists: false,
+ },
+ )
+ .await?;
+ }
+ anyhow::Ok(entry.path)
+ });
+
+ Some(cx.spawn(|this, mut cx| async move {
+ let path = delete.await?;
+ this.update(&mut cx, |this, _| {
+ this.as_local_mut()
+ .unwrap()
+ .refresh_entries_for_paths(vec![path])
+ })
+ .recv()
+ .await;
+ Ok(())
+ }))
+ }
+
+ pub fn rename_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ new_path: impl Into<Arc<Path>>,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Option<Task<Result<Entry>>> {
+ let old_path = self.entry_for_id(entry_id)?.path.clone();
+ let new_path = new_path.into();
+ let abs_old_path = self.absolutize(&old_path);
+ let abs_new_path = self.absolutize(&new_path);
+ let fs = self.fs.clone();
+ let rename = cx.background().spawn(async move {
+ fs.rename(&abs_old_path, &abs_new_path, Default::default())
+ .await
+ });
+
+ Some(cx.spawn(|this, mut cx| async move {
+ rename.await?;
+ this.update(&mut cx, |this, cx| {
+ this.as_local_mut()
+ .unwrap()
+ .refresh_entry(new_path.clone(), Some(old_path), cx)
+ })
+ .await
+ }))
+ }
+
+ pub fn copy_entry(
+ &self,
+ entry_id: ProjectEntryId,
+ new_path: impl Into<Arc<Path>>,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Option<Task<Result<Entry>>> {
+ let old_path = self.entry_for_id(entry_id)?.path.clone();
+ let new_path = new_path.into();
+ let abs_old_path = self.absolutize(&old_path);
+ let abs_new_path = self.absolutize(&new_path);
+ let fs = self.fs.clone();
+ let copy = cx.background().spawn(async move {
+ copy_recursive(
+ fs.as_ref(),
+ &abs_old_path,
+ &abs_new_path,
+ Default::default(),
+ )
+ .await
+ });
+
+ Some(cx.spawn(|this, mut cx| async move {
+ copy.await?;
+ this.update(&mut cx, |this, cx| {
+ this.as_local_mut()
+ .unwrap()
+ .refresh_entry(new_path.clone(), None, cx)
+ })
+ .await
+ }))
+ }
+
+ pub fn expand_entry(
+ &mut self,
+ entry_id: ProjectEntryId,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Option<Task<Result<()>>> {
+ let path = self.entry_for_id(entry_id)?.path.clone();
+ let mut refresh = self.refresh_entries_for_paths(vec![path]);
+ Some(cx.background().spawn(async move {
+ refresh.next().await;
+ Ok(())
+ }))
+ }
+
+ pub fn refresh_entries_for_paths(&self, paths: Vec<Arc<Path>>) -> barrier::Receiver {
+ let (tx, rx) = barrier::channel();
+ self.scan_requests_tx
+ .try_send(ScanRequest {
+ relative_paths: paths,
+ done: tx,
+ })
+ .ok();
+ rx
+ }
+
+ pub fn add_path_prefix_to_scan(&self, path_prefix: Arc<Path>) {
+ self.path_prefixes_to_scan_tx.try_send(path_prefix).ok();
+ }
+
+ fn refresh_entry(
+ &self,
+ path: Arc<Path>,
+ old_path: Option<Arc<Path>>,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<Entry>> {
+ let paths = if let Some(old_path) = old_path.as_ref() {
+ vec![old_path.clone(), path.clone()]
+ } else {
+ vec![path.clone()]
+ };
+ let mut refresh = self.refresh_entries_for_paths(paths);
+ cx.spawn_weak(move |this, mut cx| async move {
+ refresh.recv().await;
+ this.upgrade(&cx)
+ .ok_or_else(|| anyhow!("worktree was dropped"))?
+ .update(&mut cx, |this, _| {
+ this.entry_for_path(path)
+ .cloned()
+ .ok_or_else(|| anyhow!("failed to read path after update"))
+ })
+ })
+ }
+
+ pub fn observe_updates<F, Fut>(
+ &mut self,
+ project_id: u64,
+ cx: &mut ModelContext<Worktree>,
+ callback: F,
+ ) -> oneshot::Receiver<()>
+ where
+ F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
+ Fut: Send + Future<Output = bool>,
+ {
+ #[cfg(any(test, feature = "test-support"))]
+ const MAX_CHUNK_SIZE: usize = 2;
+ #[cfg(not(any(test, feature = "test-support")))]
+ const MAX_CHUNK_SIZE: usize = 256;
+
+ let (share_tx, share_rx) = oneshot::channel();
+
+ if let Some(share) = self.share.as_mut() {
+ share_tx.send(()).ok();
+ *share.resume_updates.borrow_mut() = ();
+ return share_rx;
+ }
+
+ let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
+ let (snapshots_tx, mut snapshots_rx) =
+ mpsc::unbounded::<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>();
+ snapshots_tx
+ .unbounded_send((self.snapshot(), Arc::from([]), Arc::from([])))
+ .ok();
+
+ let worktree_id = cx.model_id() as u64;
+ let _maintain_remote_snapshot = cx.background().spawn(async move {
+ let mut is_first = true;
+ while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
+ let update;
+ if is_first {
+ update = snapshot.build_initial_update(project_id, worktree_id);
+ is_first = false;
+ } else {
+ update =
+ snapshot.build_update(project_id, worktree_id, entry_changes, repo_changes);
+ }
+
+ for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
+ let _ = resume_updates_rx.try_recv();
+ loop {
+ let result = callback(update.clone());
+ if result.await {
+ break;
+ } else {
+ log::info!("waiting to resume updates");
+ if resume_updates_rx.next().await.is_none() {
+ return Some(());
+ }
+ }
+ }
+ }
+ }
+ share_tx.send(()).ok();
+ Some(())
+ });
+
+ self.share = Some(ShareState {
+ project_id,
+ snapshots_tx,
+ resume_updates: resume_updates_tx,
+ _maintain_remote_snapshot,
+ });
+ share_rx
+ }
+
+ pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
+ let client = self.client.clone();
+
+ for (path, summaries) in &self.diagnostic_summaries {
+ for (&server_id, summary) in summaries {
+ if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
+ project_id,
+ worktree_id: cx.model_id() as u64,
+ summary: Some(summary.to_proto(server_id, &path)),
+ }) {
+ return Task::ready(Err(e));
+ }
+ }
+ }
+
+ let rx = self.observe_updates(project_id, cx, move |update| {
+ client.request(update).map(|result| result.is_ok())
+ });
+ cx.foreground()
+ .spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
+ }
+
+ pub fn unshare(&mut self) {
+ self.share.take();
+ }
+
+ pub fn is_shared(&self) -> bool {
+ self.share.is_some()
+ }
+}
+
+impl RemoteWorktree {
+ fn snapshot(&self) -> Snapshot {
+ self.snapshot.clone()
+ }
+
+ pub fn disconnected_from_host(&mut self) {
+ self.updates_tx.take();
+ self.snapshot_subscriptions.clear();
+ self.disconnected = true;
+ }
+
+ pub fn save_buffer(
+ &self,
+ buffer_handle: Handle<Buffer>,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<()>> {
+ let buffer = buffer_handle.read(cx);
+ let buffer_id = buffer.remote_id();
+ let version = buffer.version();
+ let rpc = self.client.clone();
+ let project_id = self.project_id;
+ cx.as_mut().spawn(|mut cx| async move {
+ let response = rpc
+ .request(proto::SaveBuffer {
+ project_id,
+ buffer_id,
+ version: serialize_version(&version),
+ })
+ .await?;
+ let version = deserialize_version(&response.version);
+ let fingerprint = deserialize_fingerprint(&response.fingerprint)?;
+ let mtime = response
+ .mtime
+ .ok_or_else(|| anyhow!("missing mtime"))?
+ .into();
+
+ buffer_handle.update(&mut cx, |buffer, cx| {
+ buffer.did_save(version.clone(), fingerprint, mtime, cx);
+ });
+
+ Ok(())
+ })
+ }
+
+ pub fn update_from_remote(&mut self, update: proto::UpdateWorktree) {
+ if let Some(updates_tx) = &self.updates_tx {
+ updates_tx
+ .unbounded_send(update)
+ .expect("consumer runs to completion");
+ }
+ }
+
+ fn observed_snapshot(&self, scan_id: usize) -> bool {
+ self.completed_scan_id >= scan_id
+ }
+
+ pub(crate) fn wait_for_snapshot(&mut self, scan_id: usize) -> impl Future<Output = Result<()>> {
+ let (tx, rx) = oneshot::channel();
+ if self.observed_snapshot(scan_id) {
+ let _ = tx.send(());
+ } else if self.disconnected {
+ drop(tx);
+ } else {
+ match self
+ .snapshot_subscriptions
+ .binary_search_by_key(&scan_id, |probe| probe.0)
+ {
+ Ok(ix) | Err(ix) => self.snapshot_subscriptions.insert(ix, (scan_id, tx)),
+ }
+ }
+
+ async move {
+ rx.await?;
+ Ok(())
+ }
+ }
+
+ pub fn update_diagnostic_summary(
+ &mut self,
+ path: Arc<Path>,
+ summary: &proto::DiagnosticSummary,
+ ) {
+ let server_id = LanguageServerId(summary.language_server_id as usize);
+ let summary = DiagnosticSummary {
+ error_count: summary.error_count as usize,
+ warning_count: summary.warning_count as usize,
+ };
+
+ if summary.is_empty() {
+ if let Some(summaries) = self.diagnostic_summaries.get_mut(&path) {
+ summaries.remove(&server_id);
+ if summaries.is_empty() {
+ self.diagnostic_summaries.remove(&path);
+ }
+ }
+ } else {
+ self.diagnostic_summaries
+ .entry(path)
+ .or_default()
+ .insert(server_id, summary);
+ }
+ }
+
+ pub fn insert_entry(
+ &mut self,
+ entry: proto::Entry,
+ scan_id: usize,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<Entry>> {
+ let wait_for_snapshot = self.wait_for_snapshot(scan_id);
+ cx.spawn(|this, mut cx| async move {
+ wait_for_snapshot.await?;
+ this.update(&mut cx, |worktree, _| {
+ let worktree = worktree.as_remote_mut().unwrap();
+ let mut snapshot = worktree.background_snapshot.lock();
+ let entry = snapshot.insert_entry(entry);
+ worktree.snapshot = snapshot.clone();
+ entry
+ })
+ })
+ }
+
+ pub(crate) fn delete_entry(
+ &mut self,
+ id: ProjectEntryId,
+ scan_id: usize,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<()>> {
+ let wait_for_snapshot = self.wait_for_snapshot(scan_id);
+ cx.spawn(|this, mut cx| async move {
+ wait_for_snapshot.await?;
+ this.update(&mut cx, |worktree, _| {
+ let worktree = worktree.as_remote_mut().unwrap();
+ let mut snapshot = worktree.background_snapshot.lock();
+ snapshot.delete_entry(id);
+ worktree.snapshot = snapshot.clone();
+ });
+ Ok(())
+ })
+ }
+}
+
+impl Snapshot {
+ pub fn id(&self) -> WorktreeId {
+ self.id
+ }
+
+ pub fn abs_path(&self) -> &Arc<Path> {
+ &self.abs_path
+ }
+
+ pub fn absolutize(&self, path: &Path) -> PathBuf {
+ if path.file_name().is_some() {
+ self.abs_path.join(path)
+ } else {
+ self.abs_path.to_path_buf()
+ }
+ }
+
+ pub fn contains_entry(&self, entry_id: ProjectEntryId) -> bool {
+ self.entries_by_id.get(&entry_id, &()).is_some()
+ }
+
+ pub(crate) fn insert_entry(&mut self, entry: proto::Entry) -> Result<Entry> {
+ let entry = Entry::try_from((&self.root_char_bag, entry))?;
+ let old_entry = self.entries_by_id.insert_or_replace(
+ PathEntry {
+ id: entry.id,
+ path: entry.path.clone(),
+ is_ignored: entry.is_ignored,
+ scan_id: 0,
+ },
+ &(),
+ );
+ if let Some(old_entry) = old_entry {
+ self.entries_by_path.remove(&PathKey(old_entry.path), &());
+ }
+ self.entries_by_path.insert_or_replace(entry.clone(), &());
+ Ok(entry)
+ }
+
+ fn delete_entry(&mut self, entry_id: ProjectEntryId) -> Option<Arc<Path>> {
+ let removed_entry = self.entries_by_id.remove(&entry_id, &())?;
+ self.entries_by_path = {
+ let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
+ let mut new_entries_by_path =
+ cursor.slice(&TraversalTarget::Path(&removed_entry.path), Bias::Left, &());
+ while let Some(entry) = cursor.item() {
+ if entry.path.starts_with(&removed_entry.path) {
+ self.entries_by_id.remove(&entry.id, &());
+ cursor.next(&());
+ } else {
+ break;
+ }
+ }
+ new_entries_by_path.append(cursor.suffix(&()), &());
+ new_entries_by_path
+ };
+
+ Some(removed_entry.path)
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn status_for_file(&self, path: impl Into<PathBuf>) -> Option<GitFileStatus> {
+ let path = path.into();
+ self.entries_by_path
+ .get(&PathKey(Arc::from(path)), &())
+ .and_then(|entry| entry.git_status)
+ }
+
+ pub(crate) fn apply_remote_update(&mut self, mut update: proto::UpdateWorktree) -> Result<()> {
+ let mut entries_by_path_edits = Vec::new();
+ let mut entries_by_id_edits = Vec::new();
+
+ for entry_id in update.removed_entries {
+ let entry_id = ProjectEntryId::from_proto(entry_id);
+ entries_by_id_edits.push(Edit::Remove(entry_id));
+ if let Some(entry) = self.entry_for_id(entry_id) {
+ entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
+ }
+ }
+
+ for entry in update.updated_entries {
+ let entry = Entry::try_from((&self.root_char_bag, entry))?;
+ if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
+ entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
+ }
+ if let Some(old_entry) = self.entries_by_path.get(&PathKey(entry.path.clone()), &()) {
+ if old_entry.id != entry.id {
+ entries_by_id_edits.push(Edit::Remove(old_entry.id));
+ }
+ }
+ entries_by_id_edits.push(Edit::Insert(PathEntry {
+ id: entry.id,
+ path: entry.path.clone(),
+ is_ignored: entry.is_ignored,
+ scan_id: 0,
+ }));
+ entries_by_path_edits.push(Edit::Insert(entry));
+ }
+
+ self.entries_by_path.edit(entries_by_path_edits, &());
+ self.entries_by_id.edit(entries_by_id_edits, &());
+
+ update.removed_repositories.sort_unstable();
+ self.repository_entries.retain(|_, entry| {
+ if let Ok(_) = update
+ .removed_repositories
+ .binary_search(&entry.work_directory.to_proto())
+ {
+ false
+ } else {
+ true
+ }
+ });
+
+ for repository in update.updated_repositories {
+ let work_directory_entry: WorkDirectoryEntry =
+ ProjectEntryId::from_proto(repository.work_directory_id).into();
+
+ if let Some(entry) = self.entry_for_id(*work_directory_entry) {
+ let work_directory = RepositoryWorkDirectory(entry.path.clone());
+ if self.repository_entries.get(&work_directory).is_some() {
+ self.repository_entries.update(&work_directory, |repo| {
+ repo.branch = repository.branch.map(Into::into);
+ });
+ } else {
+ self.repository_entries.insert(
+ work_directory,
+ RepositoryEntry {
+ work_directory: work_directory_entry,
+ branch: repository.branch.map(Into::into),
+ },
+ )
+ }
+ } else {
+ log::error!("no work directory entry for repository {:?}", repository)
+ }
+ }
+
+ self.scan_id = update.scan_id as usize;
+ if update.is_last_update {
+ self.completed_scan_id = update.scan_id as usize;
+ }
+
+ Ok(())
+ }
+
+ pub fn file_count(&self) -> usize {
+ self.entries_by_path.summary().file_count
+ }
+
+ pub fn visible_file_count(&self) -> usize {
+ self.entries_by_path.summary().non_ignored_file_count
+ }
+
+ fn traverse_from_offset(
+ &self,
+ include_dirs: bool,
+ include_ignored: bool,
+ start_offset: usize,
+ ) -> Traversal {
+ let mut cursor = self.entries_by_path.cursor();
+ cursor.seek(
+ &TraversalTarget::Count {
+ count: start_offset,
+ include_dirs,
+ include_ignored,
+ },
+ Bias::Right,
+ &(),
+ );
+ Traversal {
+ cursor,
+ include_dirs,
+ include_ignored,
+ }
+ }
+
+ fn traverse_from_path(
+ &self,
+ include_dirs: bool,
+ include_ignored: bool,
+ path: &Path,
+ ) -> Traversal {
+ let mut cursor = self.entries_by_path.cursor();
+ cursor.seek(&TraversalTarget::Path(path), Bias::Left, &());
+ Traversal {
+ cursor,
+ include_dirs,
+ include_ignored,
+ }
+ }
+
+ pub fn files(&self, include_ignored: bool, start: usize) -> Traversal {
+ self.traverse_from_offset(false, include_ignored, start)
+ }
+
+ pub fn entries(&self, include_ignored: bool) -> Traversal {
+ self.traverse_from_offset(true, include_ignored, 0)
+ }
+
+ pub fn repositories(&self) -> impl Iterator<Item = (&Arc<Path>, &RepositoryEntry)> {
+ self.repository_entries
+ .iter()
+ .map(|(path, entry)| (&path.0, entry))
+ }
+
+ /// Get the repository whose work directory contains the given path.
+ pub fn repository_for_work_directory(&self, path: &Path) -> Option<RepositoryEntry> {
+ self.repository_entries
+ .get(&RepositoryWorkDirectory(path.into()))
+ .cloned()
+ }
+
+ /// Get the repository whose work directory contains the given path.
+ pub fn repository_for_path(&self, path: &Path) -> Option<RepositoryEntry> {
+ self.repository_and_work_directory_for_path(path)
+ .map(|e| e.1)
+ }
+
+ pub fn repository_and_work_directory_for_path(
+ &self,
+ path: &Path,
+ ) -> Option<(RepositoryWorkDirectory, RepositoryEntry)> {
+ self.repository_entries
+ .iter()
+ .filter(|(workdir_path, _)| path.starts_with(workdir_path))
+ .last()
+ .map(|(path, repo)| (path.clone(), repo.clone()))
+ }
+
+ /// Given an ordered iterator of entries, returns an iterator of those entries,
+ /// along with their containing git repository.
+ pub fn entries_with_repositories<'a>(
+ &'a self,
+ entries: impl 'a + Iterator<Item = &'a Entry>,
+ ) -> impl 'a + Iterator<Item = (&'a Entry, Option<&'a RepositoryEntry>)> {
+ let mut containing_repos = Vec::<(&Arc<Path>, &RepositoryEntry)>::new();
+ let mut repositories = self.repositories().peekable();
+ entries.map(move |entry| {
+ while let Some((repo_path, _)) = containing_repos.last() {
+ if !entry.path.starts_with(repo_path) {
+ containing_repos.pop();
+ } else {
+ break;
+ }
+ }
+ while let Some((repo_path, _)) = repositories.peek() {
+ if entry.path.starts_with(repo_path) {
+ containing_repos.push(repositories.next().unwrap());
+ } else {
+ break;
+ }
+ }
+ let repo = containing_repos.last().map(|(_, repo)| *repo);
+ (entry, repo)
+ })
+ }
+
+ /// Update the `git_status` of the given entries such that files'
+ /// statuses bubble up to their ancestor directories.
+ pub fn propagate_git_statuses(&self, result: &mut [Entry]) {
+ let mut cursor = self
+ .entries_by_path
+ .cursor::<(TraversalProgress, GitStatuses)>();
+ let mut entry_stack = Vec::<(usize, GitStatuses)>::new();
+
+ let mut result_ix = 0;
+ loop {
+ let next_entry = result.get(result_ix);
+ let containing_entry = entry_stack.last().map(|(ix, _)| &result[*ix]);
+
+ let entry_to_finish = match (containing_entry, next_entry) {
+ (Some(_), None) => entry_stack.pop(),
+ (Some(containing_entry), Some(next_path)) => {
+ if !next_path.path.starts_with(&containing_entry.path) {
+ entry_stack.pop()
+ } else {
+ None
+ }
+ }
+ (None, Some(_)) => None,
+ (None, None) => break,
+ };
+
+ if let Some((entry_ix, prev_statuses)) = entry_to_finish {
+ cursor.seek_forward(
+ &TraversalTarget::PathSuccessor(&result[entry_ix].path),
+ Bias::Left,
+ &(),
+ );
+
+ let statuses = cursor.start().1 - prev_statuses;
+
+ result[entry_ix].git_status = if statuses.conflict > 0 {
+ Some(GitFileStatus::Conflict)
+ } else if statuses.modified > 0 {
+ Some(GitFileStatus::Modified)
+ } else if statuses.added > 0 {
+ Some(GitFileStatus::Added)
+ } else {
+ None
+ };
+ } else {
+ if result[result_ix].is_dir() {
+ cursor.seek_forward(
+ &TraversalTarget::Path(&result[result_ix].path),
+ Bias::Left,
+ &(),
+ );
+ entry_stack.push((result_ix, cursor.start().1));
+ }
+ result_ix += 1;
+ }
+ }
+ }
+
+ pub fn paths(&self) -> impl Iterator<Item = &Arc<Path>> {
+ let empty_path = Path::new("");
+ self.entries_by_path
+ .cursor::<()>()
+ .filter(move |entry| entry.path.as_ref() != empty_path)
+ .map(|entry| &entry.path)
+ }
+
+ fn child_entries<'a>(&'a self, parent_path: &'a Path) -> ChildEntriesIter<'a> {
+ let mut cursor = self.entries_by_path.cursor();
+ cursor.seek(&TraversalTarget::Path(parent_path), Bias::Right, &());
+ let traversal = Traversal {
+ cursor,
+ include_dirs: true,
+ include_ignored: true,
+ };
+ ChildEntriesIter {
+ traversal,
+ parent_path,
+ }
+ }
+
+ pub fn descendent_entries<'a>(
+ &'a self,
+ include_dirs: bool,
+ include_ignored: bool,
+ parent_path: &'a Path,
+ ) -> DescendentEntriesIter<'a> {
+ let mut cursor = self.entries_by_path.cursor();
+ cursor.seek(&TraversalTarget::Path(parent_path), Bias::Left, &());
+ let mut traversal = Traversal {
+ cursor,
+ include_dirs,
+ include_ignored,
+ };
+
+ if traversal.end_offset() == traversal.start_offset() {
+ traversal.advance();
+ }
+
+ DescendentEntriesIter {
+ traversal,
+ parent_path,
+ }
+ }
+
+ pub fn root_entry(&self) -> Option<&Entry> {
+ self.entry_for_path("")
+ }
+
+ pub fn root_name(&self) -> &str {
+ &self.root_name
+ }
+
+ pub fn root_git_entry(&self) -> Option<RepositoryEntry> {
+ self.repository_entries
+ .get(&RepositoryWorkDirectory(Path::new("").into()))
+ .map(|entry| entry.to_owned())
+ }
+
+ pub fn git_entries(&self) -> impl Iterator<Item = &RepositoryEntry> {
+ self.repository_entries.values()
+ }
+
+ pub fn scan_id(&self) -> usize {
+ self.scan_id
+ }
+
+ pub fn entry_for_path(&self, path: impl AsRef<Path>) -> Option<&Entry> {
+ let path = path.as_ref();
+ self.traverse_from_path(true, true, path)
+ .entry()
+ .and_then(|entry| {
+ if entry.path.as_ref() == path {
+ Some(entry)
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn entry_for_id(&self, id: ProjectEntryId) -> Option<&Entry> {
+ let entry = self.entries_by_id.get(&id, &())?;
+ self.entry_for_path(&entry.path)
+ }
+
+ pub fn inode_for_path(&self, path: impl AsRef<Path>) -> Option<u64> {
+ self.entry_for_path(path.as_ref()).map(|e| e.inode)
+ }
+}
+
+impl LocalSnapshot {
+ pub(crate) fn get_local_repo(&self, repo: &RepositoryEntry) -> Option<&LocalRepositoryEntry> {
+ self.git_repositories.get(&repo.work_directory.0)
+ }
+
+ pub(crate) fn local_repo_for_path(
+ &self,
+ path: &Path,
+ ) -> Option<(RepositoryWorkDirectory, &LocalRepositoryEntry)> {
+ let (path, repo) = self.repository_and_work_directory_for_path(path)?;
+ Some((path, self.git_repositories.get(&repo.work_directory_id())?))
+ }
+
+ fn build_update(
+ &self,
+ project_id: u64,
+ worktree_id: u64,
+ entry_changes: UpdatedEntriesSet,
+ repo_changes: UpdatedGitRepositoriesSet,
+ ) -> proto::UpdateWorktree {
+ let mut updated_entries = Vec::new();
+ let mut removed_entries = Vec::new();
+ let mut updated_repositories = Vec::new();
+ let mut removed_repositories = Vec::new();
+
+ for (_, entry_id, path_change) in entry_changes.iter() {
+ if let PathChange::Removed = path_change {
+ removed_entries.push(entry_id.0 as u64);
+ } else if let Some(entry) = self.entry_for_id(*entry_id) {
+ updated_entries.push(proto::Entry::from(entry));
+ }
+ }
+
+ for (work_dir_path, change) in repo_changes.iter() {
+ let new_repo = self
+ .repository_entries
+ .get(&RepositoryWorkDirectory(work_dir_path.clone()));
+ match (&change.old_repository, new_repo) {
+ (Some(old_repo), Some(new_repo)) => {
+ updated_repositories.push(new_repo.build_update(old_repo));
+ }
+ (None, Some(new_repo)) => {
+ updated_repositories.push(proto::RepositoryEntry::from(new_repo));
+ }
+ (Some(old_repo), None) => {
+ removed_repositories.push(old_repo.work_directory.0.to_proto());
+ }
+ _ => {}
+ }
+ }
+
+ removed_entries.sort_unstable();
+ updated_entries.sort_unstable_by_key(|e| e.id);
+ removed_repositories.sort_unstable();
+ updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
+
+ // TODO - optimize, knowing that removed_entries are sorted.
+ removed_entries.retain(|id| updated_entries.binary_search_by_key(id, |e| e.id).is_err());
+
+ proto::UpdateWorktree {
+ project_id,
+ worktree_id,
+ abs_path: self.abs_path().to_string_lossy().into(),
+ root_name: self.root_name().to_string(),
+ updated_entries,
+ removed_entries,
+ scan_id: self.scan_id as u64,
+ is_last_update: self.completed_scan_id == self.scan_id,
+ updated_repositories,
+ removed_repositories,
+ }
+ }
+
+ fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
+ let mut updated_entries = self
+ .entries_by_path
+ .iter()
+ .map(proto::Entry::from)
+ .collect::<Vec<_>>();
+ updated_entries.sort_unstable_by_key(|e| e.id);
+
+ let mut updated_repositories = self
+ .repository_entries
+ .values()
+ .map(proto::RepositoryEntry::from)
+ .collect::<Vec<_>>();
+ updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
+
+ proto::UpdateWorktree {
+ project_id,
+ worktree_id,
+ abs_path: self.abs_path().to_string_lossy().into(),
+ root_name: self.root_name().to_string(),
+ updated_entries,
+ removed_entries: Vec::new(),
+ scan_id: self.scan_id as u64,
+ is_last_update: self.completed_scan_id == self.scan_id,
+ updated_repositories,
+ removed_repositories: Vec::new(),
+ }
+ }
+
+ fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
+ if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
+ let abs_path = self.abs_path.join(&entry.path);
+ match smol::block_on(build_gitignore(&abs_path, fs)) {
+ Ok(ignore) => {
+ self.ignores_by_parent_abs_path
+ .insert(abs_path.parent().unwrap().into(), (Arc::new(ignore), true));
+ }
+ Err(error) => {
+ log::error!(
+ "error loading .gitignore file {:?} - {:?}",
+ &entry.path,
+ error
+ );
+ }
+ }
+ }
+
+ if entry.kind == EntryKind::PendingDir {
+ if let Some(existing_entry) =
+ self.entries_by_path.get(&PathKey(entry.path.clone()), &())
+ {
+ entry.kind = existing_entry.kind;
+ }
+ }
+
+ let scan_id = self.scan_id;
+ let removed = self.entries_by_path.insert_or_replace(entry.clone(), &());
+ if let Some(removed) = removed {
+ if removed.id != entry.id {
+ self.entries_by_id.remove(&removed.id, &());
+ }
+ }
+ self.entries_by_id.insert_or_replace(
+ PathEntry {
+ id: entry.id,
+ path: entry.path.clone(),
+ is_ignored: entry.is_ignored,
+ scan_id,
+ },
+ &(),
+ );
+
+ entry
+ }
+
+ fn ancestor_inodes_for_path(&self, path: &Path) -> TreeSet<u64> {
+ let mut inodes = TreeSet::default();
+ for ancestor in path.ancestors().skip(1) {
+ if let Some(entry) = self.entry_for_path(ancestor) {
+ inodes.insert(entry.inode);
+ }
+ }
+ inodes
+ }
+
+ fn ignore_stack_for_abs_path(&self, abs_path: &Path, is_dir: bool) -> Arc<IgnoreStack> {
+ let mut new_ignores = Vec::new();
+ for ancestor in abs_path.ancestors().skip(1) {
+ if let Some((ignore, _)) = self.ignores_by_parent_abs_path.get(ancestor) {
+ new_ignores.push((ancestor, Some(ignore.clone())));
+ } else {
+ new_ignores.push((ancestor, None));
+ }
+ }
+
+ let mut ignore_stack = IgnoreStack::none();
+ for (parent_abs_path, ignore) in new_ignores.into_iter().rev() {
+ if ignore_stack.is_abs_path_ignored(parent_abs_path, true) {
+ ignore_stack = IgnoreStack::all();
+ break;
+ } else if let Some(ignore) = ignore {
+ ignore_stack = ignore_stack.append(parent_abs_path.into(), ignore);
+ }
+ }
+
+ if ignore_stack.is_abs_path_ignored(abs_path, is_dir) {
+ ignore_stack = IgnoreStack::all();
+ }
+
+ ignore_stack
+ }
+
+ #[cfg(test)]
+ pub(crate) fn expanded_entries(&self) -> impl Iterator<Item = &Entry> {
+ self.entries_by_path
+ .cursor::<()>()
+ .filter(|entry| entry.kind == EntryKind::Dir && (entry.is_external || entry.is_ignored))
+ }
+
+ #[cfg(test)]
+ pub fn check_invariants(&self, git_state: bool) {
+ use pretty_assertions::assert_eq;
+
+ assert_eq!(
+ self.entries_by_path
+ .cursor::<()>()
+ .map(|e| (&e.path, e.id))
+ .collect::<Vec<_>>(),
+ self.entries_by_id
+ .cursor::<()>()
+ .map(|e| (&e.path, e.id))
+ .collect::<collections::BTreeSet<_>>()
+ .into_iter()
+ .collect::<Vec<_>>(),
+ "entries_by_path and entries_by_id are inconsistent"
+ );
+
+ let mut files = self.files(true, 0);
+ let mut visible_files = self.files(false, 0);
+ for entry in self.entries_by_path.cursor::<()>() {
+ if entry.is_file() {
+ assert_eq!(files.next().unwrap().inode, entry.inode);
+ if !entry.is_ignored && !entry.is_external {
+ assert_eq!(visible_files.next().unwrap().inode, entry.inode);
+ }
+ }
+ }
+
+ assert!(files.next().is_none());
+ assert!(visible_files.next().is_none());
+
+ let mut bfs_paths = Vec::new();
+ let mut stack = self
+ .root_entry()
+ .map(|e| e.path.as_ref())
+ .into_iter()
+ .collect::<Vec<_>>();
+ while let Some(path) = stack.pop() {
+ bfs_paths.push(path);
+ let ix = stack.len();
+ for child_entry in self.child_entries(path) {
+ stack.insert(ix, &child_entry.path);
+ }
+ }
+
+ let dfs_paths_via_iter = self
+ .entries_by_path
+ .cursor::<()>()
+ .map(|e| e.path.as_ref())
+ .collect::<Vec<_>>();
+ assert_eq!(bfs_paths, dfs_paths_via_iter);
+
+ let dfs_paths_via_traversal = self
+ .entries(true)
+ .map(|e| e.path.as_ref())
+ .collect::<Vec<_>>();
+ assert_eq!(dfs_paths_via_traversal, dfs_paths_via_iter);
+
+ if git_state {
+ for ignore_parent_abs_path in self.ignores_by_parent_abs_path.keys() {
+ let ignore_parent_path =
+ ignore_parent_abs_path.strip_prefix(&self.abs_path).unwrap();
+ assert!(self.entry_for_path(&ignore_parent_path).is_some());
+ assert!(self
+ .entry_for_path(ignore_parent_path.join(&*GITIGNORE))
+ .is_some());
+ }
+ }
+ }
+
+ #[cfg(test)]
+ pub fn entries_without_ids(&self, include_ignored: bool) -> Vec<(&Path, u64, bool)> {
+ let mut paths = Vec::new();
+ for entry in self.entries_by_path.cursor::<()>() {
+ if include_ignored || !entry.is_ignored {
+ paths.push((entry.path.as_ref(), entry.inode, entry.is_ignored));
+ }
+ }
+ paths.sort_by(|a, b| a.0.cmp(b.0));
+ paths
+ }
+}
+
+impl BackgroundScannerState {
+ fn should_scan_directory(&self, entry: &Entry) -> bool {
+ (!entry.is_external && !entry.is_ignored)
+ || entry.path.file_name() == Some(&*DOT_GIT)
+ || self.scanned_dirs.contains(&entry.id) // If we've ever scanned it, keep scanning
+ || self
+ .paths_to_scan
+ .iter()
+ .any(|p| p.starts_with(&entry.path))
+ || self
+ .path_prefixes_to_scan
+ .iter()
+ .any(|p| entry.path.starts_with(p))
+ }
+
+ fn enqueue_scan_dir(&self, abs_path: Arc<Path>, entry: &Entry, scan_job_tx: &Sender<ScanJob>) {
+ let path = entry.path.clone();
+ let ignore_stack = self.snapshot.ignore_stack_for_abs_path(&abs_path, true);
+ let mut ancestor_inodes = self.snapshot.ancestor_inodes_for_path(&path);
+ let mut containing_repository = None;
+ if !ignore_stack.is_all() {
+ if let Some((workdir_path, repo)) = self.snapshot.local_repo_for_path(&path) {
+ if let Ok(repo_path) = path.strip_prefix(&workdir_path.0) {
+ containing_repository = Some((
+ workdir_path,
+ repo.repo_ptr.clone(),
+ repo.repo_ptr.lock().staged_statuses(repo_path),
+ ));
+ }
+ }
+ }
+ if !ancestor_inodes.contains(&entry.inode) {
+ ancestor_inodes.insert(entry.inode);
+ scan_job_tx
+ .try_send(ScanJob {
+ abs_path,
+ path,
+ ignore_stack,
+ scan_queue: scan_job_tx.clone(),
+ ancestor_inodes,
+ is_external: entry.is_external,
+ containing_repository,
+ })
+ .unwrap();
+ }
+ }
+
+ fn reuse_entry_id(&mut self, entry: &mut Entry) {
+ if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
+ entry.id = removed_entry_id;
+ } else if let Some(existing_entry) = self.snapshot.entry_for_path(&entry.path) {
+ entry.id = existing_entry.id;
+ }
+ }
+
+ fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
+ self.reuse_entry_id(&mut entry);
+ let entry = self.snapshot.insert_entry(entry, fs);
+ if entry.path.file_name() == Some(&DOT_GIT) {
+ self.build_git_repository(entry.path.clone(), fs);
+ }
+
+ #[cfg(test)]
+ self.snapshot.check_invariants(false);
+
+ entry
+ }
+
+ fn populate_dir(
+ &mut self,
+ parent_path: &Arc<Path>,
+ entries: impl IntoIterator<Item = Entry>,
+ ignore: Option<Arc<Gitignore>>,
+ ) {
+ let mut parent_entry = if let Some(parent_entry) = self
+ .snapshot
+ .entries_by_path
+ .get(&PathKey(parent_path.clone()), &())
+ {
+ parent_entry.clone()
+ } else {
+ log::warn!(
+ "populating a directory {:?} that has been removed",
+ parent_path
+ );
+ return;
+ };
+
+ match parent_entry.kind {
+ EntryKind::PendingDir | EntryKind::UnloadedDir => parent_entry.kind = EntryKind::Dir,
+ EntryKind::Dir => {}
+ _ => return,
+ }
+
+ if let Some(ignore) = ignore {
+ let abs_parent_path = self.snapshot.abs_path.join(&parent_path).into();
+ self.snapshot
+ .ignores_by_parent_abs_path
+ .insert(abs_parent_path, (ignore, false));
+ }
+
+ let parent_entry_id = parent_entry.id;
+ self.scanned_dirs.insert(parent_entry_id);
+ let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
+ let mut entries_by_id_edits = Vec::new();
+
+ for entry in entries {
+ entries_by_id_edits.push(Edit::Insert(PathEntry {
+ id: entry.id,
+ path: entry.path.clone(),
+ is_ignored: entry.is_ignored,
+ scan_id: self.snapshot.scan_id,
+ }));
+ entries_by_path_edits.push(Edit::Insert(entry));
+ }
+
+ self.snapshot
+ .entries_by_path
+ .edit(entries_by_path_edits, &());
+ self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
+
+ if let Err(ix) = self.changed_paths.binary_search(parent_path) {
+ self.changed_paths.insert(ix, parent_path.clone());
+ }
+
+ #[cfg(test)]
+ self.snapshot.check_invariants(false);
+ }
+
+ fn remove_path(&mut self, path: &Path) {
+ let mut new_entries;
+ let removed_entries;
+ {
+ let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>();
+ new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
+ removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
+ new_entries.append(cursor.suffix(&()), &());
+ }
+ self.snapshot.entries_by_path = new_entries;
+
+ let mut entries_by_id_edits = Vec::new();
+ for entry in removed_entries.cursor::<()>() {
+ let removed_entry_id = self
+ .removed_entry_ids
+ .entry(entry.inode)
+ .or_insert(entry.id);
+ *removed_entry_id = cmp::max(*removed_entry_id, entry.id);
+ entries_by_id_edits.push(Edit::Remove(entry.id));
+ }
+ self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
+
+ if path.file_name() == Some(&GITIGNORE) {
+ let abs_parent_path = self.snapshot.abs_path.join(path.parent().unwrap());
+ if let Some((_, needs_update)) = self
+ .snapshot
+ .ignores_by_parent_abs_path
+ .get_mut(abs_parent_path.as_path())
+ {
+ *needs_update = true;
+ }
+ }
+
+ #[cfg(test)]
+ self.snapshot.check_invariants(false);
+ }
+
+ fn reload_repositories(&mut self, changed_paths: &[Arc<Path>], fs: &dyn Fs) {
+ let scan_id = self.snapshot.scan_id;
+
+ // Find each of the .git directories that contain any of the given paths.
+ let mut prev_dot_git_dir = None;
+ for changed_path in changed_paths {
+ let Some(dot_git_dir) = changed_path
+ .ancestors()
+ .find(|ancestor| ancestor.file_name() == Some(&*DOT_GIT))
+ else {
+ continue;
+ };
+
+ // Avoid processing the same repository multiple times, if multiple paths
+ // within it have changed.
+ if prev_dot_git_dir == Some(dot_git_dir) {
+ continue;
+ }
+ prev_dot_git_dir = Some(dot_git_dir);
+
+ // If there is already a repository for this .git directory, reload
+ // the status for all of its files.
+ let repository = self
+ .snapshot
+ .git_repositories
+ .iter()
+ .find_map(|(entry_id, repo)| {
+ (repo.git_dir_path.as_ref() == dot_git_dir).then(|| (*entry_id, repo.clone()))
+ });
+ match repository {
+ None => {
+ self.build_git_repository(dot_git_dir.into(), fs);
+ }
+ Some((entry_id, repository)) => {
+ if repository.git_dir_scan_id == scan_id {
+ continue;
+ }
+ let Some(work_dir) = self
+ .snapshot
+ .entry_for_id(entry_id)
+ .map(|entry| RepositoryWorkDirectory(entry.path.clone()))
+ else {
+ continue;
+ };
+
+ log::info!("reload git repository {:?}", dot_git_dir);
+ let repository = repository.repo_ptr.lock();
+ let branch = repository.branch_name();
+ repository.reload_index();
+
+ self.snapshot
+ .git_repositories
+ .update(&entry_id, |entry| entry.git_dir_scan_id = scan_id);
+ self.snapshot
+ .snapshot
+ .repository_entries
+ .update(&work_dir, |entry| entry.branch = branch.map(Into::into));
+
+ self.update_git_statuses(&work_dir, &*repository);
+ }
+ }
+ }
+
+ // Remove any git repositories whose .git entry no longer exists.
+ let snapshot = &mut self.snapshot;
+ let mut repositories = mem::take(&mut snapshot.git_repositories);
+ let mut repository_entries = mem::take(&mut snapshot.repository_entries);
+ repositories.retain(|work_directory_id, _| {
+ snapshot
+ .entry_for_id(*work_directory_id)
+ .map_or(false, |entry| {
+ snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
+ })
+ });
+ repository_entries.retain(|_, entry| repositories.get(&entry.work_directory.0).is_some());
+ snapshot.git_repositories = repositories;
+ snapshot.repository_entries = repository_entries;
+ }
+
+ fn build_git_repository(
+ &mut self,
+ dot_git_path: Arc<Path>,
+ fs: &dyn Fs,
+ ) -> Option<(
+ RepositoryWorkDirectory,
+ Arc<Mutex<dyn GitRepository>>,
+ TreeMap<RepoPath, GitFileStatus>,
+ )> {
+ log::info!("build git repository {:?}", dot_git_path);
+
+ let work_dir_path: Arc<Path> = dot_git_path.parent().unwrap().into();
+
+ // Guard against repositories inside the repository metadata
+ if work_dir_path.iter().any(|component| component == *DOT_GIT) {
+ return None;
+ };
+
+ let work_dir_id = self
+ .snapshot
+ .entry_for_path(work_dir_path.clone())
+ .map(|entry| entry.id)?;
+
+ if self.snapshot.git_repositories.get(&work_dir_id).is_some() {
+ return None;
+ }
+
+ let abs_path = self.snapshot.abs_path.join(&dot_git_path);
+ let repository = fs.open_repo(abs_path.as_path())?;
+ let work_directory = RepositoryWorkDirectory(work_dir_path.clone());
+
+ let repo_lock = repository.lock();
+ self.snapshot.repository_entries.insert(
+ work_directory.clone(),
+ RepositoryEntry {
+ work_directory: work_dir_id.into(),
+ branch: repo_lock.branch_name().map(Into::into),
+ },
+ );
+
+ let staged_statuses = self.update_git_statuses(&work_directory, &*repo_lock);
+ drop(repo_lock);
+
+ self.snapshot.git_repositories.insert(
+ work_dir_id,
+ LocalRepositoryEntry {
+ git_dir_scan_id: 0,
+ repo_ptr: repository.clone(),
+ git_dir_path: dot_git_path.clone(),
+ },
+ );
+
+ Some((work_directory, repository, staged_statuses))
+ }
+
+ fn update_git_statuses(
+ &mut self,
+ work_directory: &RepositoryWorkDirectory,
+ repo: &dyn GitRepository,
+ ) -> TreeMap<RepoPath, GitFileStatus> {
+ let staged_statuses = repo.staged_statuses(Path::new(""));
+
+ let mut changes = vec![];
+ let mut edits = vec![];
+
+ for mut entry in self
+ .snapshot
+ .descendent_entries(false, false, &work_directory.0)
+ .cloned()
+ {
+ let Ok(repo_path) = entry.path.strip_prefix(&work_directory.0) else {
+ continue;
+ };
+ let repo_path = RepoPath(repo_path.to_path_buf());
+ let git_file_status = combine_git_statuses(
+ staged_statuses.get(&repo_path).copied(),
+ repo.unstaged_status(&repo_path, entry.mtime),
+ );
+ if entry.git_status != git_file_status {
+ entry.git_status = git_file_status;
+ changes.push(entry.path.clone());
+ edits.push(Edit::Insert(entry));
+ }
+ }
+
+ self.snapshot.entries_by_path.edit(edits, &());
+ util::extend_sorted(&mut self.changed_paths, changes, usize::MAX, Ord::cmp);
+ staged_statuses
+ }
+}
+
+async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
+ let contents = fs.load(abs_path).await?;
+ let parent = abs_path.parent().unwrap_or_else(|| Path::new("/"));
+ let mut builder = GitignoreBuilder::new(parent);
+ for line in contents.lines() {
+ builder.add_line(Some(abs_path.into()), line)?;
+ }
+ Ok(builder.build()?)
+}
+
+impl WorktreeId {
+ pub fn from_usize(handle_id: usize) -> Self {
+ Self(handle_id)
+ }
+
+ pub(crate) fn from_proto(id: u64) -> Self {
+ Self(id as usize)
+ }
+
+ pub fn to_proto(&self) -> u64 {
+ self.0 as u64
+ }
+
+ pub fn to_usize(&self) -> usize {
+ self.0
+ }
+}
+
+impl fmt::Display for WorktreeId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl Deref for Worktree {
+ type Target = Snapshot;
+
+ fn deref(&self) -> &Self::Target {
+ match self {
+ Worktree::Local(worktree) => &worktree.snapshot,
+ Worktree::Remote(worktree) => &worktree.snapshot,
+ }
+ }
+}
+
+impl Deref for LocalWorktree {
+ type Target = LocalSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.snapshot
+ }
+}
+
+impl Deref for RemoteWorktree {
+ type Target = Snapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.snapshot
+ }
+}
+
+impl fmt::Debug for LocalWorktree {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.snapshot.fmt(f)
+ }
+}
+
+impl fmt::Debug for Snapshot {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ struct EntriesById<'a>(&'a SumTree<PathEntry>);
+ struct EntriesByPath<'a>(&'a SumTree<Entry>);
+
+ impl<'a> fmt::Debug for EntriesByPath<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map()
+ .entries(self.0.iter().map(|entry| (&entry.path, entry.id)))
+ .finish()
+ }
+ }
+
+ impl<'a> fmt::Debug for EntriesById<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.0.iter()).finish()
+ }
+ }
+
+ f.debug_struct("Snapshot")
+ .field("id", &self.id)
+ .field("root_name", &self.root_name)
+ .field("entries_by_path", &EntriesByPath(&self.entries_by_path))
+ .field("entries_by_id", &EntriesById(&self.entries_by_id))
+ .finish()
+ }
+}
+
+#[derive(Clone, PartialEq)]
+pub struct File {
+ pub worktree: Handle<Worktree>,
+ pub path: Arc<Path>,
+ pub mtime: SystemTime,
+ pub(crate) entry_id: ProjectEntryId,
+ pub(crate) is_local: bool,
+ pub(crate) is_deleted: bool,
+}
+
+impl language2::File for File {
+ fn as_local(&self) -> Option<&dyn language2::LocalFile> {
+ if self.is_local {
+ Some(self)
+ } else {
+ None
+ }
+ }
+
+ fn mtime(&self) -> SystemTime {
+ self.mtime
+ }
+
+ fn path(&self) -> &Arc<Path> {
+ &self.path
+ }
+
+ fn full_path(&self, cx: &AppContext) -> PathBuf {
+ let mut full_path = PathBuf::new();
+ let worktree = self.worktree.read(cx);
+
+ if worktree.is_visible() {
+ full_path.push(worktree.root_name());
+ } else {
+ let path = worktree.abs_path();
+
+ if worktree.is_local() && path.starts_with(HOME.as_path()) {
+ full_path.push("~");
+ full_path.push(path.strip_prefix(HOME.as_path()).unwrap());
+ } else {
+ full_path.push(path)
+ }
+ }
+
+ if self.path.components().next().is_some() {
+ full_path.push(&self.path);
+ }
+
+ full_path
+ }
+
+ /// Returns the last component of this handle's absolute path. If this handle refers to the root
+ /// of its worktree, then this method will return the name of the worktree itself.
+ fn file_name<'a>(&'a self, cx: &'a AppContext) -> &'a OsStr {
+ self.path
+ .file_name()
+ .unwrap_or_else(|| OsStr::new(&self.worktree.read(cx).root_name))
+ }
+
+ fn worktree_id(&self) -> usize {
+ self.worktree.id()
+ }
+
+ fn is_deleted(&self) -> bool {
+ self.is_deleted
+ }
+
+ fn as_any(&self) -> &dyn Any {
+ self
+ }
+
+ fn to_proto(&self) -> rpc::proto::File {
+ rpc::proto::File {
+ worktree_id: self.worktree.id() as u64,
+ entry_id: self.entry_id.to_proto(),
+ path: self.path.to_string_lossy().into(),
+ mtime: Some(self.mtime.into()),
+ is_deleted: self.is_deleted,
+ }
+ }
+}
+
+impl language2::LocalFile for File {
+ fn abs_path(&self, cx: &AppContext) -> PathBuf {
+ self.worktree
+ .read(cx)
+ .as_local()
+ .unwrap()
+ .abs_path
+ .join(&self.path)
+ }
+
+ fn load(&self, cx: &AppContext) -> Task<Result<String>> {
+ let worktree = self.worktree.read(cx).as_local().unwrap();
+ let abs_path = worktree.absolutize(&self.path);
+ let fs = worktree.fs.clone();
+ cx.background()
+ .spawn(async move { fs.load(&abs_path).await })
+ }
+
+ fn buffer_reloaded(
+ &self,
+ buffer_id: u64,
+ version: &clock::Global,
+ fingerprint: RopeFingerprint,
+ line_ending: LineEnding,
+ mtime: SystemTime,
+ cx: &mut AppContext,
+ ) {
+ let worktree = self.worktree.read(cx).as_local().unwrap();
+ if let Some(project_id) = worktree.share.as_ref().map(|share| share.project_id) {
+ worktree
+ .client
+ .send(proto::BufferReloaded {
+ project_id,
+ buffer_id,
+ version: serialize_version(version),
+ mtime: Some(mtime.into()),
+ fingerprint: serialize_fingerprint(fingerprint),
+ line_ending: serialize_line_ending(line_ending) as i32,
+ })
+ .log_err();
+ }
+ }
+}
+
+impl File {
+ pub fn for_entry(entry: Entry, worktree: Handle<Worktree>) -> Arc<Self> {
+ Arc::new(Self {
+ worktree,
+ path: entry.path.clone(),
+ mtime: entry.mtime,
+ entry_id: entry.id,
+ is_local: true,
+ is_deleted: false,
+ })
+ }
+
+ pub fn from_proto(
+ proto: rpc::proto::File,
+ worktree: Handle<Worktree>,
+ cx: &AppContext,
+ ) -> Result<Self> {
+ let worktree_id = worktree
+ .read(cx)
+ .as_remote()
+ .ok_or_else(|| anyhow!("not remote"))?
+ .id();
+
+ if worktree_id.to_proto() != proto.worktree_id {
+ return Err(anyhow!("worktree id does not match file"));
+ }
+
+ Ok(Self {
+ worktree,
+ path: Path::new(&proto.path).into(),
+ mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
+ entry_id: ProjectEntryId::from_proto(proto.entry_id),
+ is_local: false,
+ is_deleted: proto.is_deleted,
+ })
+ }
+
+ pub fn from_dyn(file: Option<&Arc<dyn language2::File>>) -> Option<&Self> {
+ file.and_then(|f| f.as_any().downcast_ref())
+ }
+
+ pub fn worktree_id(&self, cx: &AppContext) -> WorktreeId {
+ self.worktree.read(cx).id()
+ }
+
+ pub fn project_entry_id(&self, _: &AppContext) -> Option<ProjectEntryId> {
+ if self.is_deleted {
+ None
+ } else {
+ Some(self.entry_id)
+ }
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Entry {
+ pub id: ProjectEntryId,
+ pub kind: EntryKind,
+ pub path: Arc<Path>,
+ pub inode: u64,
+ pub mtime: SystemTime,
+ pub is_symlink: bool,
+
+ /// Whether this entry is ignored by Git.
+ ///
+ /// We only scan ignored entries once the directory is expanded and
+ /// exclude them from searches.
+ pub is_ignored: bool,
+
+ /// Whether this entry's canonical path is outside of the worktree.
+ /// This means the entry is only accessible from the worktree root via a
+ /// symlink.
+ ///
+ /// We only scan entries outside of the worktree once the symlinked
+ /// directory is expanded. External entries are treated like gitignored
+ /// entries in that they are not included in searches.
+ pub is_external: bool,
+ pub git_status: Option<GitFileStatus>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub enum EntryKind {
+ UnloadedDir,
+ PendingDir,
+ Dir,
+ File(CharBag),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum PathChange {
+ /// A filesystem entry was was created.
+ Added,
+ /// A filesystem entry was removed.
+ Removed,
+ /// A filesystem entry was updated.
+ Updated,
+ /// A filesystem entry was either updated or added. We don't know
+ /// whether or not it already existed, because the path had not
+ /// been loaded before the event.
+ AddedOrUpdated,
+ /// A filesystem entry was found during the initial scan of the worktree.
+ Loaded,
+}
+
+pub struct GitRepositoryChange {
+ /// The previous state of the repository, if it already existed.
+ pub old_repository: Option<RepositoryEntry>,
+}
+
+pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
+pub type UpdatedGitRepositoriesSet = Arc<[(Arc<Path>, GitRepositoryChange)]>;
+
+impl Entry {
+ fn new(
+ path: Arc<Path>,
+ metadata: &fs::Metadata,
+ next_entry_id: &AtomicUsize,
+ root_char_bag: CharBag,
+ ) -> Self {
+ Self {
+ id: ProjectEntryId::new(next_entry_id),
+ kind: if metadata.is_dir {
+ EntryKind::PendingDir
+ } else {
+ EntryKind::File(char_bag_for_path(root_char_bag, &path))
+ },
+ path,
+ inode: metadata.inode,
+ mtime: metadata.mtime,
+ is_symlink: metadata.is_symlink,
+ is_ignored: false,
+ is_external: false,
+ git_status: None,
+ }
+ }
+
+ pub fn is_dir(&self) -> bool {
+ self.kind.is_dir()
+ }
+
+ pub fn is_file(&self) -> bool {
+ self.kind.is_file()
+ }
+
+ pub fn git_status(&self) -> Option<GitFileStatus> {
+ self.git_status
+ }
+}
+
+impl EntryKind {
+ pub fn is_dir(&self) -> bool {
+ matches!(
+ self,
+ EntryKind::Dir | EntryKind::PendingDir | EntryKind::UnloadedDir
+ )
+ }
+
+ pub fn is_unloaded(&self) -> bool {
+ matches!(self, EntryKind::UnloadedDir)
+ }
+
+ pub fn is_file(&self) -> bool {
+ matches!(self, EntryKind::File(_))
+ }
+}
+
+impl sum_tree::Item for Entry {
+ type Summary = EntrySummary;
+
+ fn summary(&self) -> Self::Summary {
+ let non_ignored_count = if self.is_ignored || self.is_external {
+ 0
+ } else {
+ 1
+ };
+ let file_count;
+ let non_ignored_file_count;
+ if self.is_file() {
+ file_count = 1;
+ non_ignored_file_count = non_ignored_count;
+ } else {
+ file_count = 0;
+ non_ignored_file_count = 0;
+ }
+
+ let mut statuses = GitStatuses::default();
+ match self.git_status {
+ Some(status) => match status {
+ GitFileStatus::Added => statuses.added = 1,
+ GitFileStatus::Modified => statuses.modified = 1,
+ GitFileStatus::Conflict => statuses.conflict = 1,
+ },
+ None => {}
+ }
+
+ EntrySummary {
+ max_path: self.path.clone(),
+ count: 1,
+ non_ignored_count,
+ file_count,
+ non_ignored_file_count,
+ statuses,
+ }
+ }
+}
+
+impl sum_tree::KeyedItem for Entry {
+ type Key = PathKey;
+
+ fn key(&self) -> Self::Key {
+ PathKey(self.path.clone())
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct EntrySummary {
+ max_path: Arc<Path>,
+ count: usize,
+ non_ignored_count: usize,
+ file_count: usize,
+ non_ignored_file_count: usize,
+ statuses: GitStatuses,
+}
+
+impl Default for EntrySummary {
+ fn default() -> Self {
+ Self {
+ max_path: Arc::from(Path::new("")),
+ count: 0,
+ non_ignored_count: 0,
+ file_count: 0,
+ non_ignored_file_count: 0,
+ statuses: Default::default(),
+ }
+ }
+}
+
+impl sum_tree::Summary for EntrySummary {
+ type Context = ();
+
+ fn add_summary(&mut self, rhs: &Self, _: &()) {
+ self.max_path = rhs.max_path.clone();
+ self.count += rhs.count;
+ self.non_ignored_count += rhs.non_ignored_count;
+ self.file_count += rhs.file_count;
+ self.non_ignored_file_count += rhs.non_ignored_file_count;
+ self.statuses += rhs.statuses;
+ }
+}
+
+#[derive(Clone, Debug)]
+struct PathEntry {
+ id: ProjectEntryId,
+ path: Arc<Path>,
+ is_ignored: bool,
+ scan_id: usize,
+}
+
+impl sum_tree::Item for PathEntry {
+ type Summary = PathEntrySummary;
+
+ fn summary(&self) -> Self::Summary {
+ PathEntrySummary { max_id: self.id }
+ }
+}
+
+impl sum_tree::KeyedItem for PathEntry {
+ type Key = ProjectEntryId;
+
+ fn key(&self) -> Self::Key {
+ self.id
+ }
+}
+
+#[derive(Clone, Debug, Default)]
+struct PathEntrySummary {
+ max_id: ProjectEntryId,
+}
+
+impl sum_tree::Summary for PathEntrySummary {
+ type Context = ();
+
+ fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
+ self.max_id = summary.max_id;
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, PathEntrySummary> for ProjectEntryId {
+ fn add_summary(&mut self, summary: &'a PathEntrySummary, _: &()) {
+ *self = summary.max_id;
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd)]
+pub struct PathKey(Arc<Path>);
+
+impl Default for PathKey {
+ fn default() -> Self {
+ Self(Path::new("").into())
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
+ fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
+ self.0 = summary.max_path.clone();
+ }
+}
+
+struct BackgroundScanner {
+ state: Mutex<BackgroundScannerState>,
+ fs: Arc<dyn Fs>,
+ status_updates_tx: UnboundedSender<ScanState>,
+ executor: Executor,
+ scan_requests_rx: channel::Receiver<ScanRequest>,
+ path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+ next_entry_id: Arc<AtomicUsize>,
+ phase: BackgroundScannerPhase,
+}
+
+#[derive(PartialEq)]
+enum BackgroundScannerPhase {
+ InitialScan,
+ EventsReceivedDuringInitialScan,
+ Events,
+}
+
+impl BackgroundScanner {
+ fn new(
+ snapshot: LocalSnapshot,
+ next_entry_id: Arc<AtomicUsize>,
+ fs: Arc<dyn Fs>,
+ status_updates_tx: UnboundedSender<ScanState>,
+ executor: Executor,
+ scan_requests_rx: channel::Receiver<ScanRequest>,
+ path_prefixes_to_scan_rx: channel::Receiver<Arc<Path>>,
+ ) -> Self {
+ Self {
+ fs,
+ status_updates_tx,
+ executor,
+ scan_requests_rx,
+ path_prefixes_to_scan_rx,
+ next_entry_id,
+ state: Mutex::new(BackgroundScannerState {
+ prev_snapshot: snapshot.snapshot.clone(),
+ snapshot,
+ scanned_dirs: Default::default(),
+ path_prefixes_to_scan: Default::default(),
+ paths_to_scan: Default::default(),
+ removed_entry_ids: Default::default(),
+ changed_paths: Default::default(),
+ }),
+ phase: BackgroundScannerPhase::InitialScan,
+ }
+ }
+
+ async fn run(
+ &mut self,
+ mut fs_events_rx: Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>,
+ ) {
+ use futures::FutureExt as _;
+
+ // Populate ignores above the root.
+ let root_abs_path = self.state.lock().snapshot.abs_path.clone();
+ for ancestor in root_abs_path.ancestors().skip(1) {
+ if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
+ {
+ self.state
+ .lock()
+ .snapshot
+ .ignores_by_parent_abs_path
+ .insert(ancestor.into(), (ignore.into(), false));
+ }
+ }
+
+ let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ {
+ let mut state = self.state.lock();
+ state.snapshot.scan_id += 1;
+ if let Some(mut root_entry) = state.snapshot.root_entry().cloned() {
+ let ignore_stack = state
+ .snapshot
+ .ignore_stack_for_abs_path(&root_abs_path, true);
+ if ignore_stack.is_all() {
+ root_entry.is_ignored = true;
+ state.insert_entry(root_entry.clone(), self.fs.as_ref());
+ }
+ state.enqueue_scan_dir(root_abs_path, &root_entry, &scan_job_tx);
+ }
+ };
+
+ // Perform an initial scan of the directory.
+ drop(scan_job_tx);
+ self.scan_dirs(true, scan_job_rx).await;
+ {
+ let mut state = self.state.lock();
+ state.snapshot.completed_scan_id = state.snapshot.scan_id;
+ }
+
+ self.send_status_update(false, None);
+
+ // Process any any FS events that occurred while performing the initial scan.
+ // For these events, update events cannot be as precise, because we didn't
+ // have the previous state loaded yet.
+ self.phase = BackgroundScannerPhase::EventsReceivedDuringInitialScan;
+ if let Poll::Ready(Some(events)) = futures::poll!(fs_events_rx.next()) {
+ let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
+ while let Poll::Ready(Some(more_events)) = futures::poll!(fs_events_rx.next()) {
+ paths.extend(more_events.into_iter().map(|e| e.path));
+ }
+ self.process_events(paths).await;
+ }
+
+ // Continue processing events until the worktree is dropped.
+ self.phase = BackgroundScannerPhase::Events;
+ loop {
+ select_biased! {
+ // Process any path refresh requests from the worktree. Prioritize
+ // these before handling changes reported by the filesystem.
+ request = self.scan_requests_rx.recv().fuse() => {
+ let Ok(request) = request else { break };
+ if !self.process_scan_request(request, false).await {
+ return;
+ }
+ }
+
+ path_prefix = self.path_prefixes_to_scan_rx.recv().fuse() => {
+ let Ok(path_prefix) = path_prefix else { break };
+ log::trace!("adding path prefix {:?}", path_prefix);
+
+ let did_scan = self.forcibly_load_paths(&[path_prefix.clone()]).await;
+ if did_scan {
+ let abs_path =
+ {
+ let mut state = self.state.lock();
+ state.path_prefixes_to_scan.insert(path_prefix.clone());
+ state.snapshot.abs_path.join(&path_prefix)
+ };
+
+ if let Some(abs_path) = self.fs.canonicalize(&abs_path).await.log_err() {
+ self.process_events(vec![abs_path]).await;
+ }
+ }
+ }
+
+ events = fs_events_rx.next().fuse() => {
+ let Some(events) = events else { break };
+ let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
+ while let Poll::Ready(Some(more_events)) = futures::poll!(fs_events_rx.next()) {
+ paths.extend(more_events.into_iter().map(|e| e.path));
+ }
+ self.process_events(paths.clone()).await;
+ }
+ }
+ }
+ }
+
+ async fn process_scan_request(&self, mut request: ScanRequest, scanning: bool) -> bool {
+ log::debug!("rescanning paths {:?}", request.relative_paths);
+
+ request.relative_paths.sort_unstable();
+ self.forcibly_load_paths(&request.relative_paths).await;
+
+ let root_path = self.state.lock().snapshot.abs_path.clone();
+ let root_canonical_path = match self.fs.canonicalize(&root_path).await {
+ Ok(path) => path,
+ Err(err) => {
+ log::error!("failed to canonicalize root path: {}", err);
+ return false;
+ }
+ };
+ let abs_paths = request
+ .relative_paths
+ .iter()
+ .map(|path| {
+ if path.file_name().is_some() {
+ root_canonical_path.join(path)
+ } else {
+ root_canonical_path.clone()
+ }
+ })
+ .collect::<Vec<_>>();
+
+ self.reload_entries_for_paths(
+ root_path,
+ root_canonical_path,
+ &request.relative_paths,
+ abs_paths,
+ None,
+ )
+ .await;
+ self.send_status_update(scanning, Some(request.done))
+ }
+
+ async fn process_events(&mut self, mut abs_paths: Vec<PathBuf>) {
+ let root_path = self.state.lock().snapshot.abs_path.clone();
+ let root_canonical_path = match self.fs.canonicalize(&root_path).await {
+ Ok(path) => path,
+ Err(err) => {
+ log::error!("failed to canonicalize root path: {}", err);
+ return;
+ }
+ };
+
+ let mut relative_paths = Vec::with_capacity(abs_paths.len());
+ abs_paths.sort_unstable();
+ abs_paths.dedup_by(|a, b| a.starts_with(&b));
+ abs_paths.retain(|abs_path| {
+ let snapshot = &self.state.lock().snapshot;
+ {
+ let relative_path: Arc<Path> =
+ if let Ok(path) = abs_path.strip_prefix(&root_canonical_path) {
+ path.into()
+ } else {
+ log::error!(
+ "ignoring event {abs_path:?} outside of root path {root_canonical_path:?}",
+ );
+ return false;
+ };
+
+ let parent_dir_is_loaded = relative_path.parent().map_or(true, |parent| {
+ snapshot
+ .entry_for_path(parent)
+ .map_or(false, |entry| entry.kind == EntryKind::Dir)
+ });
+ if !parent_dir_is_loaded {
+ log::debug!("ignoring event {relative_path:?} within unloaded directory");
+ return false;
+ }
+
+ relative_paths.push(relative_path);
+ true
+ }
+ });
+
+ if relative_paths.is_empty() {
+ return;
+ }
+
+ log::debug!("received fs events {:?}", relative_paths);
+
+ let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ self.reload_entries_for_paths(
+ root_path,
+ root_canonical_path,
+ &relative_paths,
+ abs_paths,
+ Some(scan_job_tx.clone()),
+ )
+ .await;
+ drop(scan_job_tx);
+ self.scan_dirs(false, scan_job_rx).await;
+
+ let (scan_job_tx, scan_job_rx) = channel::unbounded();
+ self.update_ignore_statuses(scan_job_tx).await;
+ self.scan_dirs(false, scan_job_rx).await;
+
+ {
+ let mut state = self.state.lock();
+ state.reload_repositories(&relative_paths, self.fs.as_ref());
+ state.snapshot.completed_scan_id = state.snapshot.scan_id;
+ for (_, entry_id) in mem::take(&mut state.removed_entry_ids) {
+ state.scanned_dirs.remove(&entry_id);
+ }
+ }
+
+ self.send_status_update(false, None);
+ }
+
+ async fn forcibly_load_paths(&self, paths: &[Arc<Path>]) -> bool {
+ let (scan_job_tx, mut scan_job_rx) = channel::unbounded();
+ {
+ let mut state = self.state.lock();
+ let root_path = state.snapshot.abs_path.clone();
+ for path in paths {
+ for ancestor in path.ancestors() {
+ if let Some(entry) = state.snapshot.entry_for_path(ancestor) {
+ if entry.kind == EntryKind::UnloadedDir {
+ let abs_path = root_path.join(ancestor);
+ state.enqueue_scan_dir(abs_path.into(), entry, &scan_job_tx);
+ state.paths_to_scan.insert(path.clone());
+ break;
+ }
+ }
+ }
+ }
+ drop(scan_job_tx);
+ }
+ while let Some(job) = scan_job_rx.next().await {
+ self.scan_dir(&job).await.log_err();
+ }
+
+ mem::take(&mut self.state.lock().paths_to_scan).len() > 0
+ }
+
+ async fn scan_dirs(
+ &self,
+ enable_progress_updates: bool,
+ scan_jobs_rx: channel::Receiver<ScanJob>,
+ ) {
+ use futures::FutureExt as _;
+
+ if self
+ .status_updates_tx
+ .unbounded_send(ScanState::Started)
+ .is_err()
+ {
+ return;
+ }
+
+ let progress_update_count = AtomicUsize::new(0);
+ self.executor
+ .scoped(|scope| {
+ for _ in 0..self.executor.num_cpus() {
+ scope.spawn(async {
+ let mut last_progress_update_count = 0;
+ let progress_update_timer = self.progress_timer(enable_progress_updates).fuse();
+ futures::pin_mut!(progress_update_timer);
+
+ loop {
+ select_biased! {
+ // Process any path refresh requests before moving on to process
+ // the scan queue, so that user operations are prioritized.
+ request = self.scan_requests_rx.recv().fuse() => {
+ let Ok(request) = request else { break };
+ if !self.process_scan_request(request, true).await {
+ return;
+ }
+ }
+
+ // Send periodic progress updates to the worktree. Use an atomic counter
+ // to ensure that only one of the workers sends a progress update after
+ // the update interval elapses.
+ _ = progress_update_timer => {
+ match progress_update_count.compare_exchange(
+ last_progress_update_count,
+ last_progress_update_count + 1,
+ SeqCst,
+ SeqCst
+ ) {
+ Ok(_) => {
+ last_progress_update_count += 1;
+ self.send_status_update(true, None);
+ }
+ Err(count) => {
+ last_progress_update_count = count;
+ }
+ }
+ progress_update_timer.set(self.progress_timer(enable_progress_updates).fuse());
+ }
+
+ // Recursively load directories from the file system.
+ job = scan_jobs_rx.recv().fuse() => {
+ let Ok(job) = job else { break };
+ if let Err(err) = self.scan_dir(&job).await {
+ if job.path.as_ref() != Path::new("") {
+ log::error!("error scanning directory {:?}: {}", job.abs_path, err);
+ }
+ }
+ }
+ }
+ }
+ })
+ }
+ })
+ .await;
+ }
+
+ fn send_status_update(&self, scanning: bool, barrier: Option<barrier::Sender>) -> bool {
+ let mut state = self.state.lock();
+ if state.changed_paths.is_empty() && scanning {
+ return true;
+ }
+
+ let new_snapshot = state.snapshot.clone();
+ let old_snapshot = mem::replace(&mut state.prev_snapshot, new_snapshot.snapshot.clone());
+ let changes = self.build_change_set(&old_snapshot, &new_snapshot, &state.changed_paths);
+ state.changed_paths.clear();
+
+ self.status_updates_tx
+ .unbounded_send(ScanState::Updated {
+ snapshot: new_snapshot,
+ changes,
+ scanning,
+ barrier,
+ })
+ .is_ok()
+ }
+
+ async fn scan_dir(&self, job: &ScanJob) -> Result<()> {
+ log::debug!("scan directory {:?}", job.path);
+
+ let mut ignore_stack = job.ignore_stack.clone();
+ let mut new_ignore = None;
+ let (root_abs_path, root_char_bag, next_entry_id) = {
+ let snapshot = &self.state.lock().snapshot;
+ (
+ snapshot.abs_path().clone(),
+ snapshot.root_char_bag,
+ self.next_entry_id.clone(),
+ )
+ };
+
+ let mut dotgit_path = None;
+ let mut root_canonical_path = None;
+ let mut new_entries: Vec<Entry> = Vec::new();
+ let mut new_jobs: Vec<Option<ScanJob>> = Vec::new();
+ let mut child_paths = self.fs.read_dir(&job.abs_path).await?;
+ while let Some(child_abs_path) = child_paths.next().await {
+ let child_abs_path: Arc<Path> = match child_abs_path {
+ Ok(child_abs_path) => child_abs_path.into(),
+ Err(error) => {
+ log::error!("error processing entry {:?}", error);
+ continue;
+ }
+ };
+
+ let child_name = child_abs_path.file_name().unwrap();
+ let child_path: Arc<Path> = job.path.join(child_name).into();
+ let child_metadata = match self.fs.metadata(&child_abs_path).await {
+ Ok(Some(metadata)) => metadata,
+ Ok(None) => continue,
+ Err(err) => {
+ log::error!("error processing {:?}: {:?}", child_abs_path, err);
+ continue;
+ }
+ };
+
+ // If we find a .gitignore, add it to the stack of ignores used to determine which paths are ignored
+ if child_name == *GITIGNORE {
+ match build_gitignore(&child_abs_path, self.fs.as_ref()).await {
+ Ok(ignore) => {
+ let ignore = Arc::new(ignore);
+ ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
+ new_ignore = Some(ignore);
+ }
+ Err(error) => {
+ log::error!(
+ "error loading .gitignore file {:?} - {:?}",
+ child_name,
+ error
+ );
+ }
+ }
+
+ // Update ignore status of any child entries we've already processed to reflect the
+ // ignore file in the current directory. Because `.gitignore` starts with a `.`,
+ // there should rarely be too numerous. Update the ignore stack associated with any
+ // new jobs as well.
+ let mut new_jobs = new_jobs.iter_mut();
+ for entry in &mut new_entries {
+ let entry_abs_path = root_abs_path.join(&entry.path);
+ entry.is_ignored =
+ ignore_stack.is_abs_path_ignored(&entry_abs_path, entry.is_dir());
+
+ if entry.is_dir() {
+ if let Some(job) = new_jobs.next().expect("missing scan job for entry") {
+ job.ignore_stack = if entry.is_ignored {
+ IgnoreStack::all()
+ } else {
+ ignore_stack.clone()
+ };
+ }
+ }
+ }
+ }
+ // If we find a .git, we'll need to load the repository.
+ else if child_name == *DOT_GIT {
+ dotgit_path = Some(child_path.clone());
+ }
+
+ let mut child_entry = Entry::new(
+ child_path.clone(),
+ &child_metadata,
+ &next_entry_id,
+ root_char_bag,
+ );
+
+ if job.is_external {
+ child_entry.is_external = true;
+ } else if child_metadata.is_symlink {
+ let canonical_path = match self.fs.canonicalize(&child_abs_path).await {
+ Ok(path) => path,
+ Err(err) => {
+ log::error!(
+ "error reading target of symlink {:?}: {:?}",
+ child_abs_path,
+ err
+ );
+ continue;
+ }
+ };
+
+ // lazily canonicalize the root path in order to determine if
+ // symlinks point outside of the worktree.
+ let root_canonical_path = match &root_canonical_path {
+ Some(path) => path,
+ None => match self.fs.canonicalize(&root_abs_path).await {
+ Ok(path) => root_canonical_path.insert(path),
+ Err(err) => {
+ log::error!("error canonicalizing root {:?}: {:?}", root_abs_path, err);
+ continue;
+ }
+ },
+ };
+
+ if !canonical_path.starts_with(root_canonical_path) {
+ child_entry.is_external = true;
+ }
+ }
+
+ if child_entry.is_dir() {
+ child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, true);
+
+ // Avoid recursing until crash in the case of a recursive symlink
+ if !job.ancestor_inodes.contains(&child_entry.inode) {
+ let mut ancestor_inodes = job.ancestor_inodes.clone();
+ ancestor_inodes.insert(child_entry.inode);
+
+ new_jobs.push(Some(ScanJob {
+ abs_path: child_abs_path,
+ path: child_path,
+ is_external: child_entry.is_external,
+ ignore_stack: if child_entry.is_ignored {
+ IgnoreStack::all()
+ } else {
+ ignore_stack.clone()
+ },
+ ancestor_inodes,
+ scan_queue: job.scan_queue.clone(),
+ containing_repository: job.containing_repository.clone(),
+ }));
+ } else {
+ new_jobs.push(None);
+ }
+ } else {
+ child_entry.is_ignored = ignore_stack.is_abs_path_ignored(&child_abs_path, false);
+ if !child_entry.is_ignored {
+ if let Some((repository_dir, repository, staged_statuses)) =
+ &job.containing_repository
+ {
+ if let Ok(repo_path) = child_entry.path.strip_prefix(&repository_dir.0) {
+ let repo_path = RepoPath(repo_path.into());
+ child_entry.git_status = combine_git_statuses(
+ staged_statuses.get(&repo_path).copied(),
+ repository
+ .lock()
+ .unstaged_status(&repo_path, child_entry.mtime),
+ );
+ }
+ }
+ }
+ }
+
+ new_entries.push(child_entry);
+ }
+
+ let mut state = self.state.lock();
+
+ // Identify any subdirectories that should not be scanned.
+ let mut job_ix = 0;
+ for entry in &mut new_entries {
+ state.reuse_entry_id(entry);
+ if entry.is_dir() {
+ if state.should_scan_directory(&entry) {
+ job_ix += 1;
+ } else {
+ log::debug!("defer scanning directory {:?}", entry.path);
+ entry.kind = EntryKind::UnloadedDir;
+ new_jobs.remove(job_ix);
+ }
+ }
+ }
+
+ state.populate_dir(&job.path, new_entries, new_ignore);
+
+ let repository =
+ dotgit_path.and_then(|path| state.build_git_repository(path, self.fs.as_ref()));
+
+ for new_job in new_jobs {
+ if let Some(mut new_job) = new_job {
+ if let Some(containing_repository) = &repository {
+ new_job.containing_repository = Some(containing_repository.clone());
+ }
+
+ job.scan_queue
+ .try_send(new_job)
+ .expect("channel is unbounded");
+ }
+ }
+
+ Ok(())
+ }
+
+ async fn reload_entries_for_paths(
+ &self,
+ root_abs_path: Arc<Path>,
+ root_canonical_path: PathBuf,
+ relative_paths: &[Arc<Path>],
+ abs_paths: Vec<PathBuf>,
+ scan_queue_tx: Option<Sender<ScanJob>>,
+ ) {
+ let metadata = futures::future::join_all(
+ abs_paths
+ .iter()
+ .map(|abs_path| async move {
+ let metadata = self.fs.metadata(&abs_path).await?;
+ if let Some(metadata) = metadata {
+ let canonical_path = self.fs.canonicalize(&abs_path).await?;
+ anyhow::Ok(Some((metadata, canonical_path)))
+ } else {
+ Ok(None)
+ }
+ })
+ .collect::<Vec<_>>(),
+ )
+ .await;
+
+ let mut state = self.state.lock();
+ let snapshot = &mut state.snapshot;
+ let is_idle = snapshot.completed_scan_id == snapshot.scan_id;
+ let doing_recursive_update = scan_queue_tx.is_some();
+ snapshot.scan_id += 1;
+ if is_idle && !doing_recursive_update {
+ snapshot.completed_scan_id = snapshot.scan_id;
+ }
+
+ // Remove any entries for paths that no longer exist or are being recursively
+ // refreshed. Do this before adding any new entries, so that renames can be
+ // detected regardless of the order of the paths.
+ for (path, metadata) in relative_paths.iter().zip(metadata.iter()) {
+ if matches!(metadata, Ok(None)) || doing_recursive_update {
+ log::trace!("remove path {:?}", path);
+ state.remove_path(path);
+ }
+ }
+
+ for (path, metadata) in relative_paths.iter().zip(metadata.iter()) {
+ let abs_path: Arc<Path> = root_abs_path.join(&path).into();
+ match metadata {
+ Ok(Some((metadata, canonical_path))) => {
+ let ignore_stack = state
+ .snapshot
+ .ignore_stack_for_abs_path(&abs_path, metadata.is_dir);
+
+ let mut fs_entry = Entry::new(
+ path.clone(),
+ metadata,
+ self.next_entry_id.as_ref(),
+ state.snapshot.root_char_bag,
+ );
+ fs_entry.is_ignored = ignore_stack.is_all();
+ fs_entry.is_external = !canonical_path.starts_with(&root_canonical_path);
+
+ if !fs_entry.is_ignored {
+ if !fs_entry.is_dir() {
+ if let Some((work_dir, repo)) =
+ state.snapshot.local_repo_for_path(&path)
+ {
+ if let Ok(repo_path) = path.strip_prefix(work_dir.0) {
+ let repo_path = RepoPath(repo_path.into());
+ let repo = repo.repo_ptr.lock();
+ fs_entry.git_status = repo.status(&repo_path, fs_entry.mtime);
+ }
+ }
+ }
+ }
+
+ if let (Some(scan_queue_tx), true) = (&scan_queue_tx, fs_entry.is_dir()) {
+ if state.should_scan_directory(&fs_entry) {
+ state.enqueue_scan_dir(abs_path, &fs_entry, scan_queue_tx);
+ } else {
+ fs_entry.kind = EntryKind::UnloadedDir;
+ }
+ }
+
+ state.insert_entry(fs_entry, self.fs.as_ref());
+ }
+ Ok(None) => {
+ self.remove_repo_path(&path, &mut state.snapshot);
+ }
+ Err(err) => {
+ // TODO - create a special 'error' entry in the entries tree to mark this
+ log::error!("error reading file on event {:?}", err);
+ }
+ }
+ }
+
+ util::extend_sorted(
+ &mut state.changed_paths,
+ relative_paths.iter().cloned(),
+ usize::MAX,
+ Ord::cmp,
+ );
+ }
+
+ fn remove_repo_path(&self, path: &Path, snapshot: &mut LocalSnapshot) -> Option<()> {
+ if !path
+ .components()
+ .any(|component| component.as_os_str() == *DOT_GIT)
+ {
+ if let Some(repository) = snapshot.repository_for_work_directory(path) {
+ let entry = repository.work_directory.0;
+ snapshot.git_repositories.remove(&entry);
+ snapshot
+ .snapshot
+ .repository_entries
+ .remove(&RepositoryWorkDirectory(path.into()));
+ return Some(());
+ }
+ }
+
+ // TODO statuses
+ // Track when a .git is removed and iterate over the file system there
+
+ Some(())
+ }
+
+ async fn update_ignore_statuses(&self, scan_job_tx: Sender<ScanJob>) {
+ use futures::FutureExt as _;
+
+ let mut snapshot = self.state.lock().snapshot.clone();
+ let mut ignores_to_update = Vec::new();
+ let mut ignores_to_delete = Vec::new();
+ let abs_path = snapshot.abs_path.clone();
+ for (parent_abs_path, (_, needs_update)) in &mut snapshot.ignores_by_parent_abs_path {
+ if let Ok(parent_path) = parent_abs_path.strip_prefix(&abs_path) {
+ if *needs_update {
+ *needs_update = false;
+ if snapshot.snapshot.entry_for_path(parent_path).is_some() {
+ ignores_to_update.push(parent_abs_path.clone());
+ }
+ }
+
+ let ignore_path = parent_path.join(&*GITIGNORE);
+ if snapshot.snapshot.entry_for_path(ignore_path).is_none() {
+ ignores_to_delete.push(parent_abs_path.clone());
+ }
+ }
+ }
+
+ for parent_abs_path in ignores_to_delete {
+ snapshot.ignores_by_parent_abs_path.remove(&parent_abs_path);
+ self.state
+ .lock()
+ .snapshot
+ .ignores_by_parent_abs_path
+ .remove(&parent_abs_path);
+ }
+
+ let (ignore_queue_tx, ignore_queue_rx) = channel::unbounded();
+ ignores_to_update.sort_unstable();
+ let mut ignores_to_update = ignores_to_update.into_iter().peekable();
+ while let Some(parent_abs_path) = ignores_to_update.next() {
+ while ignores_to_update
+ .peek()
+ .map_or(false, |p| p.starts_with(&parent_abs_path))
+ {
+ ignores_to_update.next().unwrap();
+ }
+
+ let ignore_stack = snapshot.ignore_stack_for_abs_path(&parent_abs_path, true);
+ smol::block_on(ignore_queue_tx.send(UpdateIgnoreStatusJob {
+ abs_path: parent_abs_path,
+ ignore_stack,
+ ignore_queue: ignore_queue_tx.clone(),
+ scan_queue: scan_job_tx.clone(),
+ }))
+ .unwrap();
+ }
+ drop(ignore_queue_tx);
+
+ self.executor
+ .scoped(|scope| {
+ for _ in 0..self.executor.num_cpus() {
+ scope.spawn(async {
+ loop {
+ select_biased! {
+ // Process any path refresh requests before moving on to process
+ // the queue of ignore statuses.
+ request = self.scan_requests_rx.recv().fuse() => {
+ let Ok(request) = request else { break };
+ if !self.process_scan_request(request, true).await {
+ return;
+ }
+ }
+
+ // Recursively process directories whose ignores have changed.
+ job = ignore_queue_rx.recv().fuse() => {
+ let Ok(job) = job else { break };
+ self.update_ignore_status(job, &snapshot).await;
+ }
+ }
+ }
+ });
+ }
+ })
+ .await;
+ }
+
+ async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
+ log::trace!("update ignore status {:?}", job.abs_path);
+
+ let mut ignore_stack = job.ignore_stack;
+ if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
+ ignore_stack = ignore_stack.append(job.abs_path.clone(), ignore.clone());
+ }
+
+ let mut entries_by_id_edits = Vec::new();
+ let mut entries_by_path_edits = Vec::new();
+ let path = job.abs_path.strip_prefix(&snapshot.abs_path).unwrap();
+ for mut entry in snapshot.child_entries(path).cloned() {
+ let was_ignored = entry.is_ignored;
+ let abs_path: Arc<Path> = snapshot.abs_path().join(&entry.path).into();
+ entry.is_ignored = ignore_stack.is_abs_path_ignored(&abs_path, entry.is_dir());
+ if entry.is_dir() {
+ let child_ignore_stack = if entry.is_ignored {
+ IgnoreStack::all()
+ } else {
+ ignore_stack.clone()
+ };
+
+ // Scan any directories that were previously ignored and weren't
+ // previously scanned.
+ if was_ignored && !entry.is_ignored && entry.kind.is_unloaded() {
+ let state = self.state.lock();
+ if state.should_scan_directory(&entry) {
+ state.enqueue_scan_dir(abs_path.clone(), &entry, &job.scan_queue);
+ }
+ }
+
+ job.ignore_queue
+ .send(UpdateIgnoreStatusJob {
+ abs_path: abs_path.clone(),
+ ignore_stack: child_ignore_stack,
+ ignore_queue: job.ignore_queue.clone(),
+ scan_queue: job.scan_queue.clone(),
+ })
+ .await
+ .unwrap();
+ }
+
+ if entry.is_ignored != was_ignored {
+ let mut path_entry = snapshot.entries_by_id.get(&entry.id, &()).unwrap().clone();
+ path_entry.scan_id = snapshot.scan_id;
+ path_entry.is_ignored = entry.is_ignored;
+ entries_by_id_edits.push(Edit::Insert(path_entry));
+ entries_by_path_edits.push(Edit::Insert(entry));
+ }
+ }
+
+ let state = &mut self.state.lock();
+ for edit in &entries_by_path_edits {
+ if let Edit::Insert(entry) = edit {
+ if let Err(ix) = state.changed_paths.binary_search(&entry.path) {
+ state.changed_paths.insert(ix, entry.path.clone());
+ }
+ }
+ }
+
+ state
+ .snapshot
+ .entries_by_path
+ .edit(entries_by_path_edits, &());
+ state.snapshot.entries_by_id.edit(entries_by_id_edits, &());
+ }
+
+ fn build_change_set(
+ &self,
+ old_snapshot: &Snapshot,
+ new_snapshot: &Snapshot,
+ event_paths: &[Arc<Path>],
+ ) -> UpdatedEntriesSet {
+ use BackgroundScannerPhase::*;
+ use PathChange::{Added, AddedOrUpdated, Loaded, Removed, Updated};
+
+ // Identify which paths have changed. Use the known set of changed
+ // parent paths to optimize the search.
+ let mut changes = Vec::new();
+ let mut old_paths = old_snapshot.entries_by_path.cursor::<PathKey>();
+ let mut new_paths = new_snapshot.entries_by_path.cursor::<PathKey>();
+ let mut last_newly_loaded_dir_path = None;
+ old_paths.next(&());
+ new_paths.next(&());
+ for path in event_paths {
+ let path = PathKey(path.clone());
+ if old_paths.item().map_or(false, |e| e.path < path.0) {
+ old_paths.seek_forward(&path, Bias::Left, &());
+ }
+ if new_paths.item().map_or(false, |e| e.path < path.0) {
+ new_paths.seek_forward(&path, Bias::Left, &());
+ }
+ loop {
+ match (old_paths.item(), new_paths.item()) {
+ (Some(old_entry), Some(new_entry)) => {
+ if old_entry.path > path.0
+ && new_entry.path > path.0
+ && !old_entry.path.starts_with(&path.0)
+ && !new_entry.path.starts_with(&path.0)
+ {
+ break;
+ }
+
+ match Ord::cmp(&old_entry.path, &new_entry.path) {
+ Ordering::Less => {
+ changes.push((old_entry.path.clone(), old_entry.id, Removed));
+ old_paths.next(&());
+ }
+ Ordering::Equal => {
+ if self.phase == EventsReceivedDuringInitialScan {
+ if old_entry.id != new_entry.id {
+ changes.push((
+ old_entry.path.clone(),
+ old_entry.id,
+ Removed,
+ ));
+ }
+ // If the worktree was not fully initialized when this event was generated,
+ // we can't know whether this entry was added during the scan or whether
+ // it was merely updated.
+ changes.push((
+ new_entry.path.clone(),
+ new_entry.id,
+ AddedOrUpdated,
+ ));
+ } else if old_entry.id != new_entry.id {
+ changes.push((old_entry.path.clone(), old_entry.id, Removed));
+ changes.push((new_entry.path.clone(), new_entry.id, Added));
+ } else if old_entry != new_entry {
+ if old_entry.kind.is_unloaded() {
+ last_newly_loaded_dir_path = Some(&new_entry.path);
+ changes.push((
+ new_entry.path.clone(),
+ new_entry.id,
+ Loaded,
+ ));
+ } else {
+ changes.push((
+ new_entry.path.clone(),
+ new_entry.id,
+ Updated,
+ ));
+ }
+ }
+ old_paths.next(&());
+ new_paths.next(&());
+ }
+ Ordering::Greater => {
+ let is_newly_loaded = self.phase == InitialScan
+ || last_newly_loaded_dir_path
+ .as_ref()
+ .map_or(false, |dir| new_entry.path.starts_with(&dir));
+ changes.push((
+ new_entry.path.clone(),
+ new_entry.id,
+ if is_newly_loaded { Loaded } else { Added },
+ ));
+ new_paths.next(&());
+ }
+ }
+ }
+ (Some(old_entry), None) => {
+ changes.push((old_entry.path.clone(), old_entry.id, Removed));
+ old_paths.next(&());
+ }
+ (None, Some(new_entry)) => {
+ let is_newly_loaded = self.phase == InitialScan
+ || last_newly_loaded_dir_path
+ .as_ref()
+ .map_or(false, |dir| new_entry.path.starts_with(&dir));
+ changes.push((
+ new_entry.path.clone(),
+ new_entry.id,
+ if is_newly_loaded { Loaded } else { Added },
+ ));
+ new_paths.next(&());
+ }
+ (None, None) => break,
+ }
+ }
+ }
+
+ changes.into()
+ }
+
+ async fn progress_timer(&self, running: bool) {
+ if !running {
+ return futures::future::pending().await;
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ if self.fs.is_fake() {
+ return self.executor.simulate_random_delay().await;
+ }
+
+ smol::Timer::after(Duration::from_millis(100)).await;
+ }
+}
+
+fn char_bag_for_path(root_char_bag: CharBag, path: &Path) -> CharBag {
+ let mut result = root_char_bag;
+ result.extend(
+ path.to_string_lossy()
+ .chars()
+ .map(|c| c.to_ascii_lowercase()),
+ );
+ result
+}
+
+struct ScanJob {
+ abs_path: Arc<Path>,
+ path: Arc<Path>,
+ ignore_stack: Arc<IgnoreStack>,
+ scan_queue: Sender<ScanJob>,
+ ancestor_inodes: TreeSet<u64>,
+ is_external: bool,
+ containing_repository: Option<(
+ RepositoryWorkDirectory,
+ Arc<Mutex<dyn GitRepository>>,
+ TreeMap<RepoPath, GitFileStatus>,
+ )>,
+}
+
+struct UpdateIgnoreStatusJob {
+ abs_path: Arc<Path>,
+ ignore_stack: Arc<IgnoreStack>,
+ ignore_queue: Sender<UpdateIgnoreStatusJob>,
+ scan_queue: Sender<ScanJob>,
+}
+
+// todo!("re-enable when we have tests")
+// pub trait WorktreeModelHandle {
+// #[cfg(any(test, feature = "test-support"))]
+// fn flush_fs_events<'a>(
+// &self,
+// cx: &'a gpui::TestAppContext,
+// ) -> futures::future::LocalBoxFuture<'a, ()>;
+// }
+
+// impl WorktreeModelHandle for Handle<Worktree> {
+// // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
+// // occurred before the worktree was constructed. These events can cause the worktree to perform
+// // extra directory scans, and emit extra scan-state notifications.
+// //
+// // This function mutates the worktree's directory and waits for those mutations to be picked up,
+// // to ensure that all redundant FS events have already been processed.
+// #[cfg(any(test, feature = "test-support"))]
+// fn flush_fs_events<'a>(
+// &self,
+// cx: &'a gpui::TestAppContext,
+// ) -> futures::future::LocalBoxFuture<'a, ()> {
+// let filename = "fs-event-sentinel";
+// let tree = self.clone();
+// let (fs, root_path) = self.read_with(cx, |tree, _| {
+// let tree = tree.as_local().unwrap();
+// (tree.fs.clone(), tree.abs_path().clone())
+// });
+
+// async move {
+// fs.create_file(&root_path.join(filename), Default::default())
+// .await
+// .unwrap();
+// tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
+// .await;
+
+// fs.remove_file(&root_path.join(filename), Default::default())
+// .await
+// .unwrap();
+// tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
+// .await;
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+// }
+// .boxed_local()
+// }
+// }
+
+#[derive(Clone, Debug)]
+struct TraversalProgress<'a> {
+ max_path: &'a Path,
+ count: usize,
+ non_ignored_count: usize,
+ file_count: usize,
+ non_ignored_file_count: usize,
+}
+
+impl<'a> TraversalProgress<'a> {
+ fn count(&self, include_dirs: bool, include_ignored: bool) -> usize {
+ match (include_ignored, include_dirs) {
+ (true, true) => self.count,
+ (true, false) => self.file_count,
+ (false, true) => self.non_ignored_count,
+ (false, false) => self.non_ignored_file_count,
+ }
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for TraversalProgress<'a> {
+ fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
+ self.max_path = summary.max_path.as_ref();
+ self.count += summary.count;
+ self.non_ignored_count += summary.non_ignored_count;
+ self.file_count += summary.file_count;
+ self.non_ignored_file_count += summary.non_ignored_file_count;
+ }
+}
+
+impl<'a> Default for TraversalProgress<'a> {
+ fn default() -> Self {
+ Self {
+ max_path: Path::new(""),
+ count: 0,
+ non_ignored_count: 0,
+ file_count: 0,
+ non_ignored_file_count: 0,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Default, Copy)]
+struct GitStatuses {
+ added: usize,
+ modified: usize,
+ conflict: usize,
+}
+
+impl AddAssign for GitStatuses {
+ fn add_assign(&mut self, rhs: Self) {
+ self.added += rhs.added;
+ self.modified += rhs.modified;
+ self.conflict += rhs.conflict;
+ }
+}
+
+impl Sub for GitStatuses {
+ type Output = GitStatuses;
+
+ fn sub(self, rhs: Self) -> Self::Output {
+ GitStatuses {
+ added: self.added - rhs.added,
+ modified: self.modified - rhs.modified,
+ conflict: self.conflict - rhs.conflict,
+ }
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, EntrySummary> for GitStatuses {
+ fn add_summary(&mut self, summary: &'a EntrySummary, _: &()) {
+ *self += summary.statuses
+ }
+}
+
+pub struct Traversal<'a> {
+ cursor: sum_tree::Cursor<'a, Entry, TraversalProgress<'a>>,
+ include_ignored: bool,
+ include_dirs: bool,
+}
+
+impl<'a> Traversal<'a> {
+ pub fn advance(&mut self) -> bool {
+ self.cursor.seek_forward(
+ &TraversalTarget::Count {
+ count: self.end_offset() + 1,
+ include_dirs: self.include_dirs,
+ include_ignored: self.include_ignored,
+ },
+ Bias::Left,
+ &(),
+ )
+ }
+
+ pub fn advance_to_sibling(&mut self) -> bool {
+ while let Some(entry) = self.cursor.item() {
+ self.cursor.seek_forward(
+ &TraversalTarget::PathSuccessor(&entry.path),
+ Bias::Left,
+ &(),
+ );
+ if let Some(entry) = self.cursor.item() {
+ if (self.include_dirs || !entry.is_dir())
+ && (self.include_ignored || !entry.is_ignored)
+ {
+ return true;
+ }
+ }
+ }
+ false
+ }
+
+ pub fn entry(&self) -> Option<&'a Entry> {
+ self.cursor.item()
+ }
+
+ pub fn start_offset(&self) -> usize {
+ self.cursor
+ .start()
+ .count(self.include_dirs, self.include_ignored)
+ }
+
+ pub fn end_offset(&self) -> usize {
+ self.cursor
+ .end(&())
+ .count(self.include_dirs, self.include_ignored)
+ }
+}
+
+impl<'a> Iterator for Traversal<'a> {
+ type Item = &'a Entry;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(item) = self.entry() {
+ self.advance();
+ Some(item)
+ } else {
+ None
+ }
+ }
+}
+
+#[derive(Debug)]
+enum TraversalTarget<'a> {
+ Path(&'a Path),
+ PathSuccessor(&'a Path),
+ Count {
+ count: usize,
+ include_ignored: bool,
+ include_dirs: bool,
+ },
+}
+
+impl<'a, 'b> SeekTarget<'a, EntrySummary, TraversalProgress<'a>> for TraversalTarget<'b> {
+ fn cmp(&self, cursor_location: &TraversalProgress<'a>, _: &()) -> Ordering {
+ match self {
+ TraversalTarget::Path(path) => path.cmp(&cursor_location.max_path),
+ TraversalTarget::PathSuccessor(path) => {
+ if !cursor_location.max_path.starts_with(path) {
+ Ordering::Equal
+ } else {
+ Ordering::Greater
+ }
+ }
+ TraversalTarget::Count {
+ count,
+ include_dirs,
+ include_ignored,
+ } => Ord::cmp(
+ count,
+ &cursor_location.count(*include_dirs, *include_ignored),
+ ),
+ }
+ }
+}
+
+impl<'a, 'b> SeekTarget<'a, EntrySummary, (TraversalProgress<'a>, GitStatuses)>
+ for TraversalTarget<'b>
+{
+ fn cmp(&self, cursor_location: &(TraversalProgress<'a>, GitStatuses), _: &()) -> Ordering {
+ self.cmp(&cursor_location.0, &())
+ }
+}
+
+struct ChildEntriesIter<'a> {
+ parent_path: &'a Path,
+ traversal: Traversal<'a>,
+}
+
+impl<'a> Iterator for ChildEntriesIter<'a> {
+ type Item = &'a Entry;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(item) = self.traversal.entry() {
+ if item.path.starts_with(&self.parent_path) {
+ self.traversal.advance_to_sibling();
+ return Some(item);
+ }
+ }
+ None
+ }
+}
+
+pub struct DescendentEntriesIter<'a> {
+ parent_path: &'a Path,
+ traversal: Traversal<'a>,
+}
+
+impl<'a> Iterator for DescendentEntriesIter<'a> {
+ type Item = &'a Entry;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(item) = self.traversal.entry() {
+ if item.path.starts_with(&self.parent_path) {
+ self.traversal.advance();
+ return Some(item);
+ }
+ }
+ None
+ }
+}
+
+impl<'a> From<&'a Entry> for proto::Entry {
+ fn from(entry: &'a Entry) -> Self {
+ Self {
+ id: entry.id.to_proto(),
+ is_dir: entry.is_dir(),
+ path: entry.path.to_string_lossy().into(),
+ inode: entry.inode,
+ mtime: Some(entry.mtime.into()),
+ is_symlink: entry.is_symlink,
+ is_ignored: entry.is_ignored,
+ is_external: entry.is_external,
+ git_status: entry.git_status.map(git_status_to_proto),
+ }
+ }
+}
+
+impl<'a> TryFrom<(&'a CharBag, proto::Entry)> for Entry {
+ type Error = anyhow::Error;
+
+ fn try_from((root_char_bag, entry): (&'a CharBag, proto::Entry)) -> Result<Self> {
+ if let Some(mtime) = entry.mtime {
+ let kind = if entry.is_dir {
+ EntryKind::Dir
+ } else {
+ let mut char_bag = *root_char_bag;
+ char_bag.extend(entry.path.chars().map(|c| c.to_ascii_lowercase()));
+ EntryKind::File(char_bag)
+ };
+ let path: Arc<Path> = PathBuf::from(entry.path).into();
+ Ok(Entry {
+ id: ProjectEntryId::from_proto(entry.id),
+ kind,
+ path,
+ inode: entry.inode,
+ mtime: mtime.into(),
+ is_symlink: entry.is_symlink,
+ is_ignored: entry.is_ignored,
+ is_external: entry.is_external,
+ git_status: git_status_from_proto(entry.git_status),
+ })
+ } else {
+ Err(anyhow!(
+ "missing mtime in remote worktree entry {:?}",
+ entry.path
+ ))
+ }
+ }
+}
+
+fn combine_git_statuses(
+ staged: Option<GitFileStatus>,
+ unstaged: Option<GitFileStatus>,
+) -> Option<GitFileStatus> {
+ if let Some(staged) = staged {
+ if let Some(unstaged) = unstaged {
+ if unstaged != staged {
+ Some(GitFileStatus::Modified)
+ } else {
+ Some(staged)
+ }
+ } else {
+ Some(staged)
+ }
+ } else {
+ unstaged
+ }
+}
+
+fn git_status_from_proto(git_status: Option<i32>) -> Option<GitFileStatus> {
+ git_status.and_then(|status| {
+ proto::GitStatus::from_i32(status).map(|status| match status {
+ proto::GitStatus::Added => GitFileStatus::Added,
+ proto::GitStatus::Modified => GitFileStatus::Modified,
+ proto::GitStatus::Conflict => GitFileStatus::Conflict,
+ })
+ })
+}
+
+fn git_status_to_proto(status: GitFileStatus) -> i32 {
+ match status {
+ GitFileStatus::Added => proto::GitStatus::Added as i32,
+ GitFileStatus::Modified => proto::GitStatus::Modified as i32,
+ GitFileStatus::Conflict => proto::GitStatus::Conflict as i32,
+ }
+}
@@ -0,0 +1,2141 @@
+// use crate::{
+// worktree::{Event, Snapshot, WorktreeModelHandle},
+// Entry, EntryKind, PathChange, Worktree,
+// };
+// use anyhow::Result;
+// use client2::Client;
+// use fs::{repository::GitFileStatus, FakeFs, Fs, RealFs, RemoveOptions};
+// use git::GITIGNORE;
+// use gpui::{executor::Deterministic, ModelContext, Task, TestAppContext};
+// use parking_lot::Mutex;
+// use postage::stream::Stream;
+// use pretty_assertions::assert_eq;
+// use rand::prelude::*;
+// use serde_json::json;
+// use std::{
+// env,
+// fmt::Write,
+// mem,
+// path::{Path, PathBuf},
+// sync::Arc,
+// };
+// use util::{http::FakeHttpClient, test::temp_tree, ResultExt};
+
+// #[gpui::test]
+// async fn test_traversal(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// ".gitignore": "a/b\n",
+// "a": {
+// "b": "",
+// "c": "",
+// }
+// }),
+// )
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root"),
+// true,
+// fs,
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(false)
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![
+// Path::new(""),
+// Path::new(".gitignore"),
+// Path::new("a"),
+// Path::new("a/c"),
+// ]
+// );
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![
+// Path::new(""),
+// Path::new(".gitignore"),
+// Path::new("a"),
+// Path::new("a/b"),
+// Path::new("a/c"),
+// ]
+// );
+// })
+// }
+
+// #[gpui::test]
+// async fn test_descendent_entries(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// "a": "",
+// "b": {
+// "c": {
+// "d": ""
+// },
+// "e": {}
+// },
+// "f": "",
+// "g": {
+// "h": {}
+// },
+// "i": {
+// "j": {
+// "k": ""
+// },
+// "l": {
+
+// }
+// },
+// ".gitignore": "i/j\n",
+// }),
+// )
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root"),
+// true,
+// fs,
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.descendent_entries(false, false, Path::new("b"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![Path::new("b/c/d"),]
+// );
+// assert_eq!(
+// tree.descendent_entries(true, false, Path::new("b"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![
+// Path::new("b"),
+// Path::new("b/c"),
+// Path::new("b/c/d"),
+// Path::new("b/e"),
+// ]
+// );
+
+// assert_eq!(
+// tree.descendent_entries(false, false, Path::new("g"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// Vec::<PathBuf>::new()
+// );
+// assert_eq!(
+// tree.descendent_entries(true, false, Path::new("g"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![Path::new("g"), Path::new("g/h"),]
+// );
+// });
+
+// // Expand gitignored directory.
+// tree.read_with(cx, |tree, _| {
+// tree.as_local()
+// .unwrap()
+// .refresh_entries_for_paths(vec![Path::new("i/j").into()])
+// })
+// .recv()
+// .await;
+
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.descendent_entries(false, false, Path::new("i"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// Vec::<PathBuf>::new()
+// );
+// assert_eq!(
+// tree.descendent_entries(false, true, Path::new("i"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![Path::new("i/j/k")]
+// );
+// assert_eq!(
+// tree.descendent_entries(true, false, Path::new("i"))
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![Path::new("i"), Path::new("i/l"),]
+// );
+// })
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_circular_symlinks(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// "lib": {
+// "a": {
+// "a.txt": ""
+// },
+// "b": {
+// "b.txt": ""
+// }
+// }
+// }),
+// )
+// .await;
+// fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
+// fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root"),
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(false)
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![
+// Path::new(""),
+// Path::new("lib"),
+// Path::new("lib/a"),
+// Path::new("lib/a/a.txt"),
+// Path::new("lib/a/lib"),
+// Path::new("lib/b"),
+// Path::new("lib/b/b.txt"),
+// Path::new("lib/b/lib"),
+// ]
+// );
+// });
+
+// fs.rename(
+// Path::new("/root/lib/a/lib"),
+// Path::new("/root/lib/a/lib-2"),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+// executor.run_until_parked();
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(false)
+// .map(|entry| entry.path.as_ref())
+// .collect::<Vec<_>>(),
+// vec![
+// Path::new(""),
+// Path::new("lib"),
+// Path::new("lib/a"),
+// Path::new("lib/a/a.txt"),
+// Path::new("lib/a/lib-2"),
+// Path::new("lib/b"),
+// Path::new("lib/b/b.txt"),
+// Path::new("lib/b/lib"),
+// ]
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// "dir1": {
+// "deps": {
+// // symlinks here
+// },
+// "src": {
+// "a.rs": "",
+// "b.rs": "",
+// },
+// },
+// "dir2": {
+// "src": {
+// "c.rs": "",
+// "d.rs": "",
+// }
+// },
+// "dir3": {
+// "deps": {},
+// "src": {
+// "e.rs": "",
+// "f.rs": "",
+// },
+// }
+// }),
+// )
+// .await;
+
+// // These symlinks point to directories outside of the worktree's root, dir1.
+// fs.insert_symlink("/root/dir1/deps/dep-dir2", "../../dir2".into())
+// .await;
+// fs.insert_symlink("/root/dir1/deps/dep-dir3", "../../dir3".into())
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root/dir1"),
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// let tree_updates = Arc::new(Mutex::new(Vec::new()));
+// tree.update(cx, |_, cx| {
+// let tree_updates = tree_updates.clone();
+// cx.subscribe(&tree, move |_, _, event, _| {
+// if let Event::UpdatedEntries(update) = event {
+// tree_updates.lock().extend(
+// update
+// .iter()
+// .map(|(path, _, change)| (path.clone(), *change)),
+// );
+// }
+// })
+// .detach();
+// });
+
+// // The symlinked directories are not scanned by default.
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_external))
+// .collect::<Vec<_>>(),
+// vec![
+// (Path::new(""), false),
+// (Path::new("deps"), false),
+// (Path::new("deps/dep-dir2"), true),
+// (Path::new("deps/dep-dir3"), true),
+// (Path::new("src"), false),
+// (Path::new("src/a.rs"), false),
+// (Path::new("src/b.rs"), false),
+// ]
+// );
+
+// assert_eq!(
+// tree.entry_for_path("deps/dep-dir2").unwrap().kind,
+// EntryKind::UnloadedDir
+// );
+// });
+
+// // Expand one of the symlinked directories.
+// tree.read_with(cx, |tree, _| {
+// tree.as_local()
+// .unwrap()
+// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3").into()])
+// })
+// .recv()
+// .await;
+
+// // The expanded directory's contents are loaded. Subdirectories are
+// // not scanned yet.
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_external))
+// .collect::<Vec<_>>(),
+// vec![
+// (Path::new(""), false),
+// (Path::new("deps"), false),
+// (Path::new("deps/dep-dir2"), true),
+// (Path::new("deps/dep-dir3"), true),
+// (Path::new("deps/dep-dir3/deps"), true),
+// (Path::new("deps/dep-dir3/src"), true),
+// (Path::new("src"), false),
+// (Path::new("src/a.rs"), false),
+// (Path::new("src/b.rs"), false),
+// ]
+// );
+// });
+// assert_eq!(
+// mem::take(&mut *tree_updates.lock()),
+// &[
+// (Path::new("deps/dep-dir3").into(), PathChange::Loaded),
+// (Path::new("deps/dep-dir3/deps").into(), PathChange::Loaded),
+// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded)
+// ]
+// );
+
+// // Expand a subdirectory of one of the symlinked directories.
+// tree.read_with(cx, |tree, _| {
+// tree.as_local()
+// .unwrap()
+// .refresh_entries_for_paths(vec![Path::new("deps/dep-dir3/src").into()])
+// })
+// .recv()
+// .await;
+
+// // The expanded subdirectory's contents are loaded.
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_external))
+// .collect::<Vec<_>>(),
+// vec![
+// (Path::new(""), false),
+// (Path::new("deps"), false),
+// (Path::new("deps/dep-dir2"), true),
+// (Path::new("deps/dep-dir3"), true),
+// (Path::new("deps/dep-dir3/deps"), true),
+// (Path::new("deps/dep-dir3/src"), true),
+// (Path::new("deps/dep-dir3/src/e.rs"), true),
+// (Path::new("deps/dep-dir3/src/f.rs"), true),
+// (Path::new("src"), false),
+// (Path::new("src/a.rs"), false),
+// (Path::new("src/b.rs"), false),
+// ]
+// );
+// });
+
+// assert_eq!(
+// mem::take(&mut *tree_updates.lock()),
+// &[
+// (Path::new("deps/dep-dir3/src").into(), PathChange::Loaded),
+// (
+// Path::new("deps/dep-dir3/src/e.rs").into(),
+// PathChange::Loaded
+// ),
+// (
+// Path::new("deps/dep-dir3/src/f.rs").into(),
+// PathChange::Loaded
+// )
+// ]
+// );
+// }
+
+// #[gpui::test]
+// async fn test_open_gitignored_files(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// ".gitignore": "node_modules\n",
+// "one": {
+// "node_modules": {
+// "a": {
+// "a1.js": "a1",
+// "a2.js": "a2",
+// },
+// "b": {
+// "b1.js": "b1",
+// "b2.js": "b2",
+// },
+// "c": {
+// "c1.js": "c1",
+// "c2.js": "c2",
+// }
+// },
+// },
+// "two": {
+// "x.js": "",
+// "y.js": "",
+// },
+// }),
+// )
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root"),
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+// .collect::<Vec<_>>(),
+// vec![
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("one"), false),
+// (Path::new("one/node_modules"), true),
+// (Path::new("two"), false),
+// (Path::new("two/x.js"), false),
+// (Path::new("two/y.js"), false),
+// ]
+// );
+// });
+
+// // Open a file that is nested inside of a gitignored directory that
+// // has not yet been expanded.
+// let prev_read_dir_count = fs.read_dir_call_count();
+// let buffer = tree
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .load_buffer(0, "one/node_modules/b/b1.js".as_ref(), cx)
+// })
+// .await
+// .unwrap();
+
+// tree.read_with(cx, |tree, cx| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+// .collect::<Vec<_>>(),
+// vec![
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("one"), false),
+// (Path::new("one/node_modules"), true),
+// (Path::new("one/node_modules/a"), true),
+// (Path::new("one/node_modules/b"), true),
+// (Path::new("one/node_modules/b/b1.js"), true),
+// (Path::new("one/node_modules/b/b2.js"), true),
+// (Path::new("one/node_modules/c"), true),
+// (Path::new("two"), false),
+// (Path::new("two/x.js"), false),
+// (Path::new("two/y.js"), false),
+// ]
+// );
+
+// assert_eq!(
+// buffer.read(cx).file().unwrap().path().as_ref(),
+// Path::new("one/node_modules/b/b1.js")
+// );
+
+// // Only the newly-expanded directories are scanned.
+// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 2);
+// });
+
+// // Open another file in a different subdirectory of the same
+// // gitignored directory.
+// let prev_read_dir_count = fs.read_dir_call_count();
+// let buffer = tree
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .load_buffer(0, "one/node_modules/a/a2.js".as_ref(), cx)
+// })
+// .await
+// .unwrap();
+
+// tree.read_with(cx, |tree, cx| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+// .collect::<Vec<_>>(),
+// vec![
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("one"), false),
+// (Path::new("one/node_modules"), true),
+// (Path::new("one/node_modules/a"), true),
+// (Path::new("one/node_modules/a/a1.js"), true),
+// (Path::new("one/node_modules/a/a2.js"), true),
+// (Path::new("one/node_modules/b"), true),
+// (Path::new("one/node_modules/b/b1.js"), true),
+// (Path::new("one/node_modules/b/b2.js"), true),
+// (Path::new("one/node_modules/c"), true),
+// (Path::new("two"), false),
+// (Path::new("two/x.js"), false),
+// (Path::new("two/y.js"), false),
+// ]
+// );
+
+// assert_eq!(
+// buffer.read(cx).file().unwrap().path().as_ref(),
+// Path::new("one/node_modules/a/a2.js")
+// );
+
+// // Only the newly-expanded directory is scanned.
+// assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 1);
+// });
+
+// // No work happens when files and directories change within an unloaded directory.
+// let prev_fs_call_count = fs.read_dir_call_count() + fs.metadata_call_count();
+// fs.create_dir("/root/one/node_modules/c/lib".as_ref())
+// .await
+// .unwrap();
+// cx.foreground().run_until_parked();
+// assert_eq!(
+// fs.read_dir_call_count() + fs.metadata_call_count() - prev_fs_call_count,
+// 0
+// );
+// }
+
+// #[gpui::test]
+// async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// ".gitignore": "node_modules\n",
+// "a": {
+// "a.js": "",
+// },
+// "b": {
+// "b.js": "",
+// },
+// "node_modules": {
+// "c": {
+// "c.js": "",
+// },
+// "d": {
+// "d.js": "",
+// "e": {
+// "e1.js": "",
+// "e2.js": "",
+// },
+// "f": {
+// "f1.js": "",
+// "f2.js": "",
+// }
+// },
+// },
+// }),
+// )
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root"),
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// // Open a file within the gitignored directory, forcing some of its
+// // subdirectories to be read, but not all.
+// let read_dir_count_1 = fs.read_dir_call_count();
+// tree.read_with(cx, |tree, _| {
+// tree.as_local()
+// .unwrap()
+// .refresh_entries_for_paths(vec![Path::new("node_modules/d/d.js").into()])
+// })
+// .recv()
+// .await;
+
+// // Those subdirectories are now loaded.
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|e| (e.path.as_ref(), e.is_ignored))
+// .collect::<Vec<_>>(),
+// &[
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("a"), false),
+// (Path::new("a/a.js"), false),
+// (Path::new("b"), false),
+// (Path::new("b/b.js"), false),
+// (Path::new("node_modules"), true),
+// (Path::new("node_modules/c"), true),
+// (Path::new("node_modules/d"), true),
+// (Path::new("node_modules/d/d.js"), true),
+// (Path::new("node_modules/d/e"), true),
+// (Path::new("node_modules/d/f"), true),
+// ]
+// );
+// });
+// let read_dir_count_2 = fs.read_dir_call_count();
+// assert_eq!(read_dir_count_2 - read_dir_count_1, 2);
+
+// // Update the gitignore so that node_modules is no longer ignored,
+// // but a subdirectory is ignored
+// fs.save("/root/.gitignore".as_ref(), &"e".into(), Default::default())
+// .await
+// .unwrap();
+// cx.foreground().run_until_parked();
+
+// // All of the directories that are no longer ignored are now loaded.
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(
+// tree.entries(true)
+// .map(|e| (e.path.as_ref(), e.is_ignored))
+// .collect::<Vec<_>>(),
+// &[
+// (Path::new(""), false),
+// (Path::new(".gitignore"), false),
+// (Path::new("a"), false),
+// (Path::new("a/a.js"), false),
+// (Path::new("b"), false),
+// (Path::new("b/b.js"), false),
+// // This directory is no longer ignored
+// (Path::new("node_modules"), false),
+// (Path::new("node_modules/c"), false),
+// (Path::new("node_modules/c/c.js"), false),
+// (Path::new("node_modules/d"), false),
+// (Path::new("node_modules/d/d.js"), false),
+// // This subdirectory is now ignored
+// (Path::new("node_modules/d/e"), true),
+// (Path::new("node_modules/d/f"), false),
+// (Path::new("node_modules/d/f/f1.js"), false),
+// (Path::new("node_modules/d/f/f2.js"), false),
+// ]
+// );
+// });
+
+// // Each of the newly-loaded directories is scanned only once.
+// let read_dir_count_3 = fs.read_dir_call_count();
+// assert_eq!(read_dir_count_3 - read_dir_count_2, 2);
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_rescan_with_gitignore(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
+// "tree": {
+// ".git": {},
+// ".gitignore": "ignored-dir\n",
+// "tracked-dir": {
+// "tracked-file1": "",
+// "ancestor-ignored-file1": "",
+// },
+// "ignored-dir": {
+// "ignored-file1": ""
+// }
+// }
+// }),
+// )
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// "/root/tree".as_ref(),
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// tree.read_with(cx, |tree, _| {
+// tree.as_local()
+// .unwrap()
+// .refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
+// })
+// .recv()
+// .await;
+
+// cx.read(|cx| {
+// let tree = tree.read(cx);
+// assert!(
+// !tree
+// .entry_for_path("tracked-dir/tracked-file1")
+// .unwrap()
+// .is_ignored
+// );
+// assert!(
+// tree.entry_for_path("tracked-dir/ancestor-ignored-file1")
+// .unwrap()
+// .is_ignored
+// );
+// assert!(
+// tree.entry_for_path("ignored-dir/ignored-file1")
+// .unwrap()
+// .is_ignored
+// );
+// });
+
+// fs.create_file(
+// "/root/tree/tracked-dir/tracked-file2".as_ref(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+// fs.create_file(
+// "/root/tree/tracked-dir/ancestor-ignored-file2".as_ref(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+// fs.create_file(
+// "/root/tree/ignored-dir/ignored-file2".as_ref(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.foreground().run_until_parked();
+// cx.read(|cx| {
+// let tree = tree.read(cx);
+// assert!(
+// !tree
+// .entry_for_path("tracked-dir/tracked-file2")
+// .unwrap()
+// .is_ignored
+// );
+// assert!(
+// tree.entry_for_path("tracked-dir/ancestor-ignored-file2")
+// .unwrap()
+// .is_ignored
+// );
+// assert!(
+// tree.entry_for_path("ignored-dir/ignored-file2")
+// .unwrap()
+// .is_ignored
+// );
+// assert!(tree.entry_for_path(".git").unwrap().is_ignored);
+// });
+// }
+
+// #[gpui::test]
+// async fn test_write_file(cx: &mut TestAppContext) {
+// let dir = temp_tree(json!({
+// ".git": {},
+// ".gitignore": "ignored-dir\n",
+// "tracked-dir": {},
+// "ignored-dir": {}
+// }));
+
+// let tree = Worktree::local(
+// build_client(cx),
+// dir.path(),
+// true,
+// Arc::new(RealFs),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+// tree.flush_fs_events(cx).await;
+
+// tree.update(cx, |tree, cx| {
+// tree.as_local().unwrap().write_file(
+// Path::new("tracked-dir/file.txt"),
+// "hello".into(),
+// Default::default(),
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+// tree.update(cx, |tree, cx| {
+// tree.as_local().unwrap().write_file(
+// Path::new("ignored-dir/file.txt"),
+// "world".into(),
+// Default::default(),
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+
+// tree.read_with(cx, |tree, _| {
+// let tracked = tree.entry_for_path("tracked-dir/file.txt").unwrap();
+// let ignored = tree.entry_for_path("ignored-dir/file.txt").unwrap();
+// assert!(!tracked.is_ignored);
+// assert!(ignored.is_ignored);
+// });
+// }
+
+// #[gpui::test(iterations = 30)]
+// async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// "b": {},
+// "c": {},
+// "d": {},
+// }),
+// )
+// .await;
+
+// let tree = Worktree::local(
+// build_client(cx),
+// "/root".as_ref(),
+// true,
+// fs,
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// let snapshot1 = tree.update(cx, |tree, cx| {
+// let tree = tree.as_local_mut().unwrap();
+// let snapshot = Arc::new(Mutex::new(tree.snapshot()));
+// let _ = tree.observe_updates(0, cx, {
+// let snapshot = snapshot.clone();
+// move |update| {
+// snapshot.lock().apply_remote_update(update).unwrap();
+// async { true }
+// }
+// });
+// snapshot
+// });
+
+// let entry = tree
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .create_entry("a/e".as_ref(), true, cx)
+// })
+// .await
+// .unwrap();
+// assert!(entry.is_dir());
+
+// cx.foreground().run_until_parked();
+// tree.read_with(cx, |tree, _| {
+// assert_eq!(tree.entry_for_path("a/e").unwrap().kind, EntryKind::Dir);
+// });
+
+// let snapshot2 = tree.update(cx, |tree, _| tree.as_local().unwrap().snapshot());
+// assert_eq!(
+// snapshot1.lock().entries(true).collect::<Vec<_>>(),
+// snapshot2.entries(true).collect::<Vec<_>>()
+// );
+// }
+
+// #[gpui::test]
+// async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
+// let client_fake = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+// let fs_fake = FakeFs::new(cx.background());
+// fs_fake
+// .insert_tree(
+// "/root",
+// json!({
+// "a": {},
+// }),
+// )
+// .await;
+
+// let tree_fake = Worktree::local(
+// client_fake,
+// "/root".as_ref(),
+// true,
+// fs_fake,
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// let entry = tree_fake
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+// })
+// .await
+// .unwrap();
+// assert!(entry.is_file());
+
+// cx.foreground().run_until_parked();
+// tree_fake.read_with(cx, |tree, _| {
+// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+// assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+// });
+
+// let client_real = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
+
+// let fs_real = Arc::new(RealFs);
+// let temp_root = temp_tree(json!({
+// "a": {}
+// }));
+
+// let tree_real = Worktree::local(
+// client_real,
+// temp_root.path(),
+// true,
+// fs_real,
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// let entry = tree_real
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .create_entry("a/b/c/d.txt".as_ref(), false, cx)
+// })
+// .await
+// .unwrap();
+// assert!(entry.is_file());
+
+// cx.foreground().run_until_parked();
+// tree_real.read_with(cx, |tree, _| {
+// assert!(tree.entry_for_path("a/b/c/d.txt").unwrap().is_file());
+// assert!(tree.entry_for_path("a/b/c/").unwrap().is_dir());
+// assert!(tree.entry_for_path("a/b/").unwrap().is_dir());
+// });
+
+// // Test smallest change
+// let entry = tree_real
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .create_entry("a/b/c/e.txt".as_ref(), false, cx)
+// })
+// .await
+// .unwrap();
+// assert!(entry.is_file());
+
+// cx.foreground().run_until_parked();
+// tree_real.read_with(cx, |tree, _| {
+// assert!(tree.entry_for_path("a/b/c/e.txt").unwrap().is_file());
+// });
+
+// // Test largest change
+// let entry = tree_real
+// .update(cx, |tree, cx| {
+// tree.as_local_mut()
+// .unwrap()
+// .create_entry("d/e/f/g.txt".as_ref(), false, cx)
+// })
+// .await
+// .unwrap();
+// assert!(entry.is_file());
+
+// cx.foreground().run_until_parked();
+// tree_real.read_with(cx, |tree, _| {
+// assert!(tree.entry_for_path("d/e/f/g.txt").unwrap().is_file());
+// assert!(tree.entry_for_path("d/e/f").unwrap().is_dir());
+// assert!(tree.entry_for_path("d/e/").unwrap().is_dir());
+// assert!(tree.entry_for_path("d/").unwrap().is_dir());
+// });
+// }
+
+// #[gpui::test(iterations = 100)]
+// async fn test_random_worktree_operations_during_initial_scan(
+// cx: &mut TestAppContext,
+// mut rng: StdRng,
+// ) {
+// let operations = env::var("OPERATIONS")
+// .map(|o| o.parse().unwrap())
+// .unwrap_or(5);
+// let initial_entries = env::var("INITIAL_ENTRIES")
+// .map(|o| o.parse().unwrap())
+// .unwrap_or(20);
+
+// let root_dir = Path::new("/test");
+// let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
+// fs.as_fake().insert_tree(root_dir, json!({})).await;
+// for _ in 0..initial_entries {
+// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+// }
+// log::info!("generated initial tree");
+
+// let worktree = Worktree::local(
+// build_client(cx),
+// root_dir,
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// let mut snapshots = vec![worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot())];
+// let updates = Arc::new(Mutex::new(Vec::new()));
+// worktree.update(cx, |tree, cx| {
+// check_worktree_change_events(tree, cx);
+
+// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+// let updates = updates.clone();
+// move |update| {
+// updates.lock().push(update);
+// async { true }
+// }
+// });
+// });
+
+// for _ in 0..operations {
+// worktree
+// .update(cx, |worktree, cx| {
+// randomly_mutate_worktree(worktree, &mut rng, cx)
+// })
+// .await
+// .log_err();
+// worktree.read_with(cx, |tree, _| {
+// tree.as_local().unwrap().snapshot().check_invariants(true)
+// });
+
+// if rng.gen_bool(0.6) {
+// snapshots.push(worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot()));
+// }
+// }
+
+// worktree
+// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+// .await;
+
+// cx.foreground().run_until_parked();
+
+// let final_snapshot = worktree.read_with(cx, |tree, _| {
+// let tree = tree.as_local().unwrap();
+// let snapshot = tree.snapshot();
+// snapshot.check_invariants(true);
+// snapshot
+// });
+
+// for (i, snapshot) in snapshots.into_iter().enumerate().rev() {
+// let mut updated_snapshot = snapshot.clone();
+// for update in updates.lock().iter() {
+// if update.scan_id >= updated_snapshot.scan_id() as u64 {
+// updated_snapshot
+// .apply_remote_update(update.clone())
+// .unwrap();
+// }
+// }
+
+// assert_eq!(
+// updated_snapshot.entries(true).collect::<Vec<_>>(),
+// final_snapshot.entries(true).collect::<Vec<_>>(),
+// "wrong updates after snapshot {i}: {snapshot:#?} {updates:#?}",
+// );
+// }
+// }
+
+// #[gpui::test(iterations = 100)]
+// async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng) {
+// let operations = env::var("OPERATIONS")
+// .map(|o| o.parse().unwrap())
+// .unwrap_or(40);
+// let initial_entries = env::var("INITIAL_ENTRIES")
+// .map(|o| o.parse().unwrap())
+// .unwrap_or(20);
+
+// let root_dir = Path::new("/test");
+// let fs = FakeFs::new(cx.background()) as Arc<dyn Fs>;
+// fs.as_fake().insert_tree(root_dir, json!({})).await;
+// for _ in 0..initial_entries {
+// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+// }
+// log::info!("generated initial tree");
+
+// let worktree = Worktree::local(
+// build_client(cx),
+// root_dir,
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// let updates = Arc::new(Mutex::new(Vec::new()));
+// worktree.update(cx, |tree, cx| {
+// check_worktree_change_events(tree, cx);
+
+// let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+// let updates = updates.clone();
+// move |update| {
+// updates.lock().push(update);
+// async { true }
+// }
+// });
+// });
+
+// worktree
+// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+// .await;
+
+// fs.as_fake().pause_events();
+// let mut snapshots = Vec::new();
+// let mut mutations_len = operations;
+// while mutations_len > 1 {
+// if rng.gen_bool(0.2) {
+// worktree
+// .update(cx, |worktree, cx| {
+// randomly_mutate_worktree(worktree, &mut rng, cx)
+// })
+// .await
+// .log_err();
+// } else {
+// randomly_mutate_fs(&fs, root_dir, 1.0, &mut rng).await;
+// }
+
+// let buffered_event_count = fs.as_fake().buffered_event_count();
+// if buffered_event_count > 0 && rng.gen_bool(0.3) {
+// let len = rng.gen_range(0..=buffered_event_count);
+// log::info!("flushing {} events", len);
+// fs.as_fake().flush_events(len);
+// } else {
+// randomly_mutate_fs(&fs, root_dir, 0.6, &mut rng).await;
+// mutations_len -= 1;
+// }
+
+// cx.foreground().run_until_parked();
+// if rng.gen_bool(0.2) {
+// log::info!("storing snapshot {}", snapshots.len());
+// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+// snapshots.push(snapshot);
+// }
+// }
+
+// log::info!("quiescing");
+// fs.as_fake().flush_events(usize::MAX);
+// cx.foreground().run_until_parked();
+
+// let snapshot = worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+// snapshot.check_invariants(true);
+// let expanded_paths = snapshot
+// .expanded_entries()
+// .map(|e| e.path.clone())
+// .collect::<Vec<_>>();
+
+// {
+// let new_worktree = Worktree::local(
+// build_client(cx),
+// root_dir,
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+// new_worktree
+// .update(cx, |tree, _| tree.as_local_mut().unwrap().scan_complete())
+// .await;
+// new_worktree
+// .update(cx, |tree, _| {
+// tree.as_local_mut()
+// .unwrap()
+// .refresh_entries_for_paths(expanded_paths)
+// })
+// .recv()
+// .await;
+// let new_snapshot =
+// new_worktree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
+// assert_eq!(
+// snapshot.entries_without_ids(true),
+// new_snapshot.entries_without_ids(true)
+// );
+// }
+
+// for (i, mut prev_snapshot) in snapshots.into_iter().enumerate().rev() {
+// for update in updates.lock().iter() {
+// if update.scan_id >= prev_snapshot.scan_id() as u64 {
+// prev_snapshot.apply_remote_update(update.clone()).unwrap();
+// }
+// }
+
+// assert_eq!(
+// prev_snapshot
+// .entries(true)
+// .map(ignore_pending_dir)
+// .collect::<Vec<_>>(),
+// snapshot
+// .entries(true)
+// .map(ignore_pending_dir)
+// .collect::<Vec<_>>(),
+// "wrong updates after snapshot {i}: {updates:#?}",
+// );
+// }
+
+// fn ignore_pending_dir(entry: &Entry) -> Entry {
+// let mut entry = entry.clone();
+// if entry.kind.is_dir() {
+// entry.kind = EntryKind::Dir
+// }
+// entry
+// }
+// }
+
+// // The worktree's `UpdatedEntries` event can be used to follow along with
+// // all changes to the worktree's snapshot.
+// fn check_worktree_change_events(tree: &mut Worktree, cx: &mut ModelContext<Worktree>) {
+// let mut entries = tree.entries(true).cloned().collect::<Vec<_>>();
+// cx.subscribe(&cx.handle(), move |tree, _, event, _| {
+// if let Event::UpdatedEntries(changes) = event {
+// for (path, _, change_type) in changes.iter() {
+// let entry = tree.entry_for_path(&path).cloned();
+// let ix = match entries.binary_search_by_key(&path, |e| &e.path) {
+// Ok(ix) | Err(ix) => ix,
+// };
+// match change_type {
+// PathChange::Added => entries.insert(ix, entry.unwrap()),
+// PathChange::Removed => drop(entries.remove(ix)),
+// PathChange::Updated => {
+// let entry = entry.unwrap();
+// let existing_entry = entries.get_mut(ix).unwrap();
+// assert_eq!(existing_entry.path, entry.path);
+// *existing_entry = entry;
+// }
+// PathChange::AddedOrUpdated | PathChange::Loaded => {
+// let entry = entry.unwrap();
+// if entries.get(ix).map(|e| &e.path) == Some(&entry.path) {
+// *entries.get_mut(ix).unwrap() = entry;
+// } else {
+// entries.insert(ix, entry);
+// }
+// }
+// }
+// }
+
+// let new_entries = tree.entries(true).cloned().collect::<Vec<_>>();
+// assert_eq!(entries, new_entries, "incorrect changes: {:?}", changes);
+// }
+// })
+// .detach();
+// }
+
+// fn randomly_mutate_worktree(
+// worktree: &mut Worktree,
+// rng: &mut impl Rng,
+// cx: &mut ModelContext<Worktree>,
+// ) -> Task<Result<()>> {
+// log::info!("mutating worktree");
+// let worktree = worktree.as_local_mut().unwrap();
+// let snapshot = worktree.snapshot();
+// let entry = snapshot.entries(false).choose(rng).unwrap();
+
+// match rng.gen_range(0_u32..100) {
+// 0..=33 if entry.path.as_ref() != Path::new("") => {
+// log::info!("deleting entry {:?} ({})", entry.path, entry.id.0);
+// worktree.delete_entry(entry.id, cx).unwrap()
+// }
+// ..=66 if entry.path.as_ref() != Path::new("") => {
+// let other_entry = snapshot.entries(false).choose(rng).unwrap();
+// let new_parent_path = if other_entry.is_dir() {
+// other_entry.path.clone()
+// } else {
+// other_entry.path.parent().unwrap().into()
+// };
+// let mut new_path = new_parent_path.join(random_filename(rng));
+// if new_path.starts_with(&entry.path) {
+// new_path = random_filename(rng).into();
+// }
+
+// log::info!(
+// "renaming entry {:?} ({}) to {:?}",
+// entry.path,
+// entry.id.0,
+// new_path
+// );
+// let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+// cx.foreground().spawn(async move {
+// task.await?;
+// Ok(())
+// })
+// }
+// _ => {
+// let task = if entry.is_dir() {
+// let child_path = entry.path.join(random_filename(rng));
+// let is_dir = rng.gen_bool(0.3);
+// log::info!(
+// "creating {} at {:?}",
+// if is_dir { "dir" } else { "file" },
+// child_path,
+// );
+// worktree.create_entry(child_path, is_dir, cx)
+// } else {
+// log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
+// worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
+// };
+// cx.foreground().spawn(async move {
+// task.await?;
+// Ok(())
+// })
+// }
+// }
+// }
+
+// async fn randomly_mutate_fs(
+// fs: &Arc<dyn Fs>,
+// root_path: &Path,
+// insertion_probability: f64,
+// rng: &mut impl Rng,
+// ) {
+// log::info!("mutating fs");
+// let mut files = Vec::new();
+// let mut dirs = Vec::new();
+// for path in fs.as_fake().paths(false) {
+// if path.starts_with(root_path) {
+// if fs.is_file(&path).await {
+// files.push(path);
+// } else {
+// dirs.push(path);
+// }
+// }
+// }
+
+// if (files.is_empty() && dirs.len() == 1) || rng.gen_bool(insertion_probability) {
+// let path = dirs.choose(rng).unwrap();
+// let new_path = path.join(random_filename(rng));
+
+// if rng.gen() {
+// log::info!(
+// "creating dir {:?}",
+// new_path.strip_prefix(root_path).unwrap()
+// );
+// fs.create_dir(&new_path).await.unwrap();
+// } else {
+// log::info!(
+// "creating file {:?}",
+// new_path.strip_prefix(root_path).unwrap()
+// );
+// fs.create_file(&new_path, Default::default()).await.unwrap();
+// }
+// } else if rng.gen_bool(0.05) {
+// let ignore_dir_path = dirs.choose(rng).unwrap();
+// let ignore_path = ignore_dir_path.join(&*GITIGNORE);
+
+// let subdirs = dirs
+// .iter()
+// .filter(|d| d.starts_with(&ignore_dir_path))
+// .cloned()
+// .collect::<Vec<_>>();
+// let subfiles = files
+// .iter()
+// .filter(|d| d.starts_with(&ignore_dir_path))
+// .cloned()
+// .collect::<Vec<_>>();
+// let files_to_ignore = {
+// let len = rng.gen_range(0..=subfiles.len());
+// subfiles.choose_multiple(rng, len)
+// };
+// let dirs_to_ignore = {
+// let len = rng.gen_range(0..subdirs.len());
+// subdirs.choose_multiple(rng, len)
+// };
+
+// let mut ignore_contents = String::new();
+// for path_to_ignore in files_to_ignore.chain(dirs_to_ignore) {
+// writeln!(
+// ignore_contents,
+// "{}",
+// path_to_ignore
+// .strip_prefix(&ignore_dir_path)
+// .unwrap()
+// .to_str()
+// .unwrap()
+// )
+// .unwrap();
+// }
+// log::info!(
+// "creating gitignore {:?} with contents:\n{}",
+// ignore_path.strip_prefix(&root_path).unwrap(),
+// ignore_contents
+// );
+// fs.save(
+// &ignore_path,
+// &ignore_contents.as_str().into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+// } else {
+// let old_path = {
+// let file_path = files.choose(rng);
+// let dir_path = dirs[1..].choose(rng);
+// file_path.into_iter().chain(dir_path).choose(rng).unwrap()
+// };
+
+// let is_rename = rng.gen();
+// if is_rename {
+// let new_path_parent = dirs
+// .iter()
+// .filter(|d| !d.starts_with(old_path))
+// .choose(rng)
+// .unwrap();
+
+// let overwrite_existing_dir =
+// !old_path.starts_with(&new_path_parent) && rng.gen_bool(0.3);
+// let new_path = if overwrite_existing_dir {
+// fs.remove_dir(
+// &new_path_parent,
+// RemoveOptions {
+// recursive: true,
+// ignore_if_not_exists: true,
+// },
+// )
+// .await
+// .unwrap();
+// new_path_parent.to_path_buf()
+// } else {
+// new_path_parent.join(random_filename(rng))
+// };
+
+// log::info!(
+// "renaming {:?} to {}{:?}",
+// old_path.strip_prefix(&root_path).unwrap(),
+// if overwrite_existing_dir {
+// "overwrite "
+// } else {
+// ""
+// },
+// new_path.strip_prefix(&root_path).unwrap()
+// );
+// fs.rename(
+// &old_path,
+// &new_path,
+// fs::RenameOptions {
+// overwrite: true,
+// ignore_if_exists: true,
+// },
+// )
+// .await
+// .unwrap();
+// } else if fs.is_file(&old_path).await {
+// log::info!(
+// "deleting file {:?}",
+// old_path.strip_prefix(&root_path).unwrap()
+// );
+// fs.remove_file(old_path, Default::default()).await.unwrap();
+// } else {
+// log::info!(
+// "deleting dir {:?}",
+// old_path.strip_prefix(&root_path).unwrap()
+// );
+// fs.remove_dir(
+// &old_path,
+// RemoveOptions {
+// recursive: true,
+// ignore_if_not_exists: true,
+// },
+// )
+// .await
+// .unwrap();
+// }
+// }
+// }
+
+// fn random_filename(rng: &mut impl Rng) -> String {
+// (0..6)
+// .map(|_| rng.sample(rand::distributions::Alphanumeric))
+// .map(char::from)
+// .collect()
+// }
+
+// #[gpui::test]
+// async fn test_rename_work_directory(cx: &mut TestAppContext) {
+// let root = temp_tree(json!({
+// "projects": {
+// "project1": {
+// "a": "",
+// "b": "",
+// }
+// },
+
+// }));
+// let root_path = root.path();
+
+// let tree = Worktree::local(
+// build_client(cx),
+// root_path,
+// true,
+// Arc::new(RealFs),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// let repo = git_init(&root_path.join("projects/project1"));
+// git_add("a", &repo);
+// git_commit("init", &repo);
+// std::fs::write(root_path.join("projects/project1/a"), "aa").ok();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// tree.flush_fs_events(cx).await;
+
+// cx.read(|cx| {
+// let tree = tree.read(cx);
+// let (work_dir, _) = tree.repositories().next().unwrap();
+// assert_eq!(work_dir.as_ref(), Path::new("projects/project1"));
+// assert_eq!(
+// tree.status_for_file(Path::new("projects/project1/a")),
+// Some(GitFileStatus::Modified)
+// );
+// assert_eq!(
+// tree.status_for_file(Path::new("projects/project1/b")),
+// Some(GitFileStatus::Added)
+// );
+// });
+
+// std::fs::rename(
+// root_path.join("projects/project1"),
+// root_path.join("projects/project2"),
+// )
+// .ok();
+// tree.flush_fs_events(cx).await;
+
+// cx.read(|cx| {
+// let tree = tree.read(cx);
+// let (work_dir, _) = tree.repositories().next().unwrap();
+// assert_eq!(work_dir.as_ref(), Path::new("projects/project2"));
+// assert_eq!(
+// tree.status_for_file(Path::new("projects/project2/a")),
+// Some(GitFileStatus::Modified)
+// );
+// assert_eq!(
+// tree.status_for_file(Path::new("projects/project2/b")),
+// Some(GitFileStatus::Added)
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+// let root = temp_tree(json!({
+// "c.txt": "",
+// "dir1": {
+// ".git": {},
+// "deps": {
+// "dep1": {
+// ".git": {},
+// "src": {
+// "a.txt": ""
+// }
+// }
+// },
+// "src": {
+// "b.txt": ""
+// }
+// },
+// }));
+
+// let tree = Worktree::local(
+// build_client(cx),
+// root.path(),
+// true,
+// Arc::new(RealFs),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+// tree.flush_fs_events(cx).await;
+
+// tree.read_with(cx, |tree, _cx| {
+// let tree = tree.as_local().unwrap();
+
+// assert!(tree.repository_for_path("c.txt".as_ref()).is_none());
+
+// let entry = tree.repository_for_path("dir1/src/b.txt".as_ref()).unwrap();
+// assert_eq!(
+// entry
+// .work_directory(tree)
+// .map(|directory| directory.as_ref().to_owned()),
+// Some(Path::new("dir1").to_owned())
+// );
+
+// let entry = tree
+// .repository_for_path("dir1/deps/dep1/src/a.txt".as_ref())
+// .unwrap();
+// assert_eq!(
+// entry
+// .work_directory(tree)
+// .map(|directory| directory.as_ref().to_owned()),
+// Some(Path::new("dir1/deps/dep1").to_owned())
+// );
+
+// let entries = tree.files(false, 0);
+
+// let paths_with_repos = tree
+// .entries_with_repositories(entries)
+// .map(|(entry, repo)| {
+// (
+// entry.path.as_ref(),
+// repo.and_then(|repo| {
+// repo.work_directory(&tree)
+// .map(|work_directory| work_directory.0.to_path_buf())
+// }),
+// )
+// })
+// .collect::<Vec<_>>();
+
+// assert_eq!(
+// paths_with_repos,
+// &[
+// (Path::new("c.txt"), None),
+// (
+// Path::new("dir1/deps/dep1/src/a.txt"),
+// Some(Path::new("dir1/deps/dep1").into())
+// ),
+// (Path::new("dir1/src/b.txt"), Some(Path::new("dir1").into())),
+// ]
+// );
+// });
+
+// let repo_update_events = Arc::new(Mutex::new(vec![]));
+// tree.update(cx, |_, cx| {
+// let repo_update_events = repo_update_events.clone();
+// cx.subscribe(&tree, move |_, _, event, _| {
+// if let Event::UpdatedGitRepositories(update) = event {
+// repo_update_events.lock().push(update.clone());
+// }
+// })
+// .detach();
+// });
+
+// std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+// tree.flush_fs_events(cx).await;
+
+// assert_eq!(
+// repo_update_events.lock()[0]
+// .iter()
+// .map(|e| e.0.clone())
+// .collect::<Vec<Arc<Path>>>(),
+// vec![Path::new("dir1").into()]
+// );
+
+// std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+// tree.flush_fs_events(cx).await;
+
+// tree.read_with(cx, |tree, _cx| {
+// let tree = tree.as_local().unwrap();
+
+// assert!(tree
+// .repository_for_path("dir1/src/b.txt".as_ref())
+// .is_none());
+// });
+// }
+
+// #[gpui::test]
+// async fn test_git_status(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
+// const IGNORE_RULE: &'static str = "**/target";
+
+// let root = temp_tree(json!({
+// "project": {
+// "a.txt": "a",
+// "b.txt": "bb",
+// "c": {
+// "d": {
+// "e.txt": "eee"
+// }
+// },
+// "f.txt": "ffff",
+// "target": {
+// "build_file": "???"
+// },
+// ".gitignore": IGNORE_RULE
+// },
+
+// }));
+
+// const A_TXT: &'static str = "a.txt";
+// const B_TXT: &'static str = "b.txt";
+// const E_TXT: &'static str = "c/d/e.txt";
+// const F_TXT: &'static str = "f.txt";
+// const DOTGITIGNORE: &'static str = ".gitignore";
+// const BUILD_FILE: &'static str = "target/build_file";
+// let project_path = Path::new("project");
+
+// // Set up git repository before creating the worktree.
+// let work_dir = root.path().join("project");
+// let mut repo = git_init(work_dir.as_path());
+// repo.add_ignore_rule(IGNORE_RULE).unwrap();
+// git_add(A_TXT, &repo);
+// git_add(E_TXT, &repo);
+// git_add(DOTGITIGNORE, &repo);
+// git_commit("Initial commit", &repo);
+
+// let tree = Worktree::local(
+// build_client(cx),
+// root.path(),
+// true,
+// Arc::new(RealFs),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// tree.flush_fs_events(cx).await;
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+// deterministic.run_until_parked();
+
+// // Check that the right git state is observed on startup
+// tree.read_with(cx, |tree, _cx| {
+// let snapshot = tree.snapshot();
+// assert_eq!(snapshot.repositories().count(), 1);
+// let (dir, _) = snapshot.repositories().next().unwrap();
+// assert_eq!(dir.as_ref(), Path::new("project"));
+
+// assert_eq!(
+// snapshot.status_for_file(project_path.join(B_TXT)),
+// Some(GitFileStatus::Added)
+// );
+// assert_eq!(
+// snapshot.status_for_file(project_path.join(F_TXT)),
+// Some(GitFileStatus::Added)
+// );
+// });
+
+// // Modify a file in the working copy.
+// std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
+// tree.flush_fs_events(cx).await;
+// deterministic.run_until_parked();
+
+// // The worktree detects that the file's git status has changed.
+// tree.read_with(cx, |tree, _cx| {
+// let snapshot = tree.snapshot();
+// assert_eq!(
+// snapshot.status_for_file(project_path.join(A_TXT)),
+// Some(GitFileStatus::Modified)
+// );
+// });
+
+// // Create a commit in the git repository.
+// git_add(A_TXT, &repo);
+// git_add(B_TXT, &repo);
+// git_commit("Committing modified and added", &repo);
+// tree.flush_fs_events(cx).await;
+// deterministic.run_until_parked();
+
+// // The worktree detects that the files' git status have changed.
+// tree.read_with(cx, |tree, _cx| {
+// let snapshot = tree.snapshot();
+// assert_eq!(
+// snapshot.status_for_file(project_path.join(F_TXT)),
+// Some(GitFileStatus::Added)
+// );
+// assert_eq!(snapshot.status_for_file(project_path.join(B_TXT)), None);
+// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+// });
+
+// // Modify files in the working copy and perform git operations on other files.
+// git_reset(0, &repo);
+// git_remove_index(Path::new(B_TXT), &repo);
+// git_stash(&mut repo);
+// std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
+// std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
+// tree.flush_fs_events(cx).await;
+// deterministic.run_until_parked();
+
+// // Check that more complex repo changes are tracked
+// tree.read_with(cx, |tree, _cx| {
+// let snapshot = tree.snapshot();
+
+// assert_eq!(snapshot.status_for_file(project_path.join(A_TXT)), None);
+// assert_eq!(
+// snapshot.status_for_file(project_path.join(B_TXT)),
+// Some(GitFileStatus::Added)
+// );
+// assert_eq!(
+// snapshot.status_for_file(project_path.join(E_TXT)),
+// Some(GitFileStatus::Modified)
+// );
+// });
+
+// std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
+// std::fs::remove_dir_all(work_dir.join("c")).unwrap();
+// std::fs::write(
+// work_dir.join(DOTGITIGNORE),
+// [IGNORE_RULE, "f.txt"].join("\n"),
+// )
+// .unwrap();
+
+// git_add(Path::new(DOTGITIGNORE), &repo);
+// git_commit("Committing modified git ignore", &repo);
+
+// tree.flush_fs_events(cx).await;
+// deterministic.run_until_parked();
+
+// let mut renamed_dir_name = "first_directory/second_directory";
+// const RENAMED_FILE: &'static str = "rf.txt";
+
+// std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
+// std::fs::write(
+// work_dir.join(renamed_dir_name).join(RENAMED_FILE),
+// "new-contents",
+// )
+// .unwrap();
+
+// tree.flush_fs_events(cx).await;
+// deterministic.run_until_parked();
+
+// tree.read_with(cx, |tree, _cx| {
+// let snapshot = tree.snapshot();
+// assert_eq!(
+// snapshot.status_for_file(&project_path.join(renamed_dir_name).join(RENAMED_FILE)),
+// Some(GitFileStatus::Added)
+// );
+// });
+
+// renamed_dir_name = "new_first_directory/second_directory";
+
+// std::fs::rename(
+// work_dir.join("first_directory"),
+// work_dir.join("new_first_directory"),
+// )
+// .unwrap();
+
+// tree.flush_fs_events(cx).await;
+// deterministic.run_until_parked();
+
+// tree.read_with(cx, |tree, _cx| {
+// let snapshot = tree.snapshot();
+
+// assert_eq!(
+// snapshot.status_for_file(
+// project_path
+// .join(Path::new(renamed_dir_name))
+// .join(RENAMED_FILE)
+// ),
+// Some(GitFileStatus::Added)
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_propagate_git_statuses(cx: &mut TestAppContext) {
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree(
+// "/root",
+// json!({
+// ".git": {},
+// "a": {
+// "b": {
+// "c1.txt": "",
+// "c2.txt": "",
+// },
+// "d": {
+// "e1.txt": "",
+// "e2.txt": "",
+// "e3.txt": "",
+// }
+// },
+// "f": {
+// "no-status.txt": ""
+// },
+// "g": {
+// "h1.txt": "",
+// "h2.txt": ""
+// },
+
+// }),
+// )
+// .await;
+
+// fs.set_status_for_repo_via_git_operation(
+// &Path::new("/root/.git"),
+// &[
+// (Path::new("a/b/c1.txt"), GitFileStatus::Added),
+// (Path::new("a/d/e2.txt"), GitFileStatus::Modified),
+// (Path::new("g/h2.txt"), GitFileStatus::Conflict),
+// ],
+// );
+
+// let tree = Worktree::local(
+// build_client(cx),
+// Path::new("/root"),
+// true,
+// fs.clone(),
+// Default::default(),
+// &mut cx.to_async(),
+// )
+// .await
+// .unwrap();
+
+// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+// .await;
+
+// cx.foreground().run_until_parked();
+// let snapshot = tree.read_with(cx, |tree, _| tree.snapshot());
+
+// check_propagated_statuses(
+// &snapshot,
+// &[
+// (Path::new(""), Some(GitFileStatus::Conflict)),
+// (Path::new("a"), Some(GitFileStatus::Modified)),
+// (Path::new("a/b"), Some(GitFileStatus::Added)),
+// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+// (Path::new("a/b/c2.txt"), None),
+// (Path::new("a/d"), Some(GitFileStatus::Modified)),
+// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+// (Path::new("f"), None),
+// (Path::new("f/no-status.txt"), None),
+// (Path::new("g"), Some(GitFileStatus::Conflict)),
+// (Path::new("g/h2.txt"), Some(GitFileStatus::Conflict)),
+// ],
+// );
+
+// check_propagated_statuses(
+// &snapshot,
+// &[
+// (Path::new("a/b"), Some(GitFileStatus::Added)),
+// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+// (Path::new("a/b/c2.txt"), None),
+// (Path::new("a/d"), Some(GitFileStatus::Modified)),
+// (Path::new("a/d/e1.txt"), None),
+// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+// (Path::new("f"), None),
+// (Path::new("f/no-status.txt"), None),
+// (Path::new("g"), Some(GitFileStatus::Conflict)),
+// ],
+// );
+
+// check_propagated_statuses(
+// &snapshot,
+// &[
+// (Path::new("a/b/c1.txt"), Some(GitFileStatus::Added)),
+// (Path::new("a/b/c2.txt"), None),
+// (Path::new("a/d/e1.txt"), None),
+// (Path::new("a/d/e2.txt"), Some(GitFileStatus::Modified)),
+// (Path::new("f/no-status.txt"), None),
+// ],
+// );
+
+// #[track_caller]
+// fn check_propagated_statuses(
+// snapshot: &Snapshot,
+// expected_statuses: &[(&Path, Option<GitFileStatus>)],
+// ) {
+// let mut entries = expected_statuses
+// .iter()
+// .map(|(path, _)| snapshot.entry_for_path(path).unwrap().clone())
+// .collect::<Vec<_>>();
+// snapshot.propagate_git_statuses(&mut entries);
+// assert_eq!(
+// entries
+// .iter()
+// .map(|e| (e.path.as_ref(), e.git_status))
+// .collect::<Vec<_>>(),
+// expected_statuses
+// );
+// }
+// }
+
+// fn build_client(cx: &mut TestAppContext) -> Arc<Client> {
+// let http_client = FakeHttpClient::with_404_response();
+// cx.read(|cx| Client::new(http_client, cx))
+// }
+
+// #[track_caller]
+// fn git_init(path: &Path) -> git2::Repository {
+// git2::Repository::init(path).expect("Failed to initialize git repository")
+// }
+
+// #[track_caller]
+// fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
+// let path = path.as_ref();
+// let mut index = repo.index().expect("Failed to get index");
+// index.add_path(path).expect("Failed to add a.txt");
+// index.write().expect("Failed to write index");
+// }
+
+// #[track_caller]
+// fn git_remove_index(path: &Path, repo: &git2::Repository) {
+// let mut index = repo.index().expect("Failed to get index");
+// index.remove_path(path).expect("Failed to add a.txt");
+// index.write().expect("Failed to write index");
+// }
+
+// #[track_caller]
+// fn git_commit(msg: &'static str, repo: &git2::Repository) {
+// use git2::Signature;
+
+// let signature = Signature::now("test", "test@zed.dev").unwrap();
+// let oid = repo.index().unwrap().write_tree().unwrap();
+// let tree = repo.find_tree(oid).unwrap();
+// if let Some(head) = repo.head().ok() {
+// let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
+
+// let parent_commit = parent_obj.as_commit().unwrap();
+
+// repo.commit(
+// Some("HEAD"),
+// &signature,
+// &signature,
+// msg,
+// &tree,
+// &[parent_commit],
+// )
+// .expect("Failed to commit with parent");
+// } else {
+// repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
+// .expect("Failed to commit");
+// }
+// }
+
+// #[track_caller]
+// fn git_stash(repo: &mut git2::Repository) {
+// use git2::Signature;
+
+// let signature = Signature::now("test", "test@zed.dev").unwrap();
+// repo.stash_save(&signature, "N/A", None)
+// .expect("Failed to stash");
+// }
+
+// #[track_caller]
+// fn git_reset(offset: usize, repo: &git2::Repository) {
+// let head = repo.head().expect("Couldn't get repo head");
+// let object = head.peel(git2::ObjectType::Commit).unwrap();
+// let commit = object.as_commit().unwrap();
+// let new_head = commit
+// .parents()
+// .inspect(|parnet| {
+// parnet.message();
+// })
+// .skip(offset)
+// .next()
+// .expect("Not enough history");
+// repo.reset(&new_head.as_object(), git2::ResetType::Soft, None)
+// .expect("Could not reset");
+// }
+
+// #[allow(dead_code)]
+// #[track_caller]
+// fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
+// repo.statuses(None)
+// .unwrap()
+// .iter()
+// .map(|status| (status.path().unwrap().to_string(), status.status()))
+// .collect()
+// }