1mod ignore;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7pub mod terminals;
8pub mod worktree;
9
10#[cfg(test)]
11mod project_tests;
12#[cfg(test)]
13mod worktree_tests;
14
15use anyhow::{anyhow, Context as _, Result};
16use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
17use clock::ReplicaId;
18use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
19use copilot::Copilot;
20use futures::{
21 channel::{
22 mpsc::{self, UnboundedReceiver},
23 oneshot,
24 },
25 future::{try_join_all, Shared},
26 stream::FuturesUnordered,
27 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
28};
29use globset::{Glob, GlobSet, GlobSetBuilder};
30use gpui::{
31 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
32 Model, ModelContext, Task, WeakModel,
33};
34use itertools::Itertools;
35use language::{
36 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
37 point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability,
43 CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
44 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
45 LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16,
46 TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use parking_lot::Mutex;
56use postage::watch;
57use prettier_support::{DefaultPrettier, PrettierInstance};
58use project_settings::{LspSettings, ProjectSettings};
59use rand::prelude::*;
60use search::SearchQuery;
61use serde::Serialize;
62use settings::{Settings, SettingsStore};
63use sha2::{Digest, Sha256};
64use similar::{ChangeTag, TextDiff};
65use smol::channel::{Receiver, Sender};
66use smol::lock::Semaphore;
67use std::{
68 cmp::{self, Ordering},
69 convert::TryInto,
70 hash::Hash,
71 mem,
72 num::NonZeroU32,
73 ops::Range,
74 path::{self, Component, Path, PathBuf},
75 process::Stdio,
76 str,
77 sync::{
78 atomic::{AtomicUsize, Ordering::SeqCst},
79 Arc,
80 },
81 time::{Duration, Instant},
82};
83use terminals::Terminals;
84use text::Anchor;
85use util::{
86 debug_panic, defer, http::HttpClient, merge_json_value_into,
87 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
88};
89
90pub use fs::*;
91#[cfg(any(test, feature = "test-support"))]
92pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
93pub use worktree::*;
94
95const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
96
97pub trait Item {
98 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
99 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
100}
101
102// Language server state is stored across 3 collections:
103// language_servers =>
104// a mapping from unique server id to LanguageServerState which can either be a task for a
105// server in the process of starting, or a running server with adapter and language server arcs
106// language_server_ids => a mapping from worktreeId and server name to the unique server id
107// language_server_statuses => a mapping from unique server id to the current server status
108//
109// Multiple worktrees can map to the same language server for example when you jump to the definition
110// of a file in the standard library. So language_server_ids is used to look up which server is active
111// for a given worktree and language server name
112//
113// When starting a language server, first the id map is checked to make sure a server isn't already available
114// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
115// the Starting variant of LanguageServerState is stored in the language_servers map.
116pub struct Project {
117 worktrees: Vec<WorktreeHandle>,
118 active_entry: Option<ProjectEntryId>,
119 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
120 languages: Arc<LanguageRegistry>,
121 supplementary_language_servers:
122 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
123 language_servers: HashMap<LanguageServerId, LanguageServerState>,
124 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
125 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
126 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
127 client: Arc<client::Client>,
128 next_entry_id: Arc<AtomicUsize>,
129 join_project_response_message_id: u32,
130 next_diagnostic_group_id: usize,
131 user_store: Model<UserStore>,
132 fs: Arc<dyn Fs>,
133 client_state: Option<ProjectClientState>,
134 collaborators: HashMap<proto::PeerId, Collaborator>,
135 client_subscriptions: Vec<client::Subscription>,
136 _subscriptions: Vec<gpui::Subscription>,
137 next_buffer_id: u64,
138 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
139 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
140 #[allow(clippy::type_complexity)]
141 loading_buffers_by_path: HashMap<
142 ProjectPath,
143 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
144 >,
145 #[allow(clippy::type_complexity)]
146 loading_local_worktrees:
147 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
148 opened_buffers: HashMap<u64, OpenBuffer>,
149 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
150 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
151 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
152 /// Used for re-issuing buffer requests when peers temporarily disconnect
153 incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>,
154 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
155 buffers_being_formatted: HashSet<u64>,
156 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
157 git_diff_debouncer: DelayedDebounced,
158 nonce: u128,
159 _maintain_buffer_languages: Task<()>,
160 _maintain_workspace_config: Task<Result<()>>,
161 terminals: Terminals,
162 copilot_lsp_subscription: Option<gpui::Subscription>,
163 copilot_log_subscription: Option<lsp::Subscription>,
164 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
165 node: Option<Arc<dyn NodeRuntime>>,
166 default_prettier: DefaultPrettier,
167 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
168 prettier_instances: HashMap<PathBuf, PrettierInstance>,
169}
170
171struct DelayedDebounced {
172 task: Option<Task<()>>,
173 cancel_channel: Option<oneshot::Sender<()>>,
174}
175
176pub enum LanguageServerToQuery {
177 Primary,
178 Other(LanguageServerId),
179}
180
181impl DelayedDebounced {
182 fn new() -> DelayedDebounced {
183 DelayedDebounced {
184 task: None,
185 cancel_channel: None,
186 }
187 }
188
189 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
190 where
191 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
192 {
193 if let Some(channel) = self.cancel_channel.take() {
194 _ = channel.send(());
195 }
196
197 let (sender, mut receiver) = oneshot::channel::<()>();
198 self.cancel_channel = Some(sender);
199
200 let previous_task = self.task.take();
201 self.task = Some(cx.spawn(move |project, mut cx| async move {
202 let mut timer = cx.background_executor().timer(delay).fuse();
203 if let Some(previous_task) = previous_task {
204 previous_task.await;
205 }
206
207 futures::select_biased! {
208 _ = receiver => return,
209 _ = timer => {}
210 }
211
212 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
213 task.await;
214 }
215 }));
216 }
217}
218
219struct LspBufferSnapshot {
220 version: i32,
221 snapshot: TextBufferSnapshot,
222}
223
224/// Message ordered with respect to buffer operations
225enum BufferOrderedMessage {
226 Operation {
227 buffer_id: u64,
228 operation: proto::Operation,
229 },
230 LanguageServerUpdate {
231 language_server_id: LanguageServerId,
232 message: proto::update_language_server::Variant,
233 },
234 Resync,
235}
236
237enum LocalProjectUpdate {
238 WorktreesChanged,
239 CreateBufferForPeer {
240 peer_id: proto::PeerId,
241 buffer_id: u64,
242 },
243}
244
245enum OpenBuffer {
246 Strong(Model<Buffer>),
247 Weak(WeakModel<Buffer>),
248 Operations(Vec<Operation>),
249}
250
251#[derive(Clone)]
252enum WorktreeHandle {
253 Strong(Model<Worktree>),
254 Weak(WeakModel<Worktree>),
255}
256
257enum ProjectClientState {
258 Local {
259 remote_id: u64,
260 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
261 _send_updates: Task<Result<()>>,
262 },
263 Remote {
264 sharing_has_stopped: bool,
265 capability: Capability,
266 remote_id: u64,
267 replica_id: ReplicaId,
268 },
269}
270
271#[derive(Clone, Debug, PartialEq)]
272pub enum Event {
273 LanguageServerAdded(LanguageServerId),
274 LanguageServerRemoved(LanguageServerId),
275 LanguageServerLog(LanguageServerId, String),
276 Notification(String),
277 ActiveEntryChanged(Option<ProjectEntryId>),
278 ActivateProjectPanel,
279 WorktreeAdded,
280 WorktreeRemoved(WorktreeId),
281 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
282 DiskBasedDiagnosticsStarted {
283 language_server_id: LanguageServerId,
284 },
285 DiskBasedDiagnosticsFinished {
286 language_server_id: LanguageServerId,
287 },
288 DiagnosticsUpdated {
289 path: ProjectPath,
290 language_server_id: LanguageServerId,
291 },
292 RemoteIdChanged(Option<u64>),
293 DisconnectedFromHost,
294 Closed,
295 DeletedEntry(ProjectEntryId),
296 CollaboratorUpdated {
297 old_peer_id: proto::PeerId,
298 new_peer_id: proto::PeerId,
299 },
300 CollaboratorJoined(proto::PeerId),
301 CollaboratorLeft(proto::PeerId),
302 RefreshInlayHints,
303 RevealInProjectPanel(ProjectEntryId),
304}
305
306pub enum LanguageServerState {
307 Starting(Task<Option<Arc<LanguageServer>>>),
308
309 Running {
310 language: Arc<Language>,
311 adapter: Arc<CachedLspAdapter>,
312 server: Arc<LanguageServer>,
313 watched_paths: HashMap<WorktreeId, GlobSet>,
314 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
315 },
316}
317
318#[derive(Serialize)]
319pub struct LanguageServerStatus {
320 pub name: String,
321 pub pending_work: BTreeMap<String, LanguageServerProgress>,
322 pub has_pending_diagnostic_updates: bool,
323 progress_tokens: HashSet<String>,
324}
325
326#[derive(Clone, Debug, Serialize)]
327pub struct LanguageServerProgress {
328 pub message: Option<String>,
329 pub percentage: Option<usize>,
330 #[serde(skip_serializing)]
331 pub last_update_at: Instant,
332}
333
334#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
335pub struct ProjectPath {
336 pub worktree_id: WorktreeId,
337 pub path: Arc<Path>,
338}
339
340#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
341pub struct DiagnosticSummary {
342 pub error_count: usize,
343 pub warning_count: usize,
344}
345
346#[derive(Debug, Clone, PartialEq, Eq, Hash)]
347pub struct Location {
348 pub buffer: Model<Buffer>,
349 pub range: Range<language::Anchor>,
350}
351
352#[derive(Debug, Clone, PartialEq, Eq)]
353pub struct InlayHint {
354 pub position: language::Anchor,
355 pub label: InlayHintLabel,
356 pub kind: Option<InlayHintKind>,
357 pub padding_left: bool,
358 pub padding_right: bool,
359 pub tooltip: Option<InlayHintTooltip>,
360 pub resolve_state: ResolveState,
361}
362
363#[derive(Debug, Clone, PartialEq, Eq)]
364pub enum ResolveState {
365 Resolved,
366 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
367 Resolving,
368}
369
370impl InlayHint {
371 pub fn text(&self) -> String {
372 match &self.label {
373 InlayHintLabel::String(s) => s.to_owned(),
374 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
375 }
376 }
377}
378
379#[derive(Debug, Clone, PartialEq, Eq)]
380pub enum InlayHintLabel {
381 String(String),
382 LabelParts(Vec<InlayHintLabelPart>),
383}
384
385#[derive(Debug, Clone, PartialEq, Eq)]
386pub struct InlayHintLabelPart {
387 pub value: String,
388 pub tooltip: Option<InlayHintLabelPartTooltip>,
389 pub location: Option<(LanguageServerId, lsp::Location)>,
390}
391
392#[derive(Debug, Clone, PartialEq, Eq)]
393pub enum InlayHintTooltip {
394 String(String),
395 MarkupContent(MarkupContent),
396}
397
398#[derive(Debug, Clone, PartialEq, Eq)]
399pub enum InlayHintLabelPartTooltip {
400 String(String),
401 MarkupContent(MarkupContent),
402}
403
404#[derive(Debug, Clone, PartialEq, Eq)]
405pub struct MarkupContent {
406 pub kind: HoverBlockKind,
407 pub value: String,
408}
409
410#[derive(Debug, Clone)]
411pub struct LocationLink {
412 pub origin: Option<Location>,
413 pub target: Location,
414}
415
416#[derive(Debug)]
417pub struct DocumentHighlight {
418 pub range: Range<language::Anchor>,
419 pub kind: DocumentHighlightKind,
420}
421
422#[derive(Clone, Debug)]
423pub struct Symbol {
424 pub language_server_name: LanguageServerName,
425 pub source_worktree_id: WorktreeId,
426 pub path: ProjectPath,
427 pub label: CodeLabel,
428 pub name: String,
429 pub kind: lsp::SymbolKind,
430 pub range: Range<Unclipped<PointUtf16>>,
431 pub signature: [u8; 32],
432}
433
434#[derive(Clone, Debug, PartialEq)]
435pub struct HoverBlock {
436 pub text: String,
437 pub kind: HoverBlockKind,
438}
439
440#[derive(Clone, Debug, PartialEq, Eq)]
441pub enum HoverBlockKind {
442 PlainText,
443 Markdown,
444 Code { language: String },
445}
446
447#[derive(Debug)]
448pub struct Hover {
449 pub contents: Vec<HoverBlock>,
450 pub range: Option<Range<language::Anchor>>,
451 pub language: Option<Arc<Language>>,
452}
453
454impl Hover {
455 pub fn is_empty(&self) -> bool {
456 self.contents.iter().all(|block| block.text.is_empty())
457 }
458}
459
460#[derive(Default)]
461pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
462
463impl DiagnosticSummary {
464 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
465 let mut this = Self {
466 error_count: 0,
467 warning_count: 0,
468 };
469
470 for entry in diagnostics {
471 if entry.diagnostic.is_primary {
472 match entry.diagnostic.severity {
473 DiagnosticSeverity::ERROR => this.error_count += 1,
474 DiagnosticSeverity::WARNING => this.warning_count += 1,
475 _ => {}
476 }
477 }
478 }
479
480 this
481 }
482
483 pub fn is_empty(&self) -> bool {
484 self.error_count == 0 && self.warning_count == 0
485 }
486
487 pub fn to_proto(
488 &self,
489 language_server_id: LanguageServerId,
490 path: &Path,
491 ) -> proto::DiagnosticSummary {
492 proto::DiagnosticSummary {
493 path: path.to_string_lossy().to_string(),
494 language_server_id: language_server_id.0 as u64,
495 error_count: self.error_count as u32,
496 warning_count: self.warning_count as u32,
497 }
498 }
499}
500
501#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
502pub struct ProjectEntryId(usize);
503
504impl ProjectEntryId {
505 pub const MAX: Self = Self(usize::MAX);
506
507 pub fn new(counter: &AtomicUsize) -> Self {
508 Self(counter.fetch_add(1, SeqCst))
509 }
510
511 pub fn from_proto(id: u64) -> Self {
512 Self(id as usize)
513 }
514
515 pub fn to_proto(&self) -> u64 {
516 self.0 as u64
517 }
518
519 pub fn to_usize(&self) -> usize {
520 self.0
521 }
522}
523
524#[derive(Debug, Clone, Copy, PartialEq, Eq)]
525pub enum FormatTrigger {
526 Save,
527 Manual,
528}
529
530struct ProjectLspAdapterDelegate {
531 project: Model<Project>,
532 http_client: Arc<dyn HttpClient>,
533}
534
535// Currently, formatting operations are represented differently depending on
536// whether they come from a language server or an external command.
537enum FormatOperation {
538 Lsp(Vec<(Range<Anchor>, String)>),
539 External(Diff),
540 Prettier(Diff),
541}
542
543impl FormatTrigger {
544 fn from_proto(value: i32) -> FormatTrigger {
545 match value {
546 0 => FormatTrigger::Save,
547 1 => FormatTrigger::Manual,
548 _ => FormatTrigger::Save,
549 }
550 }
551}
552#[derive(Clone, Debug, PartialEq)]
553enum SearchMatchCandidate {
554 OpenBuffer {
555 buffer: Model<Buffer>,
556 // This might be an unnamed file without representation on filesystem
557 path: Option<Arc<Path>>,
558 },
559 Path {
560 worktree_id: WorktreeId,
561 is_ignored: bool,
562 path: Arc<Path>,
563 },
564}
565
566type SearchMatchCandidateIndex = usize;
567impl SearchMatchCandidate {
568 fn path(&self) -> Option<Arc<Path>> {
569 match self {
570 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
571 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
572 }
573 }
574}
575
576impl Project {
577 pub fn init_settings(cx: &mut AppContext) {
578 ProjectSettings::register(cx);
579 }
580
581 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
582 Self::init_settings(cx);
583
584 client.add_model_message_handler(Self::handle_add_collaborator);
585 client.add_model_message_handler(Self::handle_update_project_collaborator);
586 client.add_model_message_handler(Self::handle_remove_collaborator);
587 client.add_model_message_handler(Self::handle_buffer_reloaded);
588 client.add_model_message_handler(Self::handle_buffer_saved);
589 client.add_model_message_handler(Self::handle_start_language_server);
590 client.add_model_message_handler(Self::handle_update_language_server);
591 client.add_model_message_handler(Self::handle_update_project);
592 client.add_model_message_handler(Self::handle_unshare_project);
593 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
594 client.add_model_message_handler(Self::handle_update_buffer_file);
595 client.add_model_request_handler(Self::handle_update_buffer);
596 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
597 client.add_model_message_handler(Self::handle_update_worktree);
598 client.add_model_message_handler(Self::handle_update_worktree_settings);
599 client.add_model_request_handler(Self::handle_create_project_entry);
600 client.add_model_request_handler(Self::handle_rename_project_entry);
601 client.add_model_request_handler(Self::handle_copy_project_entry);
602 client.add_model_request_handler(Self::handle_delete_project_entry);
603 client.add_model_request_handler(Self::handle_expand_project_entry);
604 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
605 client.add_model_request_handler(Self::handle_apply_code_action);
606 client.add_model_request_handler(Self::handle_on_type_formatting);
607 client.add_model_request_handler(Self::handle_inlay_hints);
608 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
609 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
610 client.add_model_request_handler(Self::handle_reload_buffers);
611 client.add_model_request_handler(Self::handle_synchronize_buffers);
612 client.add_model_request_handler(Self::handle_format_buffers);
613 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
614 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
615 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
616 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
617 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
618 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
619 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
620 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
621 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
622 client.add_model_request_handler(Self::handle_search_project);
623 client.add_model_request_handler(Self::handle_get_project_symbols);
624 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
625 client.add_model_request_handler(Self::handle_open_buffer_by_id);
626 client.add_model_request_handler(Self::handle_open_buffer_by_path);
627 client.add_model_request_handler(Self::handle_save_buffer);
628 client.add_model_message_handler(Self::handle_update_diff_base);
629 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
630 }
631
632 pub fn local(
633 client: Arc<Client>,
634 node: Arc<dyn NodeRuntime>,
635 user_store: Model<UserStore>,
636 languages: Arc<LanguageRegistry>,
637 fs: Arc<dyn Fs>,
638 cx: &mut AppContext,
639 ) -> Model<Self> {
640 cx.new_model(|cx: &mut ModelContext<Self>| {
641 let (tx, rx) = mpsc::unbounded();
642 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
643 .detach();
644 let copilot_lsp_subscription =
645 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
646 Self {
647 worktrees: Default::default(),
648 buffer_ordered_messages_tx: tx,
649 collaborators: Default::default(),
650 next_buffer_id: 0,
651 opened_buffers: Default::default(),
652 shared_buffers: Default::default(),
653 incomplete_remote_buffers: Default::default(),
654 loading_buffers_by_path: Default::default(),
655 loading_local_worktrees: Default::default(),
656 local_buffer_ids_by_path: Default::default(),
657 local_buffer_ids_by_entry_id: Default::default(),
658 buffer_snapshots: Default::default(),
659 join_project_response_message_id: 0,
660 client_state: None,
661 opened_buffer: watch::channel(),
662 client_subscriptions: Vec::new(),
663 _subscriptions: vec![
664 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
665 cx.on_release(Self::release),
666 cx.on_app_quit(Self::shutdown_language_servers),
667 ],
668 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
669 _maintain_workspace_config: Self::maintain_workspace_config(cx),
670 active_entry: None,
671 languages,
672 client,
673 user_store,
674 fs,
675 next_entry_id: Default::default(),
676 next_diagnostic_group_id: Default::default(),
677 supplementary_language_servers: HashMap::default(),
678 language_servers: Default::default(),
679 language_server_ids: Default::default(),
680 language_server_statuses: Default::default(),
681 last_workspace_edits_by_language_server: Default::default(),
682 buffers_being_formatted: Default::default(),
683 buffers_needing_diff: Default::default(),
684 git_diff_debouncer: DelayedDebounced::new(),
685 nonce: StdRng::from_entropy().gen(),
686 terminals: Terminals {
687 local_handles: Vec::new(),
688 },
689 copilot_lsp_subscription,
690 copilot_log_subscription: None,
691 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
692 node: Some(node),
693 default_prettier: DefaultPrettier::default(),
694 prettiers_per_worktree: HashMap::default(),
695 prettier_instances: HashMap::default(),
696 }
697 })
698 }
699
700 pub async fn remote(
701 remote_id: u64,
702 client: Arc<Client>,
703 user_store: Model<UserStore>,
704 languages: Arc<LanguageRegistry>,
705 fs: Arc<dyn Fs>,
706 role: proto::ChannelRole,
707 mut cx: AsyncAppContext,
708 ) -> Result<Model<Self>> {
709 client.authenticate_and_connect(true, &cx).await?;
710
711 let subscription = client.subscribe_to_entity(remote_id)?;
712 let response = client
713 .request_envelope(proto::JoinProject {
714 project_id: remote_id,
715 })
716 .await?;
717 let this = cx.new_model(|cx| {
718 let replica_id = response.payload.replica_id as ReplicaId;
719
720 let mut worktrees = Vec::new();
721 for worktree in response.payload.worktrees {
722 let worktree =
723 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
724 worktrees.push(worktree);
725 }
726
727 let (tx, rx) = mpsc::unbounded();
728 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
729 .detach();
730 let copilot_lsp_subscription =
731 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
732 let mut this = Self {
733 worktrees: Vec::new(),
734 buffer_ordered_messages_tx: tx,
735 loading_buffers_by_path: Default::default(),
736 next_buffer_id: 0,
737 opened_buffer: watch::channel(),
738 shared_buffers: Default::default(),
739 incomplete_remote_buffers: Default::default(),
740 loading_local_worktrees: Default::default(),
741 local_buffer_ids_by_path: Default::default(),
742 local_buffer_ids_by_entry_id: Default::default(),
743 active_entry: None,
744 collaborators: Default::default(),
745 join_project_response_message_id: response.message_id,
746 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
747 _maintain_workspace_config: Self::maintain_workspace_config(cx),
748 languages,
749 user_store: user_store.clone(),
750 fs,
751 next_entry_id: Default::default(),
752 next_diagnostic_group_id: Default::default(),
753 client_subscriptions: Default::default(),
754 _subscriptions: vec![
755 cx.on_release(Self::release),
756 cx.on_app_quit(Self::shutdown_language_servers),
757 ],
758 client: client.clone(),
759 client_state: Some(ProjectClientState::Remote {
760 sharing_has_stopped: false,
761 capability: Capability::ReadWrite,
762 remote_id,
763 replica_id,
764 }),
765 supplementary_language_servers: HashMap::default(),
766 language_servers: Default::default(),
767 language_server_ids: Default::default(),
768 language_server_statuses: response
769 .payload
770 .language_servers
771 .into_iter()
772 .map(|server| {
773 (
774 LanguageServerId(server.id as usize),
775 LanguageServerStatus {
776 name: server.name,
777 pending_work: Default::default(),
778 has_pending_diagnostic_updates: false,
779 progress_tokens: Default::default(),
780 },
781 )
782 })
783 .collect(),
784 last_workspace_edits_by_language_server: Default::default(),
785 opened_buffers: Default::default(),
786 buffers_being_formatted: Default::default(),
787 buffers_needing_diff: Default::default(),
788 git_diff_debouncer: DelayedDebounced::new(),
789 buffer_snapshots: Default::default(),
790 nonce: StdRng::from_entropy().gen(),
791 terminals: Terminals {
792 local_handles: Vec::new(),
793 },
794 copilot_lsp_subscription,
795 copilot_log_subscription: None,
796 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
797 node: None,
798 default_prettier: DefaultPrettier::default(),
799 prettiers_per_worktree: HashMap::default(),
800 prettier_instances: HashMap::default(),
801 };
802 this.set_role(role, cx);
803 for worktree in worktrees {
804 let _ = this.add_worktree(&worktree, cx);
805 }
806 this
807 })?;
808 let subscription = subscription.set_model(&this, &mut cx);
809
810 let user_ids = response
811 .payload
812 .collaborators
813 .iter()
814 .map(|peer| peer.user_id)
815 .collect();
816 user_store
817 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
818 .await?;
819
820 this.update(&mut cx, |this, cx| {
821 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
822 this.client_subscriptions.push(subscription);
823 anyhow::Ok(())
824 })??;
825
826 Ok(this)
827 }
828
829 fn release(&mut self, cx: &mut AppContext) {
830 match &self.client_state {
831 Some(ProjectClientState::Local { .. }) => {
832 let _ = self.unshare_internal(cx);
833 }
834 Some(ProjectClientState::Remote { remote_id, .. }) => {
835 let _ = self.client.send(proto::LeaveProject {
836 project_id: *remote_id,
837 });
838 self.disconnected_from_host_internal(cx);
839 }
840 _ => {}
841 }
842 }
843
844 fn shutdown_language_servers(
845 &mut self,
846 _cx: &mut ModelContext<Self>,
847 ) -> impl Future<Output = ()> {
848 let shutdown_futures = self
849 .language_servers
850 .drain()
851 .map(|(_, server_state)| async {
852 use LanguageServerState::*;
853 match server_state {
854 Running { server, .. } => server.shutdown()?.await,
855 Starting(task) => task.await?.shutdown()?.await,
856 }
857 })
858 .collect::<Vec<_>>();
859
860 async move {
861 futures::future::join_all(shutdown_futures).await;
862 }
863 }
864
865 #[cfg(any(test, feature = "test-support"))]
866 pub async fn test(
867 fs: Arc<dyn Fs>,
868 root_paths: impl IntoIterator<Item = &Path>,
869 cx: &mut gpui::TestAppContext,
870 ) -> Model<Project> {
871 let mut languages = LanguageRegistry::test();
872 languages.set_executor(cx.executor());
873 let http_client = util::http::FakeHttpClient::with_404_response();
874 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
875 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
876 let project = cx.update(|cx| {
877 Project::local(
878 client,
879 node_runtime::FakeNodeRuntime::new(),
880 user_store,
881 Arc::new(languages),
882 fs,
883 cx,
884 )
885 });
886 for path in root_paths {
887 let (tree, _) = project
888 .update(cx, |project, cx| {
889 project.find_or_create_local_worktree(path, true, cx)
890 })
891 .await
892 .unwrap();
893 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
894 .await;
895 }
896 project
897 }
898
899 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
900 let mut language_servers_to_start = Vec::new();
901 let mut language_formatters_to_check = Vec::new();
902 for buffer in self.opened_buffers.values() {
903 if let Some(buffer) = buffer.upgrade() {
904 let buffer = buffer.read(cx);
905 let buffer_file = File::from_dyn(buffer.file());
906 let buffer_language = buffer.language();
907 let settings = language_settings(buffer_language, buffer.file(), cx);
908 if let Some(language) = buffer_language {
909 if settings.enable_language_server {
910 if let Some(file) = buffer_file {
911 language_servers_to_start
912 .push((file.worktree.clone(), Arc::clone(language)));
913 }
914 }
915 language_formatters_to_check.push((
916 buffer_file.map(|f| f.worktree_id(cx)),
917 Arc::clone(language),
918 settings.clone(),
919 ));
920 }
921 }
922 }
923
924 let mut language_servers_to_stop = Vec::new();
925 let mut language_servers_to_restart = Vec::new();
926 let languages = self.languages.to_vec();
927
928 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
929 let current_lsp_settings = &self.current_lsp_settings;
930 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
931 let language = languages.iter().find_map(|l| {
932 let adapter = l
933 .lsp_adapters()
934 .iter()
935 .find(|adapter| &adapter.name == started_lsp_name)?;
936 Some((l, adapter))
937 });
938 if let Some((language, adapter)) = language {
939 let worktree = self.worktree_for_id(*worktree_id, cx);
940 let file = worktree.as_ref().and_then(|tree| {
941 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
942 });
943 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
944 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
945 } else if let Some(worktree) = worktree {
946 let server_name = &adapter.name.0;
947 match (
948 current_lsp_settings.get(server_name),
949 new_lsp_settings.get(server_name),
950 ) {
951 (None, None) => {}
952 (Some(_), None) | (None, Some(_)) => {
953 language_servers_to_restart.push((worktree, Arc::clone(language)));
954 }
955 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
956 if current_lsp_settings != new_lsp_settings {
957 language_servers_to_restart.push((worktree, Arc::clone(language)));
958 }
959 }
960 }
961 }
962 }
963 }
964 self.current_lsp_settings = new_lsp_settings;
965
966 // Stop all newly-disabled language servers.
967 for (worktree_id, adapter_name) in language_servers_to_stop {
968 self.stop_language_server(worktree_id, adapter_name, cx)
969 .detach();
970 }
971
972 let mut prettier_plugins_by_worktree = HashMap::default();
973 for (worktree, language, settings) in language_formatters_to_check {
974 if let Some(plugins) =
975 prettier_support::prettier_plugins_for_language(&language, &settings)
976 {
977 prettier_plugins_by_worktree
978 .entry(worktree)
979 .or_insert_with(|| HashSet::default())
980 .extend(plugins);
981 }
982 }
983 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
984 self.install_default_prettier(worktree, prettier_plugins, cx);
985 }
986
987 // Start all the newly-enabled language servers.
988 for (worktree, language) in language_servers_to_start {
989 let worktree_path = worktree.read(cx).abs_path();
990 self.start_language_servers(&worktree, worktree_path, language, cx);
991 }
992
993 // Restart all language servers with changed initialization options.
994 for (worktree, language) in language_servers_to_restart {
995 self.restart_language_servers(worktree, language, cx);
996 }
997
998 if self.copilot_lsp_subscription.is_none() {
999 if let Some(copilot) = Copilot::global(cx) {
1000 for buffer in self.opened_buffers.values() {
1001 if let Some(buffer) = buffer.upgrade() {
1002 self.register_buffer_with_copilot(&buffer, cx);
1003 }
1004 }
1005 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
1006 }
1007 }
1008
1009 cx.notify();
1010 }
1011
1012 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> {
1013 self.opened_buffers
1014 .get(&remote_id)
1015 .and_then(|buffer| buffer.upgrade())
1016 }
1017
1018 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1019 &self.languages
1020 }
1021
1022 pub fn client(&self) -> Arc<Client> {
1023 self.client.clone()
1024 }
1025
1026 pub fn user_store(&self) -> Model<UserStore> {
1027 self.user_store.clone()
1028 }
1029
1030 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1031 self.opened_buffers
1032 .values()
1033 .filter_map(|b| b.upgrade())
1034 .collect()
1035 }
1036
1037 #[cfg(any(test, feature = "test-support"))]
1038 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1039 let path = path.into();
1040 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1041 self.opened_buffers.iter().any(|(_, buffer)| {
1042 if let Some(buffer) = buffer.upgrade() {
1043 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1044 if file.worktree == worktree && file.path() == &path.path {
1045 return true;
1046 }
1047 }
1048 }
1049 false
1050 })
1051 } else {
1052 false
1053 }
1054 }
1055
1056 pub fn fs(&self) -> &Arc<dyn Fs> {
1057 &self.fs
1058 }
1059
1060 pub fn remote_id(&self) -> Option<u64> {
1061 match self.client_state.as_ref()? {
1062 ProjectClientState::Local { remote_id, .. }
1063 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
1064 }
1065 }
1066
1067 pub fn replica_id(&self) -> ReplicaId {
1068 match &self.client_state {
1069 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
1070 _ => 0,
1071 }
1072 }
1073
1074 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1075 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
1076 updates_tx
1077 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1078 .ok();
1079 }
1080 cx.notify();
1081 }
1082
1083 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1084 &self.collaborators
1085 }
1086
1087 pub fn host(&self) -> Option<&Collaborator> {
1088 self.collaborators.values().find(|c| c.replica_id == 0)
1089 }
1090
1091 /// Collect all worktrees, including ones that don't appear in the project panel
1092 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1093 self.worktrees
1094 .iter()
1095 .filter_map(move |worktree| worktree.upgrade())
1096 }
1097
1098 /// Collect all user-visible worktrees, the ones that appear in the project panel
1099 pub fn visible_worktrees<'a>(
1100 &'a self,
1101 cx: &'a AppContext,
1102 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1103 self.worktrees.iter().filter_map(|worktree| {
1104 worktree.upgrade().and_then(|worktree| {
1105 if worktree.read(cx).is_visible() {
1106 Some(worktree)
1107 } else {
1108 None
1109 }
1110 })
1111 })
1112 }
1113
1114 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1115 self.visible_worktrees(cx)
1116 .map(|tree| tree.read(cx).root_name())
1117 }
1118
1119 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1120 self.worktrees()
1121 .find(|worktree| worktree.read(cx).id() == id)
1122 }
1123
1124 pub fn worktree_for_entry(
1125 &self,
1126 entry_id: ProjectEntryId,
1127 cx: &AppContext,
1128 ) -> Option<Model<Worktree>> {
1129 self.worktrees()
1130 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1131 }
1132
1133 pub fn worktree_id_for_entry(
1134 &self,
1135 entry_id: ProjectEntryId,
1136 cx: &AppContext,
1137 ) -> Option<WorktreeId> {
1138 self.worktree_for_entry(entry_id, cx)
1139 .map(|worktree| worktree.read(cx).id())
1140 }
1141
1142 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1143 paths.iter().all(|path| self.contains_path(path, cx))
1144 }
1145
1146 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1147 for worktree in self.worktrees() {
1148 let worktree = worktree.read(cx).as_local();
1149 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1150 return true;
1151 }
1152 }
1153 false
1154 }
1155
1156 pub fn create_entry(
1157 &mut self,
1158 project_path: impl Into<ProjectPath>,
1159 is_directory: bool,
1160 cx: &mut ModelContext<Self>,
1161 ) -> Task<Result<Option<Entry>>> {
1162 let project_path = project_path.into();
1163 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1164 return Task::ready(Ok(None));
1165 };
1166 if self.is_local() {
1167 worktree.update(cx, |worktree, cx| {
1168 worktree
1169 .as_local_mut()
1170 .unwrap()
1171 .create_entry(project_path.path, is_directory, cx)
1172 })
1173 } else {
1174 let client = self.client.clone();
1175 let project_id = self.remote_id().unwrap();
1176 cx.spawn(move |_, mut cx| async move {
1177 let response = client
1178 .request(proto::CreateProjectEntry {
1179 worktree_id: project_path.worktree_id.to_proto(),
1180 project_id,
1181 path: project_path.path.to_string_lossy().into(),
1182 is_directory,
1183 })
1184 .await?;
1185 match response.entry {
1186 Some(entry) => worktree
1187 .update(&mut cx, |worktree, cx| {
1188 worktree.as_remote_mut().unwrap().insert_entry(
1189 entry,
1190 response.worktree_scan_id as usize,
1191 cx,
1192 )
1193 })?
1194 .await
1195 .map(Some),
1196 None => Ok(None),
1197 }
1198 })
1199 }
1200 }
1201
1202 pub fn copy_entry(
1203 &mut self,
1204 entry_id: ProjectEntryId,
1205 new_path: impl Into<Arc<Path>>,
1206 cx: &mut ModelContext<Self>,
1207 ) -> Task<Result<Option<Entry>>> {
1208 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1209 return Task::ready(Ok(None));
1210 };
1211 let new_path = new_path.into();
1212 if self.is_local() {
1213 worktree.update(cx, |worktree, cx| {
1214 worktree
1215 .as_local_mut()
1216 .unwrap()
1217 .copy_entry(entry_id, new_path, cx)
1218 })
1219 } else {
1220 let client = self.client.clone();
1221 let project_id = self.remote_id().unwrap();
1222
1223 cx.spawn(move |_, mut cx| async move {
1224 let response = client
1225 .request(proto::CopyProjectEntry {
1226 project_id,
1227 entry_id: entry_id.to_proto(),
1228 new_path: new_path.to_string_lossy().into(),
1229 })
1230 .await?;
1231 match response.entry {
1232 Some(entry) => worktree
1233 .update(&mut cx, |worktree, cx| {
1234 worktree.as_remote_mut().unwrap().insert_entry(
1235 entry,
1236 response.worktree_scan_id as usize,
1237 cx,
1238 )
1239 })?
1240 .await
1241 .map(Some),
1242 None => Ok(None),
1243 }
1244 })
1245 }
1246 }
1247
1248 pub fn rename_entry(
1249 &mut self,
1250 entry_id: ProjectEntryId,
1251 new_path: impl Into<Arc<Path>>,
1252 cx: &mut ModelContext<Self>,
1253 ) -> Task<Result<Option<Entry>>> {
1254 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1255 return Task::ready(Ok(None));
1256 };
1257 let new_path = new_path.into();
1258 if self.is_local() {
1259 worktree.update(cx, |worktree, cx| {
1260 worktree
1261 .as_local_mut()
1262 .unwrap()
1263 .rename_entry(entry_id, new_path, cx)
1264 })
1265 } else {
1266 let client = self.client.clone();
1267 let project_id = self.remote_id().unwrap();
1268
1269 cx.spawn(move |_, mut cx| async move {
1270 let response = client
1271 .request(proto::RenameProjectEntry {
1272 project_id,
1273 entry_id: entry_id.to_proto(),
1274 new_path: new_path.to_string_lossy().into(),
1275 })
1276 .await?;
1277 match response.entry {
1278 Some(entry) => worktree
1279 .update(&mut cx, |worktree, cx| {
1280 worktree.as_remote_mut().unwrap().insert_entry(
1281 entry,
1282 response.worktree_scan_id as usize,
1283 cx,
1284 )
1285 })?
1286 .await
1287 .map(Some),
1288 None => Ok(None),
1289 }
1290 })
1291 }
1292 }
1293
1294 pub fn delete_entry(
1295 &mut self,
1296 entry_id: ProjectEntryId,
1297 cx: &mut ModelContext<Self>,
1298 ) -> Option<Task<Result<()>>> {
1299 let worktree = self.worktree_for_entry(entry_id, cx)?;
1300
1301 cx.emit(Event::DeletedEntry(entry_id));
1302
1303 if self.is_local() {
1304 worktree.update(cx, |worktree, cx| {
1305 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1306 })
1307 } else {
1308 let client = self.client.clone();
1309 let project_id = self.remote_id().unwrap();
1310 Some(cx.spawn(move |_, mut cx| async move {
1311 let response = client
1312 .request(proto::DeleteProjectEntry {
1313 project_id,
1314 entry_id: entry_id.to_proto(),
1315 })
1316 .await?;
1317 worktree
1318 .update(&mut cx, move |worktree, cx| {
1319 worktree.as_remote_mut().unwrap().delete_entry(
1320 entry_id,
1321 response.worktree_scan_id as usize,
1322 cx,
1323 )
1324 })?
1325 .await
1326 }))
1327 }
1328 }
1329
1330 pub fn expand_entry(
1331 &mut self,
1332 worktree_id: WorktreeId,
1333 entry_id: ProjectEntryId,
1334 cx: &mut ModelContext<Self>,
1335 ) -> Option<Task<Result<()>>> {
1336 let worktree = self.worktree_for_id(worktree_id, cx)?;
1337 if self.is_local() {
1338 worktree.update(cx, |worktree, cx| {
1339 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1340 })
1341 } else {
1342 let worktree = worktree.downgrade();
1343 let request = self.client.request(proto::ExpandProjectEntry {
1344 project_id: self.remote_id().unwrap(),
1345 entry_id: entry_id.to_proto(),
1346 });
1347 Some(cx.spawn(move |_, mut cx| async move {
1348 let response = request.await?;
1349 if let Some(worktree) = worktree.upgrade() {
1350 worktree
1351 .update(&mut cx, |worktree, _| {
1352 worktree
1353 .as_remote_mut()
1354 .unwrap()
1355 .wait_for_snapshot(response.worktree_scan_id as usize)
1356 })?
1357 .await?;
1358 }
1359 Ok(())
1360 }))
1361 }
1362 }
1363
1364 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1365 if self.client_state.is_some() {
1366 return Err(anyhow!("project was already shared"));
1367 }
1368 self.client_subscriptions.push(
1369 self.client
1370 .subscribe_to_entity(project_id)?
1371 .set_model(&cx.handle(), &mut cx.to_async()),
1372 );
1373
1374 for open_buffer in self.opened_buffers.values_mut() {
1375 match open_buffer {
1376 OpenBuffer::Strong(_) => {}
1377 OpenBuffer::Weak(buffer) => {
1378 if let Some(buffer) = buffer.upgrade() {
1379 *open_buffer = OpenBuffer::Strong(buffer);
1380 }
1381 }
1382 OpenBuffer::Operations(_) => unreachable!(),
1383 }
1384 }
1385
1386 for worktree_handle in self.worktrees.iter_mut() {
1387 match worktree_handle {
1388 WorktreeHandle::Strong(_) => {}
1389 WorktreeHandle::Weak(worktree) => {
1390 if let Some(worktree) = worktree.upgrade() {
1391 *worktree_handle = WorktreeHandle::Strong(worktree);
1392 }
1393 }
1394 }
1395 }
1396
1397 for (server_id, status) in &self.language_server_statuses {
1398 self.client
1399 .send(proto::StartLanguageServer {
1400 project_id,
1401 server: Some(proto::LanguageServer {
1402 id: server_id.0 as u64,
1403 name: status.name.clone(),
1404 }),
1405 })
1406 .log_err();
1407 }
1408
1409 let store = cx.global::<SettingsStore>();
1410 for worktree in self.worktrees() {
1411 let worktree_id = worktree.read(cx).id().to_proto();
1412 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1413 self.client
1414 .send(proto::UpdateWorktreeSettings {
1415 project_id,
1416 worktree_id,
1417 path: path.to_string_lossy().into(),
1418 content: Some(content),
1419 })
1420 .log_err();
1421 }
1422 }
1423
1424 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1425 let client = self.client.clone();
1426 self.client_state = Some(ProjectClientState::Local {
1427 remote_id: project_id,
1428 updates_tx,
1429 _send_updates: cx.spawn(move |this, mut cx| async move {
1430 while let Some(update) = updates_rx.next().await {
1431 match update {
1432 LocalProjectUpdate::WorktreesChanged => {
1433 let worktrees = this.update(&mut cx, |this, _cx| {
1434 this.worktrees().collect::<Vec<_>>()
1435 })?;
1436 let update_project = this
1437 .update(&mut cx, |this, cx| {
1438 this.client.request(proto::UpdateProject {
1439 project_id,
1440 worktrees: this.worktree_metadata_protos(cx),
1441 })
1442 })?
1443 .await;
1444 if update_project.is_ok() {
1445 for worktree in worktrees {
1446 worktree.update(&mut cx, |worktree, cx| {
1447 let worktree = worktree.as_local_mut().unwrap();
1448 worktree.share(project_id, cx).detach_and_log_err(cx)
1449 })?;
1450 }
1451 }
1452 }
1453 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1454 let buffer = this.update(&mut cx, |this, _| {
1455 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1456 let shared_buffers =
1457 this.shared_buffers.entry(peer_id).or_default();
1458 if shared_buffers.insert(buffer_id) {
1459 if let OpenBuffer::Strong(buffer) = buffer {
1460 Some(buffer.clone())
1461 } else {
1462 None
1463 }
1464 } else {
1465 None
1466 }
1467 })?;
1468
1469 let Some(buffer) = buffer else { continue };
1470 let operations =
1471 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1472 let operations = operations.await;
1473 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1474
1475 let initial_state = proto::CreateBufferForPeer {
1476 project_id,
1477 peer_id: Some(peer_id),
1478 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1479 };
1480 if client.send(initial_state).log_err().is_some() {
1481 let client = client.clone();
1482 cx.background_executor()
1483 .spawn(async move {
1484 let mut chunks = split_operations(operations).peekable();
1485 while let Some(chunk) = chunks.next() {
1486 let is_last = chunks.peek().is_none();
1487 client.send(proto::CreateBufferForPeer {
1488 project_id,
1489 peer_id: Some(peer_id),
1490 variant: Some(
1491 proto::create_buffer_for_peer::Variant::Chunk(
1492 proto::BufferChunk {
1493 buffer_id,
1494 operations: chunk,
1495 is_last,
1496 },
1497 ),
1498 ),
1499 })?;
1500 }
1501 anyhow::Ok(())
1502 })
1503 .await
1504 .log_err();
1505 }
1506 }
1507 }
1508 }
1509 Ok(())
1510 }),
1511 });
1512
1513 self.metadata_changed(cx);
1514 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1515 cx.notify();
1516 Ok(())
1517 }
1518
1519 pub fn reshared(
1520 &mut self,
1521 message: proto::ResharedProject,
1522 cx: &mut ModelContext<Self>,
1523 ) -> Result<()> {
1524 self.shared_buffers.clear();
1525 self.set_collaborators_from_proto(message.collaborators, cx)?;
1526 self.metadata_changed(cx);
1527 Ok(())
1528 }
1529
1530 pub fn rejoined(
1531 &mut self,
1532 message: proto::RejoinedProject,
1533 message_id: u32,
1534 cx: &mut ModelContext<Self>,
1535 ) -> Result<()> {
1536 cx.update_global::<SettingsStore, _>(|store, cx| {
1537 for worktree in &self.worktrees {
1538 store
1539 .clear_local_settings(worktree.handle_id(), cx)
1540 .log_err();
1541 }
1542 });
1543
1544 self.join_project_response_message_id = message_id;
1545 self.set_worktrees_from_proto(message.worktrees, cx)?;
1546 self.set_collaborators_from_proto(message.collaborators, cx)?;
1547 self.language_server_statuses = message
1548 .language_servers
1549 .into_iter()
1550 .map(|server| {
1551 (
1552 LanguageServerId(server.id as usize),
1553 LanguageServerStatus {
1554 name: server.name,
1555 pending_work: Default::default(),
1556 has_pending_diagnostic_updates: false,
1557 progress_tokens: Default::default(),
1558 },
1559 )
1560 })
1561 .collect();
1562 self.buffer_ordered_messages_tx
1563 .unbounded_send(BufferOrderedMessage::Resync)
1564 .unwrap();
1565 cx.notify();
1566 Ok(())
1567 }
1568
1569 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1570 self.unshare_internal(cx)?;
1571 self.metadata_changed(cx);
1572 cx.notify();
1573 Ok(())
1574 }
1575
1576 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1577 if self.is_remote() {
1578 return Err(anyhow!("attempted to unshare a remote project"));
1579 }
1580
1581 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1582 self.collaborators.clear();
1583 self.shared_buffers.clear();
1584 self.client_subscriptions.clear();
1585
1586 for worktree_handle in self.worktrees.iter_mut() {
1587 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1588 let is_visible = worktree.update(cx, |worktree, _| {
1589 worktree.as_local_mut().unwrap().unshare();
1590 worktree.is_visible()
1591 });
1592 if !is_visible {
1593 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1594 }
1595 }
1596 }
1597
1598 for open_buffer in self.opened_buffers.values_mut() {
1599 // Wake up any tasks waiting for peers' edits to this buffer.
1600 if let Some(buffer) = open_buffer.upgrade() {
1601 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1602 }
1603
1604 if let OpenBuffer::Strong(buffer) = open_buffer {
1605 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1606 }
1607 }
1608
1609 self.client.send(proto::UnshareProject {
1610 project_id: remote_id,
1611 })?;
1612
1613 Ok(())
1614 } else {
1615 Err(anyhow!("attempted to unshare an unshared project"))
1616 }
1617 }
1618
1619 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1620 self.disconnected_from_host_internal(cx);
1621 cx.emit(Event::DisconnectedFromHost);
1622 cx.notify();
1623 }
1624
1625 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1626 let new_capability =
1627 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1628 Capability::ReadWrite
1629 } else {
1630 Capability::ReadOnly
1631 };
1632 if let Some(ProjectClientState::Remote { capability, .. }) = &mut self.client_state {
1633 if *capability == new_capability {
1634 return;
1635 }
1636
1637 *capability = new_capability;
1638 }
1639 for buffer in self.opened_buffers() {
1640 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1641 }
1642 }
1643
1644 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1645 if let Some(ProjectClientState::Remote {
1646 sharing_has_stopped,
1647 ..
1648 }) = &mut self.client_state
1649 {
1650 *sharing_has_stopped = true;
1651
1652 self.collaborators.clear();
1653
1654 for worktree in &self.worktrees {
1655 if let Some(worktree) = worktree.upgrade() {
1656 worktree.update(cx, |worktree, _| {
1657 if let Some(worktree) = worktree.as_remote_mut() {
1658 worktree.disconnected_from_host();
1659 }
1660 });
1661 }
1662 }
1663
1664 for open_buffer in self.opened_buffers.values_mut() {
1665 // Wake up any tasks waiting for peers' edits to this buffer.
1666 if let Some(buffer) = open_buffer.upgrade() {
1667 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1668 }
1669
1670 if let OpenBuffer::Strong(buffer) = open_buffer {
1671 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1672 }
1673 }
1674
1675 // Wake up all futures currently waiting on a buffer to get opened,
1676 // to give them a chance to fail now that we've disconnected.
1677 *self.opened_buffer.0.borrow_mut() = ();
1678 }
1679 }
1680
1681 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1682 cx.emit(Event::Closed);
1683 }
1684
1685 pub fn is_disconnected(&self) -> bool {
1686 match &self.client_state {
1687 Some(ProjectClientState::Remote {
1688 sharing_has_stopped,
1689 ..
1690 }) => *sharing_has_stopped,
1691 _ => false,
1692 }
1693 }
1694
1695 pub fn capability(&self) -> Capability {
1696 match &self.client_state {
1697 Some(ProjectClientState::Remote { capability, .. }) => *capability,
1698 Some(ProjectClientState::Local { .. }) | None => Capability::ReadWrite,
1699 }
1700 }
1701
1702 pub fn is_read_only(&self) -> bool {
1703 self.is_disconnected() || self.capability() == Capability::ReadOnly
1704 }
1705
1706 pub fn is_local(&self) -> bool {
1707 match &self.client_state {
1708 Some(ProjectClientState::Remote { .. }) => false,
1709 _ => true,
1710 }
1711 }
1712
1713 pub fn is_remote(&self) -> bool {
1714 !self.is_local()
1715 }
1716
1717 pub fn create_buffer(
1718 &mut self,
1719 text: &str,
1720 language: Option<Arc<Language>>,
1721 cx: &mut ModelContext<Self>,
1722 ) -> Result<Model<Buffer>> {
1723 if self.is_remote() {
1724 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1725 }
1726 let id = post_inc(&mut self.next_buffer_id);
1727 let buffer = cx.new_model(|cx| {
1728 Buffer::new(self.replica_id(), id, text)
1729 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1730 });
1731 self.register_buffer(&buffer, cx)?;
1732 Ok(buffer)
1733 }
1734
1735 pub fn open_path(
1736 &mut self,
1737 path: ProjectPath,
1738 cx: &mut ModelContext<Self>,
1739 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1740 let task = self.open_buffer(path.clone(), cx);
1741 cx.spawn(move |_, cx| async move {
1742 let buffer = task.await?;
1743 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1744 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1745 })?;
1746
1747 let buffer: &AnyModel = &buffer;
1748 Ok((project_entry_id, buffer.clone()))
1749 })
1750 }
1751
1752 pub fn open_local_buffer(
1753 &mut self,
1754 abs_path: impl AsRef<Path>,
1755 cx: &mut ModelContext<Self>,
1756 ) -> Task<Result<Model<Buffer>>> {
1757 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1758 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1759 } else {
1760 Task::ready(Err(anyhow!("no such path")))
1761 }
1762 }
1763
1764 pub fn open_buffer(
1765 &mut self,
1766 path: impl Into<ProjectPath>,
1767 cx: &mut ModelContext<Self>,
1768 ) -> Task<Result<Model<Buffer>>> {
1769 let project_path = path.into();
1770 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1771 worktree
1772 } else {
1773 return Task::ready(Err(anyhow!("no such worktree")));
1774 };
1775
1776 // If there is already a buffer for the given path, then return it.
1777 let existing_buffer = self.get_open_buffer(&project_path, cx);
1778 if let Some(existing_buffer) = existing_buffer {
1779 return Task::ready(Ok(existing_buffer));
1780 }
1781
1782 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1783 // If the given path is already being loaded, then wait for that existing
1784 // task to complete and return the same buffer.
1785 hash_map::Entry::Occupied(e) => e.get().clone(),
1786
1787 // Otherwise, record the fact that this path is now being loaded.
1788 hash_map::Entry::Vacant(entry) => {
1789 let (mut tx, rx) = postage::watch::channel();
1790 entry.insert(rx.clone());
1791
1792 let load_buffer = if worktree.read(cx).is_local() {
1793 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1794 } else {
1795 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1796 };
1797
1798 let project_path = project_path.clone();
1799 cx.spawn(move |this, mut cx| async move {
1800 let load_result = load_buffer.await;
1801 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1802 // Record the fact that the buffer is no longer loading.
1803 this.loading_buffers_by_path.remove(&project_path);
1804 let buffer = load_result.map_err(Arc::new)?;
1805 Ok(buffer)
1806 })?);
1807 anyhow::Ok(())
1808 })
1809 .detach();
1810 rx
1811 }
1812 };
1813
1814 cx.background_executor().spawn(async move {
1815 wait_for_loading_buffer(loading_watch)
1816 .await
1817 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1818 })
1819 }
1820
1821 fn open_local_buffer_internal(
1822 &mut self,
1823 path: &Arc<Path>,
1824 worktree: &Model<Worktree>,
1825 cx: &mut ModelContext<Self>,
1826 ) -> Task<Result<Model<Buffer>>> {
1827 let buffer_id = post_inc(&mut self.next_buffer_id);
1828 let load_buffer = worktree.update(cx, |worktree, cx| {
1829 let worktree = worktree.as_local_mut().unwrap();
1830 worktree.load_buffer(buffer_id, path, cx)
1831 });
1832 cx.spawn(move |this, mut cx| async move {
1833 let buffer = load_buffer.await?;
1834 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1835 Ok(buffer)
1836 })
1837 }
1838
1839 fn open_remote_buffer_internal(
1840 &mut self,
1841 path: &Arc<Path>,
1842 worktree: &Model<Worktree>,
1843 cx: &mut ModelContext<Self>,
1844 ) -> Task<Result<Model<Buffer>>> {
1845 let rpc = self.client.clone();
1846 let project_id = self.remote_id().unwrap();
1847 let remote_worktree_id = worktree.read(cx).id();
1848 let path = path.clone();
1849 let path_string = path.to_string_lossy().to_string();
1850 cx.spawn(move |this, mut cx| async move {
1851 let response = rpc
1852 .request(proto::OpenBufferByPath {
1853 project_id,
1854 worktree_id: remote_worktree_id.to_proto(),
1855 path: path_string,
1856 })
1857 .await?;
1858 this.update(&mut cx, |this, cx| {
1859 this.wait_for_remote_buffer(response.buffer_id, cx)
1860 })?
1861 .await
1862 })
1863 }
1864
1865 /// LanguageServerName is owned, because it is inserted into a map
1866 pub fn open_local_buffer_via_lsp(
1867 &mut self,
1868 abs_path: lsp::Url,
1869 language_server_id: LanguageServerId,
1870 language_server_name: LanguageServerName,
1871 cx: &mut ModelContext<Self>,
1872 ) -> Task<Result<Model<Buffer>>> {
1873 cx.spawn(move |this, mut cx| async move {
1874 let abs_path = abs_path
1875 .to_file_path()
1876 .map_err(|_| anyhow!("can't convert URI to path"))?;
1877 let (worktree, relative_path) = if let Some(result) =
1878 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1879 {
1880 result
1881 } else {
1882 let worktree = this
1883 .update(&mut cx, |this, cx| {
1884 this.create_local_worktree(&abs_path, false, cx)
1885 })?
1886 .await?;
1887 this.update(&mut cx, |this, cx| {
1888 this.language_server_ids.insert(
1889 (worktree.read(cx).id(), language_server_name),
1890 language_server_id,
1891 );
1892 })
1893 .ok();
1894 (worktree, PathBuf::new())
1895 };
1896
1897 let project_path = ProjectPath {
1898 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1899 path: relative_path.into(),
1900 };
1901 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1902 .await
1903 })
1904 }
1905
1906 pub fn open_buffer_by_id(
1907 &mut self,
1908 id: u64,
1909 cx: &mut ModelContext<Self>,
1910 ) -> Task<Result<Model<Buffer>>> {
1911 if let Some(buffer) = self.buffer_for_id(id) {
1912 Task::ready(Ok(buffer))
1913 } else if self.is_local() {
1914 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1915 } else if let Some(project_id) = self.remote_id() {
1916 let request = self
1917 .client
1918 .request(proto::OpenBufferById { project_id, id });
1919 cx.spawn(move |this, mut cx| async move {
1920 let buffer_id = request.await?.buffer_id;
1921 this.update(&mut cx, |this, cx| {
1922 this.wait_for_remote_buffer(buffer_id, cx)
1923 })?
1924 .await
1925 })
1926 } else {
1927 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1928 }
1929 }
1930
1931 pub fn save_buffers(
1932 &self,
1933 buffers: HashSet<Model<Buffer>>,
1934 cx: &mut ModelContext<Self>,
1935 ) -> Task<Result<()>> {
1936 cx.spawn(move |this, mut cx| async move {
1937 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1938 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1939 .ok()
1940 });
1941 try_join_all(save_tasks).await?;
1942 Ok(())
1943 })
1944 }
1945
1946 pub fn save_buffer(
1947 &self,
1948 buffer: Model<Buffer>,
1949 cx: &mut ModelContext<Self>,
1950 ) -> Task<Result<()>> {
1951 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1952 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1953 };
1954 let worktree = file.worktree.clone();
1955 let path = file.path.clone();
1956 worktree.update(cx, |worktree, cx| match worktree {
1957 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1958 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1959 })
1960 }
1961
1962 pub fn save_buffer_as(
1963 &mut self,
1964 buffer: Model<Buffer>,
1965 abs_path: PathBuf,
1966 cx: &mut ModelContext<Self>,
1967 ) -> Task<Result<()>> {
1968 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1969 let old_file = File::from_dyn(buffer.read(cx).file())
1970 .filter(|f| f.is_local())
1971 .cloned();
1972 cx.spawn(move |this, mut cx| async move {
1973 if let Some(old_file) = &old_file {
1974 this.update(&mut cx, |this, cx| {
1975 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1976 })?;
1977 }
1978 let (worktree, path) = worktree_task.await?;
1979 worktree
1980 .update(&mut cx, |worktree, cx| match worktree {
1981 Worktree::Local(worktree) => {
1982 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1983 }
1984 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1985 })?
1986 .await?;
1987
1988 this.update(&mut cx, |this, cx| {
1989 this.detect_language_for_buffer(&buffer, cx);
1990 this.register_buffer_with_language_servers(&buffer, cx);
1991 })?;
1992 Ok(())
1993 })
1994 }
1995
1996 pub fn get_open_buffer(
1997 &mut self,
1998 path: &ProjectPath,
1999 cx: &mut ModelContext<Self>,
2000 ) -> Option<Model<Buffer>> {
2001 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
2002 self.opened_buffers.values().find_map(|buffer| {
2003 let buffer = buffer.upgrade()?;
2004 let file = File::from_dyn(buffer.read(cx).file())?;
2005 if file.worktree == worktree && file.path() == &path.path {
2006 Some(buffer)
2007 } else {
2008 None
2009 }
2010 })
2011 }
2012
2013 fn register_buffer(
2014 &mut self,
2015 buffer: &Model<Buffer>,
2016 cx: &mut ModelContext<Self>,
2017 ) -> Result<()> {
2018 self.request_buffer_diff_recalculation(buffer, cx);
2019 buffer.update(cx, |buffer, _| {
2020 buffer.set_language_registry(self.languages.clone())
2021 });
2022
2023 let remote_id = buffer.read(cx).remote_id();
2024 let is_remote = self.is_remote();
2025 let open_buffer = if is_remote || self.is_shared() {
2026 OpenBuffer::Strong(buffer.clone())
2027 } else {
2028 OpenBuffer::Weak(buffer.downgrade())
2029 };
2030
2031 match self.opened_buffers.entry(remote_id) {
2032 hash_map::Entry::Vacant(entry) => {
2033 entry.insert(open_buffer);
2034 }
2035 hash_map::Entry::Occupied(mut entry) => {
2036 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2037 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2038 } else if entry.get().upgrade().is_some() {
2039 if is_remote {
2040 return Ok(());
2041 } else {
2042 debug_panic!("buffer {} was already registered", remote_id);
2043 Err(anyhow!("buffer {} was already registered", remote_id))?;
2044 }
2045 }
2046 entry.insert(open_buffer);
2047 }
2048 }
2049 cx.subscribe(buffer, |this, buffer, event, cx| {
2050 this.on_buffer_event(buffer, event, cx);
2051 })
2052 .detach();
2053
2054 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2055 if file.is_local {
2056 self.local_buffer_ids_by_path.insert(
2057 ProjectPath {
2058 worktree_id: file.worktree_id(cx),
2059 path: file.path.clone(),
2060 },
2061 remote_id,
2062 );
2063
2064 if let Some(entry_id) = file.entry_id {
2065 self.local_buffer_ids_by_entry_id
2066 .insert(entry_id, remote_id);
2067 }
2068 }
2069 }
2070
2071 self.detect_language_for_buffer(buffer, cx);
2072 self.register_buffer_with_language_servers(buffer, cx);
2073 self.register_buffer_with_copilot(buffer, cx);
2074 cx.observe_release(buffer, |this, buffer, cx| {
2075 if let Some(file) = File::from_dyn(buffer.file()) {
2076 if file.is_local() {
2077 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2078 for server in this.language_servers_for_buffer(buffer, cx) {
2079 server
2080 .1
2081 .notify::<lsp::notification::DidCloseTextDocument>(
2082 lsp::DidCloseTextDocumentParams {
2083 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2084 },
2085 )
2086 .log_err();
2087 }
2088 }
2089 }
2090 })
2091 .detach();
2092
2093 *self.opened_buffer.0.borrow_mut() = ();
2094 Ok(())
2095 }
2096
2097 fn register_buffer_with_language_servers(
2098 &mut self,
2099 buffer_handle: &Model<Buffer>,
2100 cx: &mut ModelContext<Self>,
2101 ) {
2102 let buffer = buffer_handle.read(cx);
2103 let buffer_id = buffer.remote_id();
2104
2105 if let Some(file) = File::from_dyn(buffer.file()) {
2106 if !file.is_local() {
2107 return;
2108 }
2109
2110 let abs_path = file.abs_path(cx);
2111 let uri = lsp::Url::from_file_path(&abs_path)
2112 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2113 let initial_snapshot = buffer.text_snapshot();
2114 let language = buffer.language().cloned();
2115 let worktree_id = file.worktree_id(cx);
2116
2117 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2118 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2119 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2120 .log_err();
2121 }
2122 }
2123
2124 if let Some(language) = language {
2125 for adapter in language.lsp_adapters() {
2126 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2127 let server = self
2128 .language_server_ids
2129 .get(&(worktree_id, adapter.name.clone()))
2130 .and_then(|id| self.language_servers.get(id))
2131 .and_then(|server_state| {
2132 if let LanguageServerState::Running { server, .. } = server_state {
2133 Some(server.clone())
2134 } else {
2135 None
2136 }
2137 });
2138 let server = match server {
2139 Some(server) => server,
2140 None => continue,
2141 };
2142
2143 server
2144 .notify::<lsp::notification::DidOpenTextDocument>(
2145 lsp::DidOpenTextDocumentParams {
2146 text_document: lsp::TextDocumentItem::new(
2147 uri.clone(),
2148 language_id.unwrap_or_default(),
2149 0,
2150 initial_snapshot.text(),
2151 ),
2152 },
2153 )
2154 .log_err();
2155
2156 buffer_handle.update(cx, |buffer, cx| {
2157 buffer.set_completion_triggers(
2158 server
2159 .capabilities()
2160 .completion_provider
2161 .as_ref()
2162 .and_then(|provider| provider.trigger_characters.clone())
2163 .unwrap_or_default(),
2164 cx,
2165 );
2166 });
2167
2168 let snapshot = LspBufferSnapshot {
2169 version: 0,
2170 snapshot: initial_snapshot.clone(),
2171 };
2172 self.buffer_snapshots
2173 .entry(buffer_id)
2174 .or_default()
2175 .insert(server.server_id(), vec![snapshot]);
2176 }
2177 }
2178 }
2179 }
2180
2181 fn unregister_buffer_from_language_servers(
2182 &mut self,
2183 buffer: &Model<Buffer>,
2184 old_file: &File,
2185 cx: &mut ModelContext<Self>,
2186 ) {
2187 let old_path = match old_file.as_local() {
2188 Some(local) => local.abs_path(cx),
2189 None => return,
2190 };
2191
2192 buffer.update(cx, |buffer, cx| {
2193 let worktree_id = old_file.worktree_id(cx);
2194 let ids = &self.language_server_ids;
2195
2196 let language = buffer.language().cloned();
2197 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2198 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2199 buffer.update_diagnostics(server_id, Default::default(), cx);
2200 }
2201
2202 self.buffer_snapshots.remove(&buffer.remote_id());
2203 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2204 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2205 language_server
2206 .notify::<lsp::notification::DidCloseTextDocument>(
2207 lsp::DidCloseTextDocumentParams {
2208 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2209 },
2210 )
2211 .log_err();
2212 }
2213 });
2214 }
2215
2216 fn register_buffer_with_copilot(
2217 &self,
2218 buffer_handle: &Model<Buffer>,
2219 cx: &mut ModelContext<Self>,
2220 ) {
2221 if let Some(copilot) = Copilot::global(cx) {
2222 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2223 }
2224 }
2225
2226 async fn send_buffer_ordered_messages(
2227 this: WeakModel<Self>,
2228 rx: UnboundedReceiver<BufferOrderedMessage>,
2229 mut cx: AsyncAppContext,
2230 ) -> Result<()> {
2231 const MAX_BATCH_SIZE: usize = 128;
2232
2233 let mut operations_by_buffer_id = HashMap::default();
2234 async fn flush_operations(
2235 this: &WeakModel<Project>,
2236 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2237 needs_resync_with_host: &mut bool,
2238 is_local: bool,
2239 cx: &mut AsyncAppContext,
2240 ) -> Result<()> {
2241 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2242 let request = this.update(cx, |this, _| {
2243 let project_id = this.remote_id()?;
2244 Some(this.client.request(proto::UpdateBuffer {
2245 buffer_id,
2246 project_id,
2247 operations,
2248 }))
2249 })?;
2250 if let Some(request) = request {
2251 if request.await.is_err() && !is_local {
2252 *needs_resync_with_host = true;
2253 break;
2254 }
2255 }
2256 }
2257 Ok(())
2258 }
2259
2260 let mut needs_resync_with_host = false;
2261 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2262
2263 while let Some(changes) = changes.next().await {
2264 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2265
2266 for change in changes {
2267 match change {
2268 BufferOrderedMessage::Operation {
2269 buffer_id,
2270 operation,
2271 } => {
2272 if needs_resync_with_host {
2273 continue;
2274 }
2275
2276 operations_by_buffer_id
2277 .entry(buffer_id)
2278 .or_insert(Vec::new())
2279 .push(operation);
2280 }
2281
2282 BufferOrderedMessage::Resync => {
2283 operations_by_buffer_id.clear();
2284 if this
2285 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2286 .await
2287 .is_ok()
2288 {
2289 needs_resync_with_host = false;
2290 }
2291 }
2292
2293 BufferOrderedMessage::LanguageServerUpdate {
2294 language_server_id,
2295 message,
2296 } => {
2297 flush_operations(
2298 &this,
2299 &mut operations_by_buffer_id,
2300 &mut needs_resync_with_host,
2301 is_local,
2302 &mut cx,
2303 )
2304 .await?;
2305
2306 this.update(&mut cx, |this, _| {
2307 if let Some(project_id) = this.remote_id() {
2308 this.client
2309 .send(proto::UpdateLanguageServer {
2310 project_id,
2311 language_server_id: language_server_id.0 as u64,
2312 variant: Some(message),
2313 })
2314 .log_err();
2315 }
2316 })?;
2317 }
2318 }
2319 }
2320
2321 flush_operations(
2322 &this,
2323 &mut operations_by_buffer_id,
2324 &mut needs_resync_with_host,
2325 is_local,
2326 &mut cx,
2327 )
2328 .await?;
2329 }
2330
2331 Ok(())
2332 }
2333
2334 fn on_buffer_event(
2335 &mut self,
2336 buffer: Model<Buffer>,
2337 event: &BufferEvent,
2338 cx: &mut ModelContext<Self>,
2339 ) -> Option<()> {
2340 if matches!(
2341 event,
2342 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2343 ) {
2344 self.request_buffer_diff_recalculation(&buffer, cx);
2345 }
2346
2347 match event {
2348 BufferEvent::Operation(operation) => {
2349 self.buffer_ordered_messages_tx
2350 .unbounded_send(BufferOrderedMessage::Operation {
2351 buffer_id: buffer.read(cx).remote_id(),
2352 operation: language::proto::serialize_operation(operation),
2353 })
2354 .ok();
2355 }
2356
2357 BufferEvent::Edited { .. } => {
2358 let buffer = buffer.read(cx);
2359 let file = File::from_dyn(buffer.file())?;
2360 let abs_path = file.as_local()?.abs_path(cx);
2361 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2362 let next_snapshot = buffer.text_snapshot();
2363
2364 let language_servers: Vec<_> = self
2365 .language_servers_for_buffer(buffer, cx)
2366 .map(|i| i.1.clone())
2367 .collect();
2368
2369 for language_server in language_servers {
2370 let language_server = language_server.clone();
2371
2372 let buffer_snapshots = self
2373 .buffer_snapshots
2374 .get_mut(&buffer.remote_id())
2375 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2376 let previous_snapshot = buffer_snapshots.last()?;
2377
2378 let build_incremental_change = || {
2379 buffer
2380 .edits_since::<(PointUtf16, usize)>(
2381 previous_snapshot.snapshot.version(),
2382 )
2383 .map(|edit| {
2384 let edit_start = edit.new.start.0;
2385 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2386 let new_text = next_snapshot
2387 .text_for_range(edit.new.start.1..edit.new.end.1)
2388 .collect();
2389 lsp::TextDocumentContentChangeEvent {
2390 range: Some(lsp::Range::new(
2391 point_to_lsp(edit_start),
2392 point_to_lsp(edit_end),
2393 )),
2394 range_length: None,
2395 text: new_text,
2396 }
2397 })
2398 .collect()
2399 };
2400
2401 let document_sync_kind = language_server
2402 .capabilities()
2403 .text_document_sync
2404 .as_ref()
2405 .and_then(|sync| match sync {
2406 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2407 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2408 });
2409
2410 let content_changes: Vec<_> = match document_sync_kind {
2411 Some(lsp::TextDocumentSyncKind::FULL) => {
2412 vec![lsp::TextDocumentContentChangeEvent {
2413 range: None,
2414 range_length: None,
2415 text: next_snapshot.text(),
2416 }]
2417 }
2418 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2419 _ => {
2420 #[cfg(any(test, feature = "test-support"))]
2421 {
2422 build_incremental_change()
2423 }
2424
2425 #[cfg(not(any(test, feature = "test-support")))]
2426 {
2427 continue;
2428 }
2429 }
2430 };
2431
2432 let next_version = previous_snapshot.version + 1;
2433
2434 buffer_snapshots.push(LspBufferSnapshot {
2435 version: next_version,
2436 snapshot: next_snapshot.clone(),
2437 });
2438
2439 language_server
2440 .notify::<lsp::notification::DidChangeTextDocument>(
2441 lsp::DidChangeTextDocumentParams {
2442 text_document: lsp::VersionedTextDocumentIdentifier::new(
2443 uri.clone(),
2444 next_version,
2445 ),
2446 content_changes,
2447 },
2448 )
2449 .log_err();
2450 }
2451 }
2452
2453 BufferEvent::Saved => {
2454 let file = File::from_dyn(buffer.read(cx).file())?;
2455 let worktree_id = file.worktree_id(cx);
2456 let abs_path = file.as_local()?.abs_path(cx);
2457 let text_document = lsp::TextDocumentIdentifier {
2458 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2459 };
2460
2461 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2462 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2463
2464 server
2465 .notify::<lsp::notification::DidSaveTextDocument>(
2466 lsp::DidSaveTextDocumentParams {
2467 text_document: text_document.clone(),
2468 text,
2469 },
2470 )
2471 .log_err();
2472 }
2473
2474 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2475 for language_server_id in language_server_ids {
2476 if let Some(LanguageServerState::Running {
2477 adapter,
2478 simulate_disk_based_diagnostics_completion,
2479 ..
2480 }) = self.language_servers.get_mut(&language_server_id)
2481 {
2482 // After saving a buffer using a language server that doesn't provide
2483 // a disk-based progress token, kick off a timer that will reset every
2484 // time the buffer is saved. If the timer eventually fires, simulate
2485 // disk-based diagnostics being finished so that other pieces of UI
2486 // (e.g., project diagnostics view, diagnostic status bar) can update.
2487 // We don't emit an event right away because the language server might take
2488 // some time to publish diagnostics.
2489 if adapter.disk_based_diagnostics_progress_token.is_none() {
2490 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2491 Duration::from_secs(1);
2492
2493 let task = cx.spawn(move |this, mut cx| async move {
2494 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2495 if let Some(this) = this.upgrade() {
2496 this.update(&mut cx, |this, cx| {
2497 this.disk_based_diagnostics_finished(
2498 language_server_id,
2499 cx,
2500 );
2501 this.buffer_ordered_messages_tx
2502 .unbounded_send(
2503 BufferOrderedMessage::LanguageServerUpdate {
2504 language_server_id,
2505 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2506 },
2507 )
2508 .ok();
2509 }).ok();
2510 }
2511 });
2512 *simulate_disk_based_diagnostics_completion = Some(task);
2513 }
2514 }
2515 }
2516 }
2517 BufferEvent::FileHandleChanged => {
2518 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2519 return None;
2520 };
2521
2522 let remote_id = buffer.read(cx).remote_id();
2523 if let Some(entry_id) = file.entry_id {
2524 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2525 Some(_) => {
2526 return None;
2527 }
2528 None => {
2529 self.local_buffer_ids_by_entry_id
2530 .insert(entry_id, remote_id);
2531 }
2532 }
2533 };
2534 self.local_buffer_ids_by_path.insert(
2535 ProjectPath {
2536 worktree_id: file.worktree_id(cx),
2537 path: file.path.clone(),
2538 },
2539 remote_id,
2540 );
2541 }
2542 _ => {}
2543 }
2544
2545 None
2546 }
2547
2548 fn request_buffer_diff_recalculation(
2549 &mut self,
2550 buffer: &Model<Buffer>,
2551 cx: &mut ModelContext<Self>,
2552 ) {
2553 self.buffers_needing_diff.insert(buffer.downgrade());
2554 let first_insertion = self.buffers_needing_diff.len() == 1;
2555
2556 let settings = ProjectSettings::get_global(cx);
2557 let delay = if let Some(delay) = settings.git.gutter_debounce {
2558 delay
2559 } else {
2560 if first_insertion {
2561 let this = cx.weak_model();
2562 cx.defer(move |cx| {
2563 if let Some(this) = this.upgrade() {
2564 this.update(cx, |this, cx| {
2565 this.recalculate_buffer_diffs(cx).detach();
2566 });
2567 }
2568 });
2569 }
2570 return;
2571 };
2572
2573 const MIN_DELAY: u64 = 50;
2574 let delay = delay.max(MIN_DELAY);
2575 let duration = Duration::from_millis(delay);
2576
2577 self.git_diff_debouncer
2578 .fire_new(duration, cx, move |this, cx| {
2579 this.recalculate_buffer_diffs(cx)
2580 });
2581 }
2582
2583 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2584 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2585 cx.spawn(move |this, mut cx| async move {
2586 let tasks: Vec<_> = buffers
2587 .iter()
2588 .filter_map(|buffer| {
2589 let buffer = buffer.upgrade()?;
2590 buffer
2591 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2592 .ok()
2593 .flatten()
2594 })
2595 .collect();
2596
2597 futures::future::join_all(tasks).await;
2598
2599 this.update(&mut cx, |this, cx| {
2600 if !this.buffers_needing_diff.is_empty() {
2601 this.recalculate_buffer_diffs(cx).detach();
2602 } else {
2603 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2604 for buffer in buffers {
2605 if let Some(buffer) = buffer.upgrade() {
2606 buffer.update(cx, |_, cx| cx.notify());
2607 }
2608 }
2609 }
2610 })
2611 .ok();
2612 })
2613 }
2614
2615 fn language_servers_for_worktree(
2616 &self,
2617 worktree_id: WorktreeId,
2618 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2619 self.language_server_ids
2620 .iter()
2621 .filter_map(move |((language_server_worktree_id, _), id)| {
2622 if *language_server_worktree_id == worktree_id {
2623 if let Some(LanguageServerState::Running {
2624 adapter,
2625 language,
2626 server,
2627 ..
2628 }) = self.language_servers.get(id)
2629 {
2630 return Some((adapter, language, server));
2631 }
2632 }
2633 None
2634 })
2635 }
2636
2637 fn maintain_buffer_languages(
2638 languages: Arc<LanguageRegistry>,
2639 cx: &mut ModelContext<Project>,
2640 ) -> Task<()> {
2641 let mut subscription = languages.subscribe();
2642 let mut prev_reload_count = languages.reload_count();
2643 cx.spawn(move |project, mut cx| async move {
2644 while let Some(()) = subscription.next().await {
2645 if let Some(project) = project.upgrade() {
2646 // If the language registry has been reloaded, then remove and
2647 // re-assign the languages on all open buffers.
2648 let reload_count = languages.reload_count();
2649 if reload_count > prev_reload_count {
2650 prev_reload_count = reload_count;
2651 project
2652 .update(&mut cx, |this, cx| {
2653 let buffers = this
2654 .opened_buffers
2655 .values()
2656 .filter_map(|b| b.upgrade())
2657 .collect::<Vec<_>>();
2658 for buffer in buffers {
2659 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2660 {
2661 this.unregister_buffer_from_language_servers(
2662 &buffer, &f, cx,
2663 );
2664 buffer
2665 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2666 }
2667 }
2668 })
2669 .ok();
2670 }
2671
2672 project
2673 .update(&mut cx, |project, cx| {
2674 let mut plain_text_buffers = Vec::new();
2675 let mut buffers_with_unknown_injections = Vec::new();
2676 for buffer in project.opened_buffers.values() {
2677 if let Some(handle) = buffer.upgrade() {
2678 let buffer = &handle.read(cx);
2679 if buffer.language().is_none()
2680 || buffer.language() == Some(&*language::PLAIN_TEXT)
2681 {
2682 plain_text_buffers.push(handle);
2683 } else if buffer.contains_unknown_injections() {
2684 buffers_with_unknown_injections.push(handle);
2685 }
2686 }
2687 }
2688
2689 for buffer in plain_text_buffers {
2690 project.detect_language_for_buffer(&buffer, cx);
2691 project.register_buffer_with_language_servers(&buffer, cx);
2692 }
2693
2694 for buffer in buffers_with_unknown_injections {
2695 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2696 }
2697 })
2698 .ok();
2699 }
2700 }
2701 })
2702 }
2703
2704 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2705 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2706 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2707
2708 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2709 *settings_changed_tx.borrow_mut() = ();
2710 });
2711
2712 cx.spawn(move |this, mut cx| async move {
2713 while let Some(_) = settings_changed_rx.next().await {
2714 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2715 this.language_servers
2716 .values()
2717 .filter_map(|state| match state {
2718 LanguageServerState::Starting(_) => None,
2719 LanguageServerState::Running {
2720 adapter, server, ..
2721 } => Some((adapter.clone(), server.clone())),
2722 })
2723 .collect()
2724 })?;
2725
2726 for (adapter, server) in servers {
2727 let workspace_config = cx
2728 .update(|cx| adapter.workspace_configuration(server.root_path(), cx))?
2729 .await;
2730 server
2731 .notify::<lsp::notification::DidChangeConfiguration>(
2732 lsp::DidChangeConfigurationParams {
2733 settings: workspace_config.clone(),
2734 },
2735 )
2736 .ok();
2737 }
2738 }
2739
2740 drop(settings_observation);
2741 anyhow::Ok(())
2742 })
2743 }
2744
2745 fn detect_language_for_buffer(
2746 &mut self,
2747 buffer_handle: &Model<Buffer>,
2748 cx: &mut ModelContext<Self>,
2749 ) -> Option<()> {
2750 // If the buffer has a language, set it and start the language server if we haven't already.
2751 let buffer = buffer_handle.read(cx);
2752 let full_path = buffer.file()?.full_path(cx);
2753 let content = buffer.as_rope();
2754 let new_language = self
2755 .languages
2756 .language_for_file(&full_path, Some(content))
2757 .now_or_never()?
2758 .ok()?;
2759 self.set_language_for_buffer(buffer_handle, new_language, cx);
2760 None
2761 }
2762
2763 pub fn set_language_for_buffer(
2764 &mut self,
2765 buffer: &Model<Buffer>,
2766 new_language: Arc<Language>,
2767 cx: &mut ModelContext<Self>,
2768 ) {
2769 buffer.update(cx, |buffer, cx| {
2770 if buffer.language().map_or(true, |old_language| {
2771 !Arc::ptr_eq(old_language, &new_language)
2772 }) {
2773 buffer.set_language(Some(new_language.clone()), cx);
2774 }
2775 });
2776
2777 let buffer_file = buffer.read(cx).file().cloned();
2778 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2779 let buffer_file = File::from_dyn(buffer_file.as_ref());
2780 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2781 if let Some(prettier_plugins) =
2782 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2783 {
2784 self.install_default_prettier(worktree, prettier_plugins, cx);
2785 };
2786 if let Some(file) = buffer_file {
2787 let worktree = file.worktree.clone();
2788 if let Some(tree) = worktree.read(cx).as_local() {
2789 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2790 }
2791 }
2792 }
2793
2794 fn start_language_servers(
2795 &mut self,
2796 worktree: &Model<Worktree>,
2797 worktree_path: Arc<Path>,
2798 language: Arc<Language>,
2799 cx: &mut ModelContext<Self>,
2800 ) {
2801 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2802 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2803 if !settings.enable_language_server {
2804 return;
2805 }
2806
2807 let worktree_id = worktree.read(cx).id();
2808 for adapter in language.lsp_adapters() {
2809 self.start_language_server(
2810 worktree_id,
2811 worktree_path.clone(),
2812 adapter.clone(),
2813 language.clone(),
2814 cx,
2815 );
2816 }
2817 }
2818
2819 fn start_language_server(
2820 &mut self,
2821 worktree_id: WorktreeId,
2822 worktree_path: Arc<Path>,
2823 adapter: Arc<CachedLspAdapter>,
2824 language: Arc<Language>,
2825 cx: &mut ModelContext<Self>,
2826 ) {
2827 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2828 return;
2829 }
2830
2831 let key = (worktree_id, adapter.name.clone());
2832 if self.language_server_ids.contains_key(&key) {
2833 return;
2834 }
2835
2836 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2837 let pending_server = match self.languages.create_pending_language_server(
2838 stderr_capture.clone(),
2839 language.clone(),
2840 adapter.clone(),
2841 Arc::clone(&worktree_path),
2842 ProjectLspAdapterDelegate::new(self, cx),
2843 cx,
2844 ) {
2845 Some(pending_server) => pending_server,
2846 None => return,
2847 };
2848
2849 let project_settings = ProjectSettings::get_global(cx);
2850 let lsp = project_settings.lsp.get(&adapter.name.0);
2851 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2852
2853 let server_id = pending_server.server_id;
2854 let container_dir = pending_server.container_dir.clone();
2855 let state = LanguageServerState::Starting({
2856 let adapter = adapter.clone();
2857 let server_name = adapter.name.0.clone();
2858 let language = language.clone();
2859 let key = key.clone();
2860
2861 cx.spawn(move |this, mut cx| async move {
2862 let result = Self::setup_and_insert_language_server(
2863 this.clone(),
2864 &worktree_path,
2865 override_options,
2866 pending_server,
2867 adapter.clone(),
2868 language.clone(),
2869 server_id,
2870 key,
2871 &mut cx,
2872 )
2873 .await;
2874
2875 match result {
2876 Ok(server) => {
2877 stderr_capture.lock().take();
2878 server
2879 }
2880
2881 Err(err) => {
2882 log::error!("failed to start language server {server_name:?}: {err}");
2883 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2884
2885 let this = this.upgrade()?;
2886 let container_dir = container_dir?;
2887
2888 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2889 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2890 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2891 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2892 return None;
2893 }
2894
2895 let installation_test_binary = adapter
2896 .installation_test_binary(container_dir.to_path_buf())
2897 .await;
2898
2899 this.update(&mut cx, |_, cx| {
2900 Self::check_errored_server(
2901 language,
2902 adapter,
2903 server_id,
2904 installation_test_binary,
2905 cx,
2906 )
2907 })
2908 .ok();
2909
2910 None
2911 }
2912 }
2913 })
2914 });
2915
2916 self.language_servers.insert(server_id, state);
2917 self.language_server_ids.insert(key, server_id);
2918 }
2919
2920 fn reinstall_language_server(
2921 &mut self,
2922 language: Arc<Language>,
2923 adapter: Arc<CachedLspAdapter>,
2924 server_id: LanguageServerId,
2925 cx: &mut ModelContext<Self>,
2926 ) -> Option<Task<()>> {
2927 log::info!("beginning to reinstall server");
2928
2929 let existing_server = match self.language_servers.remove(&server_id) {
2930 Some(LanguageServerState::Running { server, .. }) => Some(server),
2931 _ => None,
2932 };
2933
2934 for worktree in &self.worktrees {
2935 if let Some(worktree) = worktree.upgrade() {
2936 let key = (worktree.read(cx).id(), adapter.name.clone());
2937 self.language_server_ids.remove(&key);
2938 }
2939 }
2940
2941 Some(cx.spawn(move |this, mut cx| async move {
2942 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2943 log::info!("shutting down existing server");
2944 task.await;
2945 }
2946
2947 // TODO: This is race-safe with regards to preventing new instances from
2948 // starting while deleting, but existing instances in other projects are going
2949 // to be very confused and messed up
2950 let Some(task) = this
2951 .update(&mut cx, |this, cx| {
2952 this.languages.delete_server_container(adapter.clone(), cx)
2953 })
2954 .log_err()
2955 else {
2956 return;
2957 };
2958 task.await;
2959
2960 this.update(&mut cx, |this, mut cx| {
2961 let worktrees = this.worktrees.clone();
2962 for worktree in worktrees {
2963 let worktree = match worktree.upgrade() {
2964 Some(worktree) => worktree.read(cx),
2965 None => continue,
2966 };
2967 let worktree_id = worktree.id();
2968 let root_path = worktree.abs_path();
2969
2970 this.start_language_server(
2971 worktree_id,
2972 root_path,
2973 adapter.clone(),
2974 language.clone(),
2975 &mut cx,
2976 );
2977 }
2978 })
2979 .ok();
2980 }))
2981 }
2982
2983 async fn setup_and_insert_language_server(
2984 this: WeakModel<Self>,
2985 worktree_path: &Path,
2986 override_initialization_options: Option<serde_json::Value>,
2987 pending_server: PendingLanguageServer,
2988 adapter: Arc<CachedLspAdapter>,
2989 language: Arc<Language>,
2990 server_id: LanguageServerId,
2991 key: (WorktreeId, LanguageServerName),
2992 cx: &mut AsyncAppContext,
2993 ) -> Result<Option<Arc<LanguageServer>>> {
2994 let language_server = Self::setup_pending_language_server(
2995 this.clone(),
2996 override_initialization_options,
2997 pending_server,
2998 worktree_path,
2999 adapter.clone(),
3000 server_id,
3001 cx,
3002 )
3003 .await?;
3004
3005 let this = match this.upgrade() {
3006 Some(this) => this,
3007 None => return Err(anyhow!("failed to upgrade project handle")),
3008 };
3009
3010 this.update(cx, |this, cx| {
3011 this.insert_newly_running_language_server(
3012 language,
3013 adapter,
3014 language_server.clone(),
3015 server_id,
3016 key,
3017 cx,
3018 )
3019 })??;
3020
3021 Ok(Some(language_server))
3022 }
3023
3024 async fn setup_pending_language_server(
3025 this: WeakModel<Self>,
3026 override_options: Option<serde_json::Value>,
3027 pending_server: PendingLanguageServer,
3028 worktree_path: &Path,
3029 adapter: Arc<CachedLspAdapter>,
3030 server_id: LanguageServerId,
3031 cx: &mut AsyncAppContext,
3032 ) -> Result<Arc<LanguageServer>> {
3033 let workspace_config = cx
3034 .update(|cx| adapter.workspace_configuration(worktree_path, cx))?
3035 .await;
3036 let language_server = pending_server.task.await?;
3037
3038 language_server
3039 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3040 let adapter = adapter.clone();
3041 let this = this.clone();
3042 move |mut params, mut cx| {
3043 let adapter = adapter.clone();
3044 if let Some(this) = this.upgrade() {
3045 adapter.process_diagnostics(&mut params);
3046 this.update(&mut cx, |this, cx| {
3047 this.update_diagnostics(
3048 server_id,
3049 params,
3050 &adapter.disk_based_diagnostic_sources,
3051 cx,
3052 )
3053 .log_err();
3054 })
3055 .ok();
3056 }
3057 }
3058 })
3059 .detach();
3060
3061 language_server
3062 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3063 let adapter = adapter.clone();
3064 let worktree_path = worktree_path.to_path_buf();
3065 move |params, cx| {
3066 let adapter = adapter.clone();
3067 let worktree_path = worktree_path.clone();
3068 async move {
3069 let workspace_config = cx
3070 .update(|cx| adapter.workspace_configuration(&worktree_path, cx))?
3071 .await;
3072 Ok(params
3073 .items
3074 .into_iter()
3075 .map(|item| {
3076 if let Some(section) = &item.section {
3077 workspace_config
3078 .get(section)
3079 .cloned()
3080 .unwrap_or(serde_json::Value::Null)
3081 } else {
3082 workspace_config.clone()
3083 }
3084 })
3085 .collect())
3086 }
3087 }
3088 })
3089 .detach();
3090
3091 // Even though we don't have handling for these requests, respond to them to
3092 // avoid stalling any language server like `gopls` which waits for a response
3093 // to these requests when initializing.
3094 language_server
3095 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3096 let this = this.clone();
3097 move |params, mut cx| {
3098 let this = this.clone();
3099 async move {
3100 this.update(&mut cx, |this, _| {
3101 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3102 {
3103 if let lsp::NumberOrString::String(token) = params.token {
3104 status.progress_tokens.insert(token);
3105 }
3106 }
3107 })?;
3108
3109 Ok(())
3110 }
3111 }
3112 })
3113 .detach();
3114
3115 language_server
3116 .on_request::<lsp::request::RegisterCapability, _, _>({
3117 let this = this.clone();
3118 move |params, mut cx| {
3119 let this = this.clone();
3120 async move {
3121 for reg in params.registrations {
3122 if reg.method == "workspace/didChangeWatchedFiles" {
3123 if let Some(options) = reg.register_options {
3124 let options = serde_json::from_value(options)?;
3125 this.update(&mut cx, |this, cx| {
3126 this.on_lsp_did_change_watched_files(
3127 server_id, options, cx,
3128 );
3129 })?;
3130 }
3131 }
3132 }
3133 Ok(())
3134 }
3135 }
3136 })
3137 .detach();
3138
3139 language_server
3140 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3141 let adapter = adapter.clone();
3142 let this = this.clone();
3143 move |params, cx| {
3144 Self::on_lsp_workspace_edit(
3145 this.clone(),
3146 params,
3147 server_id,
3148 adapter.clone(),
3149 cx,
3150 )
3151 }
3152 })
3153 .detach();
3154
3155 language_server
3156 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3157 let this = this.clone();
3158 move |(), mut cx| {
3159 let this = this.clone();
3160 async move {
3161 this.update(&mut cx, |project, cx| {
3162 cx.emit(Event::RefreshInlayHints);
3163 project.remote_id().map(|project_id| {
3164 project.client.send(proto::RefreshInlayHints { project_id })
3165 })
3166 })?
3167 .transpose()?;
3168 Ok(())
3169 }
3170 }
3171 })
3172 .detach();
3173
3174 let disk_based_diagnostics_progress_token =
3175 adapter.disk_based_diagnostics_progress_token.clone();
3176
3177 language_server
3178 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3179 if let Some(this) = this.upgrade() {
3180 this.update(&mut cx, |this, cx| {
3181 this.on_lsp_progress(
3182 params,
3183 server_id,
3184 disk_based_diagnostics_progress_token.clone(),
3185 cx,
3186 );
3187 })
3188 .ok();
3189 }
3190 })
3191 .detach();
3192 let mut initialization_options = adapter.adapter.initialization_options().await;
3193 match (&mut initialization_options, override_options) {
3194 (Some(initialization_options), Some(override_options)) => {
3195 merge_json_value_into(override_options, initialization_options);
3196 }
3197 (None, override_options) => initialization_options = override_options,
3198 _ => {}
3199 }
3200 let language_server = language_server.initialize(initialization_options).await?;
3201
3202 language_server
3203 .notify::<lsp::notification::DidChangeConfiguration>(
3204 lsp::DidChangeConfigurationParams {
3205 settings: workspace_config,
3206 },
3207 )
3208 .ok();
3209
3210 Ok(language_server)
3211 }
3212
3213 fn insert_newly_running_language_server(
3214 &mut self,
3215 language: Arc<Language>,
3216 adapter: Arc<CachedLspAdapter>,
3217 language_server: Arc<LanguageServer>,
3218 server_id: LanguageServerId,
3219 key: (WorktreeId, LanguageServerName),
3220 cx: &mut ModelContext<Self>,
3221 ) -> Result<()> {
3222 // If the language server for this key doesn't match the server id, don't store the
3223 // server. Which will cause it to be dropped, killing the process
3224 if self
3225 .language_server_ids
3226 .get(&key)
3227 .map(|id| id != &server_id)
3228 .unwrap_or(false)
3229 {
3230 return Ok(());
3231 }
3232
3233 // Update language_servers collection with Running variant of LanguageServerState
3234 // indicating that the server is up and running and ready
3235 self.language_servers.insert(
3236 server_id,
3237 LanguageServerState::Running {
3238 adapter: adapter.clone(),
3239 language: language.clone(),
3240 watched_paths: Default::default(),
3241 server: language_server.clone(),
3242 simulate_disk_based_diagnostics_completion: None,
3243 },
3244 );
3245
3246 self.language_server_statuses.insert(
3247 server_id,
3248 LanguageServerStatus {
3249 name: language_server.name().to_string(),
3250 pending_work: Default::default(),
3251 has_pending_diagnostic_updates: false,
3252 progress_tokens: Default::default(),
3253 },
3254 );
3255
3256 cx.emit(Event::LanguageServerAdded(server_id));
3257
3258 if let Some(project_id) = self.remote_id() {
3259 self.client.send(proto::StartLanguageServer {
3260 project_id,
3261 server: Some(proto::LanguageServer {
3262 id: server_id.0 as u64,
3263 name: language_server.name().to_string(),
3264 }),
3265 })?;
3266 }
3267
3268 // Tell the language server about every open buffer in the worktree that matches the language.
3269 for buffer in self.opened_buffers.values() {
3270 if let Some(buffer_handle) = buffer.upgrade() {
3271 let buffer = buffer_handle.read(cx);
3272 let file = match File::from_dyn(buffer.file()) {
3273 Some(file) => file,
3274 None => continue,
3275 };
3276 let language = match buffer.language() {
3277 Some(language) => language,
3278 None => continue,
3279 };
3280
3281 if file.worktree.read(cx).id() != key.0
3282 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3283 {
3284 continue;
3285 }
3286
3287 let file = match file.as_local() {
3288 Some(file) => file,
3289 None => continue,
3290 };
3291
3292 let versions = self
3293 .buffer_snapshots
3294 .entry(buffer.remote_id())
3295 .or_default()
3296 .entry(server_id)
3297 .or_insert_with(|| {
3298 vec![LspBufferSnapshot {
3299 version: 0,
3300 snapshot: buffer.text_snapshot(),
3301 }]
3302 });
3303
3304 let snapshot = versions.last().unwrap();
3305 let version = snapshot.version;
3306 let initial_snapshot = &snapshot.snapshot;
3307 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3308 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3309 lsp::DidOpenTextDocumentParams {
3310 text_document: lsp::TextDocumentItem::new(
3311 uri,
3312 adapter
3313 .language_ids
3314 .get(language.name().as_ref())
3315 .cloned()
3316 .unwrap_or_default(),
3317 version,
3318 initial_snapshot.text(),
3319 ),
3320 },
3321 )?;
3322
3323 buffer_handle.update(cx, |buffer, cx| {
3324 buffer.set_completion_triggers(
3325 language_server
3326 .capabilities()
3327 .completion_provider
3328 .as_ref()
3329 .and_then(|provider| provider.trigger_characters.clone())
3330 .unwrap_or_default(),
3331 cx,
3332 )
3333 });
3334 }
3335 }
3336
3337 cx.notify();
3338 Ok(())
3339 }
3340
3341 // Returns a list of all of the worktrees which no longer have a language server and the root path
3342 // for the stopped server
3343 fn stop_language_server(
3344 &mut self,
3345 worktree_id: WorktreeId,
3346 adapter_name: LanguageServerName,
3347 cx: &mut ModelContext<Self>,
3348 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3349 let key = (worktree_id, adapter_name);
3350 if let Some(server_id) = self.language_server_ids.remove(&key) {
3351 log::info!("stopping language server {}", key.1 .0);
3352
3353 // Remove other entries for this language server as well
3354 let mut orphaned_worktrees = vec![worktree_id];
3355 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3356 for other_key in other_keys {
3357 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3358 self.language_server_ids.remove(&other_key);
3359 orphaned_worktrees.push(other_key.0);
3360 }
3361 }
3362
3363 for buffer in self.opened_buffers.values() {
3364 if let Some(buffer) = buffer.upgrade() {
3365 buffer.update(cx, |buffer, cx| {
3366 buffer.update_diagnostics(server_id, Default::default(), cx);
3367 });
3368 }
3369 }
3370 for worktree in &self.worktrees {
3371 if let Some(worktree) = worktree.upgrade() {
3372 worktree.update(cx, |worktree, cx| {
3373 if let Some(worktree) = worktree.as_local_mut() {
3374 worktree.clear_diagnostics_for_language_server(server_id, cx);
3375 }
3376 });
3377 }
3378 }
3379
3380 self.language_server_statuses.remove(&server_id);
3381 cx.notify();
3382
3383 let server_state = self.language_servers.remove(&server_id);
3384 cx.emit(Event::LanguageServerRemoved(server_id));
3385 cx.spawn(move |this, mut cx| async move {
3386 let mut root_path = None;
3387
3388 let server = match server_state {
3389 Some(LanguageServerState::Starting(task)) => task.await,
3390 Some(LanguageServerState::Running { server, .. }) => Some(server),
3391 None => None,
3392 };
3393
3394 if let Some(server) = server {
3395 root_path = Some(server.root_path().clone());
3396 if let Some(shutdown) = server.shutdown() {
3397 shutdown.await;
3398 }
3399 }
3400
3401 if let Some(this) = this.upgrade() {
3402 this.update(&mut cx, |this, cx| {
3403 this.language_server_statuses.remove(&server_id);
3404 cx.notify();
3405 })
3406 .ok();
3407 }
3408
3409 (root_path, orphaned_worktrees)
3410 })
3411 } else {
3412 Task::ready((None, Vec::new()))
3413 }
3414 }
3415
3416 pub fn restart_language_servers_for_buffers(
3417 &mut self,
3418 buffers: impl IntoIterator<Item = Model<Buffer>>,
3419 cx: &mut ModelContext<Self>,
3420 ) -> Option<()> {
3421 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3422 .into_iter()
3423 .filter_map(|buffer| {
3424 let buffer = buffer.read(cx);
3425 let file = File::from_dyn(buffer.file())?;
3426 let full_path = file.full_path(cx);
3427 let language = self
3428 .languages
3429 .language_for_file(&full_path, Some(buffer.as_rope()))
3430 .now_or_never()?
3431 .ok()?;
3432 Some((file.worktree.clone(), language))
3433 })
3434 .collect();
3435 for (worktree, language) in language_server_lookup_info {
3436 self.restart_language_servers(worktree, language, cx);
3437 }
3438
3439 None
3440 }
3441
3442 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3443 fn restart_language_servers(
3444 &mut self,
3445 worktree: Model<Worktree>,
3446 language: Arc<Language>,
3447 cx: &mut ModelContext<Self>,
3448 ) {
3449 let worktree_id = worktree.read(cx).id();
3450 let fallback_path = worktree.read(cx).abs_path();
3451
3452 let mut stops = Vec::new();
3453 for adapter in language.lsp_adapters() {
3454 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3455 }
3456
3457 if stops.is_empty() {
3458 return;
3459 }
3460 let mut stops = stops.into_iter();
3461
3462 cx.spawn(move |this, mut cx| async move {
3463 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3464 for stop in stops {
3465 let (_, worktrees) = stop.await;
3466 orphaned_worktrees.extend_from_slice(&worktrees);
3467 }
3468
3469 let this = match this.upgrade() {
3470 Some(this) => this,
3471 None => return,
3472 };
3473
3474 this.update(&mut cx, |this, cx| {
3475 // Attempt to restart using original server path. Fallback to passed in
3476 // path if we could not retrieve the root path
3477 let root_path = original_root_path
3478 .map(|path_buf| Arc::from(path_buf.as_path()))
3479 .unwrap_or(fallback_path);
3480
3481 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3482
3483 // Lookup new server ids and set them for each of the orphaned worktrees
3484 for adapter in language.lsp_adapters() {
3485 if let Some(new_server_id) = this
3486 .language_server_ids
3487 .get(&(worktree_id, adapter.name.clone()))
3488 .cloned()
3489 {
3490 for &orphaned_worktree in &orphaned_worktrees {
3491 this.language_server_ids
3492 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3493 }
3494 }
3495 }
3496 })
3497 .ok();
3498 })
3499 .detach();
3500 }
3501
3502 fn check_errored_server(
3503 language: Arc<Language>,
3504 adapter: Arc<CachedLspAdapter>,
3505 server_id: LanguageServerId,
3506 installation_test_binary: Option<LanguageServerBinary>,
3507 cx: &mut ModelContext<Self>,
3508 ) {
3509 if !adapter.can_be_reinstalled() {
3510 log::info!(
3511 "Validation check requested for {:?} but it cannot be reinstalled",
3512 adapter.name.0
3513 );
3514 return;
3515 }
3516
3517 cx.spawn(move |this, mut cx| async move {
3518 log::info!("About to spawn test binary");
3519
3520 // A lack of test binary counts as a failure
3521 let process = installation_test_binary.and_then(|binary| {
3522 smol::process::Command::new(&binary.path)
3523 .current_dir(&binary.path)
3524 .args(binary.arguments)
3525 .stdin(Stdio::piped())
3526 .stdout(Stdio::piped())
3527 .stderr(Stdio::inherit())
3528 .kill_on_drop(true)
3529 .spawn()
3530 .ok()
3531 });
3532
3533 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3534 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3535
3536 let mut errored = false;
3537 if let Some(mut process) = process {
3538 futures::select! {
3539 status = process.status().fuse() => match status {
3540 Ok(status) => errored = !status.success(),
3541 Err(_) => errored = true,
3542 },
3543
3544 _ = timeout => {
3545 log::info!("test binary time-ed out, this counts as a success");
3546 _ = process.kill();
3547 }
3548 }
3549 } else {
3550 log::warn!("test binary failed to launch");
3551 errored = true;
3552 }
3553
3554 if errored {
3555 log::warn!("test binary check failed");
3556 let task = this
3557 .update(&mut cx, move |this, mut cx| {
3558 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3559 })
3560 .ok()
3561 .flatten();
3562
3563 if let Some(task) = task {
3564 task.await;
3565 }
3566 }
3567 })
3568 .detach();
3569 }
3570
3571 fn on_lsp_progress(
3572 &mut self,
3573 progress: lsp::ProgressParams,
3574 language_server_id: LanguageServerId,
3575 disk_based_diagnostics_progress_token: Option<String>,
3576 cx: &mut ModelContext<Self>,
3577 ) {
3578 let token = match progress.token {
3579 lsp::NumberOrString::String(token) => token,
3580 lsp::NumberOrString::Number(token) => {
3581 log::info!("skipping numeric progress token {}", token);
3582 return;
3583 }
3584 };
3585 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3586 let language_server_status =
3587 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3588 status
3589 } else {
3590 return;
3591 };
3592
3593 if !language_server_status.progress_tokens.contains(&token) {
3594 return;
3595 }
3596
3597 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3598 .as_ref()
3599 .map_or(false, |disk_based_token| {
3600 token.starts_with(disk_based_token)
3601 });
3602
3603 match progress {
3604 lsp::WorkDoneProgress::Begin(report) => {
3605 if is_disk_based_diagnostics_progress {
3606 language_server_status.has_pending_diagnostic_updates = true;
3607 self.disk_based_diagnostics_started(language_server_id, cx);
3608 self.buffer_ordered_messages_tx
3609 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3610 language_server_id,
3611 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3612 })
3613 .ok();
3614 } else {
3615 self.on_lsp_work_start(
3616 language_server_id,
3617 token.clone(),
3618 LanguageServerProgress {
3619 message: report.message.clone(),
3620 percentage: report.percentage.map(|p| p as usize),
3621 last_update_at: Instant::now(),
3622 },
3623 cx,
3624 );
3625 self.buffer_ordered_messages_tx
3626 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3627 language_server_id,
3628 message: proto::update_language_server::Variant::WorkStart(
3629 proto::LspWorkStart {
3630 token,
3631 message: report.message,
3632 percentage: report.percentage.map(|p| p as u32),
3633 },
3634 ),
3635 })
3636 .ok();
3637 }
3638 }
3639 lsp::WorkDoneProgress::Report(report) => {
3640 if !is_disk_based_diagnostics_progress {
3641 self.on_lsp_work_progress(
3642 language_server_id,
3643 token.clone(),
3644 LanguageServerProgress {
3645 message: report.message.clone(),
3646 percentage: report.percentage.map(|p| p as usize),
3647 last_update_at: Instant::now(),
3648 },
3649 cx,
3650 );
3651 self.buffer_ordered_messages_tx
3652 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3653 language_server_id,
3654 message: proto::update_language_server::Variant::WorkProgress(
3655 proto::LspWorkProgress {
3656 token,
3657 message: report.message,
3658 percentage: report.percentage.map(|p| p as u32),
3659 },
3660 ),
3661 })
3662 .ok();
3663 }
3664 }
3665 lsp::WorkDoneProgress::End(_) => {
3666 language_server_status.progress_tokens.remove(&token);
3667
3668 if is_disk_based_diagnostics_progress {
3669 language_server_status.has_pending_diagnostic_updates = false;
3670 self.disk_based_diagnostics_finished(language_server_id, cx);
3671 self.buffer_ordered_messages_tx
3672 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3673 language_server_id,
3674 message:
3675 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3676 Default::default(),
3677 ),
3678 })
3679 .ok();
3680 } else {
3681 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3682 self.buffer_ordered_messages_tx
3683 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3684 language_server_id,
3685 message: proto::update_language_server::Variant::WorkEnd(
3686 proto::LspWorkEnd { token },
3687 ),
3688 })
3689 .ok();
3690 }
3691 }
3692 }
3693 }
3694
3695 fn on_lsp_work_start(
3696 &mut self,
3697 language_server_id: LanguageServerId,
3698 token: String,
3699 progress: LanguageServerProgress,
3700 cx: &mut ModelContext<Self>,
3701 ) {
3702 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3703 status.pending_work.insert(token, progress);
3704 cx.notify();
3705 }
3706 }
3707
3708 fn on_lsp_work_progress(
3709 &mut self,
3710 language_server_id: LanguageServerId,
3711 token: String,
3712 progress: LanguageServerProgress,
3713 cx: &mut ModelContext<Self>,
3714 ) {
3715 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3716 let entry = status
3717 .pending_work
3718 .entry(token)
3719 .or_insert(LanguageServerProgress {
3720 message: Default::default(),
3721 percentage: Default::default(),
3722 last_update_at: progress.last_update_at,
3723 });
3724 if progress.message.is_some() {
3725 entry.message = progress.message;
3726 }
3727 if progress.percentage.is_some() {
3728 entry.percentage = progress.percentage;
3729 }
3730 entry.last_update_at = progress.last_update_at;
3731 cx.notify();
3732 }
3733 }
3734
3735 fn on_lsp_work_end(
3736 &mut self,
3737 language_server_id: LanguageServerId,
3738 token: String,
3739 cx: &mut ModelContext<Self>,
3740 ) {
3741 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3742 cx.emit(Event::RefreshInlayHints);
3743 status.pending_work.remove(&token);
3744 cx.notify();
3745 }
3746 }
3747
3748 fn on_lsp_did_change_watched_files(
3749 &mut self,
3750 language_server_id: LanguageServerId,
3751 params: DidChangeWatchedFilesRegistrationOptions,
3752 cx: &mut ModelContext<Self>,
3753 ) {
3754 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3755 self.language_servers.get_mut(&language_server_id)
3756 {
3757 let mut builders = HashMap::default();
3758 for watcher in params.watchers {
3759 for worktree in &self.worktrees {
3760 if let Some(worktree) = worktree.upgrade() {
3761 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3762 if let Some(abs_path) = tree.abs_path().to_str() {
3763 let relative_glob_pattern = match &watcher.glob_pattern {
3764 lsp::GlobPattern::String(s) => s
3765 .strip_prefix(abs_path)
3766 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3767 lsp::GlobPattern::Relative(rp) => {
3768 let base_uri = match &rp.base_uri {
3769 lsp::OneOf::Left(workspace_folder) => {
3770 &workspace_folder.uri
3771 }
3772 lsp::OneOf::Right(base_uri) => base_uri,
3773 };
3774 base_uri.to_file_path().ok().and_then(|file_path| {
3775 (file_path.to_str() == Some(abs_path))
3776 .then_some(rp.pattern.as_str())
3777 })
3778 }
3779 };
3780 if let Some(relative_glob_pattern) = relative_glob_pattern {
3781 let literal_prefix =
3782 glob_literal_prefix(&relative_glob_pattern);
3783 tree.as_local_mut()
3784 .unwrap()
3785 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3786 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3787 builders
3788 .entry(tree.id())
3789 .or_insert_with(|| GlobSetBuilder::new())
3790 .add(glob);
3791 }
3792 return true;
3793 }
3794 }
3795 false
3796 });
3797 if glob_is_inside_worktree {
3798 break;
3799 }
3800 }
3801 }
3802 }
3803
3804 watched_paths.clear();
3805 for (worktree_id, builder) in builders {
3806 if let Ok(globset) = builder.build() {
3807 watched_paths.insert(worktree_id, globset);
3808 }
3809 }
3810
3811 cx.notify();
3812 }
3813 }
3814
3815 async fn on_lsp_workspace_edit(
3816 this: WeakModel<Self>,
3817 params: lsp::ApplyWorkspaceEditParams,
3818 server_id: LanguageServerId,
3819 adapter: Arc<CachedLspAdapter>,
3820 mut cx: AsyncAppContext,
3821 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3822 let this = this
3823 .upgrade()
3824 .ok_or_else(|| anyhow!("project project closed"))?;
3825 let language_server = this
3826 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3827 .ok_or_else(|| anyhow!("language server not found"))?;
3828 let transaction = Self::deserialize_workspace_edit(
3829 this.clone(),
3830 params.edit,
3831 true,
3832 adapter.clone(),
3833 language_server.clone(),
3834 &mut cx,
3835 )
3836 .await
3837 .log_err();
3838 this.update(&mut cx, |this, _| {
3839 if let Some(transaction) = transaction {
3840 this.last_workspace_edits_by_language_server
3841 .insert(server_id, transaction);
3842 }
3843 })?;
3844 Ok(lsp::ApplyWorkspaceEditResponse {
3845 applied: true,
3846 failed_change: None,
3847 failure_reason: None,
3848 })
3849 }
3850
3851 pub fn language_server_statuses(
3852 &self,
3853 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3854 self.language_server_statuses.values()
3855 }
3856
3857 pub fn update_diagnostics(
3858 &mut self,
3859 language_server_id: LanguageServerId,
3860 mut params: lsp::PublishDiagnosticsParams,
3861 disk_based_sources: &[String],
3862 cx: &mut ModelContext<Self>,
3863 ) -> Result<()> {
3864 let abs_path = params
3865 .uri
3866 .to_file_path()
3867 .map_err(|_| anyhow!("URI is not a file"))?;
3868 let mut diagnostics = Vec::default();
3869 let mut primary_diagnostic_group_ids = HashMap::default();
3870 let mut sources_by_group_id = HashMap::default();
3871 let mut supporting_diagnostics = HashMap::default();
3872
3873 // Ensure that primary diagnostics are always the most severe
3874 params.diagnostics.sort_by_key(|item| item.severity);
3875
3876 for diagnostic in ¶ms.diagnostics {
3877 let source = diagnostic.source.as_ref();
3878 let code = diagnostic.code.as_ref().map(|code| match code {
3879 lsp::NumberOrString::Number(code) => code.to_string(),
3880 lsp::NumberOrString::String(code) => code.clone(),
3881 });
3882 let range = range_from_lsp(diagnostic.range);
3883 let is_supporting = diagnostic
3884 .related_information
3885 .as_ref()
3886 .map_or(false, |infos| {
3887 infos.iter().any(|info| {
3888 primary_diagnostic_group_ids.contains_key(&(
3889 source,
3890 code.clone(),
3891 range_from_lsp(info.location.range),
3892 ))
3893 })
3894 });
3895
3896 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3897 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3898 });
3899
3900 if is_supporting {
3901 supporting_diagnostics.insert(
3902 (source, code.clone(), range),
3903 (diagnostic.severity, is_unnecessary),
3904 );
3905 } else {
3906 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3907 let is_disk_based =
3908 source.map_or(false, |source| disk_based_sources.contains(source));
3909
3910 sources_by_group_id.insert(group_id, source);
3911 primary_diagnostic_group_ids
3912 .insert((source, code.clone(), range.clone()), group_id);
3913
3914 diagnostics.push(DiagnosticEntry {
3915 range,
3916 diagnostic: Diagnostic {
3917 source: diagnostic.source.clone(),
3918 code: code.clone(),
3919 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3920 message: diagnostic.message.clone(),
3921 group_id,
3922 is_primary: true,
3923 is_valid: true,
3924 is_disk_based,
3925 is_unnecessary,
3926 },
3927 });
3928 if let Some(infos) = &diagnostic.related_information {
3929 for info in infos {
3930 if info.location.uri == params.uri && !info.message.is_empty() {
3931 let range = range_from_lsp(info.location.range);
3932 diagnostics.push(DiagnosticEntry {
3933 range,
3934 diagnostic: Diagnostic {
3935 source: diagnostic.source.clone(),
3936 code: code.clone(),
3937 severity: DiagnosticSeverity::INFORMATION,
3938 message: info.message.clone(),
3939 group_id,
3940 is_primary: false,
3941 is_valid: true,
3942 is_disk_based,
3943 is_unnecessary: false,
3944 },
3945 });
3946 }
3947 }
3948 }
3949 }
3950 }
3951
3952 for entry in &mut diagnostics {
3953 let diagnostic = &mut entry.diagnostic;
3954 if !diagnostic.is_primary {
3955 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3956 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3957 source,
3958 diagnostic.code.clone(),
3959 entry.range.clone(),
3960 )) {
3961 if let Some(severity) = severity {
3962 diagnostic.severity = severity;
3963 }
3964 diagnostic.is_unnecessary = is_unnecessary;
3965 }
3966 }
3967 }
3968
3969 self.update_diagnostic_entries(
3970 language_server_id,
3971 abs_path,
3972 params.version,
3973 diagnostics,
3974 cx,
3975 )?;
3976 Ok(())
3977 }
3978
3979 pub fn update_diagnostic_entries(
3980 &mut self,
3981 server_id: LanguageServerId,
3982 abs_path: PathBuf,
3983 version: Option<i32>,
3984 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3985 cx: &mut ModelContext<Project>,
3986 ) -> Result<(), anyhow::Error> {
3987 let (worktree, relative_path) = self
3988 .find_local_worktree(&abs_path, cx)
3989 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3990
3991 let project_path = ProjectPath {
3992 worktree_id: worktree.read(cx).id(),
3993 path: relative_path.into(),
3994 };
3995
3996 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3997 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3998 }
3999
4000 let updated = worktree.update(cx, |worktree, cx| {
4001 worktree
4002 .as_local_mut()
4003 .ok_or_else(|| anyhow!("not a local worktree"))?
4004 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
4005 })?;
4006 if updated {
4007 cx.emit(Event::DiagnosticsUpdated {
4008 language_server_id: server_id,
4009 path: project_path,
4010 });
4011 }
4012 Ok(())
4013 }
4014
4015 fn update_buffer_diagnostics(
4016 &mut self,
4017 buffer: &Model<Buffer>,
4018 server_id: LanguageServerId,
4019 version: Option<i32>,
4020 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4021 cx: &mut ModelContext<Self>,
4022 ) -> Result<()> {
4023 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4024 Ordering::Equal
4025 .then_with(|| b.is_primary.cmp(&a.is_primary))
4026 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4027 .then_with(|| a.severity.cmp(&b.severity))
4028 .then_with(|| a.message.cmp(&b.message))
4029 }
4030
4031 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4032
4033 diagnostics.sort_unstable_by(|a, b| {
4034 Ordering::Equal
4035 .then_with(|| a.range.start.cmp(&b.range.start))
4036 .then_with(|| b.range.end.cmp(&a.range.end))
4037 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4038 });
4039
4040 let mut sanitized_diagnostics = Vec::new();
4041 let edits_since_save = Patch::new(
4042 snapshot
4043 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4044 .collect(),
4045 );
4046 for entry in diagnostics {
4047 let start;
4048 let end;
4049 if entry.diagnostic.is_disk_based {
4050 // Some diagnostics are based on files on disk instead of buffers'
4051 // current contents. Adjust these diagnostics' ranges to reflect
4052 // any unsaved edits.
4053 start = edits_since_save.old_to_new(entry.range.start);
4054 end = edits_since_save.old_to_new(entry.range.end);
4055 } else {
4056 start = entry.range.start;
4057 end = entry.range.end;
4058 }
4059
4060 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4061 ..snapshot.clip_point_utf16(end, Bias::Right);
4062
4063 // Expand empty ranges by one codepoint
4064 if range.start == range.end {
4065 // This will be go to the next boundary when being clipped
4066 range.end.column += 1;
4067 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4068 if range.start == range.end && range.end.column > 0 {
4069 range.start.column -= 1;
4070 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
4071 }
4072 }
4073
4074 sanitized_diagnostics.push(DiagnosticEntry {
4075 range,
4076 diagnostic: entry.diagnostic,
4077 });
4078 }
4079 drop(edits_since_save);
4080
4081 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4082 buffer.update(cx, |buffer, cx| {
4083 buffer.update_diagnostics(server_id, set, cx)
4084 });
4085 Ok(())
4086 }
4087
4088 pub fn reload_buffers(
4089 &self,
4090 buffers: HashSet<Model<Buffer>>,
4091 push_to_history: bool,
4092 cx: &mut ModelContext<Self>,
4093 ) -> Task<Result<ProjectTransaction>> {
4094 let mut local_buffers = Vec::new();
4095 let mut remote_buffers = None;
4096 for buffer_handle in buffers {
4097 let buffer = buffer_handle.read(cx);
4098 if buffer.is_dirty() {
4099 if let Some(file) = File::from_dyn(buffer.file()) {
4100 if file.is_local() {
4101 local_buffers.push(buffer_handle);
4102 } else {
4103 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4104 }
4105 }
4106 }
4107 }
4108
4109 let remote_buffers = self.remote_id().zip(remote_buffers);
4110 let client = self.client.clone();
4111
4112 cx.spawn(move |this, mut cx| async move {
4113 let mut project_transaction = ProjectTransaction::default();
4114
4115 if let Some((project_id, remote_buffers)) = remote_buffers {
4116 let response = client
4117 .request(proto::ReloadBuffers {
4118 project_id,
4119 buffer_ids: remote_buffers
4120 .iter()
4121 .filter_map(|buffer| {
4122 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4123 })
4124 .collect(),
4125 })
4126 .await?
4127 .transaction
4128 .ok_or_else(|| anyhow!("missing transaction"))?;
4129 project_transaction = this
4130 .update(&mut cx, |this, cx| {
4131 this.deserialize_project_transaction(response, push_to_history, cx)
4132 })?
4133 .await?;
4134 }
4135
4136 for buffer in local_buffers {
4137 let transaction = buffer
4138 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4139 .await?;
4140 buffer.update(&mut cx, |buffer, cx| {
4141 if let Some(transaction) = transaction {
4142 if !push_to_history {
4143 buffer.forget_transaction(transaction.id);
4144 }
4145 project_transaction.0.insert(cx.handle(), transaction);
4146 }
4147 })?;
4148 }
4149
4150 Ok(project_transaction)
4151 })
4152 }
4153
4154 pub fn format(
4155 &mut self,
4156 buffers: HashSet<Model<Buffer>>,
4157 push_to_history: bool,
4158 trigger: FormatTrigger,
4159 cx: &mut ModelContext<Project>,
4160 ) -> Task<anyhow::Result<ProjectTransaction>> {
4161 if self.is_local() {
4162 let mut buffers_with_paths_and_servers = buffers
4163 .into_iter()
4164 .filter_map(|buffer_handle| {
4165 let buffer = buffer_handle.read(cx);
4166 let file = File::from_dyn(buffer.file())?;
4167 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4168 let server = self
4169 .primary_language_server_for_buffer(buffer, cx)
4170 .map(|s| s.1.clone());
4171 Some((buffer_handle, buffer_abs_path, server))
4172 })
4173 .collect::<Vec<_>>();
4174
4175 cx.spawn(move |project, mut cx| async move {
4176 // Do not allow multiple concurrent formatting requests for the
4177 // same buffer.
4178 project.update(&mut cx, |this, cx| {
4179 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4180 this.buffers_being_formatted
4181 .insert(buffer.read(cx).remote_id())
4182 });
4183 })?;
4184
4185 let _cleanup = defer({
4186 let this = project.clone();
4187 let mut cx = cx.clone();
4188 let buffers = &buffers_with_paths_and_servers;
4189 move || {
4190 this.update(&mut cx, |this, cx| {
4191 for (buffer, _, _) in buffers {
4192 this.buffers_being_formatted
4193 .remove(&buffer.read(cx).remote_id());
4194 }
4195 })
4196 .ok();
4197 }
4198 });
4199
4200 let mut project_transaction = ProjectTransaction::default();
4201 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4202 let settings = buffer.update(&mut cx, |buffer, cx| {
4203 language_settings(buffer.language(), buffer.file(), cx).clone()
4204 })?;
4205
4206 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4207 let ensure_final_newline = settings.ensure_final_newline_on_save;
4208 let tab_size = settings.tab_size;
4209
4210 // First, format buffer's whitespace according to the settings.
4211 let trailing_whitespace_diff = if remove_trailing_whitespace {
4212 Some(
4213 buffer
4214 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4215 .await,
4216 )
4217 } else {
4218 None
4219 };
4220 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4221 buffer.finalize_last_transaction();
4222 buffer.start_transaction();
4223 if let Some(diff) = trailing_whitespace_diff {
4224 buffer.apply_diff(diff, cx);
4225 }
4226 if ensure_final_newline {
4227 buffer.ensure_final_newline(cx);
4228 }
4229 buffer.end_transaction(cx)
4230 })?;
4231
4232 // Apply language-specific formatting using either a language server
4233 // or external command.
4234 let mut format_operation = None;
4235 match (&settings.formatter, &settings.format_on_save) {
4236 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4237
4238 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4239 | (_, FormatOnSave::LanguageServer) => {
4240 if let Some((language_server, buffer_abs_path)) =
4241 language_server.as_ref().zip(buffer_abs_path.as_ref())
4242 {
4243 format_operation = Some(FormatOperation::Lsp(
4244 Self::format_via_lsp(
4245 &project,
4246 &buffer,
4247 buffer_abs_path,
4248 &language_server,
4249 tab_size,
4250 &mut cx,
4251 )
4252 .await
4253 .context("failed to format via language server")?,
4254 ));
4255 }
4256 }
4257
4258 (
4259 Formatter::External { command, arguments },
4260 FormatOnSave::On | FormatOnSave::Off,
4261 )
4262 | (_, FormatOnSave::External { command, arguments }) => {
4263 if let Some(buffer_abs_path) = buffer_abs_path {
4264 format_operation = Self::format_via_external_command(
4265 buffer,
4266 buffer_abs_path,
4267 &command,
4268 &arguments,
4269 &mut cx,
4270 )
4271 .await
4272 .context(format!(
4273 "failed to format via external command {:?}",
4274 command
4275 ))?
4276 .map(FormatOperation::External);
4277 }
4278 }
4279 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4280 if let Some(new_operation) =
4281 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4282 .await
4283 {
4284 format_operation = Some(new_operation);
4285 } else if let Some((language_server, buffer_abs_path)) =
4286 language_server.as_ref().zip(buffer_abs_path.as_ref())
4287 {
4288 format_operation = Some(FormatOperation::Lsp(
4289 Self::format_via_lsp(
4290 &project,
4291 &buffer,
4292 buffer_abs_path,
4293 &language_server,
4294 tab_size,
4295 &mut cx,
4296 )
4297 .await
4298 .context("failed to format via language server")?,
4299 ));
4300 }
4301 }
4302 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4303 if let Some(new_operation) =
4304 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4305 .await
4306 {
4307 format_operation = Some(new_operation);
4308 }
4309 }
4310 };
4311
4312 buffer.update(&mut cx, |b, cx| {
4313 // If the buffer had its whitespace formatted and was edited while the language-specific
4314 // formatting was being computed, avoid applying the language-specific formatting, because
4315 // it can't be grouped with the whitespace formatting in the undo history.
4316 if let Some(transaction_id) = whitespace_transaction_id {
4317 if b.peek_undo_stack()
4318 .map_or(true, |e| e.transaction_id() != transaction_id)
4319 {
4320 format_operation.take();
4321 }
4322 }
4323
4324 // Apply any language-specific formatting, and group the two formatting operations
4325 // in the buffer's undo history.
4326 if let Some(operation) = format_operation {
4327 match operation {
4328 FormatOperation::Lsp(edits) => {
4329 b.edit(edits, None, cx);
4330 }
4331 FormatOperation::External(diff) => {
4332 b.apply_diff(diff, cx);
4333 }
4334 FormatOperation::Prettier(diff) => {
4335 b.apply_diff(diff, cx);
4336 }
4337 }
4338
4339 if let Some(transaction_id) = whitespace_transaction_id {
4340 b.group_until_transaction(transaction_id);
4341 }
4342 }
4343
4344 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4345 if !push_to_history {
4346 b.forget_transaction(transaction.id);
4347 }
4348 project_transaction.0.insert(buffer.clone(), transaction);
4349 }
4350 })?;
4351 }
4352
4353 Ok(project_transaction)
4354 })
4355 } else {
4356 let remote_id = self.remote_id();
4357 let client = self.client.clone();
4358 cx.spawn(move |this, mut cx| async move {
4359 let mut project_transaction = ProjectTransaction::default();
4360 if let Some(project_id) = remote_id {
4361 let response = client
4362 .request(proto::FormatBuffers {
4363 project_id,
4364 trigger: trigger as i32,
4365 buffer_ids: buffers
4366 .iter()
4367 .map(|buffer| {
4368 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4369 })
4370 .collect::<Result<_>>()?,
4371 })
4372 .await?
4373 .transaction
4374 .ok_or_else(|| anyhow!("missing transaction"))?;
4375 project_transaction = this
4376 .update(&mut cx, |this, cx| {
4377 this.deserialize_project_transaction(response, push_to_history, cx)
4378 })?
4379 .await?;
4380 }
4381 Ok(project_transaction)
4382 })
4383 }
4384 }
4385
4386 async fn format_via_lsp(
4387 this: &WeakModel<Self>,
4388 buffer: &Model<Buffer>,
4389 abs_path: &Path,
4390 language_server: &Arc<LanguageServer>,
4391 tab_size: NonZeroU32,
4392 cx: &mut AsyncAppContext,
4393 ) -> Result<Vec<(Range<Anchor>, String)>> {
4394 let uri = lsp::Url::from_file_path(abs_path)
4395 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4396 let text_document = lsp::TextDocumentIdentifier::new(uri);
4397 let capabilities = &language_server.capabilities();
4398
4399 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4400 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4401
4402 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4403 language_server
4404 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4405 text_document,
4406 options: lsp_command::lsp_formatting_options(tab_size.get()),
4407 work_done_progress_params: Default::default(),
4408 })
4409 .await?
4410 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4411 let buffer_start = lsp::Position::new(0, 0);
4412 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4413
4414 language_server
4415 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4416 text_document,
4417 range: lsp::Range::new(buffer_start, buffer_end),
4418 options: lsp_command::lsp_formatting_options(tab_size.get()),
4419 work_done_progress_params: Default::default(),
4420 })
4421 .await?
4422 } else {
4423 None
4424 };
4425
4426 if let Some(lsp_edits) = lsp_edits {
4427 this.update(cx, |this, cx| {
4428 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4429 })?
4430 .await
4431 } else {
4432 Ok(Vec::new())
4433 }
4434 }
4435
4436 async fn format_via_external_command(
4437 buffer: &Model<Buffer>,
4438 buffer_abs_path: &Path,
4439 command: &str,
4440 arguments: &[String],
4441 cx: &mut AsyncAppContext,
4442 ) -> Result<Option<Diff>> {
4443 let working_dir_path = buffer.update(cx, |buffer, cx| {
4444 let file = File::from_dyn(buffer.file())?;
4445 let worktree = file.worktree.read(cx).as_local()?;
4446 let mut worktree_path = worktree.abs_path().to_path_buf();
4447 if worktree.root_entry()?.is_file() {
4448 worktree_path.pop();
4449 }
4450 Some(worktree_path)
4451 })?;
4452
4453 if let Some(working_dir_path) = working_dir_path {
4454 let mut child =
4455 smol::process::Command::new(command)
4456 .args(arguments.iter().map(|arg| {
4457 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4458 }))
4459 .current_dir(&working_dir_path)
4460 .stdin(smol::process::Stdio::piped())
4461 .stdout(smol::process::Stdio::piped())
4462 .stderr(smol::process::Stdio::piped())
4463 .spawn()?;
4464 let stdin = child
4465 .stdin
4466 .as_mut()
4467 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4468 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4469 for chunk in text.chunks() {
4470 stdin.write_all(chunk.as_bytes()).await?;
4471 }
4472 stdin.flush().await?;
4473
4474 let output = child.output().await?;
4475 if !output.status.success() {
4476 return Err(anyhow!(
4477 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4478 output.status.code(),
4479 String::from_utf8_lossy(&output.stdout),
4480 String::from_utf8_lossy(&output.stderr),
4481 ));
4482 }
4483
4484 let stdout = String::from_utf8(output.stdout)?;
4485 Ok(Some(
4486 buffer
4487 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4488 .await,
4489 ))
4490 } else {
4491 Ok(None)
4492 }
4493 }
4494
4495 pub fn definition<T: ToPointUtf16>(
4496 &self,
4497 buffer: &Model<Buffer>,
4498 position: T,
4499 cx: &mut ModelContext<Self>,
4500 ) -> Task<Result<Vec<LocationLink>>> {
4501 let position = position.to_point_utf16(buffer.read(cx));
4502 self.request_lsp(
4503 buffer.clone(),
4504 LanguageServerToQuery::Primary,
4505 GetDefinition { position },
4506 cx,
4507 )
4508 }
4509
4510 pub fn type_definition<T: ToPointUtf16>(
4511 &self,
4512 buffer: &Model<Buffer>,
4513 position: T,
4514 cx: &mut ModelContext<Self>,
4515 ) -> Task<Result<Vec<LocationLink>>> {
4516 let position = position.to_point_utf16(buffer.read(cx));
4517 self.request_lsp(
4518 buffer.clone(),
4519 LanguageServerToQuery::Primary,
4520 GetTypeDefinition { position },
4521 cx,
4522 )
4523 }
4524
4525 pub fn references<T: ToPointUtf16>(
4526 &self,
4527 buffer: &Model<Buffer>,
4528 position: T,
4529 cx: &mut ModelContext<Self>,
4530 ) -> Task<Result<Vec<Location>>> {
4531 let position = position.to_point_utf16(buffer.read(cx));
4532 self.request_lsp(
4533 buffer.clone(),
4534 LanguageServerToQuery::Primary,
4535 GetReferences { position },
4536 cx,
4537 )
4538 }
4539
4540 pub fn document_highlights<T: ToPointUtf16>(
4541 &self,
4542 buffer: &Model<Buffer>,
4543 position: T,
4544 cx: &mut ModelContext<Self>,
4545 ) -> Task<Result<Vec<DocumentHighlight>>> {
4546 let position = position.to_point_utf16(buffer.read(cx));
4547 self.request_lsp(
4548 buffer.clone(),
4549 LanguageServerToQuery::Primary,
4550 GetDocumentHighlights { position },
4551 cx,
4552 )
4553 }
4554
4555 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4556 if self.is_local() {
4557 let mut requests = Vec::new();
4558 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4559 let worktree_id = *worktree_id;
4560 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4561 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4562 Some(worktree) => worktree,
4563 None => continue,
4564 };
4565 let worktree_abs_path = worktree.abs_path().clone();
4566
4567 let (adapter, language, server) = match self.language_servers.get(server_id) {
4568 Some(LanguageServerState::Running {
4569 adapter,
4570 language,
4571 server,
4572 ..
4573 }) => (adapter.clone(), language.clone(), server),
4574
4575 _ => continue,
4576 };
4577
4578 requests.push(
4579 server
4580 .request::<lsp::request::WorkspaceSymbolRequest>(
4581 lsp::WorkspaceSymbolParams {
4582 query: query.to_string(),
4583 ..Default::default()
4584 },
4585 )
4586 .log_err()
4587 .map(move |response| {
4588 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4589 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4590 flat_responses.into_iter().map(|lsp_symbol| {
4591 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4592 }).collect::<Vec<_>>()
4593 }
4594 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4595 nested_responses.into_iter().filter_map(|lsp_symbol| {
4596 let location = match lsp_symbol.location {
4597 OneOf::Left(location) => location,
4598 OneOf::Right(_) => {
4599 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4600 return None
4601 }
4602 };
4603 Some((lsp_symbol.name, lsp_symbol.kind, location))
4604 }).collect::<Vec<_>>()
4605 }
4606 }).unwrap_or_default();
4607
4608 (
4609 adapter,
4610 language,
4611 worktree_id,
4612 worktree_abs_path,
4613 lsp_symbols,
4614 )
4615 }),
4616 );
4617 }
4618
4619 cx.spawn(move |this, mut cx| async move {
4620 let responses = futures::future::join_all(requests).await;
4621 let this = match this.upgrade() {
4622 Some(this) => this,
4623 None => return Ok(Vec::new()),
4624 };
4625
4626 let symbols = this.update(&mut cx, |this, cx| {
4627 let mut symbols = Vec::new();
4628 for (
4629 adapter,
4630 adapter_language,
4631 source_worktree_id,
4632 worktree_abs_path,
4633 lsp_symbols,
4634 ) in responses
4635 {
4636 symbols.extend(lsp_symbols.into_iter().filter_map(
4637 |(symbol_name, symbol_kind, symbol_location)| {
4638 let abs_path = symbol_location.uri.to_file_path().ok()?;
4639 let mut worktree_id = source_worktree_id;
4640 let path;
4641 if let Some((worktree, rel_path)) =
4642 this.find_local_worktree(&abs_path, cx)
4643 {
4644 worktree_id = worktree.read(cx).id();
4645 path = rel_path;
4646 } else {
4647 path = relativize_path(&worktree_abs_path, &abs_path);
4648 }
4649
4650 let project_path = ProjectPath {
4651 worktree_id,
4652 path: path.into(),
4653 };
4654 let signature = this.symbol_signature(&project_path);
4655 let adapter_language = adapter_language.clone();
4656 let language = this
4657 .languages
4658 .language_for_file(&project_path.path, None)
4659 .unwrap_or_else(move |_| adapter_language);
4660 let language_server_name = adapter.name.clone();
4661 Some(async move {
4662 let language = language.await;
4663 let label =
4664 language.label_for_symbol(&symbol_name, symbol_kind).await;
4665
4666 Symbol {
4667 language_server_name,
4668 source_worktree_id,
4669 path: project_path,
4670 label: label.unwrap_or_else(|| {
4671 CodeLabel::plain(symbol_name.clone(), None)
4672 }),
4673 kind: symbol_kind,
4674 name: symbol_name,
4675 range: range_from_lsp(symbol_location.range),
4676 signature,
4677 }
4678 })
4679 },
4680 ));
4681 }
4682
4683 symbols
4684 })?;
4685
4686 Ok(futures::future::join_all(symbols).await)
4687 })
4688 } else if let Some(project_id) = self.remote_id() {
4689 let request = self.client.request(proto::GetProjectSymbols {
4690 project_id,
4691 query: query.to_string(),
4692 });
4693 cx.spawn(move |this, mut cx| async move {
4694 let response = request.await?;
4695 let mut symbols = Vec::new();
4696 if let Some(this) = this.upgrade() {
4697 let new_symbols = this.update(&mut cx, |this, _| {
4698 response
4699 .symbols
4700 .into_iter()
4701 .map(|symbol| this.deserialize_symbol(symbol))
4702 .collect::<Vec<_>>()
4703 })?;
4704 symbols = futures::future::join_all(new_symbols)
4705 .await
4706 .into_iter()
4707 .filter_map(|symbol| symbol.log_err())
4708 .collect::<Vec<_>>();
4709 }
4710 Ok(symbols)
4711 })
4712 } else {
4713 Task::ready(Ok(Default::default()))
4714 }
4715 }
4716
4717 pub fn open_buffer_for_symbol(
4718 &mut self,
4719 symbol: &Symbol,
4720 cx: &mut ModelContext<Self>,
4721 ) -> Task<Result<Model<Buffer>>> {
4722 if self.is_local() {
4723 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4724 symbol.source_worktree_id,
4725 symbol.language_server_name.clone(),
4726 )) {
4727 *id
4728 } else {
4729 return Task::ready(Err(anyhow!(
4730 "language server for worktree and language not found"
4731 )));
4732 };
4733
4734 let worktree_abs_path = if let Some(worktree_abs_path) = self
4735 .worktree_for_id(symbol.path.worktree_id, cx)
4736 .and_then(|worktree| worktree.read(cx).as_local())
4737 .map(|local_worktree| local_worktree.abs_path())
4738 {
4739 worktree_abs_path
4740 } else {
4741 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4742 };
4743
4744 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
4745 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4746 uri
4747 } else {
4748 return Task::ready(Err(anyhow!("invalid symbol path")));
4749 };
4750
4751 self.open_local_buffer_via_lsp(
4752 symbol_uri,
4753 language_server_id,
4754 symbol.language_server_name.clone(),
4755 cx,
4756 )
4757 } else if let Some(project_id) = self.remote_id() {
4758 let request = self.client.request(proto::OpenBufferForSymbol {
4759 project_id,
4760 symbol: Some(serialize_symbol(symbol)),
4761 });
4762 cx.spawn(move |this, mut cx| async move {
4763 let response = request.await?;
4764 this.update(&mut cx, |this, cx| {
4765 this.wait_for_remote_buffer(response.buffer_id, cx)
4766 })?
4767 .await
4768 })
4769 } else {
4770 Task::ready(Err(anyhow!("project does not have a remote id")))
4771 }
4772 }
4773
4774 pub fn hover<T: ToPointUtf16>(
4775 &self,
4776 buffer: &Model<Buffer>,
4777 position: T,
4778 cx: &mut ModelContext<Self>,
4779 ) -> Task<Result<Option<Hover>>> {
4780 let position = position.to_point_utf16(buffer.read(cx));
4781 self.request_lsp(
4782 buffer.clone(),
4783 LanguageServerToQuery::Primary,
4784 GetHover { position },
4785 cx,
4786 )
4787 }
4788
4789 pub fn completions<T: ToOffset + ToPointUtf16>(
4790 &self,
4791 buffer: &Model<Buffer>,
4792 position: T,
4793 cx: &mut ModelContext<Self>,
4794 ) -> Task<Result<Vec<Completion>>> {
4795 let position = position.to_point_utf16(buffer.read(cx));
4796 if self.is_local() {
4797 let snapshot = buffer.read(cx).snapshot();
4798 let offset = position.to_offset(&snapshot);
4799 let scope = snapshot.language_scope_at(offset);
4800
4801 let server_ids: Vec<_> = self
4802 .language_servers_for_buffer(buffer.read(cx), cx)
4803 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4804 .filter(|(adapter, _)| {
4805 scope
4806 .as_ref()
4807 .map(|scope| scope.language_allowed(&adapter.name))
4808 .unwrap_or(true)
4809 })
4810 .map(|(_, server)| server.server_id())
4811 .collect();
4812
4813 let buffer = buffer.clone();
4814 cx.spawn(move |this, mut cx| async move {
4815 let mut tasks = Vec::with_capacity(server_ids.len());
4816 this.update(&mut cx, |this, cx| {
4817 for server_id in server_ids {
4818 tasks.push(this.request_lsp(
4819 buffer.clone(),
4820 LanguageServerToQuery::Other(server_id),
4821 GetCompletions { position },
4822 cx,
4823 ));
4824 }
4825 })?;
4826
4827 let mut completions = Vec::new();
4828 for task in tasks {
4829 if let Ok(new_completions) = task.await {
4830 completions.extend_from_slice(&new_completions);
4831 }
4832 }
4833
4834 Ok(completions)
4835 })
4836 } else if let Some(project_id) = self.remote_id() {
4837 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4838 } else {
4839 Task::ready(Ok(Default::default()))
4840 }
4841 }
4842
4843 pub fn apply_additional_edits_for_completion(
4844 &self,
4845 buffer_handle: Model<Buffer>,
4846 completion: Completion,
4847 push_to_history: bool,
4848 cx: &mut ModelContext<Self>,
4849 ) -> Task<Result<Option<Transaction>>> {
4850 let buffer = buffer_handle.read(cx);
4851 let buffer_id = buffer.remote_id();
4852
4853 if self.is_local() {
4854 let server_id = completion.server_id;
4855 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4856 Some((_, server)) => server.clone(),
4857 _ => return Task::ready(Ok(Default::default())),
4858 };
4859
4860 cx.spawn(move |this, mut cx| async move {
4861 let can_resolve = lang_server
4862 .capabilities()
4863 .completion_provider
4864 .as_ref()
4865 .and_then(|options| options.resolve_provider)
4866 .unwrap_or(false);
4867 let additional_text_edits = if can_resolve {
4868 lang_server
4869 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4870 .await?
4871 .additional_text_edits
4872 } else {
4873 completion.lsp_completion.additional_text_edits
4874 };
4875 if let Some(edits) = additional_text_edits {
4876 let edits = this
4877 .update(&mut cx, |this, cx| {
4878 this.edits_from_lsp(
4879 &buffer_handle,
4880 edits,
4881 lang_server.server_id(),
4882 None,
4883 cx,
4884 )
4885 })?
4886 .await?;
4887
4888 buffer_handle.update(&mut cx, |buffer, cx| {
4889 buffer.finalize_last_transaction();
4890 buffer.start_transaction();
4891
4892 for (range, text) in edits {
4893 let primary = &completion.old_range;
4894 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4895 && primary.end.cmp(&range.start, buffer).is_ge();
4896 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4897 && range.end.cmp(&primary.end, buffer).is_ge();
4898
4899 //Skip additional edits which overlap with the primary completion edit
4900 //https://github.com/zed-industries/zed/pull/1871
4901 if !start_within && !end_within {
4902 buffer.edit([(range, text)], None, cx);
4903 }
4904 }
4905
4906 let transaction = if buffer.end_transaction(cx).is_some() {
4907 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4908 if !push_to_history {
4909 buffer.forget_transaction(transaction.id);
4910 }
4911 Some(transaction)
4912 } else {
4913 None
4914 };
4915 Ok(transaction)
4916 })?
4917 } else {
4918 Ok(None)
4919 }
4920 })
4921 } else if let Some(project_id) = self.remote_id() {
4922 let client = self.client.clone();
4923 cx.spawn(move |_, mut cx| async move {
4924 let response = client
4925 .request(proto::ApplyCompletionAdditionalEdits {
4926 project_id,
4927 buffer_id,
4928 completion: Some(language::proto::serialize_completion(&completion)),
4929 })
4930 .await?;
4931
4932 if let Some(transaction) = response.transaction {
4933 let transaction = language::proto::deserialize_transaction(transaction)?;
4934 buffer_handle
4935 .update(&mut cx, |buffer, _| {
4936 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4937 })?
4938 .await?;
4939 if push_to_history {
4940 buffer_handle.update(&mut cx, |buffer, _| {
4941 buffer.push_transaction(transaction.clone(), Instant::now());
4942 })?;
4943 }
4944 Ok(Some(transaction))
4945 } else {
4946 Ok(None)
4947 }
4948 })
4949 } else {
4950 Task::ready(Err(anyhow!("project does not have a remote id")))
4951 }
4952 }
4953
4954 pub fn code_actions<T: Clone + ToOffset>(
4955 &self,
4956 buffer_handle: &Model<Buffer>,
4957 range: Range<T>,
4958 cx: &mut ModelContext<Self>,
4959 ) -> Task<Result<Vec<CodeAction>>> {
4960 let buffer = buffer_handle.read(cx);
4961 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4962 self.request_lsp(
4963 buffer_handle.clone(),
4964 LanguageServerToQuery::Primary,
4965 GetCodeActions { range },
4966 cx,
4967 )
4968 }
4969
4970 pub fn apply_code_action(
4971 &self,
4972 buffer_handle: Model<Buffer>,
4973 mut action: CodeAction,
4974 push_to_history: bool,
4975 cx: &mut ModelContext<Self>,
4976 ) -> Task<Result<ProjectTransaction>> {
4977 if self.is_local() {
4978 let buffer = buffer_handle.read(cx);
4979 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4980 self.language_server_for_buffer(buffer, action.server_id, cx)
4981 {
4982 (adapter.clone(), server.clone())
4983 } else {
4984 return Task::ready(Ok(Default::default()));
4985 };
4986 let range = action.range.to_point_utf16(buffer);
4987
4988 cx.spawn(move |this, mut cx| async move {
4989 if let Some(lsp_range) = action
4990 .lsp_action
4991 .data
4992 .as_mut()
4993 .and_then(|d| d.get_mut("codeActionParams"))
4994 .and_then(|d| d.get_mut("range"))
4995 {
4996 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4997 action.lsp_action = lang_server
4998 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
4999 .await?;
5000 } else {
5001 let actions = this
5002 .update(&mut cx, |this, cx| {
5003 this.code_actions(&buffer_handle, action.range, cx)
5004 })?
5005 .await?;
5006 action.lsp_action = actions
5007 .into_iter()
5008 .find(|a| a.lsp_action.title == action.lsp_action.title)
5009 .ok_or_else(|| anyhow!("code action is outdated"))?
5010 .lsp_action;
5011 }
5012
5013 if let Some(edit) = action.lsp_action.edit {
5014 if edit.changes.is_some() || edit.document_changes.is_some() {
5015 return Self::deserialize_workspace_edit(
5016 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5017 edit,
5018 push_to_history,
5019 lsp_adapter.clone(),
5020 lang_server.clone(),
5021 &mut cx,
5022 )
5023 .await;
5024 }
5025 }
5026
5027 if let Some(command) = action.lsp_action.command {
5028 this.update(&mut cx, |this, _| {
5029 this.last_workspace_edits_by_language_server
5030 .remove(&lang_server.server_id());
5031 })?;
5032
5033 let result = lang_server
5034 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5035 command: command.command,
5036 arguments: command.arguments.unwrap_or_default(),
5037 ..Default::default()
5038 })
5039 .await;
5040
5041 if let Err(err) = result {
5042 // TODO: LSP ERROR
5043 return Err(err);
5044 }
5045
5046 return Ok(this.update(&mut cx, |this, _| {
5047 this.last_workspace_edits_by_language_server
5048 .remove(&lang_server.server_id())
5049 .unwrap_or_default()
5050 })?);
5051 }
5052
5053 Ok(ProjectTransaction::default())
5054 })
5055 } else if let Some(project_id) = self.remote_id() {
5056 let client = self.client.clone();
5057 let request = proto::ApplyCodeAction {
5058 project_id,
5059 buffer_id: buffer_handle.read(cx).remote_id(),
5060 action: Some(language::proto::serialize_code_action(&action)),
5061 };
5062 cx.spawn(move |this, mut cx| async move {
5063 let response = client
5064 .request(request)
5065 .await?
5066 .transaction
5067 .ok_or_else(|| anyhow!("missing transaction"))?;
5068 this.update(&mut cx, |this, cx| {
5069 this.deserialize_project_transaction(response, push_to_history, cx)
5070 })?
5071 .await
5072 })
5073 } else {
5074 Task::ready(Err(anyhow!("project does not have a remote id")))
5075 }
5076 }
5077
5078 fn apply_on_type_formatting(
5079 &self,
5080 buffer: Model<Buffer>,
5081 position: Anchor,
5082 trigger: String,
5083 cx: &mut ModelContext<Self>,
5084 ) -> Task<Result<Option<Transaction>>> {
5085 if self.is_local() {
5086 cx.spawn(move |this, mut cx| async move {
5087 // Do not allow multiple concurrent formatting requests for the
5088 // same buffer.
5089 this.update(&mut cx, |this, cx| {
5090 this.buffers_being_formatted
5091 .insert(buffer.read(cx).remote_id())
5092 })?;
5093
5094 let _cleanup = defer({
5095 let this = this.clone();
5096 let mut cx = cx.clone();
5097 let closure_buffer = buffer.clone();
5098 move || {
5099 this.update(&mut cx, |this, cx| {
5100 this.buffers_being_formatted
5101 .remove(&closure_buffer.read(cx).remote_id());
5102 })
5103 .ok();
5104 }
5105 });
5106
5107 buffer
5108 .update(&mut cx, |buffer, _| {
5109 buffer.wait_for_edits(Some(position.timestamp))
5110 })?
5111 .await?;
5112 this.update(&mut cx, |this, cx| {
5113 let position = position.to_point_utf16(buffer.read(cx));
5114 this.on_type_format(buffer, position, trigger, false, cx)
5115 })?
5116 .await
5117 })
5118 } else if let Some(project_id) = self.remote_id() {
5119 let client = self.client.clone();
5120 let request = proto::OnTypeFormatting {
5121 project_id,
5122 buffer_id: buffer.read(cx).remote_id(),
5123 position: Some(serialize_anchor(&position)),
5124 trigger,
5125 version: serialize_version(&buffer.read(cx).version()),
5126 };
5127 cx.spawn(move |_, _| async move {
5128 client
5129 .request(request)
5130 .await?
5131 .transaction
5132 .map(language::proto::deserialize_transaction)
5133 .transpose()
5134 })
5135 } else {
5136 Task::ready(Err(anyhow!("project does not have a remote id")))
5137 }
5138 }
5139
5140 async fn deserialize_edits(
5141 this: Model<Self>,
5142 buffer_to_edit: Model<Buffer>,
5143 edits: Vec<lsp::TextEdit>,
5144 push_to_history: bool,
5145 _: Arc<CachedLspAdapter>,
5146 language_server: Arc<LanguageServer>,
5147 cx: &mut AsyncAppContext,
5148 ) -> Result<Option<Transaction>> {
5149 let edits = this
5150 .update(cx, |this, cx| {
5151 this.edits_from_lsp(
5152 &buffer_to_edit,
5153 edits,
5154 language_server.server_id(),
5155 None,
5156 cx,
5157 )
5158 })?
5159 .await?;
5160
5161 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5162 buffer.finalize_last_transaction();
5163 buffer.start_transaction();
5164 for (range, text) in edits {
5165 buffer.edit([(range, text)], None, cx);
5166 }
5167
5168 if buffer.end_transaction(cx).is_some() {
5169 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5170 if !push_to_history {
5171 buffer.forget_transaction(transaction.id);
5172 }
5173 Some(transaction)
5174 } else {
5175 None
5176 }
5177 })?;
5178
5179 Ok(transaction)
5180 }
5181
5182 async fn deserialize_workspace_edit(
5183 this: Model<Self>,
5184 edit: lsp::WorkspaceEdit,
5185 push_to_history: bool,
5186 lsp_adapter: Arc<CachedLspAdapter>,
5187 language_server: Arc<LanguageServer>,
5188 cx: &mut AsyncAppContext,
5189 ) -> Result<ProjectTransaction> {
5190 let fs = this.update(cx, |this, _| this.fs.clone())?;
5191 let mut operations = Vec::new();
5192 if let Some(document_changes) = edit.document_changes {
5193 match document_changes {
5194 lsp::DocumentChanges::Edits(edits) => {
5195 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5196 }
5197 lsp::DocumentChanges::Operations(ops) => operations = ops,
5198 }
5199 } else if let Some(changes) = edit.changes {
5200 operations.extend(changes.into_iter().map(|(uri, edits)| {
5201 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5202 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5203 uri,
5204 version: None,
5205 },
5206 edits: edits.into_iter().map(OneOf::Left).collect(),
5207 })
5208 }));
5209 }
5210
5211 let mut project_transaction = ProjectTransaction::default();
5212 for operation in operations {
5213 match operation {
5214 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5215 let abs_path = op
5216 .uri
5217 .to_file_path()
5218 .map_err(|_| anyhow!("can't convert URI to path"))?;
5219
5220 if let Some(parent_path) = abs_path.parent() {
5221 fs.create_dir(parent_path).await?;
5222 }
5223 if abs_path.ends_with("/") {
5224 fs.create_dir(&abs_path).await?;
5225 } else {
5226 fs.create_file(
5227 &abs_path,
5228 op.options
5229 .map(|options| fs::CreateOptions {
5230 overwrite: options.overwrite.unwrap_or(false),
5231 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5232 })
5233 .unwrap_or_default(),
5234 )
5235 .await?;
5236 }
5237 }
5238
5239 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5240 let source_abs_path = op
5241 .old_uri
5242 .to_file_path()
5243 .map_err(|_| anyhow!("can't convert URI to path"))?;
5244 let target_abs_path = op
5245 .new_uri
5246 .to_file_path()
5247 .map_err(|_| anyhow!("can't convert URI to path"))?;
5248 fs.rename(
5249 &source_abs_path,
5250 &target_abs_path,
5251 op.options
5252 .map(|options| fs::RenameOptions {
5253 overwrite: options.overwrite.unwrap_or(false),
5254 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5255 })
5256 .unwrap_or_default(),
5257 )
5258 .await?;
5259 }
5260
5261 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5262 let abs_path = op
5263 .uri
5264 .to_file_path()
5265 .map_err(|_| anyhow!("can't convert URI to path"))?;
5266 let options = op
5267 .options
5268 .map(|options| fs::RemoveOptions {
5269 recursive: options.recursive.unwrap_or(false),
5270 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5271 })
5272 .unwrap_or_default();
5273 if abs_path.ends_with("/") {
5274 fs.remove_dir(&abs_path, options).await?;
5275 } else {
5276 fs.remove_file(&abs_path, options).await?;
5277 }
5278 }
5279
5280 lsp::DocumentChangeOperation::Edit(op) => {
5281 let buffer_to_edit = this
5282 .update(cx, |this, cx| {
5283 this.open_local_buffer_via_lsp(
5284 op.text_document.uri,
5285 language_server.server_id(),
5286 lsp_adapter.name.clone(),
5287 cx,
5288 )
5289 })?
5290 .await?;
5291
5292 let edits = this
5293 .update(cx, |this, cx| {
5294 let edits = op.edits.into_iter().map(|edit| match edit {
5295 OneOf::Left(edit) => edit,
5296 OneOf::Right(edit) => edit.text_edit,
5297 });
5298 this.edits_from_lsp(
5299 &buffer_to_edit,
5300 edits,
5301 language_server.server_id(),
5302 op.text_document.version,
5303 cx,
5304 )
5305 })?
5306 .await?;
5307
5308 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5309 buffer.finalize_last_transaction();
5310 buffer.start_transaction();
5311 for (range, text) in edits {
5312 buffer.edit([(range, text)], None, cx);
5313 }
5314 let transaction = if buffer.end_transaction(cx).is_some() {
5315 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5316 if !push_to_history {
5317 buffer.forget_transaction(transaction.id);
5318 }
5319 Some(transaction)
5320 } else {
5321 None
5322 };
5323
5324 transaction
5325 })?;
5326 if let Some(transaction) = transaction {
5327 project_transaction.0.insert(buffer_to_edit, transaction);
5328 }
5329 }
5330 }
5331 }
5332
5333 Ok(project_transaction)
5334 }
5335
5336 pub fn prepare_rename<T: ToPointUtf16>(
5337 &self,
5338 buffer: Model<Buffer>,
5339 position: T,
5340 cx: &mut ModelContext<Self>,
5341 ) -> Task<Result<Option<Range<Anchor>>>> {
5342 let position = position.to_point_utf16(buffer.read(cx));
5343 self.request_lsp(
5344 buffer,
5345 LanguageServerToQuery::Primary,
5346 PrepareRename { position },
5347 cx,
5348 )
5349 }
5350
5351 pub fn perform_rename<T: ToPointUtf16>(
5352 &self,
5353 buffer: Model<Buffer>,
5354 position: T,
5355 new_name: String,
5356 push_to_history: bool,
5357 cx: &mut ModelContext<Self>,
5358 ) -> Task<Result<ProjectTransaction>> {
5359 let position = position.to_point_utf16(buffer.read(cx));
5360 self.request_lsp(
5361 buffer,
5362 LanguageServerToQuery::Primary,
5363 PerformRename {
5364 position,
5365 new_name,
5366 push_to_history,
5367 },
5368 cx,
5369 )
5370 }
5371
5372 pub fn on_type_format<T: ToPointUtf16>(
5373 &self,
5374 buffer: Model<Buffer>,
5375 position: T,
5376 trigger: String,
5377 push_to_history: bool,
5378 cx: &mut ModelContext<Self>,
5379 ) -> Task<Result<Option<Transaction>>> {
5380 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5381 let position = position.to_point_utf16(buffer);
5382 (
5383 position,
5384 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5385 .tab_size,
5386 )
5387 });
5388 self.request_lsp(
5389 buffer.clone(),
5390 LanguageServerToQuery::Primary,
5391 OnTypeFormatting {
5392 position,
5393 trigger,
5394 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5395 push_to_history,
5396 },
5397 cx,
5398 )
5399 }
5400
5401 pub fn inlay_hints<T: ToOffset>(
5402 &self,
5403 buffer_handle: Model<Buffer>,
5404 range: Range<T>,
5405 cx: &mut ModelContext<Self>,
5406 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5407 let buffer = buffer_handle.read(cx);
5408 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5409 let range_start = range.start;
5410 let range_end = range.end;
5411 let buffer_id = buffer.remote_id();
5412 let buffer_version = buffer.version().clone();
5413 let lsp_request = InlayHints { range };
5414
5415 if self.is_local() {
5416 let lsp_request_task = self.request_lsp(
5417 buffer_handle.clone(),
5418 LanguageServerToQuery::Primary,
5419 lsp_request,
5420 cx,
5421 );
5422 cx.spawn(move |_, mut cx| async move {
5423 buffer_handle
5424 .update(&mut cx, |buffer, _| {
5425 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5426 })?
5427 .await
5428 .context("waiting for inlay hint request range edits")?;
5429 lsp_request_task.await.context("inlay hints LSP request")
5430 })
5431 } else if let Some(project_id) = self.remote_id() {
5432 let client = self.client.clone();
5433 let request = proto::InlayHints {
5434 project_id,
5435 buffer_id,
5436 start: Some(serialize_anchor(&range_start)),
5437 end: Some(serialize_anchor(&range_end)),
5438 version: serialize_version(&buffer_version),
5439 };
5440 cx.spawn(move |project, cx| async move {
5441 let response = client
5442 .request(request)
5443 .await
5444 .context("inlay hints proto request")?;
5445 let hints_request_result = LspCommand::response_from_proto(
5446 lsp_request,
5447 response,
5448 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5449 buffer_handle.clone(),
5450 cx,
5451 )
5452 .await;
5453
5454 hints_request_result.context("inlay hints proto response conversion")
5455 })
5456 } else {
5457 Task::ready(Err(anyhow!("project does not have a remote id")))
5458 }
5459 }
5460
5461 pub fn resolve_inlay_hint(
5462 &self,
5463 hint: InlayHint,
5464 buffer_handle: Model<Buffer>,
5465 server_id: LanguageServerId,
5466 cx: &mut ModelContext<Self>,
5467 ) -> Task<anyhow::Result<InlayHint>> {
5468 if self.is_local() {
5469 let buffer = buffer_handle.read(cx);
5470 let (_, lang_server) = if let Some((adapter, server)) =
5471 self.language_server_for_buffer(buffer, server_id, cx)
5472 {
5473 (adapter.clone(), server.clone())
5474 } else {
5475 return Task::ready(Ok(hint));
5476 };
5477 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5478 return Task::ready(Ok(hint));
5479 }
5480
5481 let buffer_snapshot = buffer.snapshot();
5482 cx.spawn(move |_, mut cx| async move {
5483 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5484 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5485 );
5486 let resolved_hint = resolve_task
5487 .await
5488 .context("inlay hint resolve LSP request")?;
5489 let resolved_hint = InlayHints::lsp_to_project_hint(
5490 resolved_hint,
5491 &buffer_handle,
5492 server_id,
5493 ResolveState::Resolved,
5494 false,
5495 &mut cx,
5496 )
5497 .await?;
5498 Ok(resolved_hint)
5499 })
5500 } else if let Some(project_id) = self.remote_id() {
5501 let client = self.client.clone();
5502 let request = proto::ResolveInlayHint {
5503 project_id,
5504 buffer_id: buffer_handle.read(cx).remote_id(),
5505 language_server_id: server_id.0 as u64,
5506 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5507 };
5508 cx.spawn(move |_, _| async move {
5509 let response = client
5510 .request(request)
5511 .await
5512 .context("inlay hints proto request")?;
5513 match response.hint {
5514 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5515 .context("inlay hints proto resolve response conversion"),
5516 None => Ok(hint),
5517 }
5518 })
5519 } else {
5520 Task::ready(Err(anyhow!("project does not have a remote id")))
5521 }
5522 }
5523
5524 #[allow(clippy::type_complexity)]
5525 pub fn search(
5526 &self,
5527 query: SearchQuery,
5528 cx: &mut ModelContext<Self>,
5529 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5530 if self.is_local() {
5531 self.search_local(query, cx)
5532 } else if let Some(project_id) = self.remote_id() {
5533 let (tx, rx) = smol::channel::unbounded();
5534 let request = self.client.request(query.to_proto(project_id));
5535 cx.spawn(move |this, mut cx| async move {
5536 let response = request.await?;
5537 let mut result = HashMap::default();
5538 for location in response.locations {
5539 let target_buffer = this
5540 .update(&mut cx, |this, cx| {
5541 this.wait_for_remote_buffer(location.buffer_id, cx)
5542 })?
5543 .await?;
5544 let start = location
5545 .start
5546 .and_then(deserialize_anchor)
5547 .ok_or_else(|| anyhow!("missing target start"))?;
5548 let end = location
5549 .end
5550 .and_then(deserialize_anchor)
5551 .ok_or_else(|| anyhow!("missing target end"))?;
5552 result
5553 .entry(target_buffer)
5554 .or_insert(Vec::new())
5555 .push(start..end)
5556 }
5557 for (buffer, ranges) in result {
5558 let _ = tx.send((buffer, ranges)).await;
5559 }
5560 Result::<(), anyhow::Error>::Ok(())
5561 })
5562 .detach_and_log_err(cx);
5563 rx
5564 } else {
5565 unimplemented!();
5566 }
5567 }
5568
5569 pub fn search_local(
5570 &self,
5571 query: SearchQuery,
5572 cx: &mut ModelContext<Self>,
5573 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5574 // Local search is split into several phases.
5575 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5576 // and the second phase that finds positions of all the matches found in the candidate files.
5577 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5578 //
5579 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5580 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5581 //
5582 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5583 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5584 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5585 // 2. At this point, we have a list of all potentially matching buffers/files.
5586 // We sort that list by buffer path - this list is retained for later use.
5587 // We ensure that all buffers are now opened and available in project.
5588 // 3. We run a scan over all the candidate buffers on multiple background threads.
5589 // We cannot assume that there will even be a match - while at least one match
5590 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5591 // There is also an auxilliary background thread responsible for result gathering.
5592 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5593 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5594 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5595 // entry - which might already be available thanks to out-of-order processing.
5596 //
5597 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5598 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5599 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5600 // in face of constantly updating list of sorted matches.
5601 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5602 let snapshots = self
5603 .visible_worktrees(cx)
5604 .filter_map(|tree| {
5605 let tree = tree.read(cx).as_local()?;
5606 Some(tree.snapshot())
5607 })
5608 .collect::<Vec<_>>();
5609
5610 let background = cx.background_executor().clone();
5611 let path_count: usize = snapshots
5612 .iter()
5613 .map(|s| {
5614 if query.include_ignored() {
5615 s.file_count()
5616 } else {
5617 s.visible_file_count()
5618 }
5619 })
5620 .sum();
5621 if path_count == 0 {
5622 let (_, rx) = smol::channel::bounded(1024);
5623 return rx;
5624 }
5625 let workers = background.num_cpus().min(path_count);
5626 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5627 let mut unnamed_files = vec![];
5628 let opened_buffers = self
5629 .opened_buffers
5630 .iter()
5631 .filter_map(|(_, b)| {
5632 let buffer = b.upgrade()?;
5633 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5634 let is_ignored = buffer
5635 .project_path(cx)
5636 .and_then(|path| self.entry_for_path(&path, cx))
5637 .map_or(false, |entry| entry.is_ignored);
5638 (is_ignored, buffer.snapshot())
5639 });
5640 if is_ignored && !query.include_ignored() {
5641 return None;
5642 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5643 Some((path.clone(), (buffer, snapshot)))
5644 } else {
5645 unnamed_files.push(buffer);
5646 None
5647 }
5648 })
5649 .collect();
5650 cx.background_executor()
5651 .spawn(Self::background_search(
5652 unnamed_files,
5653 opened_buffers,
5654 cx.background_executor().clone(),
5655 self.fs.clone(),
5656 workers,
5657 query.clone(),
5658 path_count,
5659 snapshots,
5660 matching_paths_tx,
5661 ))
5662 .detach();
5663
5664 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5665 let background = cx.background_executor().clone();
5666 let (result_tx, result_rx) = smol::channel::bounded(1024);
5667 cx.background_executor()
5668 .spawn(async move {
5669 let Ok(buffers) = buffers.await else {
5670 return;
5671 };
5672
5673 let buffers_len = buffers.len();
5674 if buffers_len == 0 {
5675 return;
5676 }
5677 let query = &query;
5678 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5679 background
5680 .scoped(|scope| {
5681 #[derive(Clone)]
5682 struct FinishedStatus {
5683 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
5684 buffer_index: SearchMatchCandidateIndex,
5685 }
5686
5687 for _ in 0..workers {
5688 let finished_tx = finished_tx.clone();
5689 let mut buffers_rx = buffers_rx.clone();
5690 scope.spawn(async move {
5691 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5692 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5693 {
5694 if query.file_matches(
5695 snapshot.file().map(|file| file.path().as_ref()),
5696 ) {
5697 query
5698 .search(&snapshot, None)
5699 .await
5700 .iter()
5701 .map(|range| {
5702 snapshot.anchor_before(range.start)
5703 ..snapshot.anchor_after(range.end)
5704 })
5705 .collect()
5706 } else {
5707 Vec::new()
5708 }
5709 } else {
5710 Vec::new()
5711 };
5712
5713 let status = if !buffer_matches.is_empty() {
5714 let entry = if let Some((buffer, _)) = entry.as_ref() {
5715 Some((buffer.clone(), buffer_matches))
5716 } else {
5717 None
5718 };
5719 FinishedStatus {
5720 entry,
5721 buffer_index,
5722 }
5723 } else {
5724 FinishedStatus {
5725 entry: None,
5726 buffer_index,
5727 }
5728 };
5729 if finished_tx.send(status).await.is_err() {
5730 break;
5731 }
5732 }
5733 });
5734 }
5735 // Report sorted matches
5736 scope.spawn(async move {
5737 let mut current_index = 0;
5738 let mut scratch = vec![None; buffers_len];
5739 while let Some(status) = finished_rx.next().await {
5740 debug_assert!(
5741 scratch[status.buffer_index].is_none(),
5742 "Got match status of position {} twice",
5743 status.buffer_index
5744 );
5745 let index = status.buffer_index;
5746 scratch[index] = Some(status);
5747 while current_index < buffers_len {
5748 let Some(current_entry) = scratch[current_index].take() else {
5749 // We intentionally **do not** increment `current_index` here. When next element arrives
5750 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5751 // this time.
5752 break;
5753 };
5754 if let Some(entry) = current_entry.entry {
5755 result_tx.send(entry).await.log_err();
5756 }
5757 current_index += 1;
5758 }
5759 if current_index == buffers_len {
5760 break;
5761 }
5762 }
5763 });
5764 })
5765 .await;
5766 })
5767 .detach();
5768 result_rx
5769 }
5770
5771 /// Pick paths that might potentially contain a match of a given search query.
5772 async fn background_search(
5773 unnamed_buffers: Vec<Model<Buffer>>,
5774 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
5775 executor: BackgroundExecutor,
5776 fs: Arc<dyn Fs>,
5777 workers: usize,
5778 query: SearchQuery,
5779 path_count: usize,
5780 snapshots: Vec<LocalSnapshot>,
5781 matching_paths_tx: Sender<SearchMatchCandidate>,
5782 ) {
5783 let fs = &fs;
5784 let query = &query;
5785 let matching_paths_tx = &matching_paths_tx;
5786 let snapshots = &snapshots;
5787 let paths_per_worker = (path_count + workers - 1) / workers;
5788 for buffer in unnamed_buffers {
5789 matching_paths_tx
5790 .send(SearchMatchCandidate::OpenBuffer {
5791 buffer: buffer.clone(),
5792 path: None,
5793 })
5794 .await
5795 .log_err();
5796 }
5797 for (path, (buffer, _)) in opened_buffers.iter() {
5798 matching_paths_tx
5799 .send(SearchMatchCandidate::OpenBuffer {
5800 buffer: buffer.clone(),
5801 path: Some(path.clone()),
5802 })
5803 .await
5804 .log_err();
5805 }
5806 executor
5807 .scoped(|scope| {
5808 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5809
5810 for worker_ix in 0..workers {
5811 let worker_start_ix = worker_ix * paths_per_worker;
5812 let worker_end_ix = worker_start_ix + paths_per_worker;
5813 let unnamed_buffers = opened_buffers.clone();
5814 let limiter = Arc::clone(&max_concurrent_workers);
5815 scope.spawn(async move {
5816 let _guard = limiter.acquire().await;
5817 let mut snapshot_start_ix = 0;
5818 let mut abs_path = PathBuf::new();
5819 for snapshot in snapshots {
5820 let snapshot_end_ix = snapshot_start_ix
5821 + if query.include_ignored() {
5822 snapshot.file_count()
5823 } else {
5824 snapshot.visible_file_count()
5825 };
5826 if worker_end_ix <= snapshot_start_ix {
5827 break;
5828 } else if worker_start_ix > snapshot_end_ix {
5829 snapshot_start_ix = snapshot_end_ix;
5830 continue;
5831 } else {
5832 let start_in_snapshot =
5833 worker_start_ix.saturating_sub(snapshot_start_ix);
5834 let end_in_snapshot =
5835 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5836
5837 for entry in snapshot
5838 .files(query.include_ignored(), start_in_snapshot)
5839 .take(end_in_snapshot - start_in_snapshot)
5840 {
5841 if matching_paths_tx.is_closed() {
5842 break;
5843 }
5844 if unnamed_buffers.contains_key(&entry.path) {
5845 continue;
5846 }
5847 let matches = if query.file_matches(Some(&entry.path)) {
5848 abs_path.clear();
5849 abs_path.push(&snapshot.abs_path());
5850 abs_path.push(&entry.path);
5851 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5852 {
5853 query.detect(file).unwrap_or(false)
5854 } else {
5855 false
5856 }
5857 } else {
5858 false
5859 };
5860
5861 if matches {
5862 let project_path = SearchMatchCandidate::Path {
5863 worktree_id: snapshot.id(),
5864 path: entry.path.clone(),
5865 is_ignored: entry.is_ignored,
5866 };
5867 if matching_paths_tx.send(project_path).await.is_err() {
5868 break;
5869 }
5870 }
5871 }
5872
5873 snapshot_start_ix = snapshot_end_ix;
5874 }
5875 }
5876 });
5877 }
5878
5879 if query.include_ignored() {
5880 for snapshot in snapshots {
5881 for ignored_entry in snapshot
5882 .entries(query.include_ignored())
5883 .filter(|e| e.is_ignored)
5884 {
5885 let limiter = Arc::clone(&max_concurrent_workers);
5886 scope.spawn(async move {
5887 let _guard = limiter.acquire().await;
5888 let mut ignored_paths_to_process =
5889 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
5890 while let Some(ignored_abs_path) =
5891 ignored_paths_to_process.pop_front()
5892 {
5893 if let Some(fs_metadata) = fs
5894 .metadata(&ignored_abs_path)
5895 .await
5896 .with_context(|| {
5897 format!("fetching fs metadata for {ignored_abs_path:?}")
5898 })
5899 .log_err()
5900 .flatten()
5901 {
5902 if fs_metadata.is_dir {
5903 if let Some(mut subfiles) = fs
5904 .read_dir(&ignored_abs_path)
5905 .await
5906 .with_context(|| {
5907 format!(
5908 "listing ignored path {ignored_abs_path:?}"
5909 )
5910 })
5911 .log_err()
5912 {
5913 while let Some(subfile) = subfiles.next().await {
5914 if let Some(subfile) = subfile.log_err() {
5915 ignored_paths_to_process.push_back(subfile);
5916 }
5917 }
5918 }
5919 } else if !fs_metadata.is_symlink {
5920 if !query.file_matches(Some(&ignored_abs_path))
5921 || snapshot.is_path_excluded(
5922 ignored_entry.path.to_path_buf(),
5923 )
5924 {
5925 continue;
5926 }
5927 let matches = if let Some(file) = fs
5928 .open_sync(&ignored_abs_path)
5929 .await
5930 .with_context(|| {
5931 format!(
5932 "Opening ignored path {ignored_abs_path:?}"
5933 )
5934 })
5935 .log_err()
5936 {
5937 query.detect(file).unwrap_or(false)
5938 } else {
5939 false
5940 };
5941 if matches {
5942 let project_path = SearchMatchCandidate::Path {
5943 worktree_id: snapshot.id(),
5944 path: Arc::from(
5945 ignored_abs_path
5946 .strip_prefix(snapshot.abs_path())
5947 .expect(
5948 "scanning worktree-related files",
5949 ),
5950 ),
5951 is_ignored: true,
5952 };
5953 if matching_paths_tx
5954 .send(project_path)
5955 .await
5956 .is_err()
5957 {
5958 return;
5959 }
5960 }
5961 }
5962 }
5963 }
5964 });
5965 }
5966 }
5967 }
5968 })
5969 .await;
5970 }
5971
5972 pub fn request_lsp<R: LspCommand>(
5973 &self,
5974 buffer_handle: Model<Buffer>,
5975 server: LanguageServerToQuery,
5976 request: R,
5977 cx: &mut ModelContext<Self>,
5978 ) -> Task<Result<R::Response>>
5979 where
5980 <R::LspRequest as lsp::request::Request>::Result: Send,
5981 <R::LspRequest as lsp::request::Request>::Params: Send,
5982 {
5983 let buffer = buffer_handle.read(cx);
5984 if self.is_local() {
5985 let language_server = match server {
5986 LanguageServerToQuery::Primary => {
5987 match self.primary_language_server_for_buffer(buffer, cx) {
5988 Some((_, server)) => Some(Arc::clone(server)),
5989 None => return Task::ready(Ok(Default::default())),
5990 }
5991 }
5992 LanguageServerToQuery::Other(id) => self
5993 .language_server_for_buffer(buffer, id, cx)
5994 .map(|(_, server)| Arc::clone(server)),
5995 };
5996 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
5997 if let (Some(file), Some(language_server)) = (file, language_server) {
5998 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
5999 return cx.spawn(move |this, cx| async move {
6000 if !request.check_capabilities(language_server.capabilities()) {
6001 return Ok(Default::default());
6002 }
6003
6004 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6005 let response = match result {
6006 Ok(response) => response,
6007
6008 Err(err) => {
6009 log::warn!(
6010 "Generic lsp request to {} failed: {}",
6011 language_server.name(),
6012 err
6013 );
6014 return Err(err);
6015 }
6016 };
6017
6018 request
6019 .response_from_lsp(
6020 response,
6021 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6022 buffer_handle,
6023 language_server.server_id(),
6024 cx,
6025 )
6026 .await
6027 });
6028 }
6029 } else if let Some(project_id) = self.remote_id() {
6030 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6031 }
6032
6033 Task::ready(Ok(Default::default()))
6034 }
6035
6036 fn send_lsp_proto_request<R: LspCommand>(
6037 &self,
6038 buffer: Model<Buffer>,
6039 project_id: u64,
6040 request: R,
6041 cx: &mut ModelContext<'_, Project>,
6042 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6043 let rpc = self.client.clone();
6044 let message = request.to_proto(project_id, buffer.read(cx));
6045 cx.spawn(move |this, mut cx| async move {
6046 // Ensure the project is still alive by the time the task
6047 // is scheduled.
6048 this.upgrade().context("project dropped")?;
6049 let response = rpc.request(message).await?;
6050 let this = this.upgrade().context("project dropped")?;
6051 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6052 Err(anyhow!("disconnected before completing request"))
6053 } else {
6054 request
6055 .response_from_proto(response, this, buffer, cx)
6056 .await
6057 }
6058 })
6059 }
6060
6061 fn sort_candidates_and_open_buffers(
6062 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6063 cx: &mut ModelContext<Self>,
6064 ) -> (
6065 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6066 Receiver<(
6067 Option<(Model<Buffer>, BufferSnapshot)>,
6068 SearchMatchCandidateIndex,
6069 )>,
6070 ) {
6071 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6072 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6073 cx.spawn(move |this, cx| async move {
6074 let mut buffers = Vec::new();
6075 let mut ignored_buffers = Vec::new();
6076 while let Some(entry) = matching_paths_rx.next().await {
6077 if matches!(
6078 entry,
6079 SearchMatchCandidate::Path {
6080 is_ignored: true,
6081 ..
6082 }
6083 ) {
6084 ignored_buffers.push(entry);
6085 } else {
6086 buffers.push(entry);
6087 }
6088 }
6089 buffers.sort_by_key(|candidate| candidate.path());
6090 ignored_buffers.sort_by_key(|candidate| candidate.path());
6091 buffers.extend(ignored_buffers);
6092 let matching_paths = buffers.clone();
6093 let _ = sorted_buffers_tx.send(buffers);
6094 for (index, candidate) in matching_paths.into_iter().enumerate() {
6095 if buffers_tx.is_closed() {
6096 break;
6097 }
6098 let this = this.clone();
6099 let buffers_tx = buffers_tx.clone();
6100 cx.spawn(move |mut cx| async move {
6101 let buffer = match candidate {
6102 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6103 SearchMatchCandidate::Path {
6104 worktree_id, path, ..
6105 } => this
6106 .update(&mut cx, |this, cx| {
6107 this.open_buffer((worktree_id, path), cx)
6108 })?
6109 .await
6110 .log_err(),
6111 };
6112 if let Some(buffer) = buffer {
6113 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6114 buffers_tx
6115 .send((Some((buffer, snapshot)), index))
6116 .await
6117 .log_err();
6118 } else {
6119 buffers_tx.send((None, index)).await.log_err();
6120 }
6121
6122 Ok::<_, anyhow::Error>(())
6123 })
6124 .detach();
6125 }
6126 })
6127 .detach();
6128 (sorted_buffers_rx, buffers_rx)
6129 }
6130
6131 pub fn find_or_create_local_worktree(
6132 &mut self,
6133 abs_path: impl AsRef<Path>,
6134 visible: bool,
6135 cx: &mut ModelContext<Self>,
6136 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6137 let abs_path = abs_path.as_ref();
6138 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6139 Task::ready(Ok((tree, relative_path)))
6140 } else {
6141 let worktree = self.create_local_worktree(abs_path, visible, cx);
6142 cx.background_executor()
6143 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6144 }
6145 }
6146
6147 pub fn find_local_worktree(
6148 &self,
6149 abs_path: &Path,
6150 cx: &AppContext,
6151 ) -> Option<(Model<Worktree>, PathBuf)> {
6152 for tree in &self.worktrees {
6153 if let Some(tree) = tree.upgrade() {
6154 if let Some(relative_path) = tree
6155 .read(cx)
6156 .as_local()
6157 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6158 {
6159 return Some((tree.clone(), relative_path.into()));
6160 }
6161 }
6162 }
6163 None
6164 }
6165
6166 pub fn is_shared(&self) -> bool {
6167 match &self.client_state {
6168 Some(ProjectClientState::Local { .. }) => true,
6169 _ => false,
6170 }
6171 }
6172
6173 fn create_local_worktree(
6174 &mut self,
6175 abs_path: impl AsRef<Path>,
6176 visible: bool,
6177 cx: &mut ModelContext<Self>,
6178 ) -> Task<Result<Model<Worktree>>> {
6179 let fs = self.fs.clone();
6180 let client = self.client.clone();
6181 let next_entry_id = self.next_entry_id.clone();
6182 let path: Arc<Path> = abs_path.as_ref().into();
6183 let task = self
6184 .loading_local_worktrees
6185 .entry(path.clone())
6186 .or_insert_with(|| {
6187 cx.spawn(move |project, mut cx| {
6188 async move {
6189 let worktree = Worktree::local(
6190 client.clone(),
6191 path.clone(),
6192 visible,
6193 fs,
6194 next_entry_id,
6195 &mut cx,
6196 )
6197 .await;
6198
6199 project.update(&mut cx, |project, _| {
6200 project.loading_local_worktrees.remove(&path);
6201 })?;
6202
6203 let worktree = worktree?;
6204 project
6205 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6206 Ok(worktree)
6207 }
6208 .map_err(Arc::new)
6209 })
6210 .shared()
6211 })
6212 .clone();
6213 cx.background_executor().spawn(async move {
6214 match task.await {
6215 Ok(worktree) => Ok(worktree),
6216 Err(err) => Err(anyhow!("{}", err)),
6217 }
6218 })
6219 }
6220
6221 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6222 self.worktrees.retain(|worktree| {
6223 if let Some(worktree) = worktree.upgrade() {
6224 let id = worktree.read(cx).id();
6225 if id == id_to_remove {
6226 cx.emit(Event::WorktreeRemoved(id));
6227 false
6228 } else {
6229 true
6230 }
6231 } else {
6232 false
6233 }
6234 });
6235 self.metadata_changed(cx);
6236 }
6237
6238 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6239 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6240 if worktree.read(cx).is_local() {
6241 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6242 worktree::Event::UpdatedEntries(changes) => {
6243 this.update_local_worktree_buffers(&worktree, changes, cx);
6244 this.update_local_worktree_language_servers(&worktree, changes, cx);
6245 this.update_local_worktree_settings(&worktree, changes, cx);
6246 this.update_prettier_settings(&worktree, changes, cx);
6247 cx.emit(Event::WorktreeUpdatedEntries(
6248 worktree.read(cx).id(),
6249 changes.clone(),
6250 ));
6251 }
6252 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6253 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6254 }
6255 })
6256 .detach();
6257 }
6258
6259 let push_strong_handle = {
6260 let worktree = worktree.read(cx);
6261 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6262 };
6263 if push_strong_handle {
6264 self.worktrees
6265 .push(WorktreeHandle::Strong(worktree.clone()));
6266 } else {
6267 self.worktrees
6268 .push(WorktreeHandle::Weak(worktree.downgrade()));
6269 }
6270
6271 let handle_id = worktree.entity_id();
6272 cx.observe_release(worktree, move |this, worktree, cx| {
6273 let _ = this.remove_worktree(worktree.id(), cx);
6274 cx.update_global::<SettingsStore, _>(|store, cx| {
6275 store
6276 .clear_local_settings(handle_id.as_u64() as usize, cx)
6277 .log_err()
6278 });
6279 })
6280 .detach();
6281
6282 cx.emit(Event::WorktreeAdded);
6283 self.metadata_changed(cx);
6284 }
6285
6286 fn update_local_worktree_buffers(
6287 &mut self,
6288 worktree_handle: &Model<Worktree>,
6289 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6290 cx: &mut ModelContext<Self>,
6291 ) {
6292 let snapshot = worktree_handle.read(cx).snapshot();
6293
6294 let mut renamed_buffers = Vec::new();
6295 for (path, entry_id, _) in changes {
6296 let worktree_id = worktree_handle.read(cx).id();
6297 let project_path = ProjectPath {
6298 worktree_id,
6299 path: path.clone(),
6300 };
6301
6302 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6303 Some(&buffer_id) => buffer_id,
6304 None => match self.local_buffer_ids_by_path.get(&project_path) {
6305 Some(&buffer_id) => buffer_id,
6306 None => {
6307 continue;
6308 }
6309 },
6310 };
6311
6312 let open_buffer = self.opened_buffers.get(&buffer_id);
6313 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6314 buffer
6315 } else {
6316 self.opened_buffers.remove(&buffer_id);
6317 self.local_buffer_ids_by_path.remove(&project_path);
6318 self.local_buffer_ids_by_entry_id.remove(entry_id);
6319 continue;
6320 };
6321
6322 buffer.update(cx, |buffer, cx| {
6323 if let Some(old_file) = File::from_dyn(buffer.file()) {
6324 if old_file.worktree != *worktree_handle {
6325 return;
6326 }
6327
6328 let new_file = if let Some(entry) = old_file
6329 .entry_id
6330 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6331 {
6332 File {
6333 is_local: true,
6334 entry_id: Some(entry.id),
6335 mtime: entry.mtime,
6336 path: entry.path.clone(),
6337 worktree: worktree_handle.clone(),
6338 is_deleted: false,
6339 }
6340 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6341 File {
6342 is_local: true,
6343 entry_id: Some(entry.id),
6344 mtime: entry.mtime,
6345 path: entry.path.clone(),
6346 worktree: worktree_handle.clone(),
6347 is_deleted: false,
6348 }
6349 } else {
6350 File {
6351 is_local: true,
6352 entry_id: old_file.entry_id,
6353 path: old_file.path().clone(),
6354 mtime: old_file.mtime(),
6355 worktree: worktree_handle.clone(),
6356 is_deleted: true,
6357 }
6358 };
6359
6360 let old_path = old_file.abs_path(cx);
6361 if new_file.abs_path(cx) != old_path {
6362 renamed_buffers.push((cx.handle(), old_file.clone()));
6363 self.local_buffer_ids_by_path.remove(&project_path);
6364 self.local_buffer_ids_by_path.insert(
6365 ProjectPath {
6366 worktree_id,
6367 path: path.clone(),
6368 },
6369 buffer_id,
6370 );
6371 }
6372
6373 if new_file.entry_id != Some(*entry_id) {
6374 self.local_buffer_ids_by_entry_id.remove(entry_id);
6375 if let Some(entry_id) = new_file.entry_id {
6376 self.local_buffer_ids_by_entry_id
6377 .insert(entry_id, buffer_id);
6378 }
6379 }
6380
6381 if new_file != *old_file {
6382 if let Some(project_id) = self.remote_id() {
6383 self.client
6384 .send(proto::UpdateBufferFile {
6385 project_id,
6386 buffer_id: buffer_id as u64,
6387 file: Some(new_file.to_proto()),
6388 })
6389 .log_err();
6390 }
6391
6392 buffer.file_updated(Arc::new(new_file), cx);
6393 }
6394 }
6395 });
6396 }
6397
6398 for (buffer, old_file) in renamed_buffers {
6399 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6400 self.detect_language_for_buffer(&buffer, cx);
6401 self.register_buffer_with_language_servers(&buffer, cx);
6402 }
6403 }
6404
6405 fn update_local_worktree_language_servers(
6406 &mut self,
6407 worktree_handle: &Model<Worktree>,
6408 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6409 cx: &mut ModelContext<Self>,
6410 ) {
6411 if changes.is_empty() {
6412 return;
6413 }
6414
6415 let worktree_id = worktree_handle.read(cx).id();
6416 let mut language_server_ids = self
6417 .language_server_ids
6418 .iter()
6419 .filter_map(|((server_worktree_id, _), server_id)| {
6420 (*server_worktree_id == worktree_id).then_some(*server_id)
6421 })
6422 .collect::<Vec<_>>();
6423 language_server_ids.sort();
6424 language_server_ids.dedup();
6425
6426 let abs_path = worktree_handle.read(cx).abs_path();
6427 for server_id in &language_server_ids {
6428 if let Some(LanguageServerState::Running {
6429 server,
6430 watched_paths,
6431 ..
6432 }) = self.language_servers.get(server_id)
6433 {
6434 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6435 let params = lsp::DidChangeWatchedFilesParams {
6436 changes: changes
6437 .iter()
6438 .filter_map(|(path, _, change)| {
6439 if !watched_paths.is_match(&path) {
6440 return None;
6441 }
6442 let typ = match change {
6443 PathChange::Loaded => return None,
6444 PathChange::Added => lsp::FileChangeType::CREATED,
6445 PathChange::Removed => lsp::FileChangeType::DELETED,
6446 PathChange::Updated => lsp::FileChangeType::CHANGED,
6447 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6448 };
6449 Some(lsp::FileEvent {
6450 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6451 typ,
6452 })
6453 })
6454 .collect(),
6455 };
6456
6457 if !params.changes.is_empty() {
6458 server
6459 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6460 .log_err();
6461 }
6462 }
6463 }
6464 }
6465 }
6466
6467 fn update_local_worktree_buffers_git_repos(
6468 &mut self,
6469 worktree_handle: Model<Worktree>,
6470 changed_repos: &UpdatedGitRepositoriesSet,
6471 cx: &mut ModelContext<Self>,
6472 ) {
6473 debug_assert!(worktree_handle.read(cx).is_local());
6474
6475 // Identify the loading buffers whose containing repository that has changed.
6476 let future_buffers = self
6477 .loading_buffers_by_path
6478 .iter()
6479 .filter_map(|(project_path, receiver)| {
6480 if project_path.worktree_id != worktree_handle.read(cx).id() {
6481 return None;
6482 }
6483 let path = &project_path.path;
6484 changed_repos
6485 .iter()
6486 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6487 let receiver = receiver.clone();
6488 let path = path.clone();
6489 Some(async move {
6490 wait_for_loading_buffer(receiver)
6491 .await
6492 .ok()
6493 .map(|buffer| (buffer, path))
6494 })
6495 })
6496 .collect::<FuturesUnordered<_>>();
6497
6498 // Identify the current buffers whose containing repository has changed.
6499 let current_buffers = self
6500 .opened_buffers
6501 .values()
6502 .filter_map(|buffer| {
6503 let buffer = buffer.upgrade()?;
6504 let file = File::from_dyn(buffer.read(cx).file())?;
6505 if file.worktree != worktree_handle {
6506 return None;
6507 }
6508 let path = file.path();
6509 changed_repos
6510 .iter()
6511 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6512 Some((buffer, path.clone()))
6513 })
6514 .collect::<Vec<_>>();
6515
6516 if future_buffers.len() + current_buffers.len() == 0 {
6517 return;
6518 }
6519
6520 let remote_id = self.remote_id();
6521 let client = self.client.clone();
6522 cx.spawn(move |_, mut cx| async move {
6523 // Wait for all of the buffers to load.
6524 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6525
6526 // Reload the diff base for every buffer whose containing git repository has changed.
6527 let snapshot =
6528 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6529 let diff_bases_by_buffer = cx
6530 .background_executor()
6531 .spawn(async move {
6532 future_buffers
6533 .into_iter()
6534 .filter_map(|e| e)
6535 .chain(current_buffers)
6536 .filter_map(|(buffer, path)| {
6537 let (work_directory, repo) =
6538 snapshot.repository_and_work_directory_for_path(&path)?;
6539 let repo = snapshot.get_local_repo(&repo)?;
6540 let relative_path = path.strip_prefix(&work_directory).ok()?;
6541 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6542 Some((buffer, base_text))
6543 })
6544 .collect::<Vec<_>>()
6545 })
6546 .await;
6547
6548 // Assign the new diff bases on all of the buffers.
6549 for (buffer, diff_base) in diff_bases_by_buffer {
6550 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6551 buffer.set_diff_base(diff_base.clone(), cx);
6552 buffer.remote_id()
6553 })?;
6554 if let Some(project_id) = remote_id {
6555 client
6556 .send(proto::UpdateDiffBase {
6557 project_id,
6558 buffer_id,
6559 diff_base,
6560 })
6561 .log_err();
6562 }
6563 }
6564
6565 anyhow::Ok(())
6566 })
6567 .detach();
6568 }
6569
6570 fn update_local_worktree_settings(
6571 &mut self,
6572 worktree: &Model<Worktree>,
6573 changes: &UpdatedEntriesSet,
6574 cx: &mut ModelContext<Self>,
6575 ) {
6576 let project_id = self.remote_id();
6577 let worktree_id = worktree.entity_id();
6578 let worktree = worktree.read(cx).as_local().unwrap();
6579 let remote_worktree_id = worktree.id();
6580
6581 let mut settings_contents = Vec::new();
6582 for (path, _, change) in changes.iter() {
6583 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6584 let settings_dir = Arc::from(
6585 path.ancestors()
6586 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6587 .unwrap(),
6588 );
6589 let fs = self.fs.clone();
6590 let removed = *change == PathChange::Removed;
6591 let abs_path = worktree.absolutize(path);
6592 settings_contents.push(async move {
6593 (
6594 settings_dir,
6595 if removed {
6596 None
6597 } else {
6598 Some(async move { fs.load(&abs_path?).await }.await)
6599 },
6600 )
6601 });
6602 }
6603 }
6604
6605 if settings_contents.is_empty() {
6606 return;
6607 }
6608
6609 let client = self.client.clone();
6610 cx.spawn(move |_, cx| async move {
6611 let settings_contents: Vec<(Arc<Path>, _)> =
6612 futures::future::join_all(settings_contents).await;
6613 cx.update(|cx| {
6614 cx.update_global::<SettingsStore, _>(|store, cx| {
6615 for (directory, file_content) in settings_contents {
6616 let file_content = file_content.and_then(|content| content.log_err());
6617 store
6618 .set_local_settings(
6619 worktree_id.as_u64() as usize,
6620 directory.clone(),
6621 file_content.as_ref().map(String::as_str),
6622 cx,
6623 )
6624 .log_err();
6625 if let Some(remote_id) = project_id {
6626 client
6627 .send(proto::UpdateWorktreeSettings {
6628 project_id: remote_id,
6629 worktree_id: remote_worktree_id.to_proto(),
6630 path: directory.to_string_lossy().into_owned(),
6631 content: file_content,
6632 })
6633 .log_err();
6634 }
6635 }
6636 });
6637 })
6638 .ok();
6639 })
6640 .detach();
6641 }
6642
6643 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6644 let new_active_entry = entry.and_then(|project_path| {
6645 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6646 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6647 Some(entry.id)
6648 });
6649 if new_active_entry != self.active_entry {
6650 self.active_entry = new_active_entry;
6651 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6652 }
6653 }
6654
6655 pub fn language_servers_running_disk_based_diagnostics(
6656 &self,
6657 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6658 self.language_server_statuses
6659 .iter()
6660 .filter_map(|(id, status)| {
6661 if status.has_pending_diagnostic_updates {
6662 Some(*id)
6663 } else {
6664 None
6665 }
6666 })
6667 }
6668
6669 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
6670 let mut summary = DiagnosticSummary::default();
6671 for (_, _, path_summary) in
6672 self.diagnostic_summaries(include_ignored, cx)
6673 .filter(|(path, _, _)| {
6674 let worktree = self.entry_for_path(&path, cx).map(|entry| entry.is_ignored);
6675 include_ignored || worktree == Some(false)
6676 })
6677 {
6678 summary.error_count += path_summary.error_count;
6679 summary.warning_count += path_summary.warning_count;
6680 }
6681 summary
6682 }
6683
6684 pub fn diagnostic_summaries<'a>(
6685 &'a self,
6686 include_ignored: bool,
6687 cx: &'a AppContext,
6688 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6689 self.visible_worktrees(cx)
6690 .flat_map(move |worktree| {
6691 let worktree = worktree.read(cx);
6692 let worktree_id = worktree.id();
6693 worktree
6694 .diagnostic_summaries()
6695 .map(move |(path, server_id, summary)| {
6696 (ProjectPath { worktree_id, path }, server_id, summary)
6697 })
6698 })
6699 .filter(move |(path, _, _)| {
6700 let worktree = self.entry_for_path(&path, cx).map(|entry| entry.is_ignored);
6701 include_ignored || worktree == Some(false)
6702 })
6703 }
6704
6705 pub fn disk_based_diagnostics_started(
6706 &mut self,
6707 language_server_id: LanguageServerId,
6708 cx: &mut ModelContext<Self>,
6709 ) {
6710 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6711 }
6712
6713 pub fn disk_based_diagnostics_finished(
6714 &mut self,
6715 language_server_id: LanguageServerId,
6716 cx: &mut ModelContext<Self>,
6717 ) {
6718 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6719 }
6720
6721 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6722 self.active_entry
6723 }
6724
6725 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6726 self.worktree_for_id(path.worktree_id, cx)?
6727 .read(cx)
6728 .entry_for_path(&path.path)
6729 .cloned()
6730 }
6731
6732 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6733 let worktree = self.worktree_for_entry(entry_id, cx)?;
6734 let worktree = worktree.read(cx);
6735 let worktree_id = worktree.id();
6736 let path = worktree.entry_for_id(entry_id)?.path.clone();
6737 Some(ProjectPath { worktree_id, path })
6738 }
6739
6740 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6741 let workspace_root = self
6742 .worktree_for_id(project_path.worktree_id, cx)?
6743 .read(cx)
6744 .abs_path();
6745 let project_path = project_path.path.as_ref();
6746
6747 Some(if project_path == Path::new("") {
6748 workspace_root.to_path_buf()
6749 } else {
6750 workspace_root.join(project_path)
6751 })
6752 }
6753
6754 // RPC message handlers
6755
6756 async fn handle_unshare_project(
6757 this: Model<Self>,
6758 _: TypedEnvelope<proto::UnshareProject>,
6759 _: Arc<Client>,
6760 mut cx: AsyncAppContext,
6761 ) -> Result<()> {
6762 this.update(&mut cx, |this, cx| {
6763 if this.is_local() {
6764 this.unshare(cx)?;
6765 } else {
6766 this.disconnected_from_host(cx);
6767 }
6768 Ok(())
6769 })?
6770 }
6771
6772 async fn handle_add_collaborator(
6773 this: Model<Self>,
6774 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6775 _: Arc<Client>,
6776 mut cx: AsyncAppContext,
6777 ) -> Result<()> {
6778 let collaborator = envelope
6779 .payload
6780 .collaborator
6781 .take()
6782 .ok_or_else(|| anyhow!("empty collaborator"))?;
6783
6784 let collaborator = Collaborator::from_proto(collaborator)?;
6785 this.update(&mut cx, |this, cx| {
6786 this.shared_buffers.remove(&collaborator.peer_id);
6787 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6788 this.collaborators
6789 .insert(collaborator.peer_id, collaborator);
6790 cx.notify();
6791 })?;
6792
6793 Ok(())
6794 }
6795
6796 async fn handle_update_project_collaborator(
6797 this: Model<Self>,
6798 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6799 _: Arc<Client>,
6800 mut cx: AsyncAppContext,
6801 ) -> Result<()> {
6802 let old_peer_id = envelope
6803 .payload
6804 .old_peer_id
6805 .ok_or_else(|| anyhow!("missing old peer id"))?;
6806 let new_peer_id = envelope
6807 .payload
6808 .new_peer_id
6809 .ok_or_else(|| anyhow!("missing new peer id"))?;
6810 this.update(&mut cx, |this, cx| {
6811 let collaborator = this
6812 .collaborators
6813 .remove(&old_peer_id)
6814 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6815 let is_host = collaborator.replica_id == 0;
6816 this.collaborators.insert(new_peer_id, collaborator);
6817
6818 let buffers = this.shared_buffers.remove(&old_peer_id);
6819 log::info!(
6820 "peer {} became {}. moving buffers {:?}",
6821 old_peer_id,
6822 new_peer_id,
6823 &buffers
6824 );
6825 if let Some(buffers) = buffers {
6826 this.shared_buffers.insert(new_peer_id, buffers);
6827 }
6828
6829 if is_host {
6830 this.opened_buffers
6831 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6832 this.buffer_ordered_messages_tx
6833 .unbounded_send(BufferOrderedMessage::Resync)
6834 .unwrap();
6835 }
6836
6837 cx.emit(Event::CollaboratorUpdated {
6838 old_peer_id,
6839 new_peer_id,
6840 });
6841 cx.notify();
6842 Ok(())
6843 })?
6844 }
6845
6846 async fn handle_remove_collaborator(
6847 this: Model<Self>,
6848 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6849 _: Arc<Client>,
6850 mut cx: AsyncAppContext,
6851 ) -> Result<()> {
6852 this.update(&mut cx, |this, cx| {
6853 let peer_id = envelope
6854 .payload
6855 .peer_id
6856 .ok_or_else(|| anyhow!("invalid peer id"))?;
6857 let replica_id = this
6858 .collaborators
6859 .remove(&peer_id)
6860 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6861 .replica_id;
6862 for buffer in this.opened_buffers.values() {
6863 if let Some(buffer) = buffer.upgrade() {
6864 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6865 }
6866 }
6867 this.shared_buffers.remove(&peer_id);
6868
6869 cx.emit(Event::CollaboratorLeft(peer_id));
6870 cx.notify();
6871 Ok(())
6872 })?
6873 }
6874
6875 async fn handle_update_project(
6876 this: Model<Self>,
6877 envelope: TypedEnvelope<proto::UpdateProject>,
6878 _: Arc<Client>,
6879 mut cx: AsyncAppContext,
6880 ) -> Result<()> {
6881 this.update(&mut cx, |this, cx| {
6882 // Don't handle messages that were sent before the response to us joining the project
6883 if envelope.message_id > this.join_project_response_message_id {
6884 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6885 }
6886 Ok(())
6887 })?
6888 }
6889
6890 async fn handle_update_worktree(
6891 this: Model<Self>,
6892 envelope: TypedEnvelope<proto::UpdateWorktree>,
6893 _: Arc<Client>,
6894 mut cx: AsyncAppContext,
6895 ) -> Result<()> {
6896 this.update(&mut cx, |this, cx| {
6897 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6898 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6899 worktree.update(cx, |worktree, _| {
6900 let worktree = worktree.as_remote_mut().unwrap();
6901 worktree.update_from_remote(envelope.payload);
6902 });
6903 }
6904 Ok(())
6905 })?
6906 }
6907
6908 async fn handle_update_worktree_settings(
6909 this: Model<Self>,
6910 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6911 _: Arc<Client>,
6912 mut cx: AsyncAppContext,
6913 ) -> Result<()> {
6914 this.update(&mut cx, |this, cx| {
6915 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6916 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6917 cx.update_global::<SettingsStore, _>(|store, cx| {
6918 store
6919 .set_local_settings(
6920 worktree.entity_id().as_u64() as usize,
6921 PathBuf::from(&envelope.payload.path).into(),
6922 envelope.payload.content.as_ref().map(String::as_str),
6923 cx,
6924 )
6925 .log_err();
6926 });
6927 }
6928 Ok(())
6929 })?
6930 }
6931
6932 async fn handle_create_project_entry(
6933 this: Model<Self>,
6934 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6935 _: Arc<Client>,
6936 mut cx: AsyncAppContext,
6937 ) -> Result<proto::ProjectEntryResponse> {
6938 let worktree = this.update(&mut cx, |this, cx| {
6939 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6940 this.worktree_for_id(worktree_id, cx)
6941 .ok_or_else(|| anyhow!("worktree not found"))
6942 })??;
6943 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6944 let entry = worktree
6945 .update(&mut cx, |worktree, cx| {
6946 let worktree = worktree.as_local_mut().unwrap();
6947 let path = PathBuf::from(envelope.payload.path);
6948 worktree.create_entry(path, envelope.payload.is_directory, cx)
6949 })?
6950 .await?;
6951 Ok(proto::ProjectEntryResponse {
6952 entry: entry.as_ref().map(|e| e.into()),
6953 worktree_scan_id: worktree_scan_id as u64,
6954 })
6955 }
6956
6957 async fn handle_rename_project_entry(
6958 this: Model<Self>,
6959 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6960 _: Arc<Client>,
6961 mut cx: AsyncAppContext,
6962 ) -> Result<proto::ProjectEntryResponse> {
6963 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6964 let worktree = this.update(&mut cx, |this, cx| {
6965 this.worktree_for_entry(entry_id, cx)
6966 .ok_or_else(|| anyhow!("worktree not found"))
6967 })??;
6968 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6969 let entry = worktree
6970 .update(&mut cx, |worktree, cx| {
6971 let new_path = PathBuf::from(envelope.payload.new_path);
6972 worktree
6973 .as_local_mut()
6974 .unwrap()
6975 .rename_entry(entry_id, new_path, cx)
6976 })?
6977 .await?;
6978 Ok(proto::ProjectEntryResponse {
6979 entry: entry.as_ref().map(|e| e.into()),
6980 worktree_scan_id: worktree_scan_id as u64,
6981 })
6982 }
6983
6984 async fn handle_copy_project_entry(
6985 this: Model<Self>,
6986 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6987 _: Arc<Client>,
6988 mut cx: AsyncAppContext,
6989 ) -> Result<proto::ProjectEntryResponse> {
6990 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6991 let worktree = this.update(&mut cx, |this, cx| {
6992 this.worktree_for_entry(entry_id, cx)
6993 .ok_or_else(|| anyhow!("worktree not found"))
6994 })??;
6995 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6996 let entry = worktree
6997 .update(&mut cx, |worktree, cx| {
6998 let new_path = PathBuf::from(envelope.payload.new_path);
6999 worktree
7000 .as_local_mut()
7001 .unwrap()
7002 .copy_entry(entry_id, new_path, cx)
7003 })?
7004 .await?;
7005 Ok(proto::ProjectEntryResponse {
7006 entry: entry.as_ref().map(|e| e.into()),
7007 worktree_scan_id: worktree_scan_id as u64,
7008 })
7009 }
7010
7011 async fn handle_delete_project_entry(
7012 this: Model<Self>,
7013 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7014 _: Arc<Client>,
7015 mut cx: AsyncAppContext,
7016 ) -> Result<proto::ProjectEntryResponse> {
7017 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7018
7019 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7020
7021 let worktree = this.update(&mut cx, |this, cx| {
7022 this.worktree_for_entry(entry_id, cx)
7023 .ok_or_else(|| anyhow!("worktree not found"))
7024 })??;
7025 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7026 worktree
7027 .update(&mut cx, |worktree, cx| {
7028 worktree
7029 .as_local_mut()
7030 .unwrap()
7031 .delete_entry(entry_id, cx)
7032 .ok_or_else(|| anyhow!("invalid entry"))
7033 })??
7034 .await?;
7035 Ok(proto::ProjectEntryResponse {
7036 entry: None,
7037 worktree_scan_id: worktree_scan_id as u64,
7038 })
7039 }
7040
7041 async fn handle_expand_project_entry(
7042 this: Model<Self>,
7043 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7044 _: Arc<Client>,
7045 mut cx: AsyncAppContext,
7046 ) -> Result<proto::ExpandProjectEntryResponse> {
7047 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7048 let worktree = this
7049 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7050 .ok_or_else(|| anyhow!("invalid request"))?;
7051 worktree
7052 .update(&mut cx, |worktree, cx| {
7053 worktree
7054 .as_local_mut()
7055 .unwrap()
7056 .expand_entry(entry_id, cx)
7057 .ok_or_else(|| anyhow!("invalid entry"))
7058 })??
7059 .await?;
7060 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7061 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7062 }
7063
7064 async fn handle_update_diagnostic_summary(
7065 this: Model<Self>,
7066 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7067 _: Arc<Client>,
7068 mut cx: AsyncAppContext,
7069 ) -> Result<()> {
7070 this.update(&mut cx, |this, cx| {
7071 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7072 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7073 if let Some(summary) = envelope.payload.summary {
7074 let project_path = ProjectPath {
7075 worktree_id,
7076 path: Path::new(&summary.path).into(),
7077 };
7078 worktree.update(cx, |worktree, _| {
7079 worktree
7080 .as_remote_mut()
7081 .unwrap()
7082 .update_diagnostic_summary(project_path.path.clone(), &summary);
7083 });
7084 cx.emit(Event::DiagnosticsUpdated {
7085 language_server_id: LanguageServerId(summary.language_server_id as usize),
7086 path: project_path,
7087 });
7088 }
7089 }
7090 Ok(())
7091 })?
7092 }
7093
7094 async fn handle_start_language_server(
7095 this: Model<Self>,
7096 envelope: TypedEnvelope<proto::StartLanguageServer>,
7097 _: Arc<Client>,
7098 mut cx: AsyncAppContext,
7099 ) -> Result<()> {
7100 let server = envelope
7101 .payload
7102 .server
7103 .ok_or_else(|| anyhow!("invalid server"))?;
7104 this.update(&mut cx, |this, cx| {
7105 this.language_server_statuses.insert(
7106 LanguageServerId(server.id as usize),
7107 LanguageServerStatus {
7108 name: server.name,
7109 pending_work: Default::default(),
7110 has_pending_diagnostic_updates: false,
7111 progress_tokens: Default::default(),
7112 },
7113 );
7114 cx.notify();
7115 })?;
7116 Ok(())
7117 }
7118
7119 async fn handle_update_language_server(
7120 this: Model<Self>,
7121 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7122 _: Arc<Client>,
7123 mut cx: AsyncAppContext,
7124 ) -> Result<()> {
7125 this.update(&mut cx, |this, cx| {
7126 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7127
7128 match envelope
7129 .payload
7130 .variant
7131 .ok_or_else(|| anyhow!("invalid variant"))?
7132 {
7133 proto::update_language_server::Variant::WorkStart(payload) => {
7134 this.on_lsp_work_start(
7135 language_server_id,
7136 payload.token,
7137 LanguageServerProgress {
7138 message: payload.message,
7139 percentage: payload.percentage.map(|p| p as usize),
7140 last_update_at: Instant::now(),
7141 },
7142 cx,
7143 );
7144 }
7145
7146 proto::update_language_server::Variant::WorkProgress(payload) => {
7147 this.on_lsp_work_progress(
7148 language_server_id,
7149 payload.token,
7150 LanguageServerProgress {
7151 message: payload.message,
7152 percentage: payload.percentage.map(|p| p as usize),
7153 last_update_at: Instant::now(),
7154 },
7155 cx,
7156 );
7157 }
7158
7159 proto::update_language_server::Variant::WorkEnd(payload) => {
7160 this.on_lsp_work_end(language_server_id, payload.token, cx);
7161 }
7162
7163 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7164 this.disk_based_diagnostics_started(language_server_id, cx);
7165 }
7166
7167 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7168 this.disk_based_diagnostics_finished(language_server_id, cx)
7169 }
7170 }
7171
7172 Ok(())
7173 })?
7174 }
7175
7176 async fn handle_update_buffer(
7177 this: Model<Self>,
7178 envelope: TypedEnvelope<proto::UpdateBuffer>,
7179 _: Arc<Client>,
7180 mut cx: AsyncAppContext,
7181 ) -> Result<proto::Ack> {
7182 this.update(&mut cx, |this, cx| {
7183 let payload = envelope.payload.clone();
7184 let buffer_id = payload.buffer_id;
7185 let ops = payload
7186 .operations
7187 .into_iter()
7188 .map(language::proto::deserialize_operation)
7189 .collect::<Result<Vec<_>, _>>()?;
7190 let is_remote = this.is_remote();
7191 match this.opened_buffers.entry(buffer_id) {
7192 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7193 OpenBuffer::Strong(buffer) => {
7194 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7195 }
7196 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7197 OpenBuffer::Weak(_) => {}
7198 },
7199 hash_map::Entry::Vacant(e) => {
7200 assert!(
7201 is_remote,
7202 "received buffer update from {:?}",
7203 envelope.original_sender_id
7204 );
7205 e.insert(OpenBuffer::Operations(ops));
7206 }
7207 }
7208 Ok(proto::Ack {})
7209 })?
7210 }
7211
7212 async fn handle_create_buffer_for_peer(
7213 this: Model<Self>,
7214 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7215 _: Arc<Client>,
7216 mut cx: AsyncAppContext,
7217 ) -> Result<()> {
7218 this.update(&mut cx, |this, cx| {
7219 match envelope
7220 .payload
7221 .variant
7222 .ok_or_else(|| anyhow!("missing variant"))?
7223 {
7224 proto::create_buffer_for_peer::Variant::State(mut state) => {
7225 let mut buffer_file = None;
7226 if let Some(file) = state.file.take() {
7227 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7228 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7229 anyhow!("no worktree found for id {}", file.worktree_id)
7230 })?;
7231 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7232 as Arc<dyn language::File>);
7233 }
7234
7235 let buffer_id = state.id;
7236 let buffer = cx.new_model(|_| {
7237 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
7238 .unwrap()
7239 });
7240 this.incomplete_remote_buffers
7241 .insert(buffer_id, Some(buffer));
7242 }
7243 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7244 let buffer = this
7245 .incomplete_remote_buffers
7246 .get(&chunk.buffer_id)
7247 .cloned()
7248 .flatten()
7249 .ok_or_else(|| {
7250 anyhow!(
7251 "received chunk for buffer {} without initial state",
7252 chunk.buffer_id
7253 )
7254 })?;
7255 let operations = chunk
7256 .operations
7257 .into_iter()
7258 .map(language::proto::deserialize_operation)
7259 .collect::<Result<Vec<_>>>()?;
7260 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7261
7262 if chunk.is_last {
7263 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7264 this.register_buffer(&buffer, cx)?;
7265 }
7266 }
7267 }
7268
7269 Ok(())
7270 })?
7271 }
7272
7273 async fn handle_update_diff_base(
7274 this: Model<Self>,
7275 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7276 _: Arc<Client>,
7277 mut cx: AsyncAppContext,
7278 ) -> Result<()> {
7279 this.update(&mut cx, |this, cx| {
7280 let buffer_id = envelope.payload.buffer_id;
7281 let diff_base = envelope.payload.diff_base;
7282 if let Some(buffer) = this
7283 .opened_buffers
7284 .get_mut(&buffer_id)
7285 .and_then(|b| b.upgrade())
7286 .or_else(|| {
7287 this.incomplete_remote_buffers
7288 .get(&buffer_id)
7289 .cloned()
7290 .flatten()
7291 })
7292 {
7293 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7294 }
7295 Ok(())
7296 })?
7297 }
7298
7299 async fn handle_update_buffer_file(
7300 this: Model<Self>,
7301 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7302 _: Arc<Client>,
7303 mut cx: AsyncAppContext,
7304 ) -> Result<()> {
7305 let buffer_id = envelope.payload.buffer_id;
7306
7307 this.update(&mut cx, |this, cx| {
7308 let payload = envelope.payload.clone();
7309 if let Some(buffer) = this
7310 .opened_buffers
7311 .get(&buffer_id)
7312 .and_then(|b| b.upgrade())
7313 .or_else(|| {
7314 this.incomplete_remote_buffers
7315 .get(&buffer_id)
7316 .cloned()
7317 .flatten()
7318 })
7319 {
7320 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7321 let worktree = this
7322 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7323 .ok_or_else(|| anyhow!("no such worktree"))?;
7324 let file = File::from_proto(file, worktree, cx)?;
7325 buffer.update(cx, |buffer, cx| {
7326 buffer.file_updated(Arc::new(file), cx);
7327 });
7328 this.detect_language_for_buffer(&buffer, cx);
7329 }
7330 Ok(())
7331 })?
7332 }
7333
7334 async fn handle_save_buffer(
7335 this: Model<Self>,
7336 envelope: TypedEnvelope<proto::SaveBuffer>,
7337 _: Arc<Client>,
7338 mut cx: AsyncAppContext,
7339 ) -> Result<proto::BufferSaved> {
7340 let buffer_id = envelope.payload.buffer_id;
7341 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7342 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7343 let buffer = this
7344 .opened_buffers
7345 .get(&buffer_id)
7346 .and_then(|buffer| buffer.upgrade())
7347 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7348 anyhow::Ok((project_id, buffer))
7349 })??;
7350 buffer
7351 .update(&mut cx, |buffer, _| {
7352 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7353 })?
7354 .await?;
7355 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7356
7357 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7358 .await?;
7359 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7360 project_id,
7361 buffer_id,
7362 version: serialize_version(buffer.saved_version()),
7363 mtime: Some(buffer.saved_mtime().into()),
7364 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7365 })?)
7366 }
7367
7368 async fn handle_reload_buffers(
7369 this: Model<Self>,
7370 envelope: TypedEnvelope<proto::ReloadBuffers>,
7371 _: Arc<Client>,
7372 mut cx: AsyncAppContext,
7373 ) -> Result<proto::ReloadBuffersResponse> {
7374 let sender_id = envelope.original_sender_id()?;
7375 let reload = this.update(&mut cx, |this, cx| {
7376 let mut buffers = HashSet::default();
7377 for buffer_id in &envelope.payload.buffer_ids {
7378 buffers.insert(
7379 this.opened_buffers
7380 .get(buffer_id)
7381 .and_then(|buffer| buffer.upgrade())
7382 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7383 );
7384 }
7385 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7386 })??;
7387
7388 let project_transaction = reload.await?;
7389 let project_transaction = this.update(&mut cx, |this, cx| {
7390 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7391 })?;
7392 Ok(proto::ReloadBuffersResponse {
7393 transaction: Some(project_transaction),
7394 })
7395 }
7396
7397 async fn handle_synchronize_buffers(
7398 this: Model<Self>,
7399 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7400 _: Arc<Client>,
7401 mut cx: AsyncAppContext,
7402 ) -> Result<proto::SynchronizeBuffersResponse> {
7403 let project_id = envelope.payload.project_id;
7404 let mut response = proto::SynchronizeBuffersResponse {
7405 buffers: Default::default(),
7406 };
7407
7408 this.update(&mut cx, |this, cx| {
7409 let Some(guest_id) = envelope.original_sender_id else {
7410 error!("missing original_sender_id on SynchronizeBuffers request");
7411 return;
7412 };
7413
7414 this.shared_buffers.entry(guest_id).or_default().clear();
7415 for buffer in envelope.payload.buffers {
7416 let buffer_id = buffer.id;
7417 let remote_version = language::proto::deserialize_version(&buffer.version);
7418 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7419 this.shared_buffers
7420 .entry(guest_id)
7421 .or_default()
7422 .insert(buffer_id);
7423
7424 let buffer = buffer.read(cx);
7425 response.buffers.push(proto::BufferVersion {
7426 id: buffer_id,
7427 version: language::proto::serialize_version(&buffer.version),
7428 });
7429
7430 let operations = buffer.serialize_ops(Some(remote_version), cx);
7431 let client = this.client.clone();
7432 if let Some(file) = buffer.file() {
7433 client
7434 .send(proto::UpdateBufferFile {
7435 project_id,
7436 buffer_id: buffer_id as u64,
7437 file: Some(file.to_proto()),
7438 })
7439 .log_err();
7440 }
7441
7442 client
7443 .send(proto::UpdateDiffBase {
7444 project_id,
7445 buffer_id: buffer_id as u64,
7446 diff_base: buffer.diff_base().map(Into::into),
7447 })
7448 .log_err();
7449
7450 client
7451 .send(proto::BufferReloaded {
7452 project_id,
7453 buffer_id,
7454 version: language::proto::serialize_version(buffer.saved_version()),
7455 mtime: Some(buffer.saved_mtime().into()),
7456 fingerprint: language::proto::serialize_fingerprint(
7457 buffer.saved_version_fingerprint(),
7458 ),
7459 line_ending: language::proto::serialize_line_ending(
7460 buffer.line_ending(),
7461 ) as i32,
7462 })
7463 .log_err();
7464
7465 cx.background_executor()
7466 .spawn(
7467 async move {
7468 let operations = operations.await;
7469 for chunk in split_operations(operations) {
7470 client
7471 .request(proto::UpdateBuffer {
7472 project_id,
7473 buffer_id,
7474 operations: chunk,
7475 })
7476 .await?;
7477 }
7478 anyhow::Ok(())
7479 }
7480 .log_err(),
7481 )
7482 .detach();
7483 }
7484 }
7485 })?;
7486
7487 Ok(response)
7488 }
7489
7490 async fn handle_format_buffers(
7491 this: Model<Self>,
7492 envelope: TypedEnvelope<proto::FormatBuffers>,
7493 _: Arc<Client>,
7494 mut cx: AsyncAppContext,
7495 ) -> Result<proto::FormatBuffersResponse> {
7496 let sender_id = envelope.original_sender_id()?;
7497 let format = this.update(&mut cx, |this, cx| {
7498 let mut buffers = HashSet::default();
7499 for buffer_id in &envelope.payload.buffer_ids {
7500 buffers.insert(
7501 this.opened_buffers
7502 .get(buffer_id)
7503 .and_then(|buffer| buffer.upgrade())
7504 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7505 );
7506 }
7507 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7508 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7509 })??;
7510
7511 let project_transaction = format.await?;
7512 let project_transaction = this.update(&mut cx, |this, cx| {
7513 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7514 })?;
7515 Ok(proto::FormatBuffersResponse {
7516 transaction: Some(project_transaction),
7517 })
7518 }
7519
7520 async fn handle_apply_additional_edits_for_completion(
7521 this: Model<Self>,
7522 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7523 _: Arc<Client>,
7524 mut cx: AsyncAppContext,
7525 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7526 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7527 let buffer = this
7528 .opened_buffers
7529 .get(&envelope.payload.buffer_id)
7530 .and_then(|buffer| buffer.upgrade())
7531 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7532 let language = buffer.read(cx).language();
7533 let completion = language::proto::deserialize_completion(
7534 envelope
7535 .payload
7536 .completion
7537 .ok_or_else(|| anyhow!("invalid completion"))?,
7538 language.cloned(),
7539 );
7540 Ok::<_, anyhow::Error>((buffer, completion))
7541 })??;
7542
7543 let completion = completion.await?;
7544
7545 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7546 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7547 })?;
7548
7549 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7550 transaction: apply_additional_edits
7551 .await?
7552 .as_ref()
7553 .map(language::proto::serialize_transaction),
7554 })
7555 }
7556
7557 async fn handle_apply_code_action(
7558 this: Model<Self>,
7559 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7560 _: Arc<Client>,
7561 mut cx: AsyncAppContext,
7562 ) -> Result<proto::ApplyCodeActionResponse> {
7563 let sender_id = envelope.original_sender_id()?;
7564 let action = language::proto::deserialize_code_action(
7565 envelope
7566 .payload
7567 .action
7568 .ok_or_else(|| anyhow!("invalid action"))?,
7569 )?;
7570 let apply_code_action = this.update(&mut cx, |this, cx| {
7571 let buffer = this
7572 .opened_buffers
7573 .get(&envelope.payload.buffer_id)
7574 .and_then(|buffer| buffer.upgrade())
7575 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7576 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7577 })??;
7578
7579 let project_transaction = apply_code_action.await?;
7580 let project_transaction = this.update(&mut cx, |this, cx| {
7581 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7582 })?;
7583 Ok(proto::ApplyCodeActionResponse {
7584 transaction: Some(project_transaction),
7585 })
7586 }
7587
7588 async fn handle_on_type_formatting(
7589 this: Model<Self>,
7590 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7591 _: Arc<Client>,
7592 mut cx: AsyncAppContext,
7593 ) -> Result<proto::OnTypeFormattingResponse> {
7594 let on_type_formatting = this.update(&mut cx, |this, cx| {
7595 let buffer = this
7596 .opened_buffers
7597 .get(&envelope.payload.buffer_id)
7598 .and_then(|buffer| buffer.upgrade())
7599 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7600 let position = envelope
7601 .payload
7602 .position
7603 .and_then(deserialize_anchor)
7604 .ok_or_else(|| anyhow!("invalid position"))?;
7605 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7606 buffer,
7607 position,
7608 envelope.payload.trigger.clone(),
7609 cx,
7610 ))
7611 })??;
7612
7613 let transaction = on_type_formatting
7614 .await?
7615 .as_ref()
7616 .map(language::proto::serialize_transaction);
7617 Ok(proto::OnTypeFormattingResponse { transaction })
7618 }
7619
7620 async fn handle_inlay_hints(
7621 this: Model<Self>,
7622 envelope: TypedEnvelope<proto::InlayHints>,
7623 _: Arc<Client>,
7624 mut cx: AsyncAppContext,
7625 ) -> Result<proto::InlayHintsResponse> {
7626 let sender_id = envelope.original_sender_id()?;
7627 let buffer = this.update(&mut cx, |this, _| {
7628 this.opened_buffers
7629 .get(&envelope.payload.buffer_id)
7630 .and_then(|buffer| buffer.upgrade())
7631 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7632 })??;
7633 let buffer_version = deserialize_version(&envelope.payload.version);
7634
7635 buffer
7636 .update(&mut cx, |buffer, _| {
7637 buffer.wait_for_version(buffer_version.clone())
7638 })?
7639 .await
7640 .with_context(|| {
7641 format!(
7642 "waiting for version {:?} for buffer {}",
7643 buffer_version,
7644 buffer.entity_id()
7645 )
7646 })?;
7647
7648 let start = envelope
7649 .payload
7650 .start
7651 .and_then(deserialize_anchor)
7652 .context("missing range start")?;
7653 let end = envelope
7654 .payload
7655 .end
7656 .and_then(deserialize_anchor)
7657 .context("missing range end")?;
7658 let buffer_hints = this
7659 .update(&mut cx, |project, cx| {
7660 project.inlay_hints(buffer, start..end, cx)
7661 })?
7662 .await
7663 .context("inlay hints fetch")?;
7664
7665 Ok(this.update(&mut cx, |project, cx| {
7666 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7667 })?)
7668 }
7669
7670 async fn handle_resolve_inlay_hint(
7671 this: Model<Self>,
7672 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7673 _: Arc<Client>,
7674 mut cx: AsyncAppContext,
7675 ) -> Result<proto::ResolveInlayHintResponse> {
7676 let proto_hint = envelope
7677 .payload
7678 .hint
7679 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7680 let hint = InlayHints::proto_to_project_hint(proto_hint)
7681 .context("resolved proto inlay hint conversion")?;
7682 let buffer = this.update(&mut cx, |this, _cx| {
7683 this.opened_buffers
7684 .get(&envelope.payload.buffer_id)
7685 .and_then(|buffer| buffer.upgrade())
7686 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7687 })??;
7688 let response_hint = this
7689 .update(&mut cx, |project, cx| {
7690 project.resolve_inlay_hint(
7691 hint,
7692 buffer,
7693 LanguageServerId(envelope.payload.language_server_id as usize),
7694 cx,
7695 )
7696 })?
7697 .await
7698 .context("inlay hints fetch")?;
7699 Ok(proto::ResolveInlayHintResponse {
7700 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7701 })
7702 }
7703
7704 async fn handle_refresh_inlay_hints(
7705 this: Model<Self>,
7706 _: TypedEnvelope<proto::RefreshInlayHints>,
7707 _: Arc<Client>,
7708 mut cx: AsyncAppContext,
7709 ) -> Result<proto::Ack> {
7710 this.update(&mut cx, |_, cx| {
7711 cx.emit(Event::RefreshInlayHints);
7712 })?;
7713 Ok(proto::Ack {})
7714 }
7715
7716 async fn handle_lsp_command<T: LspCommand>(
7717 this: Model<Self>,
7718 envelope: TypedEnvelope<T::ProtoRequest>,
7719 _: Arc<Client>,
7720 mut cx: AsyncAppContext,
7721 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7722 where
7723 <T::LspRequest as lsp::request::Request>::Params: Send,
7724 <T::LspRequest as lsp::request::Request>::Result: Send,
7725 {
7726 let sender_id = envelope.original_sender_id()?;
7727 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7728 let buffer_handle = this.update(&mut cx, |this, _cx| {
7729 this.opened_buffers
7730 .get(&buffer_id)
7731 .and_then(|buffer| buffer.upgrade())
7732 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7733 })??;
7734 let request = T::from_proto(
7735 envelope.payload,
7736 this.clone(),
7737 buffer_handle.clone(),
7738 cx.clone(),
7739 )
7740 .await?;
7741 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7742 let response = this
7743 .update(&mut cx, |this, cx| {
7744 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7745 })?
7746 .await?;
7747 this.update(&mut cx, |this, cx| {
7748 Ok(T::response_to_proto(
7749 response,
7750 this,
7751 sender_id,
7752 &buffer_version,
7753 cx,
7754 ))
7755 })?
7756 }
7757
7758 async fn handle_get_project_symbols(
7759 this: Model<Self>,
7760 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7761 _: Arc<Client>,
7762 mut cx: AsyncAppContext,
7763 ) -> Result<proto::GetProjectSymbolsResponse> {
7764 let symbols = this
7765 .update(&mut cx, |this, cx| {
7766 this.symbols(&envelope.payload.query, cx)
7767 })?
7768 .await?;
7769
7770 Ok(proto::GetProjectSymbolsResponse {
7771 symbols: symbols.iter().map(serialize_symbol).collect(),
7772 })
7773 }
7774
7775 async fn handle_search_project(
7776 this: Model<Self>,
7777 envelope: TypedEnvelope<proto::SearchProject>,
7778 _: Arc<Client>,
7779 mut cx: AsyncAppContext,
7780 ) -> Result<proto::SearchProjectResponse> {
7781 let peer_id = envelope.original_sender_id()?;
7782 let query = SearchQuery::from_proto(envelope.payload)?;
7783 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7784
7785 cx.spawn(move |mut cx| async move {
7786 let mut locations = Vec::new();
7787 while let Some((buffer, ranges)) = result.next().await {
7788 for range in ranges {
7789 let start = serialize_anchor(&range.start);
7790 let end = serialize_anchor(&range.end);
7791 let buffer_id = this.update(&mut cx, |this, cx| {
7792 this.create_buffer_for_peer(&buffer, peer_id, cx)
7793 })?;
7794 locations.push(proto::Location {
7795 buffer_id,
7796 start: Some(start),
7797 end: Some(end),
7798 });
7799 }
7800 }
7801 Ok(proto::SearchProjectResponse { locations })
7802 })
7803 .await
7804 }
7805
7806 async fn handle_open_buffer_for_symbol(
7807 this: Model<Self>,
7808 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7809 _: Arc<Client>,
7810 mut cx: AsyncAppContext,
7811 ) -> Result<proto::OpenBufferForSymbolResponse> {
7812 let peer_id = envelope.original_sender_id()?;
7813 let symbol = envelope
7814 .payload
7815 .symbol
7816 .ok_or_else(|| anyhow!("invalid symbol"))?;
7817 let symbol = this
7818 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
7819 .await?;
7820 let symbol = this.update(&mut cx, |this, _| {
7821 let signature = this.symbol_signature(&symbol.path);
7822 if signature == symbol.signature {
7823 Ok(symbol)
7824 } else {
7825 Err(anyhow!("invalid symbol signature"))
7826 }
7827 })??;
7828 let buffer = this
7829 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
7830 .await?;
7831
7832 Ok(proto::OpenBufferForSymbolResponse {
7833 buffer_id: this.update(&mut cx, |this, cx| {
7834 this.create_buffer_for_peer(&buffer, peer_id, cx)
7835 })?,
7836 })
7837 }
7838
7839 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7840 let mut hasher = Sha256::new();
7841 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7842 hasher.update(project_path.path.to_string_lossy().as_bytes());
7843 hasher.update(self.nonce.to_be_bytes());
7844 hasher.finalize().as_slice().try_into().unwrap()
7845 }
7846
7847 async fn handle_open_buffer_by_id(
7848 this: Model<Self>,
7849 envelope: TypedEnvelope<proto::OpenBufferById>,
7850 _: Arc<Client>,
7851 mut cx: AsyncAppContext,
7852 ) -> Result<proto::OpenBufferResponse> {
7853 let peer_id = envelope.original_sender_id()?;
7854 let buffer = this
7855 .update(&mut cx, |this, cx| {
7856 this.open_buffer_by_id(envelope.payload.id, cx)
7857 })?
7858 .await?;
7859 this.update(&mut cx, |this, cx| {
7860 Ok(proto::OpenBufferResponse {
7861 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7862 })
7863 })?
7864 }
7865
7866 async fn handle_open_buffer_by_path(
7867 this: Model<Self>,
7868 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7869 _: Arc<Client>,
7870 mut cx: AsyncAppContext,
7871 ) -> Result<proto::OpenBufferResponse> {
7872 let peer_id = envelope.original_sender_id()?;
7873 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7874 let open_buffer = this.update(&mut cx, |this, cx| {
7875 this.open_buffer(
7876 ProjectPath {
7877 worktree_id,
7878 path: PathBuf::from(envelope.payload.path).into(),
7879 },
7880 cx,
7881 )
7882 })?;
7883
7884 let buffer = open_buffer.await?;
7885 this.update(&mut cx, |this, cx| {
7886 Ok(proto::OpenBufferResponse {
7887 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7888 })
7889 })?
7890 }
7891
7892 fn serialize_project_transaction_for_peer(
7893 &mut self,
7894 project_transaction: ProjectTransaction,
7895 peer_id: proto::PeerId,
7896 cx: &mut AppContext,
7897 ) -> proto::ProjectTransaction {
7898 let mut serialized_transaction = proto::ProjectTransaction {
7899 buffer_ids: Default::default(),
7900 transactions: Default::default(),
7901 };
7902 for (buffer, transaction) in project_transaction.0 {
7903 serialized_transaction
7904 .buffer_ids
7905 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7906 serialized_transaction
7907 .transactions
7908 .push(language::proto::serialize_transaction(&transaction));
7909 }
7910 serialized_transaction
7911 }
7912
7913 fn deserialize_project_transaction(
7914 &mut self,
7915 message: proto::ProjectTransaction,
7916 push_to_history: bool,
7917 cx: &mut ModelContext<Self>,
7918 ) -> Task<Result<ProjectTransaction>> {
7919 cx.spawn(move |this, mut cx| async move {
7920 let mut project_transaction = ProjectTransaction::default();
7921 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7922 {
7923 let buffer = this
7924 .update(&mut cx, |this, cx| {
7925 this.wait_for_remote_buffer(buffer_id, cx)
7926 })?
7927 .await?;
7928 let transaction = language::proto::deserialize_transaction(transaction)?;
7929 project_transaction.0.insert(buffer, transaction);
7930 }
7931
7932 for (buffer, transaction) in &project_transaction.0 {
7933 buffer
7934 .update(&mut cx, |buffer, _| {
7935 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7936 })?
7937 .await?;
7938
7939 if push_to_history {
7940 buffer.update(&mut cx, |buffer, _| {
7941 buffer.push_transaction(transaction.clone(), Instant::now());
7942 })?;
7943 }
7944 }
7945
7946 Ok(project_transaction)
7947 })
7948 }
7949
7950 fn create_buffer_for_peer(
7951 &mut self,
7952 buffer: &Model<Buffer>,
7953 peer_id: proto::PeerId,
7954 cx: &mut AppContext,
7955 ) -> u64 {
7956 let buffer_id = buffer.read(cx).remote_id();
7957 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
7958 updates_tx
7959 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7960 .ok();
7961 }
7962 buffer_id
7963 }
7964
7965 fn wait_for_remote_buffer(
7966 &mut self,
7967 id: u64,
7968 cx: &mut ModelContext<Self>,
7969 ) -> Task<Result<Model<Buffer>>> {
7970 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7971
7972 cx.spawn(move |this, mut cx| async move {
7973 let buffer = loop {
7974 let Some(this) = this.upgrade() else {
7975 return Err(anyhow!("project dropped"));
7976 };
7977
7978 let buffer = this.update(&mut cx, |this, _cx| {
7979 this.opened_buffers
7980 .get(&id)
7981 .and_then(|buffer| buffer.upgrade())
7982 })?;
7983
7984 if let Some(buffer) = buffer {
7985 break buffer;
7986 } else if this.update(&mut cx, |this, _| this.is_disconnected())? {
7987 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7988 }
7989
7990 this.update(&mut cx, |this, _| {
7991 this.incomplete_remote_buffers.entry(id).or_default();
7992 })?;
7993 drop(this);
7994
7995 opened_buffer_rx
7996 .next()
7997 .await
7998 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
7999 };
8000
8001 Ok(buffer)
8002 })
8003 }
8004
8005 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8006 let project_id = match self.client_state.as_ref() {
8007 Some(ProjectClientState::Remote {
8008 sharing_has_stopped,
8009 remote_id,
8010 ..
8011 }) => {
8012 if *sharing_has_stopped {
8013 return Task::ready(Err(anyhow!(
8014 "can't synchronize remote buffers on a readonly project"
8015 )));
8016 } else {
8017 *remote_id
8018 }
8019 }
8020 Some(ProjectClientState::Local { .. }) | None => {
8021 return Task::ready(Err(anyhow!(
8022 "can't synchronize remote buffers on a local project"
8023 )))
8024 }
8025 };
8026
8027 let client = self.client.clone();
8028 cx.spawn(move |this, mut cx| async move {
8029 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8030 let buffers = this
8031 .opened_buffers
8032 .iter()
8033 .filter_map(|(id, buffer)| {
8034 let buffer = buffer.upgrade()?;
8035 Some(proto::BufferVersion {
8036 id: *id,
8037 version: language::proto::serialize_version(&buffer.read(cx).version),
8038 })
8039 })
8040 .collect();
8041 let incomplete_buffer_ids = this
8042 .incomplete_remote_buffers
8043 .keys()
8044 .copied()
8045 .collect::<Vec<_>>();
8046
8047 (buffers, incomplete_buffer_ids)
8048 })?;
8049 let response = client
8050 .request(proto::SynchronizeBuffers {
8051 project_id,
8052 buffers,
8053 })
8054 .await?;
8055
8056 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8057 response
8058 .buffers
8059 .into_iter()
8060 .map(|buffer| {
8061 let client = client.clone();
8062 let buffer_id = buffer.id;
8063 let remote_version = language::proto::deserialize_version(&buffer.version);
8064 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8065 let operations =
8066 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8067 cx.background_executor().spawn(async move {
8068 let operations = operations.await;
8069 for chunk in split_operations(operations) {
8070 client
8071 .request(proto::UpdateBuffer {
8072 project_id,
8073 buffer_id,
8074 operations: chunk,
8075 })
8076 .await?;
8077 }
8078 anyhow::Ok(())
8079 })
8080 } else {
8081 Task::ready(Ok(()))
8082 }
8083 })
8084 .collect::<Vec<_>>()
8085 })?;
8086
8087 // Any incomplete buffers have open requests waiting. Request that the host sends
8088 // creates these buffers for us again to unblock any waiting futures.
8089 for id in incomplete_buffer_ids {
8090 cx.background_executor()
8091 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8092 .detach();
8093 }
8094
8095 futures::future::join_all(send_updates_for_buffers)
8096 .await
8097 .into_iter()
8098 .collect()
8099 })
8100 }
8101
8102 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8103 self.worktrees()
8104 .map(|worktree| {
8105 let worktree = worktree.read(cx);
8106 proto::WorktreeMetadata {
8107 id: worktree.id().to_proto(),
8108 root_name: worktree.root_name().into(),
8109 visible: worktree.is_visible(),
8110 abs_path: worktree.abs_path().to_string_lossy().into(),
8111 }
8112 })
8113 .collect()
8114 }
8115
8116 fn set_worktrees_from_proto(
8117 &mut self,
8118 worktrees: Vec<proto::WorktreeMetadata>,
8119 cx: &mut ModelContext<Project>,
8120 ) -> Result<()> {
8121 let replica_id = self.replica_id();
8122 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8123
8124 let mut old_worktrees_by_id = self
8125 .worktrees
8126 .drain(..)
8127 .filter_map(|worktree| {
8128 let worktree = worktree.upgrade()?;
8129 Some((worktree.read(cx).id(), worktree))
8130 })
8131 .collect::<HashMap<_, _>>();
8132
8133 for worktree in worktrees {
8134 if let Some(old_worktree) =
8135 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8136 {
8137 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8138 } else {
8139 let worktree =
8140 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8141 let _ = self.add_worktree(&worktree, cx);
8142 }
8143 }
8144
8145 self.metadata_changed(cx);
8146 for id in old_worktrees_by_id.keys() {
8147 cx.emit(Event::WorktreeRemoved(*id));
8148 }
8149
8150 Ok(())
8151 }
8152
8153 fn set_collaborators_from_proto(
8154 &mut self,
8155 messages: Vec<proto::Collaborator>,
8156 cx: &mut ModelContext<Self>,
8157 ) -> Result<()> {
8158 let mut collaborators = HashMap::default();
8159 for message in messages {
8160 let collaborator = Collaborator::from_proto(message)?;
8161 collaborators.insert(collaborator.peer_id, collaborator);
8162 }
8163 for old_peer_id in self.collaborators.keys() {
8164 if !collaborators.contains_key(old_peer_id) {
8165 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8166 }
8167 }
8168 self.collaborators = collaborators;
8169 Ok(())
8170 }
8171
8172 fn deserialize_symbol(
8173 &self,
8174 serialized_symbol: proto::Symbol,
8175 ) -> impl Future<Output = Result<Symbol>> {
8176 let languages = self.languages.clone();
8177 async move {
8178 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8179 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8180 let start = serialized_symbol
8181 .start
8182 .ok_or_else(|| anyhow!("invalid start"))?;
8183 let end = serialized_symbol
8184 .end
8185 .ok_or_else(|| anyhow!("invalid end"))?;
8186 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8187 let path = ProjectPath {
8188 worktree_id,
8189 path: PathBuf::from(serialized_symbol.path).into(),
8190 };
8191 let language = languages
8192 .language_for_file(&path.path, None)
8193 .await
8194 .log_err();
8195 Ok(Symbol {
8196 language_server_name: LanguageServerName(
8197 serialized_symbol.language_server_name.into(),
8198 ),
8199 source_worktree_id,
8200 path,
8201 label: {
8202 match language {
8203 Some(language) => {
8204 language
8205 .label_for_symbol(&serialized_symbol.name, kind)
8206 .await
8207 }
8208 None => None,
8209 }
8210 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8211 },
8212
8213 name: serialized_symbol.name,
8214 range: Unclipped(PointUtf16::new(start.row, start.column))
8215 ..Unclipped(PointUtf16::new(end.row, end.column)),
8216 kind,
8217 signature: serialized_symbol
8218 .signature
8219 .try_into()
8220 .map_err(|_| anyhow!("invalid signature"))?,
8221 })
8222 }
8223 }
8224
8225 async fn handle_buffer_saved(
8226 this: Model<Self>,
8227 envelope: TypedEnvelope<proto::BufferSaved>,
8228 _: Arc<Client>,
8229 mut cx: AsyncAppContext,
8230 ) -> Result<()> {
8231 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8232 let version = deserialize_version(&envelope.payload.version);
8233 let mtime = envelope
8234 .payload
8235 .mtime
8236 .ok_or_else(|| anyhow!("missing mtime"))?
8237 .into();
8238
8239 this.update(&mut cx, |this, cx| {
8240 let buffer = this
8241 .opened_buffers
8242 .get(&envelope.payload.buffer_id)
8243 .and_then(|buffer| buffer.upgrade())
8244 .or_else(|| {
8245 this.incomplete_remote_buffers
8246 .get(&envelope.payload.buffer_id)
8247 .and_then(|b| b.clone())
8248 });
8249 if let Some(buffer) = buffer {
8250 buffer.update(cx, |buffer, cx| {
8251 buffer.did_save(version, fingerprint, mtime, cx);
8252 });
8253 }
8254 Ok(())
8255 })?
8256 }
8257
8258 async fn handle_buffer_reloaded(
8259 this: Model<Self>,
8260 envelope: TypedEnvelope<proto::BufferReloaded>,
8261 _: Arc<Client>,
8262 mut cx: AsyncAppContext,
8263 ) -> Result<()> {
8264 let payload = envelope.payload;
8265 let version = deserialize_version(&payload.version);
8266 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8267 let line_ending = deserialize_line_ending(
8268 proto::LineEnding::from_i32(payload.line_ending)
8269 .ok_or_else(|| anyhow!("missing line ending"))?,
8270 );
8271 let mtime = payload
8272 .mtime
8273 .ok_or_else(|| anyhow!("missing mtime"))?
8274 .into();
8275 this.update(&mut cx, |this, cx| {
8276 let buffer = this
8277 .opened_buffers
8278 .get(&payload.buffer_id)
8279 .and_then(|buffer| buffer.upgrade())
8280 .or_else(|| {
8281 this.incomplete_remote_buffers
8282 .get(&payload.buffer_id)
8283 .cloned()
8284 .flatten()
8285 });
8286 if let Some(buffer) = buffer {
8287 buffer.update(cx, |buffer, cx| {
8288 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8289 });
8290 }
8291 Ok(())
8292 })?
8293 }
8294
8295 #[allow(clippy::type_complexity)]
8296 fn edits_from_lsp(
8297 &mut self,
8298 buffer: &Model<Buffer>,
8299 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8300 server_id: LanguageServerId,
8301 version: Option<i32>,
8302 cx: &mut ModelContext<Self>,
8303 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8304 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8305 cx.background_executor().spawn(async move {
8306 let snapshot = snapshot?;
8307 let mut lsp_edits = lsp_edits
8308 .into_iter()
8309 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8310 .collect::<Vec<_>>();
8311 lsp_edits.sort_by_key(|(range, _)| range.start);
8312
8313 let mut lsp_edits = lsp_edits.into_iter().peekable();
8314 let mut edits = Vec::new();
8315 while let Some((range, mut new_text)) = lsp_edits.next() {
8316 // Clip invalid ranges provided by the language server.
8317 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8318 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8319
8320 // Combine any LSP edits that are adjacent.
8321 //
8322 // Also, combine LSP edits that are separated from each other by only
8323 // a newline. This is important because for some code actions,
8324 // Rust-analyzer rewrites the entire buffer via a series of edits that
8325 // are separated by unchanged newline characters.
8326 //
8327 // In order for the diffing logic below to work properly, any edits that
8328 // cancel each other out must be combined into one.
8329 while let Some((next_range, next_text)) = lsp_edits.peek() {
8330 if next_range.start.0 > range.end {
8331 if next_range.start.0.row > range.end.row + 1
8332 || next_range.start.0.column > 0
8333 || snapshot.clip_point_utf16(
8334 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8335 Bias::Left,
8336 ) > range.end
8337 {
8338 break;
8339 }
8340 new_text.push('\n');
8341 }
8342 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8343 new_text.push_str(next_text);
8344 lsp_edits.next();
8345 }
8346
8347 // For multiline edits, perform a diff of the old and new text so that
8348 // we can identify the changes more precisely, preserving the locations
8349 // of any anchors positioned in the unchanged regions.
8350 if range.end.row > range.start.row {
8351 let mut offset = range.start.to_offset(&snapshot);
8352 let old_text = snapshot.text_for_range(range).collect::<String>();
8353
8354 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8355 let mut moved_since_edit = true;
8356 for change in diff.iter_all_changes() {
8357 let tag = change.tag();
8358 let value = change.value();
8359 match tag {
8360 ChangeTag::Equal => {
8361 offset += value.len();
8362 moved_since_edit = true;
8363 }
8364 ChangeTag::Delete => {
8365 let start = snapshot.anchor_after(offset);
8366 let end = snapshot.anchor_before(offset + value.len());
8367 if moved_since_edit {
8368 edits.push((start..end, String::new()));
8369 } else {
8370 edits.last_mut().unwrap().0.end = end;
8371 }
8372 offset += value.len();
8373 moved_since_edit = false;
8374 }
8375 ChangeTag::Insert => {
8376 if moved_since_edit {
8377 let anchor = snapshot.anchor_after(offset);
8378 edits.push((anchor..anchor, value.to_string()));
8379 } else {
8380 edits.last_mut().unwrap().1.push_str(value);
8381 }
8382 moved_since_edit = false;
8383 }
8384 }
8385 }
8386 } else if range.end == range.start {
8387 let anchor = snapshot.anchor_after(range.start);
8388 edits.push((anchor..anchor, new_text));
8389 } else {
8390 let edit_start = snapshot.anchor_after(range.start);
8391 let edit_end = snapshot.anchor_before(range.end);
8392 edits.push((edit_start..edit_end, new_text));
8393 }
8394 }
8395
8396 Ok(edits)
8397 })
8398 }
8399
8400 fn buffer_snapshot_for_lsp_version(
8401 &mut self,
8402 buffer: &Model<Buffer>,
8403 server_id: LanguageServerId,
8404 version: Option<i32>,
8405 cx: &AppContext,
8406 ) -> Result<TextBufferSnapshot> {
8407 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8408
8409 if let Some(version) = version {
8410 let buffer_id = buffer.read(cx).remote_id();
8411 let snapshots = self
8412 .buffer_snapshots
8413 .get_mut(&buffer_id)
8414 .and_then(|m| m.get_mut(&server_id))
8415 .ok_or_else(|| {
8416 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8417 })?;
8418
8419 let found_snapshot = snapshots
8420 .binary_search_by_key(&version, |e| e.version)
8421 .map(|ix| snapshots[ix].snapshot.clone())
8422 .map_err(|_| {
8423 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8424 })?;
8425
8426 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8427 Ok(found_snapshot)
8428 } else {
8429 Ok((buffer.read(cx)).text_snapshot())
8430 }
8431 }
8432
8433 pub fn language_servers(
8434 &self,
8435 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8436 self.language_server_ids
8437 .iter()
8438 .map(|((worktree_id, server_name), server_id)| {
8439 (*server_id, server_name.clone(), *worktree_id)
8440 })
8441 }
8442
8443 pub fn supplementary_language_servers(
8444 &self,
8445 ) -> impl '_
8446 + Iterator<
8447 Item = (
8448 &LanguageServerId,
8449 &(LanguageServerName, Arc<LanguageServer>),
8450 ),
8451 > {
8452 self.supplementary_language_servers.iter()
8453 }
8454
8455 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8456 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8457 Some(server.clone())
8458 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8459 Some(Arc::clone(server))
8460 } else {
8461 None
8462 }
8463 }
8464
8465 pub fn language_servers_for_buffer(
8466 &self,
8467 buffer: &Buffer,
8468 cx: &AppContext,
8469 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8470 self.language_server_ids_for_buffer(buffer, cx)
8471 .into_iter()
8472 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8473 LanguageServerState::Running {
8474 adapter, server, ..
8475 } => Some((adapter, server)),
8476 _ => None,
8477 })
8478 }
8479
8480 fn primary_language_server_for_buffer(
8481 &self,
8482 buffer: &Buffer,
8483 cx: &AppContext,
8484 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8485 self.language_servers_for_buffer(buffer, cx).next()
8486 }
8487
8488 pub fn language_server_for_buffer(
8489 &self,
8490 buffer: &Buffer,
8491 server_id: LanguageServerId,
8492 cx: &AppContext,
8493 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8494 self.language_servers_for_buffer(buffer, cx)
8495 .find(|(_, s)| s.server_id() == server_id)
8496 }
8497
8498 fn language_server_ids_for_buffer(
8499 &self,
8500 buffer: &Buffer,
8501 cx: &AppContext,
8502 ) -> Vec<LanguageServerId> {
8503 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8504 let worktree_id = file.worktree_id(cx);
8505 language
8506 .lsp_adapters()
8507 .iter()
8508 .flat_map(|adapter| {
8509 let key = (worktree_id, adapter.name.clone());
8510 self.language_server_ids.get(&key).copied()
8511 })
8512 .collect()
8513 } else {
8514 Vec::new()
8515 }
8516 }
8517}
8518
8519fn subscribe_for_copilot_events(
8520 copilot: &Model<Copilot>,
8521 cx: &mut ModelContext<'_, Project>,
8522) -> gpui::Subscription {
8523 cx.subscribe(
8524 copilot,
8525 |project, copilot, copilot_event, cx| match copilot_event {
8526 copilot::Event::CopilotLanguageServerStarted => {
8527 match copilot.read(cx).language_server() {
8528 Some((name, copilot_server)) => {
8529 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8530 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8531 let new_server_id = copilot_server.server_id();
8532 let weak_project = cx.weak_model();
8533 let copilot_log_subscription = copilot_server
8534 .on_notification::<copilot::request::LogMessage, _>(
8535 move |params, mut cx| {
8536 weak_project.update(&mut cx, |_, cx| {
8537 cx.emit(Event::LanguageServerLog(
8538 new_server_id,
8539 params.message,
8540 ));
8541 }).ok();
8542 },
8543 );
8544 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8545 project.copilot_log_subscription = Some(copilot_log_subscription);
8546 cx.emit(Event::LanguageServerAdded(new_server_id));
8547 }
8548 }
8549 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8550 }
8551 }
8552 },
8553 )
8554}
8555
8556fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8557 let mut literal_end = 0;
8558 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8559 if part.contains(&['*', '?', '{', '}']) {
8560 break;
8561 } else {
8562 if i > 0 {
8563 // Acount for separator prior to this part
8564 literal_end += path::MAIN_SEPARATOR.len_utf8();
8565 }
8566 literal_end += part.len();
8567 }
8568 }
8569 &glob[..literal_end]
8570}
8571
8572impl WorktreeHandle {
8573 pub fn upgrade(&self) -> Option<Model<Worktree>> {
8574 match self {
8575 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8576 WorktreeHandle::Weak(handle) => handle.upgrade(),
8577 }
8578 }
8579
8580 pub fn handle_id(&self) -> usize {
8581 match self {
8582 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8583 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8584 }
8585 }
8586}
8587
8588impl OpenBuffer {
8589 pub fn upgrade(&self) -> Option<Model<Buffer>> {
8590 match self {
8591 OpenBuffer::Strong(handle) => Some(handle.clone()),
8592 OpenBuffer::Weak(handle) => handle.upgrade(),
8593 OpenBuffer::Operations(_) => None,
8594 }
8595 }
8596}
8597
8598pub struct PathMatchCandidateSet {
8599 pub snapshot: Snapshot,
8600 pub include_ignored: bool,
8601 pub include_root_name: bool,
8602}
8603
8604impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8605 type Candidates = PathMatchCandidateSetIter<'a>;
8606
8607 fn id(&self) -> usize {
8608 self.snapshot.id().to_usize()
8609 }
8610
8611 fn len(&self) -> usize {
8612 if self.include_ignored {
8613 self.snapshot.file_count()
8614 } else {
8615 self.snapshot.visible_file_count()
8616 }
8617 }
8618
8619 fn prefix(&self) -> Arc<str> {
8620 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8621 self.snapshot.root_name().into()
8622 } else if self.include_root_name {
8623 format!("{}/", self.snapshot.root_name()).into()
8624 } else {
8625 "".into()
8626 }
8627 }
8628
8629 fn candidates(&'a self, start: usize) -> Self::Candidates {
8630 PathMatchCandidateSetIter {
8631 traversal: self.snapshot.files(self.include_ignored, start),
8632 }
8633 }
8634}
8635
8636pub struct PathMatchCandidateSetIter<'a> {
8637 traversal: Traversal<'a>,
8638}
8639
8640impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8641 type Item = fuzzy::PathMatchCandidate<'a>;
8642
8643 fn next(&mut self) -> Option<Self::Item> {
8644 self.traversal.next().map(|entry| {
8645 if let EntryKind::File(char_bag) = entry.kind {
8646 fuzzy::PathMatchCandidate {
8647 path: &entry.path,
8648 char_bag,
8649 }
8650 } else {
8651 unreachable!()
8652 }
8653 })
8654 }
8655}
8656
8657impl EventEmitter<Event> for Project {}
8658
8659impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8660 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8661 Self {
8662 worktree_id,
8663 path: path.as_ref().into(),
8664 }
8665 }
8666}
8667
8668impl ProjectLspAdapterDelegate {
8669 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8670 Arc::new(Self {
8671 project: cx.handle(),
8672 http_client: project.client.http_client(),
8673 })
8674 }
8675}
8676
8677impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8678 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8679 self.project
8680 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8681 }
8682
8683 fn http_client(&self) -> Arc<dyn HttpClient> {
8684 self.http_client.clone()
8685 }
8686}
8687
8688fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8689 proto::Symbol {
8690 language_server_name: symbol.language_server_name.0.to_string(),
8691 source_worktree_id: symbol.source_worktree_id.to_proto(),
8692 worktree_id: symbol.path.worktree_id.to_proto(),
8693 path: symbol.path.path.to_string_lossy().to_string(),
8694 name: symbol.name.clone(),
8695 kind: unsafe { mem::transmute(symbol.kind) },
8696 start: Some(proto::PointUtf16 {
8697 row: symbol.range.start.0.row,
8698 column: symbol.range.start.0.column,
8699 }),
8700 end: Some(proto::PointUtf16 {
8701 row: symbol.range.end.0.row,
8702 column: symbol.range.end.0.column,
8703 }),
8704 signature: symbol.signature.to_vec(),
8705 }
8706}
8707
8708fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8709 let mut path_components = path.components();
8710 let mut base_components = base.components();
8711 let mut components: Vec<Component> = Vec::new();
8712 loop {
8713 match (path_components.next(), base_components.next()) {
8714 (None, None) => break,
8715 (Some(a), None) => {
8716 components.push(a);
8717 components.extend(path_components.by_ref());
8718 break;
8719 }
8720 (None, _) => components.push(Component::ParentDir),
8721 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8722 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8723 (Some(a), Some(_)) => {
8724 components.push(Component::ParentDir);
8725 for _ in base_components {
8726 components.push(Component::ParentDir);
8727 }
8728 components.push(a);
8729 components.extend(path_components.by_ref());
8730 break;
8731 }
8732 }
8733 }
8734 components.iter().map(|c| c.as_os_str()).collect()
8735}
8736
8737fn resolve_path(base: &Path, path: &Path) -> PathBuf {
8738 let mut result = base.to_path_buf();
8739 for component in path.components() {
8740 match component {
8741 Component::ParentDir => {
8742 result.pop();
8743 }
8744 Component::CurDir => (),
8745 _ => result.push(component),
8746 }
8747 }
8748 result
8749}
8750
8751impl Item for Buffer {
8752 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8753 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8754 }
8755
8756 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8757 File::from_dyn(self.file()).map(|file| ProjectPath {
8758 worktree_id: file.worktree_id(cx),
8759 path: file.path().clone(),
8760 })
8761 }
8762}
8763
8764async fn wait_for_loading_buffer(
8765 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
8766) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
8767 loop {
8768 if let Some(result) = receiver.borrow().as_ref() {
8769 match result {
8770 Ok(buffer) => return Ok(buffer.to_owned()),
8771 Err(e) => return Err(e.to_owned()),
8772 }
8773 }
8774 receiver.next().await;
8775 }
8776}
8777
8778fn include_text(server: &lsp::LanguageServer) -> bool {
8779 server
8780 .capabilities()
8781 .text_document_sync
8782 .as_ref()
8783 .and_then(|sync| match sync {
8784 lsp::TextDocumentSyncCapability::Kind(_) => None,
8785 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8786 })
8787 .and_then(|save_options| match save_options {
8788 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
8789 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8790 })
8791 .unwrap_or(false)
8792}