1mod ignore;
2mod lsp_command;
3pub mod project_settings;
4pub mod search;
5pub mod terminals;
6pub mod worktree;
7
8#[cfg(test)]
9mod project_tests;
10#[cfg(test)]
11mod worktree_tests;
12
13use anyhow::{anyhow, Context, Result};
14use client::{proto, Client, TypedEnvelope, UserId, UserStore};
15use clock::ReplicaId;
16use collections::{hash_map, BTreeMap, HashMap, HashSet};
17use copilot::Copilot;
18use futures::{
19 channel::{
20 mpsc::{self, UnboundedReceiver},
21 oneshot,
22 },
23 future::{try_join_all, Shared},
24 stream::FuturesUnordered,
25 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
26};
27use globset::{Glob, GlobSet, GlobSetBuilder};
28use gpui::{
29 executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
30 ModelContext, ModelHandle, Task, WeakModelHandle,
31};
32use itertools::Itertools;
33use language::{
34 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
35 point_to_lsp,
36 proto::{
37 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
38 serialize_anchor, serialize_version, split_operations,
39 },
40 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
41 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
42 File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
43 OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
44 ToOffset, ToPointUtf16, Transaction, Unclipped,
45};
46use log::error;
47use lsp::{
48 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
49 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
50};
51use lsp_command::*;
52use postage::watch;
53use project_settings::{LspSettings, ProjectSettings};
54use rand::prelude::*;
55use search::SearchQuery;
56use serde::Serialize;
57use settings::SettingsStore;
58use sha2::{Digest, Sha256};
59use similar::{ChangeTag, TextDiff};
60use smol::channel::{Receiver, Sender};
61use std::{
62 cmp::{self, Ordering},
63 convert::TryInto,
64 hash::Hash,
65 mem,
66 num::NonZeroU32,
67 ops::Range,
68 path::{self, Component, Path, PathBuf},
69 process::Stdio,
70 str,
71 sync::{
72 atomic::{AtomicUsize, Ordering::SeqCst},
73 Arc,
74 },
75 time::{Duration, Instant},
76};
77use terminals::Terminals;
78use text::Anchor;
79use util::{
80 debug_panic, defer, http::HttpClient, merge_json_value_into,
81 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
82};
83
84pub use fs::*;
85pub use worktree::*;
86
87pub trait Item {
88 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
89 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
90}
91
92// Language server state is stored across 3 collections:
93// language_servers =>
94// a mapping from unique server id to LanguageServerState which can either be a task for a
95// server in the process of starting, or a running server with adapter and language server arcs
96// language_server_ids => a mapping from worktreeId and server name to the unique server id
97// language_server_statuses => a mapping from unique server id to the current server status
98//
99// Multiple worktrees can map to the same language server for example when you jump to the definition
100// of a file in the standard library. So language_server_ids is used to look up which server is active
101// for a given worktree and language server name
102//
103// When starting a language server, first the id map is checked to make sure a server isn't already available
104// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
105// the Starting variant of LanguageServerState is stored in the language_servers map.
106pub struct Project {
107 worktrees: Vec<WorktreeHandle>,
108 active_entry: Option<ProjectEntryId>,
109 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
110 languages: Arc<LanguageRegistry>,
111 language_servers: HashMap<LanguageServerId, LanguageServerState>,
112 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
113 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
114 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
115 client: Arc<client::Client>,
116 next_entry_id: Arc<AtomicUsize>,
117 join_project_response_message_id: u32,
118 next_diagnostic_group_id: usize,
119 user_store: ModelHandle<UserStore>,
120 fs: Arc<dyn Fs>,
121 client_state: Option<ProjectClientState>,
122 collaborators: HashMap<proto::PeerId, Collaborator>,
123 client_subscriptions: Vec<client::Subscription>,
124 _subscriptions: Vec<gpui::Subscription>,
125 next_buffer_id: u64,
126 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
127 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
128 #[allow(clippy::type_complexity)]
129 loading_buffers_by_path: HashMap<
130 ProjectPath,
131 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
132 >,
133 #[allow(clippy::type_complexity)]
134 loading_local_worktrees:
135 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
136 opened_buffers: HashMap<u64, OpenBuffer>,
137 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
138 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
139 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
140 /// Used for re-issuing buffer requests when peers temporarily disconnect
141 incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
142 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
143 buffers_being_formatted: HashSet<u64>,
144 buffers_needing_diff: HashSet<WeakModelHandle<Buffer>>,
145 git_diff_debouncer: DelayedDebounced,
146 nonce: u128,
147 _maintain_buffer_languages: Task<()>,
148 _maintain_workspace_config: Task<()>,
149 terminals: Terminals,
150 copilot_enabled: bool,
151 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
152}
153
154struct DelayedDebounced {
155 task: Option<Task<()>>,
156 cancel_channel: Option<oneshot::Sender<()>>,
157}
158
159enum LanguageServerToQuery {
160 Primary,
161 Other(LanguageServerId),
162}
163
164impl DelayedDebounced {
165 fn new() -> DelayedDebounced {
166 DelayedDebounced {
167 task: None,
168 cancel_channel: None,
169 }
170 }
171
172 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
173 where
174 F: 'static + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
175 {
176 if let Some(channel) = self.cancel_channel.take() {
177 _ = channel.send(());
178 }
179
180 let (sender, mut receiver) = oneshot::channel::<()>();
181 self.cancel_channel = Some(sender);
182
183 let previous_task = self.task.take();
184 self.task = Some(cx.spawn(|workspace, mut cx| async move {
185 let mut timer = cx.background().timer(delay).fuse();
186 if let Some(previous_task) = previous_task {
187 previous_task.await;
188 }
189
190 futures::select_biased! {
191 _ = receiver => return,
192 _ = timer => {}
193 }
194
195 workspace
196 .update(&mut cx, |workspace, cx| (func)(workspace, cx))
197 .await;
198 }));
199 }
200}
201
202struct LspBufferSnapshot {
203 version: i32,
204 snapshot: TextBufferSnapshot,
205}
206
207/// Message ordered with respect to buffer operations
208enum BufferOrderedMessage {
209 Operation {
210 buffer_id: u64,
211 operation: proto::Operation,
212 },
213 LanguageServerUpdate {
214 language_server_id: LanguageServerId,
215 message: proto::update_language_server::Variant,
216 },
217 Resync,
218}
219
220enum LocalProjectUpdate {
221 WorktreesChanged,
222 CreateBufferForPeer {
223 peer_id: proto::PeerId,
224 buffer_id: u64,
225 },
226}
227
228enum OpenBuffer {
229 Strong(ModelHandle<Buffer>),
230 Weak(WeakModelHandle<Buffer>),
231 Operations(Vec<Operation>),
232}
233
234#[derive(Clone)]
235enum WorktreeHandle {
236 Strong(ModelHandle<Worktree>),
237 Weak(WeakModelHandle<Worktree>),
238}
239
240enum ProjectClientState {
241 Local {
242 remote_id: u64,
243 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
244 _send_updates: Task<()>,
245 },
246 Remote {
247 sharing_has_stopped: bool,
248 remote_id: u64,
249 replica_id: ReplicaId,
250 },
251}
252
253#[derive(Clone, Debug)]
254pub struct Collaborator {
255 pub peer_id: proto::PeerId,
256 pub replica_id: ReplicaId,
257 pub user_id: UserId,
258}
259
260#[derive(Clone, Debug, PartialEq)]
261pub enum Event {
262 LanguageServerAdded(LanguageServerId),
263 LanguageServerRemoved(LanguageServerId),
264 LanguageServerLog(LanguageServerId, String),
265 Notification(String),
266 ActiveEntryChanged(Option<ProjectEntryId>),
267 ActivateProjectPanel,
268 WorktreeAdded,
269 WorktreeRemoved(WorktreeId),
270 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
271 DiskBasedDiagnosticsStarted {
272 language_server_id: LanguageServerId,
273 },
274 DiskBasedDiagnosticsFinished {
275 language_server_id: LanguageServerId,
276 },
277 DiagnosticsUpdated {
278 path: ProjectPath,
279 language_server_id: LanguageServerId,
280 },
281 RemoteIdChanged(Option<u64>),
282 DisconnectedFromHost,
283 Closed,
284 DeletedEntry(ProjectEntryId),
285 CollaboratorUpdated {
286 old_peer_id: proto::PeerId,
287 new_peer_id: proto::PeerId,
288 },
289 CollaboratorJoined(proto::PeerId),
290 CollaboratorLeft(proto::PeerId),
291 RefreshInlayHints,
292}
293
294pub enum LanguageServerState {
295 Starting(Task<Option<Arc<LanguageServer>>>),
296
297 Running {
298 language: Arc<Language>,
299 adapter: Arc<CachedLspAdapter>,
300 server: Arc<LanguageServer>,
301 watched_paths: HashMap<WorktreeId, GlobSet>,
302 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
303 },
304}
305
306#[derive(Serialize)]
307pub struct LanguageServerStatus {
308 pub name: String,
309 pub pending_work: BTreeMap<String, LanguageServerProgress>,
310 pub has_pending_diagnostic_updates: bool,
311 progress_tokens: HashSet<String>,
312}
313
314#[derive(Clone, Debug, Serialize)]
315pub struct LanguageServerProgress {
316 pub message: Option<String>,
317 pub percentage: Option<usize>,
318 #[serde(skip_serializing)]
319 pub last_update_at: Instant,
320}
321
322#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
323pub struct ProjectPath {
324 pub worktree_id: WorktreeId,
325 pub path: Arc<Path>,
326}
327
328#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
329pub struct DiagnosticSummary {
330 pub error_count: usize,
331 pub warning_count: usize,
332}
333
334#[derive(Debug, Clone, PartialEq, Eq, Hash)]
335pub struct Location {
336 pub buffer: ModelHandle<Buffer>,
337 pub range: Range<language::Anchor>,
338}
339
340#[derive(Debug, Clone, PartialEq, Eq)]
341pub struct InlayHint {
342 pub position: language::Anchor,
343 pub label: InlayHintLabel,
344 pub kind: Option<InlayHintKind>,
345 pub padding_left: bool,
346 pub padding_right: bool,
347 pub tooltip: Option<InlayHintTooltip>,
348 pub resolve_state: ResolveState,
349}
350
351#[derive(Debug, Clone, PartialEq, Eq)]
352pub enum ResolveState {
353 Resolved,
354 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
355 Resolving,
356}
357
358impl InlayHint {
359 pub fn text(&self) -> String {
360 match &self.label {
361 InlayHintLabel::String(s) => s.to_owned(),
362 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
363 }
364 }
365}
366
367#[derive(Debug, Clone, PartialEq, Eq)]
368pub enum InlayHintLabel {
369 String(String),
370 LabelParts(Vec<InlayHintLabelPart>),
371}
372
373#[derive(Debug, Clone, PartialEq, Eq)]
374pub struct InlayHintLabelPart {
375 pub value: String,
376 pub tooltip: Option<InlayHintLabelPartTooltip>,
377 pub location: Option<(LanguageServerId, lsp::Location)>,
378}
379
380#[derive(Debug, Clone, PartialEq, Eq)]
381pub enum InlayHintTooltip {
382 String(String),
383 MarkupContent(MarkupContent),
384}
385
386#[derive(Debug, Clone, PartialEq, Eq)]
387pub enum InlayHintLabelPartTooltip {
388 String(String),
389 MarkupContent(MarkupContent),
390}
391
392#[derive(Debug, Clone, PartialEq, Eq)]
393pub struct MarkupContent {
394 pub kind: HoverBlockKind,
395 pub value: String,
396}
397
398#[derive(Debug, Clone)]
399pub struct LocationLink {
400 pub origin: Option<Location>,
401 pub target: Location,
402}
403
404#[derive(Debug)]
405pub struct DocumentHighlight {
406 pub range: Range<language::Anchor>,
407 pub kind: DocumentHighlightKind,
408}
409
410#[derive(Clone, Debug)]
411pub struct Symbol {
412 pub language_server_name: LanguageServerName,
413 pub source_worktree_id: WorktreeId,
414 pub path: ProjectPath,
415 pub label: CodeLabel,
416 pub name: String,
417 pub kind: lsp::SymbolKind,
418 pub range: Range<Unclipped<PointUtf16>>,
419 pub signature: [u8; 32],
420}
421
422#[derive(Clone, Debug, PartialEq)]
423pub struct HoverBlock {
424 pub text: String,
425 pub kind: HoverBlockKind,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum HoverBlockKind {
430 PlainText,
431 Markdown,
432 Code { language: String },
433}
434
435#[derive(Debug)]
436pub struct Hover {
437 pub contents: Vec<HoverBlock>,
438 pub range: Option<Range<language::Anchor>>,
439 pub language: Option<Arc<Language>>,
440}
441
442impl Hover {
443 pub fn is_empty(&self) -> bool {
444 self.contents.iter().all(|block| block.text.is_empty())
445 }
446}
447
448#[derive(Default)]
449pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
450
451impl DiagnosticSummary {
452 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
453 let mut this = Self {
454 error_count: 0,
455 warning_count: 0,
456 };
457
458 for entry in diagnostics {
459 if entry.diagnostic.is_primary {
460 match entry.diagnostic.severity {
461 DiagnosticSeverity::ERROR => this.error_count += 1,
462 DiagnosticSeverity::WARNING => this.warning_count += 1,
463 _ => {}
464 }
465 }
466 }
467
468 this
469 }
470
471 pub fn is_empty(&self) -> bool {
472 self.error_count == 0 && self.warning_count == 0
473 }
474
475 pub fn to_proto(
476 &self,
477 language_server_id: LanguageServerId,
478 path: &Path,
479 ) -> proto::DiagnosticSummary {
480 proto::DiagnosticSummary {
481 path: path.to_string_lossy().to_string(),
482 language_server_id: language_server_id.0 as u64,
483 error_count: self.error_count as u32,
484 warning_count: self.warning_count as u32,
485 }
486 }
487}
488
489#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
490pub struct ProjectEntryId(usize);
491
492impl ProjectEntryId {
493 pub const MAX: Self = Self(usize::MAX);
494
495 pub fn new(counter: &AtomicUsize) -> Self {
496 Self(counter.fetch_add(1, SeqCst))
497 }
498
499 pub fn from_proto(id: u64) -> Self {
500 Self(id as usize)
501 }
502
503 pub fn to_proto(&self) -> u64 {
504 self.0 as u64
505 }
506
507 pub fn to_usize(&self) -> usize {
508 self.0
509 }
510}
511
512#[derive(Debug, Clone, Copy, PartialEq, Eq)]
513pub enum FormatTrigger {
514 Save,
515 Manual,
516}
517
518struct ProjectLspAdapterDelegate {
519 project: ModelHandle<Project>,
520 http_client: Arc<dyn HttpClient>,
521}
522
523impl FormatTrigger {
524 fn from_proto(value: i32) -> FormatTrigger {
525 match value {
526 0 => FormatTrigger::Save,
527 1 => FormatTrigger::Manual,
528 _ => FormatTrigger::Save,
529 }
530 }
531}
532#[derive(Clone, Debug, PartialEq)]
533enum SearchMatchCandidate {
534 OpenBuffer {
535 buffer: ModelHandle<Buffer>,
536 // This might be an unnamed file without representation on filesystem
537 path: Option<Arc<Path>>,
538 },
539 Path {
540 worktree_id: WorktreeId,
541 path: Arc<Path>,
542 },
543}
544
545type SearchMatchCandidateIndex = usize;
546impl SearchMatchCandidate {
547 fn path(&self) -> Option<Arc<Path>> {
548 match self {
549 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
550 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
551 }
552 }
553}
554
555impl Project {
556 pub fn init_settings(cx: &mut AppContext) {
557 settings::register::<ProjectSettings>(cx);
558 }
559
560 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
561 Self::init_settings(cx);
562
563 client.add_model_message_handler(Self::handle_add_collaborator);
564 client.add_model_message_handler(Self::handle_update_project_collaborator);
565 client.add_model_message_handler(Self::handle_remove_collaborator);
566 client.add_model_message_handler(Self::handle_buffer_reloaded);
567 client.add_model_message_handler(Self::handle_buffer_saved);
568 client.add_model_message_handler(Self::handle_start_language_server);
569 client.add_model_message_handler(Self::handle_update_language_server);
570 client.add_model_message_handler(Self::handle_update_project);
571 client.add_model_message_handler(Self::handle_unshare_project);
572 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
573 client.add_model_message_handler(Self::handle_update_buffer_file);
574 client.add_model_request_handler(Self::handle_update_buffer);
575 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
576 client.add_model_message_handler(Self::handle_update_worktree);
577 client.add_model_message_handler(Self::handle_update_worktree_settings);
578 client.add_model_request_handler(Self::handle_create_project_entry);
579 client.add_model_request_handler(Self::handle_rename_project_entry);
580 client.add_model_request_handler(Self::handle_copy_project_entry);
581 client.add_model_request_handler(Self::handle_delete_project_entry);
582 client.add_model_request_handler(Self::handle_expand_project_entry);
583 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
584 client.add_model_request_handler(Self::handle_apply_code_action);
585 client.add_model_request_handler(Self::handle_on_type_formatting);
586 client.add_model_request_handler(Self::handle_inlay_hints);
587 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
588 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
589 client.add_model_request_handler(Self::handle_reload_buffers);
590 client.add_model_request_handler(Self::handle_synchronize_buffers);
591 client.add_model_request_handler(Self::handle_format_buffers);
592 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
593 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
594 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
595 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
596 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
597 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
598 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
599 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
600 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
601 client.add_model_request_handler(Self::handle_search_project);
602 client.add_model_request_handler(Self::handle_get_project_symbols);
603 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
604 client.add_model_request_handler(Self::handle_open_buffer_by_id);
605 client.add_model_request_handler(Self::handle_open_buffer_by_path);
606 client.add_model_request_handler(Self::handle_save_buffer);
607 client.add_model_message_handler(Self::handle_update_diff_base);
608 }
609
610 pub fn local(
611 client: Arc<Client>,
612 user_store: ModelHandle<UserStore>,
613 languages: Arc<LanguageRegistry>,
614 fs: Arc<dyn Fs>,
615 cx: &mut AppContext,
616 ) -> ModelHandle<Self> {
617 cx.add_model(|cx: &mut ModelContext<Self>| {
618 let (tx, rx) = mpsc::unbounded();
619 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
620 .detach();
621 Self {
622 worktrees: Default::default(),
623 buffer_ordered_messages_tx: tx,
624 collaborators: Default::default(),
625 next_buffer_id: 0,
626 opened_buffers: Default::default(),
627 shared_buffers: Default::default(),
628 incomplete_remote_buffers: Default::default(),
629 loading_buffers_by_path: Default::default(),
630 loading_local_worktrees: Default::default(),
631 local_buffer_ids_by_path: Default::default(),
632 local_buffer_ids_by_entry_id: Default::default(),
633 buffer_snapshots: Default::default(),
634 join_project_response_message_id: 0,
635 client_state: None,
636 opened_buffer: watch::channel(),
637 client_subscriptions: Vec::new(),
638 _subscriptions: vec![
639 cx.observe_global::<SettingsStore, _>(Self::on_settings_changed)
640 ],
641 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
642 _maintain_workspace_config: Self::maintain_workspace_config(cx),
643 active_entry: None,
644 languages,
645 client,
646 user_store,
647 fs,
648 next_entry_id: Default::default(),
649 next_diagnostic_group_id: Default::default(),
650 language_servers: Default::default(),
651 language_server_ids: Default::default(),
652 language_server_statuses: Default::default(),
653 last_workspace_edits_by_language_server: Default::default(),
654 buffers_being_formatted: Default::default(),
655 buffers_needing_diff: Default::default(),
656 git_diff_debouncer: DelayedDebounced::new(),
657 nonce: StdRng::from_entropy().gen(),
658 terminals: Terminals {
659 local_handles: Vec::new(),
660 },
661 copilot_enabled: Copilot::global(cx).is_some(),
662 current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
663 }
664 })
665 }
666
667 pub async fn remote(
668 remote_id: u64,
669 client: Arc<Client>,
670 user_store: ModelHandle<UserStore>,
671 languages: Arc<LanguageRegistry>,
672 fs: Arc<dyn Fs>,
673 mut cx: AsyncAppContext,
674 ) -> Result<ModelHandle<Self>> {
675 client.authenticate_and_connect(true, &cx).await?;
676
677 let subscription = client.subscribe_to_entity(remote_id)?;
678 let response = client
679 .request_envelope(proto::JoinProject {
680 project_id: remote_id,
681 })
682 .await?;
683 let this = cx.add_model(|cx| {
684 let replica_id = response.payload.replica_id as ReplicaId;
685
686 let mut worktrees = Vec::new();
687 for worktree in response.payload.worktrees {
688 let worktree = cx.update(|cx| {
689 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
690 });
691 worktrees.push(worktree);
692 }
693
694 let (tx, rx) = mpsc::unbounded();
695 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
696 .detach();
697 let mut this = Self {
698 worktrees: Vec::new(),
699 buffer_ordered_messages_tx: tx,
700 loading_buffers_by_path: Default::default(),
701 next_buffer_id: 0,
702 opened_buffer: watch::channel(),
703 shared_buffers: Default::default(),
704 incomplete_remote_buffers: Default::default(),
705 loading_local_worktrees: Default::default(),
706 local_buffer_ids_by_path: Default::default(),
707 local_buffer_ids_by_entry_id: Default::default(),
708 active_entry: None,
709 collaborators: Default::default(),
710 join_project_response_message_id: response.message_id,
711 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
712 _maintain_workspace_config: Self::maintain_workspace_config(cx),
713 languages,
714 user_store: user_store.clone(),
715 fs,
716 next_entry_id: Default::default(),
717 next_diagnostic_group_id: Default::default(),
718 client_subscriptions: Default::default(),
719 _subscriptions: Default::default(),
720 client: client.clone(),
721 client_state: Some(ProjectClientState::Remote {
722 sharing_has_stopped: false,
723 remote_id,
724 replica_id,
725 }),
726 language_servers: Default::default(),
727 language_server_ids: Default::default(),
728 language_server_statuses: response
729 .payload
730 .language_servers
731 .into_iter()
732 .map(|server| {
733 (
734 LanguageServerId(server.id as usize),
735 LanguageServerStatus {
736 name: server.name,
737 pending_work: Default::default(),
738 has_pending_diagnostic_updates: false,
739 progress_tokens: Default::default(),
740 },
741 )
742 })
743 .collect(),
744 last_workspace_edits_by_language_server: Default::default(),
745 opened_buffers: Default::default(),
746 buffers_being_formatted: Default::default(),
747 buffers_needing_diff: Default::default(),
748 git_diff_debouncer: DelayedDebounced::new(),
749 buffer_snapshots: Default::default(),
750 nonce: StdRng::from_entropy().gen(),
751 terminals: Terminals {
752 local_handles: Vec::new(),
753 },
754 copilot_enabled: Copilot::global(cx).is_some(),
755 current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
756 };
757 for worktree in worktrees {
758 let _ = this.add_worktree(&worktree, cx);
759 }
760 this
761 });
762 let subscription = subscription.set_model(&this, &mut cx);
763
764 let user_ids = response
765 .payload
766 .collaborators
767 .iter()
768 .map(|peer| peer.user_id)
769 .collect();
770 user_store
771 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
772 .await?;
773
774 this.update(&mut cx, |this, cx| {
775 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
776 this.client_subscriptions.push(subscription);
777 anyhow::Ok(())
778 })?;
779
780 Ok(this)
781 }
782
783 #[cfg(any(test, feature = "test-support"))]
784 pub async fn test(
785 fs: Arc<dyn Fs>,
786 root_paths: impl IntoIterator<Item = &Path>,
787 cx: &mut gpui::TestAppContext,
788 ) -> ModelHandle<Project> {
789 let mut languages = LanguageRegistry::test();
790 languages.set_executor(cx.background());
791 let http_client = util::http::FakeHttpClient::with_404_response();
792 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
793 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
794 let project =
795 cx.update(|cx| Project::local(client, user_store, Arc::new(languages), fs, cx));
796 for path in root_paths {
797 let (tree, _) = project
798 .update(cx, |project, cx| {
799 project.find_or_create_local_worktree(path, true, cx)
800 })
801 .await
802 .unwrap();
803 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
804 .await;
805 }
806 project
807 }
808
809 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
810 let mut language_servers_to_start = Vec::new();
811 for buffer in self.opened_buffers.values() {
812 if let Some(buffer) = buffer.upgrade(cx) {
813 let buffer = buffer.read(cx);
814 if let Some((file, language)) = buffer.file().zip(buffer.language()) {
815 let settings = language_settings(Some(language), Some(file), cx);
816 if settings.enable_language_server {
817 if let Some(file) = File::from_dyn(Some(file)) {
818 language_servers_to_start
819 .push((file.worktree.clone(), language.clone()));
820 }
821 }
822 }
823 }
824 }
825
826 let mut language_servers_to_stop = Vec::new();
827 let mut language_servers_to_restart = Vec::new();
828 let languages = self.languages.to_vec();
829
830 let new_lsp_settings = settings::get::<ProjectSettings>(cx).lsp.clone();
831 let current_lsp_settings = &self.current_lsp_settings;
832 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
833 let language = languages.iter().find_map(|l| {
834 let adapter = l
835 .lsp_adapters()
836 .iter()
837 .find(|adapter| &adapter.name == started_lsp_name)?;
838 Some((l, adapter))
839 });
840 if let Some((language, adapter)) = language {
841 let worktree = self.worktree_for_id(*worktree_id, cx);
842 let file = worktree.as_ref().and_then(|tree| {
843 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
844 });
845 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
846 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
847 } else if let Some(worktree) = worktree {
848 let server_name = &adapter.name.0;
849 match (
850 current_lsp_settings.get(server_name),
851 new_lsp_settings.get(server_name),
852 ) {
853 (None, None) => {}
854 (Some(_), None) | (None, Some(_)) => {
855 language_servers_to_restart.push((worktree, Arc::clone(language)));
856 }
857 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
858 if current_lsp_settings != new_lsp_settings {
859 language_servers_to_restart.push((worktree, Arc::clone(language)));
860 }
861 }
862 }
863 }
864 }
865 }
866 self.current_lsp_settings = new_lsp_settings;
867
868 // Stop all newly-disabled language servers.
869 for (worktree_id, adapter_name) in language_servers_to_stop {
870 self.stop_language_server(worktree_id, adapter_name, cx)
871 .detach();
872 }
873
874 // Start all the newly-enabled language servers.
875 for (worktree, language) in language_servers_to_start {
876 let worktree_path = worktree.read(cx).abs_path();
877 self.start_language_servers(&worktree, worktree_path, language, cx);
878 }
879
880 // Restart all language servers with changed initialization options.
881 for (worktree, language) in language_servers_to_restart {
882 self.restart_language_servers(worktree, language, cx);
883 }
884
885 if !self.copilot_enabled && Copilot::global(cx).is_some() {
886 self.copilot_enabled = true;
887 for buffer in self.opened_buffers.values() {
888 if let Some(buffer) = buffer.upgrade(cx) {
889 self.register_buffer_with_copilot(&buffer, cx);
890 }
891 }
892 }
893
894 cx.notify();
895 }
896
897 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
898 self.opened_buffers
899 .get(&remote_id)
900 .and_then(|buffer| buffer.upgrade(cx))
901 }
902
903 pub fn languages(&self) -> &Arc<LanguageRegistry> {
904 &self.languages
905 }
906
907 pub fn client(&self) -> Arc<Client> {
908 self.client.clone()
909 }
910
911 pub fn user_store(&self) -> ModelHandle<UserStore> {
912 self.user_store.clone()
913 }
914
915 pub fn opened_buffers(&self, cx: &AppContext) -> Vec<ModelHandle<Buffer>> {
916 self.opened_buffers
917 .values()
918 .filter_map(|b| b.upgrade(cx))
919 .collect()
920 }
921
922 #[cfg(any(test, feature = "test-support"))]
923 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
924 let path = path.into();
925 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
926 self.opened_buffers.iter().any(|(_, buffer)| {
927 if let Some(buffer) = buffer.upgrade(cx) {
928 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
929 if file.worktree == worktree && file.path() == &path.path {
930 return true;
931 }
932 }
933 }
934 false
935 })
936 } else {
937 false
938 }
939 }
940
941 pub fn fs(&self) -> &Arc<dyn Fs> {
942 &self.fs
943 }
944
945 pub fn remote_id(&self) -> Option<u64> {
946 match self.client_state.as_ref()? {
947 ProjectClientState::Local { remote_id, .. }
948 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
949 }
950 }
951
952 pub fn replica_id(&self) -> ReplicaId {
953 match &self.client_state {
954 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
955 _ => 0,
956 }
957 }
958
959 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
960 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
961 updates_tx
962 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
963 .ok();
964 }
965 cx.notify();
966 }
967
968 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
969 &self.collaborators
970 }
971
972 /// Collect all worktrees, including ones that don't appear in the project panel
973 pub fn worktrees<'a>(
974 &'a self,
975 cx: &'a AppContext,
976 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
977 self.worktrees
978 .iter()
979 .filter_map(move |worktree| worktree.upgrade(cx))
980 }
981
982 /// Collect all user-visible worktrees, the ones that appear in the project panel
983 pub fn visible_worktrees<'a>(
984 &'a self,
985 cx: &'a AppContext,
986 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
987 self.worktrees.iter().filter_map(|worktree| {
988 worktree.upgrade(cx).and_then(|worktree| {
989 if worktree.read(cx).is_visible() {
990 Some(worktree)
991 } else {
992 None
993 }
994 })
995 })
996 }
997
998 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
999 self.visible_worktrees(cx)
1000 .map(|tree| tree.read(cx).root_name())
1001 }
1002
1003 pub fn worktree_for_id(
1004 &self,
1005 id: WorktreeId,
1006 cx: &AppContext,
1007 ) -> Option<ModelHandle<Worktree>> {
1008 self.worktrees(cx)
1009 .find(|worktree| worktree.read(cx).id() == id)
1010 }
1011
1012 pub fn worktree_for_entry(
1013 &self,
1014 entry_id: ProjectEntryId,
1015 cx: &AppContext,
1016 ) -> Option<ModelHandle<Worktree>> {
1017 self.worktrees(cx)
1018 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1019 }
1020
1021 pub fn worktree_id_for_entry(
1022 &self,
1023 entry_id: ProjectEntryId,
1024 cx: &AppContext,
1025 ) -> Option<WorktreeId> {
1026 self.worktree_for_entry(entry_id, cx)
1027 .map(|worktree| worktree.read(cx).id())
1028 }
1029
1030 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1031 paths.iter().all(|path| self.contains_path(path, cx))
1032 }
1033
1034 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1035 for worktree in self.worktrees(cx) {
1036 let worktree = worktree.read(cx).as_local();
1037 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1038 return true;
1039 }
1040 }
1041 false
1042 }
1043
1044 pub fn create_entry(
1045 &mut self,
1046 project_path: impl Into<ProjectPath>,
1047 is_directory: bool,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<Task<Result<Entry>>> {
1050 let project_path = project_path.into();
1051 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1052 if self.is_local() {
1053 Some(worktree.update(cx, |worktree, cx| {
1054 worktree
1055 .as_local_mut()
1056 .unwrap()
1057 .create_entry(project_path.path, is_directory, cx)
1058 }))
1059 } else {
1060 let client = self.client.clone();
1061 let project_id = self.remote_id().unwrap();
1062 Some(cx.spawn_weak(|_, mut cx| async move {
1063 let response = client
1064 .request(proto::CreateProjectEntry {
1065 worktree_id: project_path.worktree_id.to_proto(),
1066 project_id,
1067 path: project_path.path.to_string_lossy().into(),
1068 is_directory,
1069 })
1070 .await?;
1071 let entry = response
1072 .entry
1073 .ok_or_else(|| anyhow!("missing entry in response"))?;
1074 worktree
1075 .update(&mut cx, |worktree, cx| {
1076 worktree.as_remote_mut().unwrap().insert_entry(
1077 entry,
1078 response.worktree_scan_id as usize,
1079 cx,
1080 )
1081 })
1082 .await
1083 }))
1084 }
1085 }
1086
1087 pub fn copy_entry(
1088 &mut self,
1089 entry_id: ProjectEntryId,
1090 new_path: impl Into<Arc<Path>>,
1091 cx: &mut ModelContext<Self>,
1092 ) -> Option<Task<Result<Entry>>> {
1093 let worktree = self.worktree_for_entry(entry_id, cx)?;
1094 let new_path = new_path.into();
1095 if self.is_local() {
1096 worktree.update(cx, |worktree, cx| {
1097 worktree
1098 .as_local_mut()
1099 .unwrap()
1100 .copy_entry(entry_id, new_path, cx)
1101 })
1102 } else {
1103 let client = self.client.clone();
1104 let project_id = self.remote_id().unwrap();
1105
1106 Some(cx.spawn_weak(|_, mut cx| async move {
1107 let response = client
1108 .request(proto::CopyProjectEntry {
1109 project_id,
1110 entry_id: entry_id.to_proto(),
1111 new_path: new_path.to_string_lossy().into(),
1112 })
1113 .await?;
1114 let entry = response
1115 .entry
1116 .ok_or_else(|| anyhow!("missing entry in response"))?;
1117 worktree
1118 .update(&mut cx, |worktree, cx| {
1119 worktree.as_remote_mut().unwrap().insert_entry(
1120 entry,
1121 response.worktree_scan_id as usize,
1122 cx,
1123 )
1124 })
1125 .await
1126 }))
1127 }
1128 }
1129
1130 pub fn rename_entry(
1131 &mut self,
1132 entry_id: ProjectEntryId,
1133 new_path: impl Into<Arc<Path>>,
1134 cx: &mut ModelContext<Self>,
1135 ) -> Option<Task<Result<Entry>>> {
1136 let worktree = self.worktree_for_entry(entry_id, cx)?;
1137 let new_path = new_path.into();
1138 if self.is_local() {
1139 worktree.update(cx, |worktree, cx| {
1140 worktree
1141 .as_local_mut()
1142 .unwrap()
1143 .rename_entry(entry_id, new_path, cx)
1144 })
1145 } else {
1146 let client = self.client.clone();
1147 let project_id = self.remote_id().unwrap();
1148
1149 Some(cx.spawn_weak(|_, mut cx| async move {
1150 let response = client
1151 .request(proto::RenameProjectEntry {
1152 project_id,
1153 entry_id: entry_id.to_proto(),
1154 new_path: new_path.to_string_lossy().into(),
1155 })
1156 .await?;
1157 let entry = response
1158 .entry
1159 .ok_or_else(|| anyhow!("missing entry in response"))?;
1160 worktree
1161 .update(&mut cx, |worktree, cx| {
1162 worktree.as_remote_mut().unwrap().insert_entry(
1163 entry,
1164 response.worktree_scan_id as usize,
1165 cx,
1166 )
1167 })
1168 .await
1169 }))
1170 }
1171 }
1172
1173 pub fn delete_entry(
1174 &mut self,
1175 entry_id: ProjectEntryId,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Option<Task<Result<()>>> {
1178 let worktree = self.worktree_for_entry(entry_id, cx)?;
1179
1180 cx.emit(Event::DeletedEntry(entry_id));
1181
1182 if self.is_local() {
1183 worktree.update(cx, |worktree, cx| {
1184 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1185 })
1186 } else {
1187 let client = self.client.clone();
1188 let project_id = self.remote_id().unwrap();
1189 Some(cx.spawn_weak(|_, mut cx| async move {
1190 let response = client
1191 .request(proto::DeleteProjectEntry {
1192 project_id,
1193 entry_id: entry_id.to_proto(),
1194 })
1195 .await?;
1196 worktree
1197 .update(&mut cx, move |worktree, cx| {
1198 worktree.as_remote_mut().unwrap().delete_entry(
1199 entry_id,
1200 response.worktree_scan_id as usize,
1201 cx,
1202 )
1203 })
1204 .await
1205 }))
1206 }
1207 }
1208
1209 pub fn expand_entry(
1210 &mut self,
1211 worktree_id: WorktreeId,
1212 entry_id: ProjectEntryId,
1213 cx: &mut ModelContext<Self>,
1214 ) -> Option<Task<Result<()>>> {
1215 let worktree = self.worktree_for_id(worktree_id, cx)?;
1216 if self.is_local() {
1217 worktree.update(cx, |worktree, cx| {
1218 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1219 })
1220 } else {
1221 let worktree = worktree.downgrade();
1222 let request = self.client.request(proto::ExpandProjectEntry {
1223 project_id: self.remote_id().unwrap(),
1224 entry_id: entry_id.to_proto(),
1225 });
1226 Some(cx.spawn_weak(|_, mut cx| async move {
1227 let response = request.await?;
1228 if let Some(worktree) = worktree.upgrade(&cx) {
1229 worktree
1230 .update(&mut cx, |worktree, _| {
1231 worktree
1232 .as_remote_mut()
1233 .unwrap()
1234 .wait_for_snapshot(response.worktree_scan_id as usize)
1235 })
1236 .await?;
1237 }
1238 Ok(())
1239 }))
1240 }
1241 }
1242
1243 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1244 if self.client_state.is_some() {
1245 return Err(anyhow!("project was already shared"));
1246 }
1247 self.client_subscriptions.push(
1248 self.client
1249 .subscribe_to_entity(project_id)?
1250 .set_model(&cx.handle(), &mut cx.to_async()),
1251 );
1252
1253 for open_buffer in self.opened_buffers.values_mut() {
1254 match open_buffer {
1255 OpenBuffer::Strong(_) => {}
1256 OpenBuffer::Weak(buffer) => {
1257 if let Some(buffer) = buffer.upgrade(cx) {
1258 *open_buffer = OpenBuffer::Strong(buffer);
1259 }
1260 }
1261 OpenBuffer::Operations(_) => unreachable!(),
1262 }
1263 }
1264
1265 for worktree_handle in self.worktrees.iter_mut() {
1266 match worktree_handle {
1267 WorktreeHandle::Strong(_) => {}
1268 WorktreeHandle::Weak(worktree) => {
1269 if let Some(worktree) = worktree.upgrade(cx) {
1270 *worktree_handle = WorktreeHandle::Strong(worktree);
1271 }
1272 }
1273 }
1274 }
1275
1276 for (server_id, status) in &self.language_server_statuses {
1277 self.client
1278 .send(proto::StartLanguageServer {
1279 project_id,
1280 server: Some(proto::LanguageServer {
1281 id: server_id.0 as u64,
1282 name: status.name.clone(),
1283 }),
1284 })
1285 .log_err();
1286 }
1287
1288 let store = cx.global::<SettingsStore>();
1289 for worktree in self.worktrees(cx) {
1290 let worktree_id = worktree.read(cx).id().to_proto();
1291 for (path, content) in store.local_settings(worktree.id()) {
1292 self.client
1293 .send(proto::UpdateWorktreeSettings {
1294 project_id,
1295 worktree_id,
1296 path: path.to_string_lossy().into(),
1297 content: Some(content),
1298 })
1299 .log_err();
1300 }
1301 }
1302
1303 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1304 let client = self.client.clone();
1305 self.client_state = Some(ProjectClientState::Local {
1306 remote_id: project_id,
1307 updates_tx,
1308 _send_updates: cx.spawn_weak(move |this, mut cx| async move {
1309 while let Some(update) = updates_rx.next().await {
1310 let Some(this) = this.upgrade(&cx) else { break };
1311
1312 match update {
1313 LocalProjectUpdate::WorktreesChanged => {
1314 let worktrees = this
1315 .read_with(&cx, |this, cx| this.worktrees(cx).collect::<Vec<_>>());
1316 let update_project = this
1317 .read_with(&cx, |this, cx| {
1318 this.client.request(proto::UpdateProject {
1319 project_id,
1320 worktrees: this.worktree_metadata_protos(cx),
1321 })
1322 })
1323 .await;
1324 if update_project.is_ok() {
1325 for worktree in worktrees {
1326 worktree.update(&mut cx, |worktree, cx| {
1327 let worktree = worktree.as_local_mut().unwrap();
1328 worktree.share(project_id, cx).detach_and_log_err(cx)
1329 });
1330 }
1331 }
1332 }
1333 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1334 let buffer = this.update(&mut cx, |this, _| {
1335 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1336 let shared_buffers =
1337 this.shared_buffers.entry(peer_id).or_default();
1338 if shared_buffers.insert(buffer_id) {
1339 if let OpenBuffer::Strong(buffer) = buffer {
1340 Some(buffer.clone())
1341 } else {
1342 None
1343 }
1344 } else {
1345 None
1346 }
1347 });
1348
1349 let Some(buffer) = buffer else { continue };
1350 let operations =
1351 buffer.read_with(&cx, |b, cx| b.serialize_ops(None, cx));
1352 let operations = operations.await;
1353 let state = buffer.read_with(&cx, |buffer, _| buffer.to_proto());
1354
1355 let initial_state = proto::CreateBufferForPeer {
1356 project_id,
1357 peer_id: Some(peer_id),
1358 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1359 };
1360 if client.send(initial_state).log_err().is_some() {
1361 let client = client.clone();
1362 cx.background()
1363 .spawn(async move {
1364 let mut chunks = split_operations(operations).peekable();
1365 while let Some(chunk) = chunks.next() {
1366 let is_last = chunks.peek().is_none();
1367 client.send(proto::CreateBufferForPeer {
1368 project_id,
1369 peer_id: Some(peer_id),
1370 variant: Some(
1371 proto::create_buffer_for_peer::Variant::Chunk(
1372 proto::BufferChunk {
1373 buffer_id,
1374 operations: chunk,
1375 is_last,
1376 },
1377 ),
1378 ),
1379 })?;
1380 }
1381 anyhow::Ok(())
1382 })
1383 .await
1384 .log_err();
1385 }
1386 }
1387 }
1388 }
1389 }),
1390 });
1391
1392 self.metadata_changed(cx);
1393 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1394 cx.notify();
1395 Ok(())
1396 }
1397
1398 pub fn reshared(
1399 &mut self,
1400 message: proto::ResharedProject,
1401 cx: &mut ModelContext<Self>,
1402 ) -> Result<()> {
1403 self.shared_buffers.clear();
1404 self.set_collaborators_from_proto(message.collaborators, cx)?;
1405 self.metadata_changed(cx);
1406 Ok(())
1407 }
1408
1409 pub fn rejoined(
1410 &mut self,
1411 message: proto::RejoinedProject,
1412 message_id: u32,
1413 cx: &mut ModelContext<Self>,
1414 ) -> Result<()> {
1415 cx.update_global::<SettingsStore, _, _>(|store, cx| {
1416 for worktree in &self.worktrees {
1417 store
1418 .clear_local_settings(worktree.handle_id(), cx)
1419 .log_err();
1420 }
1421 });
1422
1423 self.join_project_response_message_id = message_id;
1424 self.set_worktrees_from_proto(message.worktrees, cx)?;
1425 self.set_collaborators_from_proto(message.collaborators, cx)?;
1426 self.language_server_statuses = message
1427 .language_servers
1428 .into_iter()
1429 .map(|server| {
1430 (
1431 LanguageServerId(server.id as usize),
1432 LanguageServerStatus {
1433 name: server.name,
1434 pending_work: Default::default(),
1435 has_pending_diagnostic_updates: false,
1436 progress_tokens: Default::default(),
1437 },
1438 )
1439 })
1440 .collect();
1441 self.buffer_ordered_messages_tx
1442 .unbounded_send(BufferOrderedMessage::Resync)
1443 .unwrap();
1444 cx.notify();
1445 Ok(())
1446 }
1447
1448 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1449 self.unshare_internal(cx)?;
1450 self.metadata_changed(cx);
1451 cx.notify();
1452 Ok(())
1453 }
1454
1455 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1456 if self.is_remote() {
1457 return Err(anyhow!("attempted to unshare a remote project"));
1458 }
1459
1460 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1461 self.collaborators.clear();
1462 self.shared_buffers.clear();
1463 self.client_subscriptions.clear();
1464
1465 for worktree_handle in self.worktrees.iter_mut() {
1466 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1467 let is_visible = worktree.update(cx, |worktree, _| {
1468 worktree.as_local_mut().unwrap().unshare();
1469 worktree.is_visible()
1470 });
1471 if !is_visible {
1472 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1473 }
1474 }
1475 }
1476
1477 for open_buffer in self.opened_buffers.values_mut() {
1478 // Wake up any tasks waiting for peers' edits to this buffer.
1479 if let Some(buffer) = open_buffer.upgrade(cx) {
1480 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1481 }
1482
1483 if let OpenBuffer::Strong(buffer) = open_buffer {
1484 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1485 }
1486 }
1487
1488 self.client.send(proto::UnshareProject {
1489 project_id: remote_id,
1490 })?;
1491
1492 Ok(())
1493 } else {
1494 Err(anyhow!("attempted to unshare an unshared project"))
1495 }
1496 }
1497
1498 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1499 self.disconnected_from_host_internal(cx);
1500 cx.emit(Event::DisconnectedFromHost);
1501 cx.notify();
1502 }
1503
1504 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1505 if let Some(ProjectClientState::Remote {
1506 sharing_has_stopped,
1507 ..
1508 }) = &mut self.client_state
1509 {
1510 *sharing_has_stopped = true;
1511
1512 self.collaborators.clear();
1513
1514 for worktree in &self.worktrees {
1515 if let Some(worktree) = worktree.upgrade(cx) {
1516 worktree.update(cx, |worktree, _| {
1517 if let Some(worktree) = worktree.as_remote_mut() {
1518 worktree.disconnected_from_host();
1519 }
1520 });
1521 }
1522 }
1523
1524 for open_buffer in self.opened_buffers.values_mut() {
1525 // Wake up any tasks waiting for peers' edits to this buffer.
1526 if let Some(buffer) = open_buffer.upgrade(cx) {
1527 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1528 }
1529
1530 if let OpenBuffer::Strong(buffer) = open_buffer {
1531 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1532 }
1533 }
1534
1535 // Wake up all futures currently waiting on a buffer to get opened,
1536 // to give them a chance to fail now that we've disconnected.
1537 *self.opened_buffer.0.borrow_mut() = ();
1538 }
1539 }
1540
1541 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1542 cx.emit(Event::Closed);
1543 }
1544
1545 pub fn is_read_only(&self) -> bool {
1546 match &self.client_state {
1547 Some(ProjectClientState::Remote {
1548 sharing_has_stopped,
1549 ..
1550 }) => *sharing_has_stopped,
1551 _ => false,
1552 }
1553 }
1554
1555 pub fn is_local(&self) -> bool {
1556 match &self.client_state {
1557 Some(ProjectClientState::Remote { .. }) => false,
1558 _ => true,
1559 }
1560 }
1561
1562 pub fn is_remote(&self) -> bool {
1563 !self.is_local()
1564 }
1565
1566 pub fn create_buffer(
1567 &mut self,
1568 text: &str,
1569 language: Option<Arc<Language>>,
1570 cx: &mut ModelContext<Self>,
1571 ) -> Result<ModelHandle<Buffer>> {
1572 if self.is_remote() {
1573 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1574 }
1575 let id = post_inc(&mut self.next_buffer_id);
1576 let buffer = cx.add_model(|cx| {
1577 Buffer::new(self.replica_id(), id, text)
1578 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1579 });
1580 self.register_buffer(&buffer, cx)?;
1581 Ok(buffer)
1582 }
1583
1584 pub fn open_path(
1585 &mut self,
1586 path: impl Into<ProjectPath>,
1587 cx: &mut ModelContext<Self>,
1588 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1589 let task = self.open_buffer(path, cx);
1590 cx.spawn_weak(|_, cx| async move {
1591 let buffer = task.await?;
1592 let project_entry_id = buffer
1593 .read_with(&cx, |buffer, cx| {
1594 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1595 })
1596 .ok_or_else(|| anyhow!("no project entry"))?;
1597
1598 let buffer: &AnyModelHandle = &buffer;
1599 Ok((project_entry_id, buffer.clone()))
1600 })
1601 }
1602
1603 pub fn open_local_buffer(
1604 &mut self,
1605 abs_path: impl AsRef<Path>,
1606 cx: &mut ModelContext<Self>,
1607 ) -> Task<Result<ModelHandle<Buffer>>> {
1608 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1609 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1610 } else {
1611 Task::ready(Err(anyhow!("no such path")))
1612 }
1613 }
1614
1615 pub fn open_buffer(
1616 &mut self,
1617 path: impl Into<ProjectPath>,
1618 cx: &mut ModelContext<Self>,
1619 ) -> Task<Result<ModelHandle<Buffer>>> {
1620 let project_path = path.into();
1621 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1622 worktree
1623 } else {
1624 return Task::ready(Err(anyhow!("no such worktree")));
1625 };
1626
1627 // If there is already a buffer for the given path, then return it.
1628 let existing_buffer = self.get_open_buffer(&project_path, cx);
1629 if let Some(existing_buffer) = existing_buffer {
1630 return Task::ready(Ok(existing_buffer));
1631 }
1632
1633 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1634 // If the given path is already being loaded, then wait for that existing
1635 // task to complete and return the same buffer.
1636 hash_map::Entry::Occupied(e) => e.get().clone(),
1637
1638 // Otherwise, record the fact that this path is now being loaded.
1639 hash_map::Entry::Vacant(entry) => {
1640 let (mut tx, rx) = postage::watch::channel();
1641 entry.insert(rx.clone());
1642
1643 let load_buffer = if worktree.read(cx).is_local() {
1644 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1645 } else {
1646 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1647 };
1648
1649 cx.spawn(move |this, mut cx| async move {
1650 let load_result = load_buffer.await;
1651 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1652 // Record the fact that the buffer is no longer loading.
1653 this.loading_buffers_by_path.remove(&project_path);
1654 let buffer = load_result.map_err(Arc::new)?;
1655 Ok(buffer)
1656 }));
1657 })
1658 .detach();
1659 rx
1660 }
1661 };
1662
1663 cx.foreground().spawn(async move {
1664 wait_for_loading_buffer(loading_watch)
1665 .await
1666 .map_err(|error| anyhow!("{}", error))
1667 })
1668 }
1669
1670 fn open_local_buffer_internal(
1671 &mut self,
1672 path: &Arc<Path>,
1673 worktree: &ModelHandle<Worktree>,
1674 cx: &mut ModelContext<Self>,
1675 ) -> Task<Result<ModelHandle<Buffer>>> {
1676 let buffer_id = post_inc(&mut self.next_buffer_id);
1677 let load_buffer = worktree.update(cx, |worktree, cx| {
1678 let worktree = worktree.as_local_mut().unwrap();
1679 worktree.load_buffer(buffer_id, path, cx)
1680 });
1681 cx.spawn(|this, mut cx| async move {
1682 let buffer = load_buffer.await?;
1683 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1684 Ok(buffer)
1685 })
1686 }
1687
1688 fn open_remote_buffer_internal(
1689 &mut self,
1690 path: &Arc<Path>,
1691 worktree: &ModelHandle<Worktree>,
1692 cx: &mut ModelContext<Self>,
1693 ) -> Task<Result<ModelHandle<Buffer>>> {
1694 let rpc = self.client.clone();
1695 let project_id = self.remote_id().unwrap();
1696 let remote_worktree_id = worktree.read(cx).id();
1697 let path = path.clone();
1698 let path_string = path.to_string_lossy().to_string();
1699 cx.spawn(|this, mut cx| async move {
1700 let response = rpc
1701 .request(proto::OpenBufferByPath {
1702 project_id,
1703 worktree_id: remote_worktree_id.to_proto(),
1704 path: path_string,
1705 })
1706 .await?;
1707 this.update(&mut cx, |this, cx| {
1708 this.wait_for_remote_buffer(response.buffer_id, cx)
1709 })
1710 .await
1711 })
1712 }
1713
1714 /// LanguageServerName is owned, because it is inserted into a map
1715 pub fn open_local_buffer_via_lsp(
1716 &mut self,
1717 abs_path: lsp::Url,
1718 language_server_id: LanguageServerId,
1719 language_server_name: LanguageServerName,
1720 cx: &mut ModelContext<Self>,
1721 ) -> Task<Result<ModelHandle<Buffer>>> {
1722 cx.spawn(|this, mut cx| async move {
1723 let abs_path = abs_path
1724 .to_file_path()
1725 .map_err(|_| anyhow!("can't convert URI to path"))?;
1726 let (worktree, relative_path) = if let Some(result) =
1727 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1728 {
1729 result
1730 } else {
1731 let worktree = this
1732 .update(&mut cx, |this, cx| {
1733 this.create_local_worktree(&abs_path, false, cx)
1734 })
1735 .await?;
1736 this.update(&mut cx, |this, cx| {
1737 this.language_server_ids.insert(
1738 (worktree.read(cx).id(), language_server_name),
1739 language_server_id,
1740 );
1741 });
1742 (worktree, PathBuf::new())
1743 };
1744
1745 let project_path = ProjectPath {
1746 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1747 path: relative_path.into(),
1748 };
1749 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1750 .await
1751 })
1752 }
1753
1754 pub fn open_buffer_by_id(
1755 &mut self,
1756 id: u64,
1757 cx: &mut ModelContext<Self>,
1758 ) -> Task<Result<ModelHandle<Buffer>>> {
1759 if let Some(buffer) = self.buffer_for_id(id, cx) {
1760 Task::ready(Ok(buffer))
1761 } else if self.is_local() {
1762 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1763 } else if let Some(project_id) = self.remote_id() {
1764 let request = self
1765 .client
1766 .request(proto::OpenBufferById { project_id, id });
1767 cx.spawn(|this, mut cx| async move {
1768 let buffer_id = request.await?.buffer_id;
1769 this.update(&mut cx, |this, cx| {
1770 this.wait_for_remote_buffer(buffer_id, cx)
1771 })
1772 .await
1773 })
1774 } else {
1775 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1776 }
1777 }
1778
1779 pub fn save_buffers(
1780 &self,
1781 buffers: HashSet<ModelHandle<Buffer>>,
1782 cx: &mut ModelContext<Self>,
1783 ) -> Task<Result<()>> {
1784 cx.spawn(|this, mut cx| async move {
1785 let save_tasks = buffers
1786 .into_iter()
1787 .map(|buffer| this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx)));
1788 try_join_all(save_tasks).await?;
1789 Ok(())
1790 })
1791 }
1792
1793 pub fn save_buffer(
1794 &self,
1795 buffer: ModelHandle<Buffer>,
1796 cx: &mut ModelContext<Self>,
1797 ) -> Task<Result<()>> {
1798 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1799 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1800 };
1801 let worktree = file.worktree.clone();
1802 let path = file.path.clone();
1803 worktree.update(cx, |worktree, cx| match worktree {
1804 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1805 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1806 })
1807 }
1808
1809 pub fn save_buffer_as(
1810 &mut self,
1811 buffer: ModelHandle<Buffer>,
1812 abs_path: PathBuf,
1813 cx: &mut ModelContext<Self>,
1814 ) -> Task<Result<()>> {
1815 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1816 let old_file = File::from_dyn(buffer.read(cx).file())
1817 .filter(|f| f.is_local())
1818 .cloned();
1819 cx.spawn(|this, mut cx| async move {
1820 if let Some(old_file) = &old_file {
1821 this.update(&mut cx, |this, cx| {
1822 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1823 });
1824 }
1825 let (worktree, path) = worktree_task.await?;
1826 worktree
1827 .update(&mut cx, |worktree, cx| match worktree {
1828 Worktree::Local(worktree) => {
1829 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1830 }
1831 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1832 })
1833 .await?;
1834 this.update(&mut cx, |this, cx| {
1835 this.detect_language_for_buffer(&buffer, cx);
1836 this.register_buffer_with_language_servers(&buffer, cx);
1837 });
1838 Ok(())
1839 })
1840 }
1841
1842 pub fn get_open_buffer(
1843 &mut self,
1844 path: &ProjectPath,
1845 cx: &mut ModelContext<Self>,
1846 ) -> Option<ModelHandle<Buffer>> {
1847 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1848 self.opened_buffers.values().find_map(|buffer| {
1849 let buffer = buffer.upgrade(cx)?;
1850 let file = File::from_dyn(buffer.read(cx).file())?;
1851 if file.worktree == worktree && file.path() == &path.path {
1852 Some(buffer)
1853 } else {
1854 None
1855 }
1856 })
1857 }
1858
1859 fn register_buffer(
1860 &mut self,
1861 buffer: &ModelHandle<Buffer>,
1862 cx: &mut ModelContext<Self>,
1863 ) -> Result<()> {
1864 self.request_buffer_diff_recalculation(buffer, cx);
1865 buffer.update(cx, |buffer, _| {
1866 buffer.set_language_registry(self.languages.clone())
1867 });
1868
1869 let remote_id = buffer.read(cx).remote_id();
1870 let is_remote = self.is_remote();
1871 let open_buffer = if is_remote || self.is_shared() {
1872 OpenBuffer::Strong(buffer.clone())
1873 } else {
1874 OpenBuffer::Weak(buffer.downgrade())
1875 };
1876
1877 match self.opened_buffers.entry(remote_id) {
1878 hash_map::Entry::Vacant(entry) => {
1879 entry.insert(open_buffer);
1880 }
1881 hash_map::Entry::Occupied(mut entry) => {
1882 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1883 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1884 } else if entry.get().upgrade(cx).is_some() {
1885 if is_remote {
1886 return Ok(());
1887 } else {
1888 debug_panic!("buffer {} was already registered", remote_id);
1889 Err(anyhow!("buffer {} was already registered", remote_id))?;
1890 }
1891 }
1892 entry.insert(open_buffer);
1893 }
1894 }
1895 cx.subscribe(buffer, |this, buffer, event, cx| {
1896 this.on_buffer_event(buffer, event, cx);
1897 })
1898 .detach();
1899
1900 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1901 if file.is_local {
1902 self.local_buffer_ids_by_path.insert(
1903 ProjectPath {
1904 worktree_id: file.worktree_id(cx),
1905 path: file.path.clone(),
1906 },
1907 remote_id,
1908 );
1909
1910 self.local_buffer_ids_by_entry_id
1911 .insert(file.entry_id, remote_id);
1912 }
1913 }
1914
1915 self.detect_language_for_buffer(buffer, cx);
1916 self.register_buffer_with_language_servers(buffer, cx);
1917 self.register_buffer_with_copilot(buffer, cx);
1918 cx.observe_release(buffer, |this, buffer, cx| {
1919 if let Some(file) = File::from_dyn(buffer.file()) {
1920 if file.is_local() {
1921 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1922 for server in this.language_servers_for_buffer(buffer, cx) {
1923 server
1924 .1
1925 .notify::<lsp::notification::DidCloseTextDocument>(
1926 lsp::DidCloseTextDocumentParams {
1927 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1928 },
1929 )
1930 .log_err();
1931 }
1932 }
1933 }
1934 })
1935 .detach();
1936
1937 *self.opened_buffer.0.borrow_mut() = ();
1938 Ok(())
1939 }
1940
1941 fn register_buffer_with_language_servers(
1942 &mut self,
1943 buffer_handle: &ModelHandle<Buffer>,
1944 cx: &mut ModelContext<Self>,
1945 ) {
1946 let buffer = buffer_handle.read(cx);
1947 let buffer_id = buffer.remote_id();
1948
1949 if let Some(file) = File::from_dyn(buffer.file()) {
1950 if !file.is_local() {
1951 return;
1952 }
1953
1954 let abs_path = file.abs_path(cx);
1955 let uri = lsp::Url::from_file_path(&abs_path)
1956 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
1957 let initial_snapshot = buffer.text_snapshot();
1958 let language = buffer.language().cloned();
1959 let worktree_id = file.worktree_id(cx);
1960
1961 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1962 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
1963 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
1964 .log_err();
1965 }
1966 }
1967
1968 if let Some(language) = language {
1969 for adapter in language.lsp_adapters() {
1970 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
1971 let server = self
1972 .language_server_ids
1973 .get(&(worktree_id, adapter.name.clone()))
1974 .and_then(|id| self.language_servers.get(id))
1975 .and_then(|server_state| {
1976 if let LanguageServerState::Running { server, .. } = server_state {
1977 Some(server.clone())
1978 } else {
1979 None
1980 }
1981 });
1982 let server = match server {
1983 Some(server) => server,
1984 None => continue,
1985 };
1986
1987 server
1988 .notify::<lsp::notification::DidOpenTextDocument>(
1989 lsp::DidOpenTextDocumentParams {
1990 text_document: lsp::TextDocumentItem::new(
1991 uri.clone(),
1992 language_id.unwrap_or_default(),
1993 0,
1994 initial_snapshot.text(),
1995 ),
1996 },
1997 )
1998 .log_err();
1999
2000 buffer_handle.update(cx, |buffer, cx| {
2001 buffer.set_completion_triggers(
2002 server
2003 .capabilities()
2004 .completion_provider
2005 .as_ref()
2006 .and_then(|provider| provider.trigger_characters.clone())
2007 .unwrap_or_default(),
2008 cx,
2009 );
2010 });
2011
2012 let snapshot = LspBufferSnapshot {
2013 version: 0,
2014 snapshot: initial_snapshot.clone(),
2015 };
2016 self.buffer_snapshots
2017 .entry(buffer_id)
2018 .or_default()
2019 .insert(server.server_id(), vec![snapshot]);
2020 }
2021 }
2022 }
2023 }
2024
2025 fn unregister_buffer_from_language_servers(
2026 &mut self,
2027 buffer: &ModelHandle<Buffer>,
2028 old_file: &File,
2029 cx: &mut ModelContext<Self>,
2030 ) {
2031 let old_path = match old_file.as_local() {
2032 Some(local) => local.abs_path(cx),
2033 None => return,
2034 };
2035
2036 buffer.update(cx, |buffer, cx| {
2037 let worktree_id = old_file.worktree_id(cx);
2038 let ids = &self.language_server_ids;
2039
2040 let language = buffer.language().cloned();
2041 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2042 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2043 buffer.update_diagnostics(server_id, Default::default(), cx);
2044 }
2045
2046 self.buffer_snapshots.remove(&buffer.remote_id());
2047 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2048 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2049 language_server
2050 .notify::<lsp::notification::DidCloseTextDocument>(
2051 lsp::DidCloseTextDocumentParams {
2052 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2053 },
2054 )
2055 .log_err();
2056 }
2057 });
2058 }
2059
2060 fn register_buffer_with_copilot(
2061 &self,
2062 buffer_handle: &ModelHandle<Buffer>,
2063 cx: &mut ModelContext<Self>,
2064 ) {
2065 if let Some(copilot) = Copilot::global(cx) {
2066 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2067 }
2068 }
2069
2070 async fn send_buffer_ordered_messages(
2071 this: WeakModelHandle<Self>,
2072 rx: UnboundedReceiver<BufferOrderedMessage>,
2073 mut cx: AsyncAppContext,
2074 ) -> Option<()> {
2075 const MAX_BATCH_SIZE: usize = 128;
2076
2077 let mut operations_by_buffer_id = HashMap::default();
2078 async fn flush_operations(
2079 this: &ModelHandle<Project>,
2080 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2081 needs_resync_with_host: &mut bool,
2082 is_local: bool,
2083 cx: &AsyncAppContext,
2084 ) {
2085 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2086 let request = this.read_with(cx, |this, _| {
2087 let project_id = this.remote_id()?;
2088 Some(this.client.request(proto::UpdateBuffer {
2089 buffer_id,
2090 project_id,
2091 operations,
2092 }))
2093 });
2094 if let Some(request) = request {
2095 if request.await.is_err() && !is_local {
2096 *needs_resync_with_host = true;
2097 break;
2098 }
2099 }
2100 }
2101 }
2102
2103 let mut needs_resync_with_host = false;
2104 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2105
2106 while let Some(changes) = changes.next().await {
2107 let this = this.upgrade(&mut cx)?;
2108 let is_local = this.read_with(&cx, |this, _| this.is_local());
2109
2110 for change in changes {
2111 match change {
2112 BufferOrderedMessage::Operation {
2113 buffer_id,
2114 operation,
2115 } => {
2116 if needs_resync_with_host {
2117 continue;
2118 }
2119
2120 operations_by_buffer_id
2121 .entry(buffer_id)
2122 .or_insert(Vec::new())
2123 .push(operation);
2124 }
2125
2126 BufferOrderedMessage::Resync => {
2127 operations_by_buffer_id.clear();
2128 if this
2129 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
2130 .await
2131 .is_ok()
2132 {
2133 needs_resync_with_host = false;
2134 }
2135 }
2136
2137 BufferOrderedMessage::LanguageServerUpdate {
2138 language_server_id,
2139 message,
2140 } => {
2141 flush_operations(
2142 &this,
2143 &mut operations_by_buffer_id,
2144 &mut needs_resync_with_host,
2145 is_local,
2146 &cx,
2147 )
2148 .await;
2149
2150 this.read_with(&cx, |this, _| {
2151 if let Some(project_id) = this.remote_id() {
2152 this.client
2153 .send(proto::UpdateLanguageServer {
2154 project_id,
2155 language_server_id: language_server_id.0 as u64,
2156 variant: Some(message),
2157 })
2158 .log_err();
2159 }
2160 });
2161 }
2162 }
2163 }
2164
2165 flush_operations(
2166 &this,
2167 &mut operations_by_buffer_id,
2168 &mut needs_resync_with_host,
2169 is_local,
2170 &cx,
2171 )
2172 .await;
2173 }
2174
2175 None
2176 }
2177
2178 fn on_buffer_event(
2179 &mut self,
2180 buffer: ModelHandle<Buffer>,
2181 event: &BufferEvent,
2182 cx: &mut ModelContext<Self>,
2183 ) -> Option<()> {
2184 if matches!(
2185 event,
2186 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2187 ) {
2188 self.request_buffer_diff_recalculation(&buffer, cx);
2189 }
2190
2191 match event {
2192 BufferEvent::Operation(operation) => {
2193 self.buffer_ordered_messages_tx
2194 .unbounded_send(BufferOrderedMessage::Operation {
2195 buffer_id: buffer.read(cx).remote_id(),
2196 operation: language::proto::serialize_operation(operation),
2197 })
2198 .ok();
2199 }
2200
2201 BufferEvent::Edited { .. } => {
2202 let buffer = buffer.read(cx);
2203 let file = File::from_dyn(buffer.file())?;
2204 let abs_path = file.as_local()?.abs_path(cx);
2205 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2206 let next_snapshot = buffer.text_snapshot();
2207
2208 let language_servers: Vec<_> = self
2209 .language_servers_for_buffer(buffer, cx)
2210 .map(|i| i.1.clone())
2211 .collect();
2212
2213 for language_server in language_servers {
2214 let language_server = language_server.clone();
2215
2216 let buffer_snapshots = self
2217 .buffer_snapshots
2218 .get_mut(&buffer.remote_id())
2219 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2220 let previous_snapshot = buffer_snapshots.last()?;
2221 let next_version = previous_snapshot.version + 1;
2222
2223 let content_changes = buffer
2224 .edits_since::<(PointUtf16, usize)>(previous_snapshot.snapshot.version())
2225 .map(|edit| {
2226 let edit_start = edit.new.start.0;
2227 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2228 let new_text = next_snapshot
2229 .text_for_range(edit.new.start.1..edit.new.end.1)
2230 .collect();
2231 lsp::TextDocumentContentChangeEvent {
2232 range: Some(lsp::Range::new(
2233 point_to_lsp(edit_start),
2234 point_to_lsp(edit_end),
2235 )),
2236 range_length: None,
2237 text: new_text,
2238 }
2239 })
2240 .collect();
2241
2242 buffer_snapshots.push(LspBufferSnapshot {
2243 version: next_version,
2244 snapshot: next_snapshot.clone(),
2245 });
2246
2247 language_server
2248 .notify::<lsp::notification::DidChangeTextDocument>(
2249 lsp::DidChangeTextDocumentParams {
2250 text_document: lsp::VersionedTextDocumentIdentifier::new(
2251 uri.clone(),
2252 next_version,
2253 ),
2254 content_changes,
2255 },
2256 )
2257 .log_err();
2258 }
2259 }
2260
2261 BufferEvent::Saved => {
2262 let file = File::from_dyn(buffer.read(cx).file())?;
2263 let worktree_id = file.worktree_id(cx);
2264 let abs_path = file.as_local()?.abs_path(cx);
2265 let text_document = lsp::TextDocumentIdentifier {
2266 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2267 };
2268
2269 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2270 server
2271 .notify::<lsp::notification::DidSaveTextDocument>(
2272 lsp::DidSaveTextDocumentParams {
2273 text_document: text_document.clone(),
2274 text: None,
2275 },
2276 )
2277 .log_err();
2278 }
2279
2280 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2281 for language_server_id in language_server_ids {
2282 if let Some(LanguageServerState::Running {
2283 adapter,
2284 simulate_disk_based_diagnostics_completion,
2285 ..
2286 }) = self.language_servers.get_mut(&language_server_id)
2287 {
2288 // After saving a buffer using a language server that doesn't provide
2289 // a disk-based progress token, kick off a timer that will reset every
2290 // time the buffer is saved. If the timer eventually fires, simulate
2291 // disk-based diagnostics being finished so that other pieces of UI
2292 // (e.g., project diagnostics view, diagnostic status bar) can update.
2293 // We don't emit an event right away because the language server might take
2294 // some time to publish diagnostics.
2295 if adapter.disk_based_diagnostics_progress_token.is_none() {
2296 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2297 Duration::from_secs(1);
2298
2299 let task = cx.spawn_weak(|this, mut cx| async move {
2300 cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2301 if let Some(this) = this.upgrade(&cx) {
2302 this.update(&mut cx, |this, cx| {
2303 this.disk_based_diagnostics_finished(
2304 language_server_id,
2305 cx,
2306 );
2307 this.buffer_ordered_messages_tx
2308 .unbounded_send(
2309 BufferOrderedMessage::LanguageServerUpdate {
2310 language_server_id,
2311 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2312 },
2313 )
2314 .ok();
2315 });
2316 }
2317 });
2318 *simulate_disk_based_diagnostics_completion = Some(task);
2319 }
2320 }
2321 }
2322 }
2323
2324 _ => {}
2325 }
2326
2327 None
2328 }
2329
2330 fn request_buffer_diff_recalculation(
2331 &mut self,
2332 buffer: &ModelHandle<Buffer>,
2333 cx: &mut ModelContext<Self>,
2334 ) {
2335 self.buffers_needing_diff.insert(buffer.downgrade());
2336 let first_insertion = self.buffers_needing_diff.len() == 1;
2337
2338 let settings = settings::get::<ProjectSettings>(cx);
2339 let delay = if let Some(delay) = settings.git.gutter_debounce {
2340 delay
2341 } else {
2342 if first_insertion {
2343 let this = cx.weak_handle();
2344 cx.defer(move |cx| {
2345 if let Some(this) = this.upgrade(cx) {
2346 this.update(cx, |this, cx| {
2347 this.recalculate_buffer_diffs(cx).detach();
2348 });
2349 }
2350 });
2351 }
2352 return;
2353 };
2354
2355 const MIN_DELAY: u64 = 50;
2356 let delay = delay.max(MIN_DELAY);
2357 let duration = Duration::from_millis(delay);
2358
2359 self.git_diff_debouncer
2360 .fire_new(duration, cx, move |this, cx| {
2361 this.recalculate_buffer_diffs(cx)
2362 });
2363 }
2364
2365 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2366 cx.spawn(|this, mut cx| async move {
2367 let buffers: Vec<_> = this.update(&mut cx, |this, _| {
2368 this.buffers_needing_diff.drain().collect()
2369 });
2370
2371 let tasks: Vec<_> = this.update(&mut cx, |_, cx| {
2372 buffers
2373 .iter()
2374 .filter_map(|buffer| {
2375 let buffer = buffer.upgrade(cx)?;
2376 buffer.update(cx, |buffer, cx| buffer.git_diff_recalc(cx))
2377 })
2378 .collect()
2379 });
2380
2381 futures::future::join_all(tasks).await;
2382
2383 this.update(&mut cx, |this, cx| {
2384 if !this.buffers_needing_diff.is_empty() {
2385 this.recalculate_buffer_diffs(cx).detach();
2386 } else {
2387 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2388 for buffer in buffers {
2389 if let Some(buffer) = buffer.upgrade(cx) {
2390 buffer.update(cx, |_, cx| cx.notify());
2391 }
2392 }
2393 }
2394 });
2395 })
2396 }
2397
2398 fn language_servers_for_worktree(
2399 &self,
2400 worktree_id: WorktreeId,
2401 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2402 self.language_server_ids
2403 .iter()
2404 .filter_map(move |((language_server_worktree_id, _), id)| {
2405 if *language_server_worktree_id == worktree_id {
2406 if let Some(LanguageServerState::Running {
2407 adapter,
2408 language,
2409 server,
2410 ..
2411 }) = self.language_servers.get(id)
2412 {
2413 return Some((adapter, language, server));
2414 }
2415 }
2416 None
2417 })
2418 }
2419
2420 fn maintain_buffer_languages(
2421 languages: Arc<LanguageRegistry>,
2422 cx: &mut ModelContext<Project>,
2423 ) -> Task<()> {
2424 let mut subscription = languages.subscribe();
2425 let mut prev_reload_count = languages.reload_count();
2426 cx.spawn_weak(|project, mut cx| async move {
2427 while let Some(()) = subscription.next().await {
2428 if let Some(project) = project.upgrade(&cx) {
2429 // If the language registry has been reloaded, then remove and
2430 // re-assign the languages on all open buffers.
2431 let reload_count = languages.reload_count();
2432 if reload_count > prev_reload_count {
2433 prev_reload_count = reload_count;
2434 project.update(&mut cx, |this, cx| {
2435 let buffers = this
2436 .opened_buffers
2437 .values()
2438 .filter_map(|b| b.upgrade(cx))
2439 .collect::<Vec<_>>();
2440 for buffer in buffers {
2441 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() {
2442 this.unregister_buffer_from_language_servers(&buffer, &f, cx);
2443 buffer.update(cx, |buffer, cx| buffer.set_language(None, cx));
2444 }
2445 }
2446 });
2447 }
2448
2449 project.update(&mut cx, |project, cx| {
2450 let mut plain_text_buffers = Vec::new();
2451 let mut buffers_with_unknown_injections = Vec::new();
2452 for buffer in project.opened_buffers.values() {
2453 if let Some(handle) = buffer.upgrade(cx) {
2454 let buffer = &handle.read(cx);
2455 if buffer.language().is_none()
2456 || buffer.language() == Some(&*language::PLAIN_TEXT)
2457 {
2458 plain_text_buffers.push(handle);
2459 } else if buffer.contains_unknown_injections() {
2460 buffers_with_unknown_injections.push(handle);
2461 }
2462 }
2463 }
2464
2465 for buffer in plain_text_buffers {
2466 project.detect_language_for_buffer(&buffer, cx);
2467 project.register_buffer_with_language_servers(&buffer, cx);
2468 }
2469
2470 for buffer in buffers_with_unknown_injections {
2471 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2472 }
2473 });
2474 }
2475 }
2476 })
2477 }
2478
2479 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<()> {
2480 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2481 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2482
2483 let settings_observation = cx.observe_global::<SettingsStore, _>(move |_, _| {
2484 *settings_changed_tx.borrow_mut() = ();
2485 });
2486
2487 cx.spawn_weak(|this, mut cx| async move {
2488 while let Some(_) = settings_changed_rx.next().await {
2489 let Some(this) = this.upgrade(&cx) else {
2490 break;
2491 };
2492
2493 let servers: Vec<_> = this.read_with(&cx, |this, _| {
2494 this.language_servers
2495 .values()
2496 .filter_map(|state| match state {
2497 LanguageServerState::Starting(_) => None,
2498 LanguageServerState::Running {
2499 adapter, server, ..
2500 } => Some((adapter.clone(), server.clone())),
2501 })
2502 .collect()
2503 });
2504
2505 for (adapter, server) in servers {
2506 let workspace_config =
2507 cx.update(|cx| adapter.workspace_configuration(cx)).await;
2508 server
2509 .notify::<lsp::notification::DidChangeConfiguration>(
2510 lsp::DidChangeConfigurationParams {
2511 settings: workspace_config.clone(),
2512 },
2513 )
2514 .ok();
2515 }
2516 }
2517
2518 drop(settings_observation);
2519 })
2520 }
2521
2522 fn detect_language_for_buffer(
2523 &mut self,
2524 buffer_handle: &ModelHandle<Buffer>,
2525 cx: &mut ModelContext<Self>,
2526 ) -> Option<()> {
2527 // If the buffer has a language, set it and start the language server if we haven't already.
2528 let buffer = buffer_handle.read(cx);
2529 let full_path = buffer.file()?.full_path(cx);
2530 let content = buffer.as_rope();
2531 let new_language = self
2532 .languages
2533 .language_for_file(&full_path, Some(content))
2534 .now_or_never()?
2535 .ok()?;
2536 self.set_language_for_buffer(buffer_handle, new_language, cx);
2537 None
2538 }
2539
2540 pub fn set_language_for_buffer(
2541 &mut self,
2542 buffer: &ModelHandle<Buffer>,
2543 new_language: Arc<Language>,
2544 cx: &mut ModelContext<Self>,
2545 ) {
2546 buffer.update(cx, |buffer, cx| {
2547 if buffer.language().map_or(true, |old_language| {
2548 !Arc::ptr_eq(old_language, &new_language)
2549 }) {
2550 buffer.set_language(Some(new_language.clone()), cx);
2551 }
2552 });
2553
2554 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2555 let worktree = file.worktree.clone();
2556 if let Some(tree) = worktree.read(cx).as_local() {
2557 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2558 }
2559 }
2560 }
2561
2562 fn start_language_servers(
2563 &mut self,
2564 worktree: &ModelHandle<Worktree>,
2565 worktree_path: Arc<Path>,
2566 language: Arc<Language>,
2567 cx: &mut ModelContext<Self>,
2568 ) {
2569 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2570 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2571 if !settings.enable_language_server {
2572 return;
2573 }
2574
2575 let worktree_id = worktree.read(cx).id();
2576 for adapter in language.lsp_adapters() {
2577 self.start_language_server(
2578 worktree_id,
2579 worktree_path.clone(),
2580 adapter.clone(),
2581 language.clone(),
2582 cx,
2583 );
2584 }
2585 }
2586
2587 fn start_language_server(
2588 &mut self,
2589 worktree_id: WorktreeId,
2590 worktree_path: Arc<Path>,
2591 adapter: Arc<CachedLspAdapter>,
2592 language: Arc<Language>,
2593 cx: &mut ModelContext<Self>,
2594 ) {
2595 let key = (worktree_id, adapter.name.clone());
2596 if self.language_server_ids.contains_key(&key) {
2597 return;
2598 }
2599
2600 let pending_server = match self.languages.create_pending_language_server(
2601 language.clone(),
2602 adapter.clone(),
2603 worktree_path,
2604 ProjectLspAdapterDelegate::new(self, cx),
2605 cx,
2606 ) {
2607 Some(pending_server) => pending_server,
2608 None => return,
2609 };
2610
2611 let project_settings = settings::get::<ProjectSettings>(cx);
2612 let lsp = project_settings.lsp.get(&adapter.name.0);
2613 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2614
2615 let mut initialization_options = adapter.initialization_options.clone();
2616 match (&mut initialization_options, override_options) {
2617 (Some(initialization_options), Some(override_options)) => {
2618 merge_json_value_into(override_options, initialization_options);
2619 }
2620 (None, override_options) => initialization_options = override_options,
2621 _ => {}
2622 }
2623
2624 let server_id = pending_server.server_id;
2625 let container_dir = pending_server.container_dir.clone();
2626 let state = LanguageServerState::Starting({
2627 let adapter = adapter.clone();
2628 let server_name = adapter.name.0.clone();
2629 let language = language.clone();
2630 let key = key.clone();
2631
2632 cx.spawn_weak(|this, mut cx| async move {
2633 let result = Self::setup_and_insert_language_server(
2634 this,
2635 initialization_options,
2636 pending_server,
2637 adapter.clone(),
2638 language.clone(),
2639 server_id,
2640 key,
2641 &mut cx,
2642 )
2643 .await;
2644
2645 match result {
2646 Ok(server) => server,
2647
2648 Err(err) => {
2649 log::error!("failed to start language server {:?}: {}", server_name, err);
2650
2651 if let Some(this) = this.upgrade(&cx) {
2652 if let Some(container_dir) = container_dir {
2653 let installation_test_binary = adapter
2654 .installation_test_binary(container_dir.to_path_buf())
2655 .await;
2656
2657 this.update(&mut cx, |_, cx| {
2658 Self::check_errored_server(
2659 language,
2660 adapter,
2661 server_id,
2662 installation_test_binary,
2663 cx,
2664 )
2665 });
2666 }
2667 }
2668
2669 None
2670 }
2671 }
2672 })
2673 });
2674
2675 self.language_servers.insert(server_id, state);
2676 self.language_server_ids.insert(key, server_id);
2677 }
2678
2679 fn reinstall_language_server(
2680 &mut self,
2681 language: Arc<Language>,
2682 adapter: Arc<CachedLspAdapter>,
2683 server_id: LanguageServerId,
2684 cx: &mut ModelContext<Self>,
2685 ) -> Option<Task<()>> {
2686 log::info!("beginning to reinstall server");
2687
2688 let existing_server = match self.language_servers.remove(&server_id) {
2689 Some(LanguageServerState::Running { server, .. }) => Some(server),
2690 _ => None,
2691 };
2692
2693 for worktree in &self.worktrees {
2694 if let Some(worktree) = worktree.upgrade(cx) {
2695 let key = (worktree.read(cx).id(), adapter.name.clone());
2696 self.language_server_ids.remove(&key);
2697 }
2698 }
2699
2700 Some(cx.spawn(move |this, mut cx| async move {
2701 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2702 log::info!("shutting down existing server");
2703 task.await;
2704 }
2705
2706 // TODO: This is race-safe with regards to preventing new instances from
2707 // starting while deleting, but existing instances in other projects are going
2708 // to be very confused and messed up
2709 this.update(&mut cx, |this, cx| {
2710 this.languages.delete_server_container(adapter.clone(), cx)
2711 })
2712 .await;
2713
2714 this.update(&mut cx, |this, mut cx| {
2715 let worktrees = this.worktrees.clone();
2716 for worktree in worktrees {
2717 let worktree = match worktree.upgrade(cx) {
2718 Some(worktree) => worktree.read(cx),
2719 None => continue,
2720 };
2721 let worktree_id = worktree.id();
2722 let root_path = worktree.abs_path();
2723
2724 this.start_language_server(
2725 worktree_id,
2726 root_path,
2727 adapter.clone(),
2728 language.clone(),
2729 &mut cx,
2730 );
2731 }
2732 })
2733 }))
2734 }
2735
2736 async fn setup_and_insert_language_server(
2737 this: WeakModelHandle<Self>,
2738 initialization_options: Option<serde_json::Value>,
2739 pending_server: PendingLanguageServer,
2740 adapter: Arc<CachedLspAdapter>,
2741 language: Arc<Language>,
2742 server_id: LanguageServerId,
2743 key: (WorktreeId, LanguageServerName),
2744 cx: &mut AsyncAppContext,
2745 ) -> Result<Option<Arc<LanguageServer>>> {
2746 let setup = Self::setup_pending_language_server(
2747 this,
2748 initialization_options,
2749 pending_server,
2750 adapter.clone(),
2751 server_id,
2752 cx,
2753 );
2754
2755 let language_server = match setup.await? {
2756 Some(language_server) => language_server,
2757 None => return Ok(None),
2758 };
2759 let this = match this.upgrade(cx) {
2760 Some(this) => this,
2761 None => return Err(anyhow!("failed to upgrade project handle")),
2762 };
2763
2764 this.update(cx, |this, cx| {
2765 this.insert_newly_running_language_server(
2766 language,
2767 adapter,
2768 language_server.clone(),
2769 server_id,
2770 key,
2771 cx,
2772 )
2773 })?;
2774
2775 Ok(Some(language_server))
2776 }
2777
2778 async fn setup_pending_language_server(
2779 this: WeakModelHandle<Self>,
2780 initialization_options: Option<serde_json::Value>,
2781 pending_server: PendingLanguageServer,
2782 adapter: Arc<CachedLspAdapter>,
2783 server_id: LanguageServerId,
2784 cx: &mut AsyncAppContext,
2785 ) -> Result<Option<Arc<LanguageServer>>> {
2786 let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await;
2787 let language_server = match pending_server.task.await? {
2788 Some(server) => server,
2789 None => return Ok(None),
2790 };
2791
2792 language_server
2793 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2794 let adapter = adapter.clone();
2795 move |mut params, mut cx| {
2796 let this = this;
2797 let adapter = adapter.clone();
2798 adapter.process_diagnostics(&mut params);
2799 if let Some(this) = this.upgrade(&cx) {
2800 this.update(&mut cx, |this, cx| {
2801 this.update_diagnostics(
2802 server_id,
2803 params,
2804 &adapter.disk_based_diagnostic_sources,
2805 cx,
2806 )
2807 .log_err();
2808 });
2809 }
2810 }
2811 })
2812 .detach();
2813
2814 language_server
2815 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2816 let adapter = adapter.clone();
2817 move |params, mut cx| {
2818 let adapter = adapter.clone();
2819 async move {
2820 let workspace_config =
2821 cx.update(|cx| adapter.workspace_configuration(cx)).await;
2822 Ok(params
2823 .items
2824 .into_iter()
2825 .map(|item| {
2826 if let Some(section) = &item.section {
2827 workspace_config
2828 .get(section)
2829 .cloned()
2830 .unwrap_or(serde_json::Value::Null)
2831 } else {
2832 workspace_config.clone()
2833 }
2834 })
2835 .collect())
2836 }
2837 }
2838 })
2839 .detach();
2840
2841 // Even though we don't have handling for these requests, respond to them to
2842 // avoid stalling any language server like `gopls` which waits for a response
2843 // to these requests when initializing.
2844 language_server
2845 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(
2846 move |params, mut cx| async move {
2847 if let Some(this) = this.upgrade(&cx) {
2848 this.update(&mut cx, |this, _| {
2849 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
2850 {
2851 if let lsp::NumberOrString::String(token) = params.token {
2852 status.progress_tokens.insert(token);
2853 }
2854 }
2855 });
2856 }
2857 Ok(())
2858 },
2859 )
2860 .detach();
2861 language_server
2862 .on_request::<lsp::request::RegisterCapability, _, _>({
2863 move |params, mut cx| async move {
2864 let this = this
2865 .upgrade(&cx)
2866 .ok_or_else(|| anyhow!("project dropped"))?;
2867 for reg in params.registrations {
2868 if reg.method == "workspace/didChangeWatchedFiles" {
2869 if let Some(options) = reg.register_options {
2870 let options = serde_json::from_value(options)?;
2871 this.update(&mut cx, |this, cx| {
2872 this.on_lsp_did_change_watched_files(server_id, options, cx);
2873 });
2874 }
2875 }
2876 }
2877 Ok(())
2878 }
2879 })
2880 .detach();
2881
2882 language_server
2883 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2884 let adapter = adapter.clone();
2885 move |params, cx| {
2886 Self::on_lsp_workspace_edit(this, params, server_id, adapter.clone(), cx)
2887 }
2888 })
2889 .detach();
2890
2891 language_server
2892 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
2893 move |(), mut cx| async move {
2894 let this = this
2895 .upgrade(&cx)
2896 .ok_or_else(|| anyhow!("project dropped"))?;
2897 this.update(&mut cx, |project, cx| {
2898 cx.emit(Event::RefreshInlayHints);
2899 project.remote_id().map(|project_id| {
2900 project.client.send(proto::RefreshInlayHints { project_id })
2901 })
2902 })
2903 .transpose()?;
2904 Ok(())
2905 }
2906 })
2907 .detach();
2908
2909 let disk_based_diagnostics_progress_token =
2910 adapter.disk_based_diagnostics_progress_token.clone();
2911
2912 language_server
2913 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
2914 if let Some(this) = this.upgrade(&cx) {
2915 this.update(&mut cx, |this, cx| {
2916 this.on_lsp_progress(
2917 params,
2918 server_id,
2919 disk_based_diagnostics_progress_token.clone(),
2920 cx,
2921 );
2922 });
2923 }
2924 })
2925 .detach();
2926
2927 let language_server = language_server.initialize(initialization_options).await?;
2928
2929 language_server
2930 .notify::<lsp::notification::DidChangeConfiguration>(
2931 lsp::DidChangeConfigurationParams {
2932 settings: workspace_config,
2933 },
2934 )
2935 .ok();
2936
2937 Ok(Some(language_server))
2938 }
2939
2940 fn insert_newly_running_language_server(
2941 &mut self,
2942 language: Arc<Language>,
2943 adapter: Arc<CachedLspAdapter>,
2944 language_server: Arc<LanguageServer>,
2945 server_id: LanguageServerId,
2946 key: (WorktreeId, LanguageServerName),
2947 cx: &mut ModelContext<Self>,
2948 ) -> Result<()> {
2949 // If the language server for this key doesn't match the server id, don't store the
2950 // server. Which will cause it to be dropped, killing the process
2951 if self
2952 .language_server_ids
2953 .get(&key)
2954 .map(|id| id != &server_id)
2955 .unwrap_or(false)
2956 {
2957 return Ok(());
2958 }
2959
2960 // Update language_servers collection with Running variant of LanguageServerState
2961 // indicating that the server is up and running and ready
2962 self.language_servers.insert(
2963 server_id,
2964 LanguageServerState::Running {
2965 adapter: adapter.clone(),
2966 language: language.clone(),
2967 watched_paths: Default::default(),
2968 server: language_server.clone(),
2969 simulate_disk_based_diagnostics_completion: None,
2970 },
2971 );
2972
2973 self.language_server_statuses.insert(
2974 server_id,
2975 LanguageServerStatus {
2976 name: language_server.name().to_string(),
2977 pending_work: Default::default(),
2978 has_pending_diagnostic_updates: false,
2979 progress_tokens: Default::default(),
2980 },
2981 );
2982
2983 cx.emit(Event::LanguageServerAdded(server_id));
2984
2985 if let Some(project_id) = self.remote_id() {
2986 self.client.send(proto::StartLanguageServer {
2987 project_id,
2988 server: Some(proto::LanguageServer {
2989 id: server_id.0 as u64,
2990 name: language_server.name().to_string(),
2991 }),
2992 })?;
2993 }
2994
2995 // Tell the language server about every open buffer in the worktree that matches the language.
2996 for buffer in self.opened_buffers.values() {
2997 if let Some(buffer_handle) = buffer.upgrade(cx) {
2998 let buffer = buffer_handle.read(cx);
2999 let file = match File::from_dyn(buffer.file()) {
3000 Some(file) => file,
3001 None => continue,
3002 };
3003 let language = match buffer.language() {
3004 Some(language) => language,
3005 None => continue,
3006 };
3007
3008 if file.worktree.read(cx).id() != key.0
3009 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3010 {
3011 continue;
3012 }
3013
3014 let file = match file.as_local() {
3015 Some(file) => file,
3016 None => continue,
3017 };
3018
3019 let versions = self
3020 .buffer_snapshots
3021 .entry(buffer.remote_id())
3022 .or_default()
3023 .entry(server_id)
3024 .or_insert_with(|| {
3025 vec![LspBufferSnapshot {
3026 version: 0,
3027 snapshot: buffer.text_snapshot(),
3028 }]
3029 });
3030
3031 let snapshot = versions.last().unwrap();
3032 let version = snapshot.version;
3033 let initial_snapshot = &snapshot.snapshot;
3034 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3035 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3036 lsp::DidOpenTextDocumentParams {
3037 text_document: lsp::TextDocumentItem::new(
3038 uri,
3039 adapter
3040 .language_ids
3041 .get(language.name().as_ref())
3042 .cloned()
3043 .unwrap_or_default(),
3044 version,
3045 initial_snapshot.text(),
3046 ),
3047 },
3048 )?;
3049
3050 buffer_handle.update(cx, |buffer, cx| {
3051 buffer.set_completion_triggers(
3052 language_server
3053 .capabilities()
3054 .completion_provider
3055 .as_ref()
3056 .and_then(|provider| provider.trigger_characters.clone())
3057 .unwrap_or_default(),
3058 cx,
3059 )
3060 });
3061 }
3062 }
3063
3064 cx.notify();
3065 Ok(())
3066 }
3067
3068 // Returns a list of all of the worktrees which no longer have a language server and the root path
3069 // for the stopped server
3070 fn stop_language_server(
3071 &mut self,
3072 worktree_id: WorktreeId,
3073 adapter_name: LanguageServerName,
3074 cx: &mut ModelContext<Self>,
3075 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3076 let key = (worktree_id, adapter_name);
3077 if let Some(server_id) = self.language_server_ids.remove(&key) {
3078 log::info!("stopping language server {}", key.1 .0);
3079
3080 // Remove other entries for this language server as well
3081 let mut orphaned_worktrees = vec![worktree_id];
3082 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3083 for other_key in other_keys {
3084 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3085 self.language_server_ids.remove(&other_key);
3086 orphaned_worktrees.push(other_key.0);
3087 }
3088 }
3089
3090 for buffer in self.opened_buffers.values() {
3091 if let Some(buffer) = buffer.upgrade(cx) {
3092 buffer.update(cx, |buffer, cx| {
3093 buffer.update_diagnostics(server_id, Default::default(), cx);
3094 });
3095 }
3096 }
3097 for worktree in &self.worktrees {
3098 if let Some(worktree) = worktree.upgrade(cx) {
3099 worktree.update(cx, |worktree, cx| {
3100 if let Some(worktree) = worktree.as_local_mut() {
3101 worktree.clear_diagnostics_for_language_server(server_id, cx);
3102 }
3103 });
3104 }
3105 }
3106
3107 self.language_server_statuses.remove(&server_id);
3108 cx.notify();
3109
3110 let server_state = self.language_servers.remove(&server_id);
3111 cx.emit(Event::LanguageServerRemoved(server_id));
3112 cx.spawn_weak(|this, mut cx| async move {
3113 let mut root_path = None;
3114
3115 let server = match server_state {
3116 Some(LanguageServerState::Starting(task)) => task.await,
3117 Some(LanguageServerState::Running { server, .. }) => Some(server),
3118 None => None,
3119 };
3120
3121 if let Some(server) = server {
3122 root_path = Some(server.root_path().clone());
3123 if let Some(shutdown) = server.shutdown() {
3124 shutdown.await;
3125 }
3126 }
3127
3128 if let Some(this) = this.upgrade(&cx) {
3129 this.update(&mut cx, |this, cx| {
3130 this.language_server_statuses.remove(&server_id);
3131 cx.notify();
3132 });
3133 }
3134
3135 (root_path, orphaned_worktrees)
3136 })
3137 } else {
3138 Task::ready((None, Vec::new()))
3139 }
3140 }
3141
3142 pub fn restart_language_servers_for_buffers(
3143 &mut self,
3144 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
3145 cx: &mut ModelContext<Self>,
3146 ) -> Option<()> {
3147 let language_server_lookup_info: HashSet<(ModelHandle<Worktree>, Arc<Language>)> = buffers
3148 .into_iter()
3149 .filter_map(|buffer| {
3150 let buffer = buffer.read(cx);
3151 let file = File::from_dyn(buffer.file())?;
3152 let full_path = file.full_path(cx);
3153 let language = self
3154 .languages
3155 .language_for_file(&full_path, Some(buffer.as_rope()))
3156 .now_or_never()?
3157 .ok()?;
3158 Some((file.worktree.clone(), language))
3159 })
3160 .collect();
3161 for (worktree, language) in language_server_lookup_info {
3162 self.restart_language_servers(worktree, language, cx);
3163 }
3164
3165 None
3166 }
3167
3168 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3169 fn restart_language_servers(
3170 &mut self,
3171 worktree: ModelHandle<Worktree>,
3172 language: Arc<Language>,
3173 cx: &mut ModelContext<Self>,
3174 ) {
3175 let worktree_id = worktree.read(cx).id();
3176 let fallback_path = worktree.read(cx).abs_path();
3177
3178 let mut stops = Vec::new();
3179 for adapter in language.lsp_adapters() {
3180 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3181 }
3182
3183 if stops.is_empty() {
3184 return;
3185 }
3186 let mut stops = stops.into_iter();
3187
3188 cx.spawn_weak(|this, mut cx| async move {
3189 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3190 for stop in stops {
3191 let (_, worktrees) = stop.await;
3192 orphaned_worktrees.extend_from_slice(&worktrees);
3193 }
3194
3195 let this = match this.upgrade(&cx) {
3196 Some(this) => this,
3197 None => return,
3198 };
3199
3200 this.update(&mut cx, |this, cx| {
3201 // Attempt to restart using original server path. Fallback to passed in
3202 // path if we could not retrieve the root path
3203 let root_path = original_root_path
3204 .map(|path_buf| Arc::from(path_buf.as_path()))
3205 .unwrap_or(fallback_path);
3206
3207 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3208
3209 // Lookup new server ids and set them for each of the orphaned worktrees
3210 for adapter in language.lsp_adapters() {
3211 if let Some(new_server_id) = this
3212 .language_server_ids
3213 .get(&(worktree_id, adapter.name.clone()))
3214 .cloned()
3215 {
3216 for &orphaned_worktree in &orphaned_worktrees {
3217 this.language_server_ids
3218 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3219 }
3220 }
3221 }
3222 });
3223 })
3224 .detach();
3225 }
3226
3227 fn check_errored_server(
3228 language: Arc<Language>,
3229 adapter: Arc<CachedLspAdapter>,
3230 server_id: LanguageServerId,
3231 installation_test_binary: Option<LanguageServerBinary>,
3232 cx: &mut ModelContext<Self>,
3233 ) {
3234 if !adapter.can_be_reinstalled() {
3235 log::info!(
3236 "Validation check requested for {:?} but it cannot be reinstalled",
3237 adapter.name.0
3238 );
3239 return;
3240 }
3241
3242 cx.spawn(|this, mut cx| async move {
3243 log::info!("About to spawn test binary");
3244
3245 // A lack of test binary counts as a failure
3246 let process = installation_test_binary.and_then(|binary| {
3247 smol::process::Command::new(&binary.path)
3248 .current_dir(&binary.path)
3249 .args(binary.arguments)
3250 .stdin(Stdio::piped())
3251 .stdout(Stdio::piped())
3252 .stderr(Stdio::inherit())
3253 .kill_on_drop(true)
3254 .spawn()
3255 .ok()
3256 });
3257
3258 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3259 let mut timeout = cx.background().timer(PROCESS_TIMEOUT).fuse();
3260
3261 let mut errored = false;
3262 if let Some(mut process) = process {
3263 futures::select! {
3264 status = process.status().fuse() => match status {
3265 Ok(status) => errored = !status.success(),
3266 Err(_) => errored = true,
3267 },
3268
3269 _ = timeout => {
3270 log::info!("test binary time-ed out, this counts as a success");
3271 _ = process.kill();
3272 }
3273 }
3274 } else {
3275 log::warn!("test binary failed to launch");
3276 errored = true;
3277 }
3278
3279 if errored {
3280 log::warn!("test binary check failed");
3281 let task = this.update(&mut cx, move |this, mut cx| {
3282 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3283 });
3284
3285 if let Some(task) = task {
3286 task.await;
3287 }
3288 }
3289 })
3290 .detach();
3291 }
3292
3293 fn on_lsp_progress(
3294 &mut self,
3295 progress: lsp::ProgressParams,
3296 language_server_id: LanguageServerId,
3297 disk_based_diagnostics_progress_token: Option<String>,
3298 cx: &mut ModelContext<Self>,
3299 ) {
3300 let token = match progress.token {
3301 lsp::NumberOrString::String(token) => token,
3302 lsp::NumberOrString::Number(token) => {
3303 log::info!("skipping numeric progress token {}", token);
3304 return;
3305 }
3306 };
3307 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3308 let language_server_status =
3309 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3310 status
3311 } else {
3312 return;
3313 };
3314
3315 if !language_server_status.progress_tokens.contains(&token) {
3316 return;
3317 }
3318
3319 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3320 .as_ref()
3321 .map_or(false, |disk_based_token| {
3322 token.starts_with(disk_based_token)
3323 });
3324
3325 match progress {
3326 lsp::WorkDoneProgress::Begin(report) => {
3327 if is_disk_based_diagnostics_progress {
3328 language_server_status.has_pending_diagnostic_updates = true;
3329 self.disk_based_diagnostics_started(language_server_id, cx);
3330 self.buffer_ordered_messages_tx
3331 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3332 language_server_id,
3333 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3334 })
3335 .ok();
3336 } else {
3337 self.on_lsp_work_start(
3338 language_server_id,
3339 token.clone(),
3340 LanguageServerProgress {
3341 message: report.message.clone(),
3342 percentage: report.percentage.map(|p| p as usize),
3343 last_update_at: Instant::now(),
3344 },
3345 cx,
3346 );
3347 self.buffer_ordered_messages_tx
3348 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3349 language_server_id,
3350 message: proto::update_language_server::Variant::WorkStart(
3351 proto::LspWorkStart {
3352 token,
3353 message: report.message,
3354 percentage: report.percentage.map(|p| p as u32),
3355 },
3356 ),
3357 })
3358 .ok();
3359 }
3360 }
3361 lsp::WorkDoneProgress::Report(report) => {
3362 if !is_disk_based_diagnostics_progress {
3363 self.on_lsp_work_progress(
3364 language_server_id,
3365 token.clone(),
3366 LanguageServerProgress {
3367 message: report.message.clone(),
3368 percentage: report.percentage.map(|p| p as usize),
3369 last_update_at: Instant::now(),
3370 },
3371 cx,
3372 );
3373 self.buffer_ordered_messages_tx
3374 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3375 language_server_id,
3376 message: proto::update_language_server::Variant::WorkProgress(
3377 proto::LspWorkProgress {
3378 token,
3379 message: report.message,
3380 percentage: report.percentage.map(|p| p as u32),
3381 },
3382 ),
3383 })
3384 .ok();
3385 }
3386 }
3387 lsp::WorkDoneProgress::End(_) => {
3388 language_server_status.progress_tokens.remove(&token);
3389
3390 if is_disk_based_diagnostics_progress {
3391 language_server_status.has_pending_diagnostic_updates = false;
3392 self.disk_based_diagnostics_finished(language_server_id, cx);
3393 self.buffer_ordered_messages_tx
3394 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3395 language_server_id,
3396 message:
3397 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3398 Default::default(),
3399 ),
3400 })
3401 .ok();
3402 } else {
3403 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3404 self.buffer_ordered_messages_tx
3405 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3406 language_server_id,
3407 message: proto::update_language_server::Variant::WorkEnd(
3408 proto::LspWorkEnd { token },
3409 ),
3410 })
3411 .ok();
3412 }
3413 }
3414 }
3415 }
3416
3417 fn on_lsp_work_start(
3418 &mut self,
3419 language_server_id: LanguageServerId,
3420 token: String,
3421 progress: LanguageServerProgress,
3422 cx: &mut ModelContext<Self>,
3423 ) {
3424 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3425 status.pending_work.insert(token, progress);
3426 cx.notify();
3427 }
3428 }
3429
3430 fn on_lsp_work_progress(
3431 &mut self,
3432 language_server_id: LanguageServerId,
3433 token: String,
3434 progress: LanguageServerProgress,
3435 cx: &mut ModelContext<Self>,
3436 ) {
3437 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3438 let entry = status
3439 .pending_work
3440 .entry(token)
3441 .or_insert(LanguageServerProgress {
3442 message: Default::default(),
3443 percentage: Default::default(),
3444 last_update_at: progress.last_update_at,
3445 });
3446 if progress.message.is_some() {
3447 entry.message = progress.message;
3448 }
3449 if progress.percentage.is_some() {
3450 entry.percentage = progress.percentage;
3451 }
3452 entry.last_update_at = progress.last_update_at;
3453 cx.notify();
3454 }
3455 }
3456
3457 fn on_lsp_work_end(
3458 &mut self,
3459 language_server_id: LanguageServerId,
3460 token: String,
3461 cx: &mut ModelContext<Self>,
3462 ) {
3463 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3464 cx.emit(Event::RefreshInlayHints);
3465 status.pending_work.remove(&token);
3466 cx.notify();
3467 }
3468 }
3469
3470 fn on_lsp_did_change_watched_files(
3471 &mut self,
3472 language_server_id: LanguageServerId,
3473 params: DidChangeWatchedFilesRegistrationOptions,
3474 cx: &mut ModelContext<Self>,
3475 ) {
3476 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3477 self.language_servers.get_mut(&language_server_id)
3478 {
3479 let mut builders = HashMap::default();
3480 for watcher in params.watchers {
3481 for worktree in &self.worktrees {
3482 if let Some(worktree) = worktree.upgrade(cx) {
3483 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3484 if let Some(abs_path) = tree.abs_path().to_str() {
3485 let relative_glob_pattern = match &watcher.glob_pattern {
3486 lsp::GlobPattern::String(s) => s
3487 .strip_prefix(abs_path)
3488 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3489 lsp::GlobPattern::Relative(rp) => {
3490 let base_uri = match &rp.base_uri {
3491 lsp::OneOf::Left(workspace_folder) => {
3492 &workspace_folder.uri
3493 }
3494 lsp::OneOf::Right(base_uri) => base_uri,
3495 };
3496 base_uri.to_file_path().ok().and_then(|file_path| {
3497 (file_path.to_str() == Some(abs_path))
3498 .then_some(rp.pattern.as_str())
3499 })
3500 }
3501 };
3502 if let Some(relative_glob_pattern) = relative_glob_pattern {
3503 let literal_prefix =
3504 glob_literal_prefix(&relative_glob_pattern);
3505 tree.as_local_mut()
3506 .unwrap()
3507 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3508 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3509 builders
3510 .entry(tree.id())
3511 .or_insert_with(|| GlobSetBuilder::new())
3512 .add(glob);
3513 }
3514 return true;
3515 }
3516 }
3517 false
3518 });
3519 if glob_is_inside_worktree {
3520 break;
3521 }
3522 }
3523 }
3524 }
3525
3526 watched_paths.clear();
3527 for (worktree_id, builder) in builders {
3528 if let Ok(globset) = builder.build() {
3529 watched_paths.insert(worktree_id, globset);
3530 }
3531 }
3532
3533 cx.notify();
3534 }
3535 }
3536
3537 async fn on_lsp_workspace_edit(
3538 this: WeakModelHandle<Self>,
3539 params: lsp::ApplyWorkspaceEditParams,
3540 server_id: LanguageServerId,
3541 adapter: Arc<CachedLspAdapter>,
3542 mut cx: AsyncAppContext,
3543 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3544 let this = this
3545 .upgrade(&cx)
3546 .ok_or_else(|| anyhow!("project project closed"))?;
3547 let language_server = this
3548 .read_with(&cx, |this, _| this.language_server_for_id(server_id))
3549 .ok_or_else(|| anyhow!("language server not found"))?;
3550 let transaction = Self::deserialize_workspace_edit(
3551 this.clone(),
3552 params.edit,
3553 true,
3554 adapter.clone(),
3555 language_server.clone(),
3556 &mut cx,
3557 )
3558 .await
3559 .log_err();
3560 this.update(&mut cx, |this, _| {
3561 if let Some(transaction) = transaction {
3562 this.last_workspace_edits_by_language_server
3563 .insert(server_id, transaction);
3564 }
3565 });
3566 Ok(lsp::ApplyWorkspaceEditResponse {
3567 applied: true,
3568 failed_change: None,
3569 failure_reason: None,
3570 })
3571 }
3572
3573 pub fn language_server_statuses(
3574 &self,
3575 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3576 self.language_server_statuses.values()
3577 }
3578
3579 pub fn update_diagnostics(
3580 &mut self,
3581 language_server_id: LanguageServerId,
3582 mut params: lsp::PublishDiagnosticsParams,
3583 disk_based_sources: &[String],
3584 cx: &mut ModelContext<Self>,
3585 ) -> Result<()> {
3586 let abs_path = params
3587 .uri
3588 .to_file_path()
3589 .map_err(|_| anyhow!("URI is not a file"))?;
3590 let mut diagnostics = Vec::default();
3591 let mut primary_diagnostic_group_ids = HashMap::default();
3592 let mut sources_by_group_id = HashMap::default();
3593 let mut supporting_diagnostics = HashMap::default();
3594
3595 // Ensure that primary diagnostics are always the most severe
3596 params.diagnostics.sort_by_key(|item| item.severity);
3597
3598 for diagnostic in ¶ms.diagnostics {
3599 let source = diagnostic.source.as_ref();
3600 let code = diagnostic.code.as_ref().map(|code| match code {
3601 lsp::NumberOrString::Number(code) => code.to_string(),
3602 lsp::NumberOrString::String(code) => code.clone(),
3603 });
3604 let range = range_from_lsp(diagnostic.range);
3605 let is_supporting = diagnostic
3606 .related_information
3607 .as_ref()
3608 .map_or(false, |infos| {
3609 infos.iter().any(|info| {
3610 primary_diagnostic_group_ids.contains_key(&(
3611 source,
3612 code.clone(),
3613 range_from_lsp(info.location.range),
3614 ))
3615 })
3616 });
3617
3618 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3619 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3620 });
3621
3622 if is_supporting {
3623 supporting_diagnostics.insert(
3624 (source, code.clone(), range),
3625 (diagnostic.severity, is_unnecessary),
3626 );
3627 } else {
3628 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3629 let is_disk_based =
3630 source.map_or(false, |source| disk_based_sources.contains(source));
3631
3632 sources_by_group_id.insert(group_id, source);
3633 primary_diagnostic_group_ids
3634 .insert((source, code.clone(), range.clone()), group_id);
3635
3636 diagnostics.push(DiagnosticEntry {
3637 range,
3638 diagnostic: Diagnostic {
3639 source: diagnostic.source.clone(),
3640 code: code.clone(),
3641 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3642 message: diagnostic.message.clone(),
3643 group_id,
3644 is_primary: true,
3645 is_valid: true,
3646 is_disk_based,
3647 is_unnecessary,
3648 },
3649 });
3650 if let Some(infos) = &diagnostic.related_information {
3651 for info in infos {
3652 if info.location.uri == params.uri && !info.message.is_empty() {
3653 let range = range_from_lsp(info.location.range);
3654 diagnostics.push(DiagnosticEntry {
3655 range,
3656 diagnostic: Diagnostic {
3657 source: diagnostic.source.clone(),
3658 code: code.clone(),
3659 severity: DiagnosticSeverity::INFORMATION,
3660 message: info.message.clone(),
3661 group_id,
3662 is_primary: false,
3663 is_valid: true,
3664 is_disk_based,
3665 is_unnecessary: false,
3666 },
3667 });
3668 }
3669 }
3670 }
3671 }
3672 }
3673
3674 for entry in &mut diagnostics {
3675 let diagnostic = &mut entry.diagnostic;
3676 if !diagnostic.is_primary {
3677 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3678 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3679 source,
3680 diagnostic.code.clone(),
3681 entry.range.clone(),
3682 )) {
3683 if let Some(severity) = severity {
3684 diagnostic.severity = severity;
3685 }
3686 diagnostic.is_unnecessary = is_unnecessary;
3687 }
3688 }
3689 }
3690
3691 self.update_diagnostic_entries(
3692 language_server_id,
3693 abs_path,
3694 params.version,
3695 diagnostics,
3696 cx,
3697 )?;
3698 Ok(())
3699 }
3700
3701 pub fn update_diagnostic_entries(
3702 &mut self,
3703 server_id: LanguageServerId,
3704 abs_path: PathBuf,
3705 version: Option<i32>,
3706 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3707 cx: &mut ModelContext<Project>,
3708 ) -> Result<(), anyhow::Error> {
3709 let (worktree, relative_path) = self
3710 .find_local_worktree(&abs_path, cx)
3711 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3712
3713 let project_path = ProjectPath {
3714 worktree_id: worktree.read(cx).id(),
3715 path: relative_path.into(),
3716 };
3717
3718 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3719 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3720 }
3721
3722 let updated = worktree.update(cx, |worktree, cx| {
3723 worktree
3724 .as_local_mut()
3725 .ok_or_else(|| anyhow!("not a local worktree"))?
3726 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3727 })?;
3728 if updated {
3729 cx.emit(Event::DiagnosticsUpdated {
3730 language_server_id: server_id,
3731 path: project_path,
3732 });
3733 }
3734 Ok(())
3735 }
3736
3737 fn update_buffer_diagnostics(
3738 &mut self,
3739 buffer: &ModelHandle<Buffer>,
3740 server_id: LanguageServerId,
3741 version: Option<i32>,
3742 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3743 cx: &mut ModelContext<Self>,
3744 ) -> Result<()> {
3745 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3746 Ordering::Equal
3747 .then_with(|| b.is_primary.cmp(&a.is_primary))
3748 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3749 .then_with(|| a.severity.cmp(&b.severity))
3750 .then_with(|| a.message.cmp(&b.message))
3751 }
3752
3753 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3754
3755 diagnostics.sort_unstable_by(|a, b| {
3756 Ordering::Equal
3757 .then_with(|| a.range.start.cmp(&b.range.start))
3758 .then_with(|| b.range.end.cmp(&a.range.end))
3759 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3760 });
3761
3762 let mut sanitized_diagnostics = Vec::new();
3763 let edits_since_save = Patch::new(
3764 snapshot
3765 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
3766 .collect(),
3767 );
3768 for entry in diagnostics {
3769 let start;
3770 let end;
3771 if entry.diagnostic.is_disk_based {
3772 // Some diagnostics are based on files on disk instead of buffers'
3773 // current contents. Adjust these diagnostics' ranges to reflect
3774 // any unsaved edits.
3775 start = edits_since_save.old_to_new(entry.range.start);
3776 end = edits_since_save.old_to_new(entry.range.end);
3777 } else {
3778 start = entry.range.start;
3779 end = entry.range.end;
3780 }
3781
3782 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
3783 ..snapshot.clip_point_utf16(end, Bias::Right);
3784
3785 // Expand empty ranges by one codepoint
3786 if range.start == range.end {
3787 // This will be go to the next boundary when being clipped
3788 range.end.column += 1;
3789 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
3790 if range.start == range.end && range.end.column > 0 {
3791 range.start.column -= 1;
3792 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
3793 }
3794 }
3795
3796 sanitized_diagnostics.push(DiagnosticEntry {
3797 range,
3798 diagnostic: entry.diagnostic,
3799 });
3800 }
3801 drop(edits_since_save);
3802
3803 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
3804 buffer.update(cx, |buffer, cx| {
3805 buffer.update_diagnostics(server_id, set, cx)
3806 });
3807 Ok(())
3808 }
3809
3810 pub fn reload_buffers(
3811 &self,
3812 buffers: HashSet<ModelHandle<Buffer>>,
3813 push_to_history: bool,
3814 cx: &mut ModelContext<Self>,
3815 ) -> Task<Result<ProjectTransaction>> {
3816 let mut local_buffers = Vec::new();
3817 let mut remote_buffers = None;
3818 for buffer_handle in buffers {
3819 let buffer = buffer_handle.read(cx);
3820 if buffer.is_dirty() {
3821 if let Some(file) = File::from_dyn(buffer.file()) {
3822 if file.is_local() {
3823 local_buffers.push(buffer_handle);
3824 } else {
3825 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
3826 }
3827 }
3828 }
3829 }
3830
3831 let remote_buffers = self.remote_id().zip(remote_buffers);
3832 let client = self.client.clone();
3833
3834 cx.spawn(|this, mut cx| async move {
3835 let mut project_transaction = ProjectTransaction::default();
3836
3837 if let Some((project_id, remote_buffers)) = remote_buffers {
3838 let response = client
3839 .request(proto::ReloadBuffers {
3840 project_id,
3841 buffer_ids: remote_buffers
3842 .iter()
3843 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3844 .collect(),
3845 })
3846 .await?
3847 .transaction
3848 .ok_or_else(|| anyhow!("missing transaction"))?;
3849 project_transaction = this
3850 .update(&mut cx, |this, cx| {
3851 this.deserialize_project_transaction(response, push_to_history, cx)
3852 })
3853 .await?;
3854 }
3855
3856 for buffer in local_buffers {
3857 let transaction = buffer
3858 .update(&mut cx, |buffer, cx| buffer.reload(cx))
3859 .await?;
3860 buffer.update(&mut cx, |buffer, cx| {
3861 if let Some(transaction) = transaction {
3862 if !push_to_history {
3863 buffer.forget_transaction(transaction.id);
3864 }
3865 project_transaction.0.insert(cx.handle(), transaction);
3866 }
3867 });
3868 }
3869
3870 Ok(project_transaction)
3871 })
3872 }
3873
3874 pub fn format(
3875 &self,
3876 buffers: HashSet<ModelHandle<Buffer>>,
3877 push_to_history: bool,
3878 trigger: FormatTrigger,
3879 cx: &mut ModelContext<Project>,
3880 ) -> Task<Result<ProjectTransaction>> {
3881 if self.is_local() {
3882 let mut buffers_with_paths_and_servers = buffers
3883 .into_iter()
3884 .filter_map(|buffer_handle| {
3885 let buffer = buffer_handle.read(cx);
3886 let file = File::from_dyn(buffer.file())?;
3887 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3888 let server = self
3889 .primary_language_server_for_buffer(buffer, cx)
3890 .map(|s| s.1.clone());
3891 Some((buffer_handle, buffer_abs_path, server))
3892 })
3893 .collect::<Vec<_>>();
3894
3895 cx.spawn(|this, mut cx| async move {
3896 // Do not allow multiple concurrent formatting requests for the
3897 // same buffer.
3898 this.update(&mut cx, |this, cx| {
3899 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
3900 this.buffers_being_formatted
3901 .insert(buffer.read(cx).remote_id())
3902 });
3903 });
3904
3905 let _cleanup = defer({
3906 let this = this.clone();
3907 let mut cx = cx.clone();
3908 let buffers = &buffers_with_paths_and_servers;
3909 move || {
3910 this.update(&mut cx, |this, cx| {
3911 for (buffer, _, _) in buffers {
3912 this.buffers_being_formatted
3913 .remove(&buffer.read(cx).remote_id());
3914 }
3915 });
3916 }
3917 });
3918
3919 let mut project_transaction = ProjectTransaction::default();
3920 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
3921 let settings = buffer.read_with(&cx, |buffer, cx| {
3922 language_settings(buffer.language(), buffer.file(), cx).clone()
3923 });
3924
3925 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
3926 let ensure_final_newline = settings.ensure_final_newline_on_save;
3927 let format_on_save = settings.format_on_save.clone();
3928 let formatter = settings.formatter.clone();
3929 let tab_size = settings.tab_size;
3930
3931 // First, format buffer's whitespace according to the settings.
3932 let trailing_whitespace_diff = if remove_trailing_whitespace {
3933 Some(
3934 buffer
3935 .read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
3936 .await,
3937 )
3938 } else {
3939 None
3940 };
3941 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
3942 buffer.finalize_last_transaction();
3943 buffer.start_transaction();
3944 if let Some(diff) = trailing_whitespace_diff {
3945 buffer.apply_diff(diff, cx);
3946 }
3947 if ensure_final_newline {
3948 buffer.ensure_final_newline(cx);
3949 }
3950 buffer.end_transaction(cx)
3951 });
3952
3953 // Currently, formatting operations are represented differently depending on
3954 // whether they come from a language server or an external command.
3955 enum FormatOperation {
3956 Lsp(Vec<(Range<Anchor>, String)>),
3957 External(Diff),
3958 }
3959
3960 // Apply language-specific formatting using either a language server
3961 // or external command.
3962 let mut format_operation = None;
3963 match (formatter, format_on_save) {
3964 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
3965
3966 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
3967 | (_, FormatOnSave::LanguageServer) => {
3968 if let Some((language_server, buffer_abs_path)) =
3969 language_server.as_ref().zip(buffer_abs_path.as_ref())
3970 {
3971 format_operation = Some(FormatOperation::Lsp(
3972 Self::format_via_lsp(
3973 &this,
3974 &buffer,
3975 buffer_abs_path,
3976 &language_server,
3977 tab_size,
3978 &mut cx,
3979 )
3980 .await
3981 .context("failed to format via language server")?,
3982 ));
3983 }
3984 }
3985
3986 (
3987 Formatter::External { command, arguments },
3988 FormatOnSave::On | FormatOnSave::Off,
3989 )
3990 | (_, FormatOnSave::External { command, arguments }) => {
3991 if let Some(buffer_abs_path) = buffer_abs_path {
3992 format_operation = Self::format_via_external_command(
3993 &buffer,
3994 &buffer_abs_path,
3995 &command,
3996 &arguments,
3997 &mut cx,
3998 )
3999 .await
4000 .context(format!(
4001 "failed to format via external command {:?}",
4002 command
4003 ))?
4004 .map(FormatOperation::External);
4005 }
4006 }
4007 };
4008
4009 buffer.update(&mut cx, |b, cx| {
4010 // If the buffer had its whitespace formatted and was edited while the language-specific
4011 // formatting was being computed, avoid applying the language-specific formatting, because
4012 // it can't be grouped with the whitespace formatting in the undo history.
4013 if let Some(transaction_id) = whitespace_transaction_id {
4014 if b.peek_undo_stack()
4015 .map_or(true, |e| e.transaction_id() != transaction_id)
4016 {
4017 format_operation.take();
4018 }
4019 }
4020
4021 // Apply any language-specific formatting, and group the two formatting operations
4022 // in the buffer's undo history.
4023 if let Some(operation) = format_operation {
4024 match operation {
4025 FormatOperation::Lsp(edits) => {
4026 b.edit(edits, None, cx);
4027 }
4028 FormatOperation::External(diff) => {
4029 b.apply_diff(diff, cx);
4030 }
4031 }
4032
4033 if let Some(transaction_id) = whitespace_transaction_id {
4034 b.group_until_transaction(transaction_id);
4035 }
4036 }
4037
4038 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4039 if !push_to_history {
4040 b.forget_transaction(transaction.id);
4041 }
4042 project_transaction.0.insert(buffer.clone(), transaction);
4043 }
4044 });
4045 }
4046
4047 Ok(project_transaction)
4048 })
4049 } else {
4050 let remote_id = self.remote_id();
4051 let client = self.client.clone();
4052 cx.spawn(|this, mut cx| async move {
4053 let mut project_transaction = ProjectTransaction::default();
4054 if let Some(project_id) = remote_id {
4055 let response = client
4056 .request(proto::FormatBuffers {
4057 project_id,
4058 trigger: trigger as i32,
4059 buffer_ids: buffers
4060 .iter()
4061 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
4062 .collect(),
4063 })
4064 .await?
4065 .transaction
4066 .ok_or_else(|| anyhow!("missing transaction"))?;
4067 project_transaction = this
4068 .update(&mut cx, |this, cx| {
4069 this.deserialize_project_transaction(response, push_to_history, cx)
4070 })
4071 .await?;
4072 }
4073 Ok(project_transaction)
4074 })
4075 }
4076 }
4077
4078 async fn format_via_lsp(
4079 this: &ModelHandle<Self>,
4080 buffer: &ModelHandle<Buffer>,
4081 abs_path: &Path,
4082 language_server: &Arc<LanguageServer>,
4083 tab_size: NonZeroU32,
4084 cx: &mut AsyncAppContext,
4085 ) -> Result<Vec<(Range<Anchor>, String)>> {
4086 let uri = lsp::Url::from_file_path(abs_path)
4087 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4088 let text_document = lsp::TextDocumentIdentifier::new(uri);
4089 let capabilities = &language_server.capabilities();
4090
4091 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4092 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4093
4094 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4095 language_server
4096 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4097 text_document,
4098 options: lsp_command::lsp_formatting_options(tab_size.get()),
4099 work_done_progress_params: Default::default(),
4100 })
4101 .await?
4102 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4103 let buffer_start = lsp::Position::new(0, 0);
4104 let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()));
4105
4106 language_server
4107 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4108 text_document,
4109 range: lsp::Range::new(buffer_start, buffer_end),
4110 options: lsp_command::lsp_formatting_options(tab_size.get()),
4111 work_done_progress_params: Default::default(),
4112 })
4113 .await?
4114 } else {
4115 None
4116 };
4117
4118 if let Some(lsp_edits) = lsp_edits {
4119 this.update(cx, |this, cx| {
4120 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4121 })
4122 .await
4123 } else {
4124 Ok(Vec::new())
4125 }
4126 }
4127
4128 async fn format_via_external_command(
4129 buffer: &ModelHandle<Buffer>,
4130 buffer_abs_path: &Path,
4131 command: &str,
4132 arguments: &[String],
4133 cx: &mut AsyncAppContext,
4134 ) -> Result<Option<Diff>> {
4135 let working_dir_path = buffer.read_with(cx, |buffer, cx| {
4136 let file = File::from_dyn(buffer.file())?;
4137 let worktree = file.worktree.read(cx).as_local()?;
4138 let mut worktree_path = worktree.abs_path().to_path_buf();
4139 if worktree.root_entry()?.is_file() {
4140 worktree_path.pop();
4141 }
4142 Some(worktree_path)
4143 });
4144
4145 if let Some(working_dir_path) = working_dir_path {
4146 let mut child =
4147 smol::process::Command::new(command)
4148 .args(arguments.iter().map(|arg| {
4149 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4150 }))
4151 .current_dir(&working_dir_path)
4152 .stdin(smol::process::Stdio::piped())
4153 .stdout(smol::process::Stdio::piped())
4154 .stderr(smol::process::Stdio::piped())
4155 .spawn()?;
4156 let stdin = child
4157 .stdin
4158 .as_mut()
4159 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4160 let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
4161 for chunk in text.chunks() {
4162 stdin.write_all(chunk.as_bytes()).await?;
4163 }
4164 stdin.flush().await?;
4165
4166 let output = child.output().await?;
4167 if !output.status.success() {
4168 return Err(anyhow!(
4169 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4170 output.status.code(),
4171 String::from_utf8_lossy(&output.stdout),
4172 String::from_utf8_lossy(&output.stderr),
4173 ));
4174 }
4175
4176 let stdout = String::from_utf8(output.stdout)?;
4177 Ok(Some(
4178 buffer
4179 .read_with(cx, |buffer, cx| buffer.diff(stdout, cx))
4180 .await,
4181 ))
4182 } else {
4183 Ok(None)
4184 }
4185 }
4186
4187 pub fn definition<T: ToPointUtf16>(
4188 &self,
4189 buffer: &ModelHandle<Buffer>,
4190 position: T,
4191 cx: &mut ModelContext<Self>,
4192 ) -> Task<Result<Vec<LocationLink>>> {
4193 let position = position.to_point_utf16(buffer.read(cx));
4194 self.request_lsp(
4195 buffer.clone(),
4196 LanguageServerToQuery::Primary,
4197 GetDefinition { position },
4198 cx,
4199 )
4200 }
4201
4202 pub fn type_definition<T: ToPointUtf16>(
4203 &self,
4204 buffer: &ModelHandle<Buffer>,
4205 position: T,
4206 cx: &mut ModelContext<Self>,
4207 ) -> Task<Result<Vec<LocationLink>>> {
4208 let position = position.to_point_utf16(buffer.read(cx));
4209 self.request_lsp(
4210 buffer.clone(),
4211 LanguageServerToQuery::Primary,
4212 GetTypeDefinition { position },
4213 cx,
4214 )
4215 }
4216
4217 pub fn references<T: ToPointUtf16>(
4218 &self,
4219 buffer: &ModelHandle<Buffer>,
4220 position: T,
4221 cx: &mut ModelContext<Self>,
4222 ) -> Task<Result<Vec<Location>>> {
4223 let position = position.to_point_utf16(buffer.read(cx));
4224 self.request_lsp(
4225 buffer.clone(),
4226 LanguageServerToQuery::Primary,
4227 GetReferences { position },
4228 cx,
4229 )
4230 }
4231
4232 pub fn document_highlights<T: ToPointUtf16>(
4233 &self,
4234 buffer: &ModelHandle<Buffer>,
4235 position: T,
4236 cx: &mut ModelContext<Self>,
4237 ) -> Task<Result<Vec<DocumentHighlight>>> {
4238 let position = position.to_point_utf16(buffer.read(cx));
4239 self.request_lsp(
4240 buffer.clone(),
4241 LanguageServerToQuery::Primary,
4242 GetDocumentHighlights { position },
4243 cx,
4244 )
4245 }
4246
4247 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4248 if self.is_local() {
4249 let mut requests = Vec::new();
4250 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4251 let worktree_id = *worktree_id;
4252 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4253 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4254 Some(worktree) => worktree,
4255 None => continue,
4256 };
4257 let worktree_abs_path = worktree.abs_path().clone();
4258
4259 let (adapter, language, server) = match self.language_servers.get(server_id) {
4260 Some(LanguageServerState::Running {
4261 adapter,
4262 language,
4263 server,
4264 ..
4265 }) => (adapter.clone(), language.clone(), server),
4266
4267 _ => continue,
4268 };
4269
4270 requests.push(
4271 server
4272 .request::<lsp::request::WorkspaceSymbolRequest>(
4273 lsp::WorkspaceSymbolParams {
4274 query: query.to_string(),
4275 ..Default::default()
4276 },
4277 )
4278 .log_err()
4279 .map(move |response| {
4280 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4281 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4282 flat_responses.into_iter().map(|lsp_symbol| {
4283 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4284 }).collect::<Vec<_>>()
4285 }
4286 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4287 nested_responses.into_iter().filter_map(|lsp_symbol| {
4288 let location = match lsp_symbol.location {
4289 OneOf::Left(location) => location,
4290 OneOf::Right(_) => {
4291 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4292 return None
4293 }
4294 };
4295 Some((lsp_symbol.name, lsp_symbol.kind, location))
4296 }).collect::<Vec<_>>()
4297 }
4298 }).unwrap_or_default();
4299
4300 (
4301 adapter,
4302 language,
4303 worktree_id,
4304 worktree_abs_path,
4305 lsp_symbols,
4306 )
4307 }),
4308 );
4309 }
4310
4311 cx.spawn_weak(|this, cx| async move {
4312 let responses = futures::future::join_all(requests).await;
4313 let this = match this.upgrade(&cx) {
4314 Some(this) => this,
4315 None => return Ok(Vec::new()),
4316 };
4317
4318 let symbols = this.read_with(&cx, |this, cx| {
4319 let mut symbols = Vec::new();
4320 for (
4321 adapter,
4322 adapter_language,
4323 source_worktree_id,
4324 worktree_abs_path,
4325 lsp_symbols,
4326 ) in responses
4327 {
4328 symbols.extend(lsp_symbols.into_iter().filter_map(
4329 |(symbol_name, symbol_kind, symbol_location)| {
4330 let abs_path = symbol_location.uri.to_file_path().ok()?;
4331 let mut worktree_id = source_worktree_id;
4332 let path;
4333 if let Some((worktree, rel_path)) =
4334 this.find_local_worktree(&abs_path, cx)
4335 {
4336 worktree_id = worktree.read(cx).id();
4337 path = rel_path;
4338 } else {
4339 path = relativize_path(&worktree_abs_path, &abs_path);
4340 }
4341
4342 let project_path = ProjectPath {
4343 worktree_id,
4344 path: path.into(),
4345 };
4346 let signature = this.symbol_signature(&project_path);
4347 let adapter_language = adapter_language.clone();
4348 let language = this
4349 .languages
4350 .language_for_file(&project_path.path, None)
4351 .unwrap_or_else(move |_| adapter_language);
4352 let language_server_name = adapter.name.clone();
4353 Some(async move {
4354 let language = language.await;
4355 let label =
4356 language.label_for_symbol(&symbol_name, symbol_kind).await;
4357
4358 Symbol {
4359 language_server_name,
4360 source_worktree_id,
4361 path: project_path,
4362 label: label.unwrap_or_else(|| {
4363 CodeLabel::plain(symbol_name.clone(), None)
4364 }),
4365 kind: symbol_kind,
4366 name: symbol_name,
4367 range: range_from_lsp(symbol_location.range),
4368 signature,
4369 }
4370 })
4371 },
4372 ));
4373 }
4374
4375 symbols
4376 });
4377
4378 Ok(futures::future::join_all(symbols).await)
4379 })
4380 } else if let Some(project_id) = self.remote_id() {
4381 let request = self.client.request(proto::GetProjectSymbols {
4382 project_id,
4383 query: query.to_string(),
4384 });
4385 cx.spawn_weak(|this, cx| async move {
4386 let response = request.await?;
4387 let mut symbols = Vec::new();
4388 if let Some(this) = this.upgrade(&cx) {
4389 let new_symbols = this.read_with(&cx, |this, _| {
4390 response
4391 .symbols
4392 .into_iter()
4393 .map(|symbol| this.deserialize_symbol(symbol))
4394 .collect::<Vec<_>>()
4395 });
4396 symbols = futures::future::join_all(new_symbols)
4397 .await
4398 .into_iter()
4399 .filter_map(|symbol| symbol.log_err())
4400 .collect::<Vec<_>>();
4401 }
4402 Ok(symbols)
4403 })
4404 } else {
4405 Task::ready(Ok(Default::default()))
4406 }
4407 }
4408
4409 pub fn open_buffer_for_symbol(
4410 &mut self,
4411 symbol: &Symbol,
4412 cx: &mut ModelContext<Self>,
4413 ) -> Task<Result<ModelHandle<Buffer>>> {
4414 if self.is_local() {
4415 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4416 symbol.source_worktree_id,
4417 symbol.language_server_name.clone(),
4418 )) {
4419 *id
4420 } else {
4421 return Task::ready(Err(anyhow!(
4422 "language server for worktree and language not found"
4423 )));
4424 };
4425
4426 let worktree_abs_path = if let Some(worktree_abs_path) = self
4427 .worktree_for_id(symbol.path.worktree_id, cx)
4428 .and_then(|worktree| worktree.read(cx).as_local())
4429 .map(|local_worktree| local_worktree.abs_path())
4430 {
4431 worktree_abs_path
4432 } else {
4433 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4434 };
4435 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
4436 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4437 uri
4438 } else {
4439 return Task::ready(Err(anyhow!("invalid symbol path")));
4440 };
4441
4442 self.open_local_buffer_via_lsp(
4443 symbol_uri,
4444 language_server_id,
4445 symbol.language_server_name.clone(),
4446 cx,
4447 )
4448 } else if let Some(project_id) = self.remote_id() {
4449 let request = self.client.request(proto::OpenBufferForSymbol {
4450 project_id,
4451 symbol: Some(serialize_symbol(symbol)),
4452 });
4453 cx.spawn(|this, mut cx| async move {
4454 let response = request.await?;
4455 this.update(&mut cx, |this, cx| {
4456 this.wait_for_remote_buffer(response.buffer_id, cx)
4457 })
4458 .await
4459 })
4460 } else {
4461 Task::ready(Err(anyhow!("project does not have a remote id")))
4462 }
4463 }
4464
4465 pub fn hover<T: ToPointUtf16>(
4466 &self,
4467 buffer: &ModelHandle<Buffer>,
4468 position: T,
4469 cx: &mut ModelContext<Self>,
4470 ) -> Task<Result<Option<Hover>>> {
4471 let position = position.to_point_utf16(buffer.read(cx));
4472 self.request_lsp(
4473 buffer.clone(),
4474 LanguageServerToQuery::Primary,
4475 GetHover { position },
4476 cx,
4477 )
4478 }
4479
4480 pub fn completions<T: ToOffset + ToPointUtf16>(
4481 &self,
4482 buffer: &ModelHandle<Buffer>,
4483 position: T,
4484 cx: &mut ModelContext<Self>,
4485 ) -> Task<Result<Vec<Completion>>> {
4486 let position = position.to_point_utf16(buffer.read(cx));
4487 if self.is_local() {
4488 let snapshot = buffer.read(cx).snapshot();
4489 let offset = position.to_offset(&snapshot);
4490 let scope = snapshot.language_scope_at(offset);
4491
4492 let server_ids: Vec<_> = self
4493 .language_servers_for_buffer(buffer.read(cx), cx)
4494 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4495 .filter(|(adapter, _)| {
4496 scope
4497 .as_ref()
4498 .map(|scope| scope.language_allowed(&adapter.name))
4499 .unwrap_or(true)
4500 })
4501 .map(|(_, server)| server.server_id())
4502 .collect();
4503
4504 let buffer = buffer.clone();
4505 cx.spawn(|this, mut cx| async move {
4506 let mut tasks = Vec::with_capacity(server_ids.len());
4507 this.update(&mut cx, |this, cx| {
4508 for server_id in server_ids {
4509 tasks.push(this.request_lsp(
4510 buffer.clone(),
4511 LanguageServerToQuery::Other(server_id),
4512 GetCompletions { position },
4513 cx,
4514 ));
4515 }
4516 });
4517
4518 let mut completions = Vec::new();
4519 for task in tasks {
4520 if let Ok(new_completions) = task.await {
4521 completions.extend_from_slice(&new_completions);
4522 }
4523 }
4524
4525 Ok(completions)
4526 })
4527 } else if let Some(project_id) = self.remote_id() {
4528 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4529 } else {
4530 Task::ready(Ok(Default::default()))
4531 }
4532 }
4533
4534 pub fn apply_additional_edits_for_completion(
4535 &self,
4536 buffer_handle: ModelHandle<Buffer>,
4537 completion: Completion,
4538 push_to_history: bool,
4539 cx: &mut ModelContext<Self>,
4540 ) -> Task<Result<Option<Transaction>>> {
4541 let buffer = buffer_handle.read(cx);
4542 let buffer_id = buffer.remote_id();
4543
4544 if self.is_local() {
4545 let server_id = completion.server_id;
4546 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4547 Some((_, server)) => server.clone(),
4548 _ => return Task::ready(Ok(Default::default())),
4549 };
4550
4551 cx.spawn(|this, mut cx| async move {
4552 let can_resolve = lang_server
4553 .capabilities()
4554 .completion_provider
4555 .as_ref()
4556 .and_then(|options| options.resolve_provider)
4557 .unwrap_or(false);
4558 let additional_text_edits = if can_resolve {
4559 lang_server
4560 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4561 .await?
4562 .additional_text_edits
4563 } else {
4564 completion.lsp_completion.additional_text_edits
4565 };
4566 if let Some(edits) = additional_text_edits {
4567 let edits = this
4568 .update(&mut cx, |this, cx| {
4569 this.edits_from_lsp(
4570 &buffer_handle,
4571 edits,
4572 lang_server.server_id(),
4573 None,
4574 cx,
4575 )
4576 })
4577 .await?;
4578
4579 buffer_handle.update(&mut cx, |buffer, cx| {
4580 buffer.finalize_last_transaction();
4581 buffer.start_transaction();
4582
4583 for (range, text) in edits {
4584 let primary = &completion.old_range;
4585 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4586 && primary.end.cmp(&range.start, buffer).is_ge();
4587 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4588 && range.end.cmp(&primary.end, buffer).is_ge();
4589
4590 //Skip additional edits which overlap with the primary completion edit
4591 //https://github.com/zed-industries/zed/pull/1871
4592 if !start_within && !end_within {
4593 buffer.edit([(range, text)], None, cx);
4594 }
4595 }
4596
4597 let transaction = if buffer.end_transaction(cx).is_some() {
4598 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4599 if !push_to_history {
4600 buffer.forget_transaction(transaction.id);
4601 }
4602 Some(transaction)
4603 } else {
4604 None
4605 };
4606 Ok(transaction)
4607 })
4608 } else {
4609 Ok(None)
4610 }
4611 })
4612 } else if let Some(project_id) = self.remote_id() {
4613 let client = self.client.clone();
4614 cx.spawn(|_, mut cx| async move {
4615 let response = client
4616 .request(proto::ApplyCompletionAdditionalEdits {
4617 project_id,
4618 buffer_id,
4619 completion: Some(language::proto::serialize_completion(&completion)),
4620 })
4621 .await?;
4622
4623 if let Some(transaction) = response.transaction {
4624 let transaction = language::proto::deserialize_transaction(transaction)?;
4625 buffer_handle
4626 .update(&mut cx, |buffer, _| {
4627 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4628 })
4629 .await?;
4630 if push_to_history {
4631 buffer_handle.update(&mut cx, |buffer, _| {
4632 buffer.push_transaction(transaction.clone(), Instant::now());
4633 });
4634 }
4635 Ok(Some(transaction))
4636 } else {
4637 Ok(None)
4638 }
4639 })
4640 } else {
4641 Task::ready(Err(anyhow!("project does not have a remote id")))
4642 }
4643 }
4644
4645 pub fn code_actions<T: Clone + ToOffset>(
4646 &self,
4647 buffer_handle: &ModelHandle<Buffer>,
4648 range: Range<T>,
4649 cx: &mut ModelContext<Self>,
4650 ) -> Task<Result<Vec<CodeAction>>> {
4651 let buffer = buffer_handle.read(cx);
4652 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4653 self.request_lsp(
4654 buffer_handle.clone(),
4655 LanguageServerToQuery::Primary,
4656 GetCodeActions { range },
4657 cx,
4658 )
4659 }
4660
4661 pub fn apply_code_action(
4662 &self,
4663 buffer_handle: ModelHandle<Buffer>,
4664 mut action: CodeAction,
4665 push_to_history: bool,
4666 cx: &mut ModelContext<Self>,
4667 ) -> Task<Result<ProjectTransaction>> {
4668 if self.is_local() {
4669 let buffer = buffer_handle.read(cx);
4670 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4671 self.language_server_for_buffer(buffer, action.server_id, cx)
4672 {
4673 (adapter.clone(), server.clone())
4674 } else {
4675 return Task::ready(Ok(Default::default()));
4676 };
4677 let range = action.range.to_point_utf16(buffer);
4678
4679 cx.spawn(|this, mut cx| async move {
4680 if let Some(lsp_range) = action
4681 .lsp_action
4682 .data
4683 .as_mut()
4684 .and_then(|d| d.get_mut("codeActionParams"))
4685 .and_then(|d| d.get_mut("range"))
4686 {
4687 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4688 action.lsp_action = lang_server
4689 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
4690 .await?;
4691 } else {
4692 let actions = this
4693 .update(&mut cx, |this, cx| {
4694 this.code_actions(&buffer_handle, action.range, cx)
4695 })
4696 .await?;
4697 action.lsp_action = actions
4698 .into_iter()
4699 .find(|a| a.lsp_action.title == action.lsp_action.title)
4700 .ok_or_else(|| anyhow!("code action is outdated"))?
4701 .lsp_action;
4702 }
4703
4704 if let Some(edit) = action.lsp_action.edit {
4705 if edit.changes.is_some() || edit.document_changes.is_some() {
4706 return Self::deserialize_workspace_edit(
4707 this,
4708 edit,
4709 push_to_history,
4710 lsp_adapter.clone(),
4711 lang_server.clone(),
4712 &mut cx,
4713 )
4714 .await;
4715 }
4716 }
4717
4718 if let Some(command) = action.lsp_action.command {
4719 this.update(&mut cx, |this, _| {
4720 this.last_workspace_edits_by_language_server
4721 .remove(&lang_server.server_id());
4722 });
4723
4724 let result = lang_server
4725 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
4726 command: command.command,
4727 arguments: command.arguments.unwrap_or_default(),
4728 ..Default::default()
4729 })
4730 .await;
4731
4732 if let Err(err) = result {
4733 // TODO: LSP ERROR
4734 return Err(err);
4735 }
4736
4737 return Ok(this.update(&mut cx, |this, _| {
4738 this.last_workspace_edits_by_language_server
4739 .remove(&lang_server.server_id())
4740 .unwrap_or_default()
4741 }));
4742 }
4743
4744 Ok(ProjectTransaction::default())
4745 })
4746 } else if let Some(project_id) = self.remote_id() {
4747 let client = self.client.clone();
4748 let request = proto::ApplyCodeAction {
4749 project_id,
4750 buffer_id: buffer_handle.read(cx).remote_id(),
4751 action: Some(language::proto::serialize_code_action(&action)),
4752 };
4753 cx.spawn(|this, mut cx| async move {
4754 let response = client
4755 .request(request)
4756 .await?
4757 .transaction
4758 .ok_or_else(|| anyhow!("missing transaction"))?;
4759 this.update(&mut cx, |this, cx| {
4760 this.deserialize_project_transaction(response, push_to_history, cx)
4761 })
4762 .await
4763 })
4764 } else {
4765 Task::ready(Err(anyhow!("project does not have a remote id")))
4766 }
4767 }
4768
4769 fn apply_on_type_formatting(
4770 &self,
4771 buffer: ModelHandle<Buffer>,
4772 position: Anchor,
4773 trigger: String,
4774 cx: &mut ModelContext<Self>,
4775 ) -> Task<Result<Option<Transaction>>> {
4776 if self.is_local() {
4777 cx.spawn(|this, mut cx| async move {
4778 // Do not allow multiple concurrent formatting requests for the
4779 // same buffer.
4780 this.update(&mut cx, |this, cx| {
4781 this.buffers_being_formatted
4782 .insert(buffer.read(cx).remote_id())
4783 });
4784
4785 let _cleanup = defer({
4786 let this = this.clone();
4787 let mut cx = cx.clone();
4788 let closure_buffer = buffer.clone();
4789 move || {
4790 this.update(&mut cx, |this, cx| {
4791 this.buffers_being_formatted
4792 .remove(&closure_buffer.read(cx).remote_id());
4793 });
4794 }
4795 });
4796
4797 buffer
4798 .update(&mut cx, |buffer, _| {
4799 buffer.wait_for_edits(Some(position.timestamp))
4800 })
4801 .await?;
4802 this.update(&mut cx, |this, cx| {
4803 let position = position.to_point_utf16(buffer.read(cx));
4804 this.on_type_format(buffer, position, trigger, false, cx)
4805 })
4806 .await
4807 })
4808 } else if let Some(project_id) = self.remote_id() {
4809 let client = self.client.clone();
4810 let request = proto::OnTypeFormatting {
4811 project_id,
4812 buffer_id: buffer.read(cx).remote_id(),
4813 position: Some(serialize_anchor(&position)),
4814 trigger,
4815 version: serialize_version(&buffer.read(cx).version()),
4816 };
4817 cx.spawn(|_, _| async move {
4818 client
4819 .request(request)
4820 .await?
4821 .transaction
4822 .map(language::proto::deserialize_transaction)
4823 .transpose()
4824 })
4825 } else {
4826 Task::ready(Err(anyhow!("project does not have a remote id")))
4827 }
4828 }
4829
4830 async fn deserialize_edits(
4831 this: ModelHandle<Self>,
4832 buffer_to_edit: ModelHandle<Buffer>,
4833 edits: Vec<lsp::TextEdit>,
4834 push_to_history: bool,
4835 _: Arc<CachedLspAdapter>,
4836 language_server: Arc<LanguageServer>,
4837 cx: &mut AsyncAppContext,
4838 ) -> Result<Option<Transaction>> {
4839 let edits = this
4840 .update(cx, |this, cx| {
4841 this.edits_from_lsp(
4842 &buffer_to_edit,
4843 edits,
4844 language_server.server_id(),
4845 None,
4846 cx,
4847 )
4848 })
4849 .await?;
4850
4851 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4852 buffer.finalize_last_transaction();
4853 buffer.start_transaction();
4854 for (range, text) in edits {
4855 buffer.edit([(range, text)], None, cx);
4856 }
4857
4858 if buffer.end_transaction(cx).is_some() {
4859 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4860 if !push_to_history {
4861 buffer.forget_transaction(transaction.id);
4862 }
4863 Some(transaction)
4864 } else {
4865 None
4866 }
4867 });
4868
4869 Ok(transaction)
4870 }
4871
4872 async fn deserialize_workspace_edit(
4873 this: ModelHandle<Self>,
4874 edit: lsp::WorkspaceEdit,
4875 push_to_history: bool,
4876 lsp_adapter: Arc<CachedLspAdapter>,
4877 language_server: Arc<LanguageServer>,
4878 cx: &mut AsyncAppContext,
4879 ) -> Result<ProjectTransaction> {
4880 let fs = this.read_with(cx, |this, _| this.fs.clone());
4881 let mut operations = Vec::new();
4882 if let Some(document_changes) = edit.document_changes {
4883 match document_changes {
4884 lsp::DocumentChanges::Edits(edits) => {
4885 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
4886 }
4887 lsp::DocumentChanges::Operations(ops) => operations = ops,
4888 }
4889 } else if let Some(changes) = edit.changes {
4890 operations.extend(changes.into_iter().map(|(uri, edits)| {
4891 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
4892 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4893 uri,
4894 version: None,
4895 },
4896 edits: edits.into_iter().map(OneOf::Left).collect(),
4897 })
4898 }));
4899 }
4900
4901 let mut project_transaction = ProjectTransaction::default();
4902 for operation in operations {
4903 match operation {
4904 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
4905 let abs_path = op
4906 .uri
4907 .to_file_path()
4908 .map_err(|_| anyhow!("can't convert URI to path"))?;
4909
4910 if let Some(parent_path) = abs_path.parent() {
4911 fs.create_dir(parent_path).await?;
4912 }
4913 if abs_path.ends_with("/") {
4914 fs.create_dir(&abs_path).await?;
4915 } else {
4916 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
4917 .await?;
4918 }
4919 }
4920
4921 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
4922 let source_abs_path = op
4923 .old_uri
4924 .to_file_path()
4925 .map_err(|_| anyhow!("can't convert URI to path"))?;
4926 let target_abs_path = op
4927 .new_uri
4928 .to_file_path()
4929 .map_err(|_| anyhow!("can't convert URI to path"))?;
4930 fs.rename(
4931 &source_abs_path,
4932 &target_abs_path,
4933 op.options.map(Into::into).unwrap_or_default(),
4934 )
4935 .await?;
4936 }
4937
4938 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
4939 let abs_path = op
4940 .uri
4941 .to_file_path()
4942 .map_err(|_| anyhow!("can't convert URI to path"))?;
4943 let options = op.options.map(Into::into).unwrap_or_default();
4944 if abs_path.ends_with("/") {
4945 fs.remove_dir(&abs_path, options).await?;
4946 } else {
4947 fs.remove_file(&abs_path, options).await?;
4948 }
4949 }
4950
4951 lsp::DocumentChangeOperation::Edit(op) => {
4952 let buffer_to_edit = this
4953 .update(cx, |this, cx| {
4954 this.open_local_buffer_via_lsp(
4955 op.text_document.uri,
4956 language_server.server_id(),
4957 lsp_adapter.name.clone(),
4958 cx,
4959 )
4960 })
4961 .await?;
4962
4963 let edits = this
4964 .update(cx, |this, cx| {
4965 let edits = op.edits.into_iter().map(|edit| match edit {
4966 OneOf::Left(edit) => edit,
4967 OneOf::Right(edit) => edit.text_edit,
4968 });
4969 this.edits_from_lsp(
4970 &buffer_to_edit,
4971 edits,
4972 language_server.server_id(),
4973 op.text_document.version,
4974 cx,
4975 )
4976 })
4977 .await?;
4978
4979 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4980 buffer.finalize_last_transaction();
4981 buffer.start_transaction();
4982 for (range, text) in edits {
4983 buffer.edit([(range, text)], None, cx);
4984 }
4985 let transaction = if buffer.end_transaction(cx).is_some() {
4986 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4987 if !push_to_history {
4988 buffer.forget_transaction(transaction.id);
4989 }
4990 Some(transaction)
4991 } else {
4992 None
4993 };
4994
4995 transaction
4996 });
4997 if let Some(transaction) = transaction {
4998 project_transaction.0.insert(buffer_to_edit, transaction);
4999 }
5000 }
5001 }
5002 }
5003
5004 Ok(project_transaction)
5005 }
5006
5007 pub fn prepare_rename<T: ToPointUtf16>(
5008 &self,
5009 buffer: ModelHandle<Buffer>,
5010 position: T,
5011 cx: &mut ModelContext<Self>,
5012 ) -> Task<Result<Option<Range<Anchor>>>> {
5013 let position = position.to_point_utf16(buffer.read(cx));
5014 self.request_lsp(
5015 buffer,
5016 LanguageServerToQuery::Primary,
5017 PrepareRename { position },
5018 cx,
5019 )
5020 }
5021
5022 pub fn perform_rename<T: ToPointUtf16>(
5023 &self,
5024 buffer: ModelHandle<Buffer>,
5025 position: T,
5026 new_name: String,
5027 push_to_history: bool,
5028 cx: &mut ModelContext<Self>,
5029 ) -> Task<Result<ProjectTransaction>> {
5030 let position = position.to_point_utf16(buffer.read(cx));
5031 self.request_lsp(
5032 buffer,
5033 LanguageServerToQuery::Primary,
5034 PerformRename {
5035 position,
5036 new_name,
5037 push_to_history,
5038 },
5039 cx,
5040 )
5041 }
5042
5043 pub fn on_type_format<T: ToPointUtf16>(
5044 &self,
5045 buffer: ModelHandle<Buffer>,
5046 position: T,
5047 trigger: String,
5048 push_to_history: bool,
5049 cx: &mut ModelContext<Self>,
5050 ) -> Task<Result<Option<Transaction>>> {
5051 let (position, tab_size) = buffer.read_with(cx, |buffer, cx| {
5052 let position = position.to_point_utf16(buffer);
5053 (
5054 position,
5055 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5056 .tab_size,
5057 )
5058 });
5059 self.request_lsp(
5060 buffer.clone(),
5061 LanguageServerToQuery::Primary,
5062 OnTypeFormatting {
5063 position,
5064 trigger,
5065 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5066 push_to_history,
5067 },
5068 cx,
5069 )
5070 }
5071
5072 pub fn inlay_hints<T: ToOffset>(
5073 &self,
5074 buffer_handle: ModelHandle<Buffer>,
5075 range: Range<T>,
5076 cx: &mut ModelContext<Self>,
5077 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5078 let buffer = buffer_handle.read(cx);
5079 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5080 let range_start = range.start;
5081 let range_end = range.end;
5082 let buffer_id = buffer.remote_id();
5083 let buffer_version = buffer.version().clone();
5084 let lsp_request = InlayHints { range };
5085
5086 if self.is_local() {
5087 let lsp_request_task = self.request_lsp(
5088 buffer_handle.clone(),
5089 LanguageServerToQuery::Primary,
5090 lsp_request,
5091 cx,
5092 );
5093 cx.spawn(|_, mut cx| async move {
5094 buffer_handle
5095 .update(&mut cx, |buffer, _| {
5096 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5097 })
5098 .await
5099 .context("waiting for inlay hint request range edits")?;
5100 lsp_request_task.await.context("inlay hints LSP request")
5101 })
5102 } else if let Some(project_id) = self.remote_id() {
5103 let client = self.client.clone();
5104 let request = proto::InlayHints {
5105 project_id,
5106 buffer_id,
5107 start: Some(serialize_anchor(&range_start)),
5108 end: Some(serialize_anchor(&range_end)),
5109 version: serialize_version(&buffer_version),
5110 };
5111 cx.spawn(|project, cx| async move {
5112 let response = client
5113 .request(request)
5114 .await
5115 .context("inlay hints proto request")?;
5116 let hints_request_result = LspCommand::response_from_proto(
5117 lsp_request,
5118 response,
5119 project,
5120 buffer_handle.clone(),
5121 cx,
5122 )
5123 .await;
5124
5125 hints_request_result.context("inlay hints proto response conversion")
5126 })
5127 } else {
5128 Task::ready(Err(anyhow!("project does not have a remote id")))
5129 }
5130 }
5131
5132 pub fn resolve_inlay_hint(
5133 &self,
5134 hint: InlayHint,
5135 buffer_handle: ModelHandle<Buffer>,
5136 server_id: LanguageServerId,
5137 cx: &mut ModelContext<Self>,
5138 ) -> Task<anyhow::Result<InlayHint>> {
5139 if self.is_local() {
5140 let buffer = buffer_handle.read(cx);
5141 let (_, lang_server) = if let Some((adapter, server)) =
5142 self.language_server_for_buffer(buffer, server_id, cx)
5143 {
5144 (adapter.clone(), server.clone())
5145 } else {
5146 return Task::ready(Ok(hint));
5147 };
5148 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5149 return Task::ready(Ok(hint));
5150 }
5151
5152 let buffer_snapshot = buffer.snapshot();
5153 cx.spawn(|_, mut cx| async move {
5154 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5155 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5156 );
5157 let resolved_hint = resolve_task
5158 .await
5159 .context("inlay hint resolve LSP request")?;
5160 let resolved_hint = InlayHints::lsp_to_project_hint(
5161 resolved_hint,
5162 &buffer_handle,
5163 server_id,
5164 ResolveState::Resolved,
5165 false,
5166 &mut cx,
5167 )
5168 .await?;
5169 Ok(resolved_hint)
5170 })
5171 } else if let Some(project_id) = self.remote_id() {
5172 let client = self.client.clone();
5173 let request = proto::ResolveInlayHint {
5174 project_id,
5175 buffer_id: buffer_handle.read(cx).remote_id(),
5176 language_server_id: server_id.0 as u64,
5177 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5178 };
5179 cx.spawn(|_, _| async move {
5180 let response = client
5181 .request(request)
5182 .await
5183 .context("inlay hints proto request")?;
5184 match response.hint {
5185 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5186 .context("inlay hints proto resolve response conversion"),
5187 None => Ok(hint),
5188 }
5189 })
5190 } else {
5191 Task::ready(Err(anyhow!("project does not have a remote id")))
5192 }
5193 }
5194
5195 #[allow(clippy::type_complexity)]
5196 pub fn search(
5197 &self,
5198 query: SearchQuery,
5199 cx: &mut ModelContext<Self>,
5200 ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
5201 if self.is_local() {
5202 self.search_local(query, cx)
5203 } else if let Some(project_id) = self.remote_id() {
5204 let (tx, rx) = smol::channel::unbounded();
5205 let request = self.client.request(query.to_proto(project_id));
5206 cx.spawn(|this, mut cx| async move {
5207 let response = request.await?;
5208 let mut result = HashMap::default();
5209 for location in response.locations {
5210 let target_buffer = this
5211 .update(&mut cx, |this, cx| {
5212 this.wait_for_remote_buffer(location.buffer_id, cx)
5213 })
5214 .await?;
5215 let start = location
5216 .start
5217 .and_then(deserialize_anchor)
5218 .ok_or_else(|| anyhow!("missing target start"))?;
5219 let end = location
5220 .end
5221 .and_then(deserialize_anchor)
5222 .ok_or_else(|| anyhow!("missing target end"))?;
5223 result
5224 .entry(target_buffer)
5225 .or_insert(Vec::new())
5226 .push(start..end)
5227 }
5228 for (buffer, ranges) in result {
5229 let _ = tx.send((buffer, ranges)).await;
5230 }
5231 Result::<(), anyhow::Error>::Ok(())
5232 })
5233 .detach_and_log_err(cx);
5234 rx
5235 } else {
5236 unimplemented!();
5237 }
5238 }
5239
5240 pub fn search_local(
5241 &self,
5242 query: SearchQuery,
5243 cx: &mut ModelContext<Self>,
5244 ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
5245 // Local search is split into several phases.
5246 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5247 // and the second phase that finds positions of all the matches found in the candidate files.
5248 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5249 //
5250 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5251 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5252 //
5253 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5254 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5255 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5256 // 2. At this point, we have a list of all potentially matching buffers/files.
5257 // We sort that list by buffer path - this list is retained for later use.
5258 // We ensure that all buffers are now opened and available in project.
5259 // 3. We run a scan over all the candidate buffers on multiple background threads.
5260 // We cannot assume that there will even be a match - while at least one match
5261 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5262 // There is also an auxilliary background thread responsible for result gathering.
5263 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5264 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5265 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5266 // entry - which might already be available thanks to out-of-order processing.
5267 //
5268 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5269 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5270 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5271 // in face of constantly updating list of sorted matches.
5272 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5273 let snapshots = self
5274 .visible_worktrees(cx)
5275 .filter_map(|tree| {
5276 let tree = tree.read(cx).as_local()?;
5277 Some(tree.snapshot())
5278 })
5279 .collect::<Vec<_>>();
5280
5281 let background = cx.background().clone();
5282 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
5283 if path_count == 0 {
5284 let (_, rx) = smol::channel::bounded(1024);
5285 return rx;
5286 }
5287 let workers = background.num_cpus().min(path_count);
5288 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5289 let mut unnamed_files = vec![];
5290 let opened_buffers = self
5291 .opened_buffers
5292 .iter()
5293 .filter_map(|(_, b)| {
5294 let buffer = b.upgrade(cx)?;
5295 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
5296 if let Some(path) = snapshot.file().map(|file| file.path()) {
5297 Some((path.clone(), (buffer, snapshot)))
5298 } else {
5299 unnamed_files.push(buffer);
5300 None
5301 }
5302 })
5303 .collect();
5304 cx.background()
5305 .spawn(Self::background_search(
5306 unnamed_files,
5307 opened_buffers,
5308 cx.background().clone(),
5309 self.fs.clone(),
5310 workers,
5311 query.clone(),
5312 path_count,
5313 snapshots,
5314 matching_paths_tx,
5315 ))
5316 .detach();
5317
5318 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5319 let background = cx.background().clone();
5320 let (result_tx, result_rx) = smol::channel::bounded(1024);
5321 cx.background()
5322 .spawn(async move {
5323 let Ok(buffers) = buffers.await else {
5324 return;
5325 };
5326
5327 let buffers_len = buffers.len();
5328 if buffers_len == 0 {
5329 return;
5330 }
5331 let query = &query;
5332 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5333 background
5334 .scoped(|scope| {
5335 #[derive(Clone)]
5336 struct FinishedStatus {
5337 entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
5338 buffer_index: SearchMatchCandidateIndex,
5339 }
5340
5341 for _ in 0..workers {
5342 let finished_tx = finished_tx.clone();
5343 let mut buffers_rx = buffers_rx.clone();
5344 scope.spawn(async move {
5345 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5346 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5347 {
5348 if query.file_matches(
5349 snapshot.file().map(|file| file.path().as_ref()),
5350 ) {
5351 query
5352 .search(&snapshot, None)
5353 .await
5354 .iter()
5355 .map(|range| {
5356 snapshot.anchor_before(range.start)
5357 ..snapshot.anchor_after(range.end)
5358 })
5359 .collect()
5360 } else {
5361 Vec::new()
5362 }
5363 } else {
5364 Vec::new()
5365 };
5366
5367 let status = if !buffer_matches.is_empty() {
5368 let entry = if let Some((buffer, _)) = entry.as_ref() {
5369 Some((buffer.clone(), buffer_matches))
5370 } else {
5371 None
5372 };
5373 FinishedStatus {
5374 entry,
5375 buffer_index,
5376 }
5377 } else {
5378 FinishedStatus {
5379 entry: None,
5380 buffer_index,
5381 }
5382 };
5383 if finished_tx.send(status).await.is_err() {
5384 break;
5385 }
5386 }
5387 });
5388 }
5389 // Report sorted matches
5390 scope.spawn(async move {
5391 let mut current_index = 0;
5392 let mut scratch = vec![None; buffers_len];
5393 while let Some(status) = finished_rx.next().await {
5394 debug_assert!(
5395 scratch[status.buffer_index].is_none(),
5396 "Got match status of position {} twice",
5397 status.buffer_index
5398 );
5399 let index = status.buffer_index;
5400 scratch[index] = Some(status);
5401 while current_index < buffers_len {
5402 let Some(current_entry) = scratch[current_index].take() else {
5403 // We intentionally **do not** increment `current_index` here. When next element arrives
5404 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5405 // this time.
5406 break;
5407 };
5408 if let Some(entry) = current_entry.entry {
5409 result_tx.send(entry).await.log_err();
5410 }
5411 current_index += 1;
5412 }
5413 if current_index == buffers_len {
5414 break;
5415 }
5416 }
5417 });
5418 })
5419 .await;
5420 })
5421 .detach();
5422 result_rx
5423 }
5424 /// Pick paths that might potentially contain a match of a given search query.
5425 async fn background_search(
5426 unnamed_buffers: Vec<ModelHandle<Buffer>>,
5427 opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
5428 background: Arc<Background>,
5429 fs: Arc<dyn Fs>,
5430 workers: usize,
5431 query: SearchQuery,
5432 path_count: usize,
5433 snapshots: Vec<LocalSnapshot>,
5434 matching_paths_tx: Sender<SearchMatchCandidate>,
5435 ) {
5436 let fs = &fs;
5437 let query = &query;
5438 let matching_paths_tx = &matching_paths_tx;
5439 let snapshots = &snapshots;
5440 let paths_per_worker = (path_count + workers - 1) / workers;
5441 for buffer in unnamed_buffers {
5442 matching_paths_tx
5443 .send(SearchMatchCandidate::OpenBuffer {
5444 buffer: buffer.clone(),
5445 path: None,
5446 })
5447 .await
5448 .log_err();
5449 }
5450 for (path, (buffer, _)) in opened_buffers.iter() {
5451 matching_paths_tx
5452 .send(SearchMatchCandidate::OpenBuffer {
5453 buffer: buffer.clone(),
5454 path: Some(path.clone()),
5455 })
5456 .await
5457 .log_err();
5458 }
5459 background
5460 .scoped(|scope| {
5461 for worker_ix in 0..workers {
5462 let worker_start_ix = worker_ix * paths_per_worker;
5463 let worker_end_ix = worker_start_ix + paths_per_worker;
5464 let unnamed_buffers = opened_buffers.clone();
5465 scope.spawn(async move {
5466 let mut snapshot_start_ix = 0;
5467 let mut abs_path = PathBuf::new();
5468 for snapshot in snapshots {
5469 let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
5470 if worker_end_ix <= snapshot_start_ix {
5471 break;
5472 } else if worker_start_ix > snapshot_end_ix {
5473 snapshot_start_ix = snapshot_end_ix;
5474 continue;
5475 } else {
5476 let start_in_snapshot =
5477 worker_start_ix.saturating_sub(snapshot_start_ix);
5478 let end_in_snapshot =
5479 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5480
5481 for entry in snapshot
5482 .files(false, start_in_snapshot)
5483 .take(end_in_snapshot - start_in_snapshot)
5484 {
5485 if matching_paths_tx.is_closed() {
5486 break;
5487 }
5488 if unnamed_buffers.contains_key(&entry.path) {
5489 continue;
5490 }
5491 let matches = if query.file_matches(Some(&entry.path)) {
5492 abs_path.clear();
5493 abs_path.push(&snapshot.abs_path());
5494 abs_path.push(&entry.path);
5495 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5496 {
5497 query.detect(file).unwrap_or(false)
5498 } else {
5499 false
5500 }
5501 } else {
5502 false
5503 };
5504
5505 if matches {
5506 let project_path = SearchMatchCandidate::Path {
5507 worktree_id: snapshot.id(),
5508 path: entry.path.clone(),
5509 };
5510 if matching_paths_tx.send(project_path).await.is_err() {
5511 break;
5512 }
5513 }
5514 }
5515
5516 snapshot_start_ix = snapshot_end_ix;
5517 }
5518 }
5519 });
5520 }
5521 })
5522 .await;
5523 }
5524
5525 fn request_lsp<R: LspCommand>(
5526 &self,
5527 buffer_handle: ModelHandle<Buffer>,
5528 server: LanguageServerToQuery,
5529 request: R,
5530 cx: &mut ModelContext<Self>,
5531 ) -> Task<Result<R::Response>>
5532 where
5533 <R::LspRequest as lsp::request::Request>::Result: Send,
5534 {
5535 let buffer = buffer_handle.read(cx);
5536 if self.is_local() {
5537 let language_server = match server {
5538 LanguageServerToQuery::Primary => {
5539 match self.primary_language_server_for_buffer(buffer, cx) {
5540 Some((_, server)) => Some(Arc::clone(server)),
5541 None => return Task::ready(Ok(Default::default())),
5542 }
5543 }
5544 LanguageServerToQuery::Other(id) => self
5545 .language_server_for_buffer(buffer, id, cx)
5546 .map(|(_, server)| Arc::clone(server)),
5547 };
5548 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
5549 if let (Some(file), Some(language_server)) = (file, language_server) {
5550 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
5551 return cx.spawn(|this, cx| async move {
5552 if !request.check_capabilities(language_server.capabilities()) {
5553 return Ok(Default::default());
5554 }
5555
5556 let result = language_server.request::<R::LspRequest>(lsp_params).await;
5557 let response = match result {
5558 Ok(response) => response,
5559
5560 Err(err) => {
5561 log::warn!(
5562 "Generic lsp request to {} failed: {}",
5563 language_server.name(),
5564 err
5565 );
5566 return Err(err);
5567 }
5568 };
5569
5570 request
5571 .response_from_lsp(
5572 response,
5573 this,
5574 buffer_handle,
5575 language_server.server_id(),
5576 cx,
5577 )
5578 .await
5579 });
5580 }
5581 } else if let Some(project_id) = self.remote_id() {
5582 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
5583 }
5584
5585 Task::ready(Ok(Default::default()))
5586 }
5587
5588 fn send_lsp_proto_request<R: LspCommand>(
5589 &self,
5590 buffer: ModelHandle<Buffer>,
5591 project_id: u64,
5592 request: R,
5593 cx: &mut ModelContext<'_, Project>,
5594 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
5595 let rpc = self.client.clone();
5596 let message = request.to_proto(project_id, buffer.read(cx));
5597 cx.spawn_weak(|this, cx| async move {
5598 // Ensure the project is still alive by the time the task
5599 // is scheduled.
5600 this.upgrade(&cx)
5601 .ok_or_else(|| anyhow!("project dropped"))?;
5602 let response = rpc.request(message).await?;
5603 let this = this
5604 .upgrade(&cx)
5605 .ok_or_else(|| anyhow!("project dropped"))?;
5606 if this.read_with(&cx, |this, _| this.is_read_only()) {
5607 Err(anyhow!("disconnected before completing request"))
5608 } else {
5609 request
5610 .response_from_proto(response, this, buffer, cx)
5611 .await
5612 }
5613 })
5614 }
5615
5616 fn sort_candidates_and_open_buffers(
5617 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
5618 cx: &mut ModelContext<Self>,
5619 ) -> (
5620 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
5621 Receiver<(
5622 Option<(ModelHandle<Buffer>, BufferSnapshot)>,
5623 SearchMatchCandidateIndex,
5624 )>,
5625 ) {
5626 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
5627 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
5628 cx.spawn(|this, cx| async move {
5629 let mut buffers = vec![];
5630 while let Some(entry) = matching_paths_rx.next().await {
5631 buffers.push(entry);
5632 }
5633 buffers.sort_by_key(|candidate| candidate.path());
5634 let matching_paths = buffers.clone();
5635 let _ = sorted_buffers_tx.send(buffers);
5636 for (index, candidate) in matching_paths.into_iter().enumerate() {
5637 if buffers_tx.is_closed() {
5638 break;
5639 }
5640 let this = this.clone();
5641 let buffers_tx = buffers_tx.clone();
5642 cx.spawn(|mut cx| async move {
5643 let buffer = match candidate {
5644 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
5645 SearchMatchCandidate::Path { worktree_id, path } => this
5646 .update(&mut cx, |this, cx| {
5647 this.open_buffer((worktree_id, path), cx)
5648 })
5649 .await
5650 .log_err(),
5651 };
5652 if let Some(buffer) = buffer {
5653 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
5654 buffers_tx
5655 .send((Some((buffer, snapshot)), index))
5656 .await
5657 .log_err();
5658 } else {
5659 buffers_tx.send((None, index)).await.log_err();
5660 }
5661
5662 Ok::<_, anyhow::Error>(())
5663 })
5664 .detach();
5665 }
5666 })
5667 .detach();
5668 (sorted_buffers_rx, buffers_rx)
5669 }
5670
5671 pub fn find_or_create_local_worktree(
5672 &mut self,
5673 abs_path: impl AsRef<Path>,
5674 visible: bool,
5675 cx: &mut ModelContext<Self>,
5676 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
5677 let abs_path = abs_path.as_ref();
5678 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
5679 Task::ready(Ok((tree, relative_path)))
5680 } else {
5681 let worktree = self.create_local_worktree(abs_path, visible, cx);
5682 cx.foreground()
5683 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
5684 }
5685 }
5686
5687 pub fn find_local_worktree(
5688 &self,
5689 abs_path: &Path,
5690 cx: &AppContext,
5691 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
5692 for tree in &self.worktrees {
5693 if let Some(tree) = tree.upgrade(cx) {
5694 if let Some(relative_path) = tree
5695 .read(cx)
5696 .as_local()
5697 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
5698 {
5699 return Some((tree.clone(), relative_path.into()));
5700 }
5701 }
5702 }
5703 None
5704 }
5705
5706 pub fn is_shared(&self) -> bool {
5707 match &self.client_state {
5708 Some(ProjectClientState::Local { .. }) => true,
5709 _ => false,
5710 }
5711 }
5712
5713 fn create_local_worktree(
5714 &mut self,
5715 abs_path: impl AsRef<Path>,
5716 visible: bool,
5717 cx: &mut ModelContext<Self>,
5718 ) -> Task<Result<ModelHandle<Worktree>>> {
5719 let fs = self.fs.clone();
5720 let client = self.client.clone();
5721 let next_entry_id = self.next_entry_id.clone();
5722 let path: Arc<Path> = abs_path.as_ref().into();
5723 let task = self
5724 .loading_local_worktrees
5725 .entry(path.clone())
5726 .or_insert_with(|| {
5727 cx.spawn(|project, mut cx| {
5728 async move {
5729 let worktree = Worktree::local(
5730 client.clone(),
5731 path.clone(),
5732 visible,
5733 fs,
5734 next_entry_id,
5735 &mut cx,
5736 )
5737 .await;
5738
5739 project.update(&mut cx, |project, _| {
5740 project.loading_local_worktrees.remove(&path);
5741 });
5742
5743 let worktree = worktree?;
5744 project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
5745 Ok(worktree)
5746 }
5747 .map_err(Arc::new)
5748 })
5749 .shared()
5750 })
5751 .clone();
5752 cx.foreground().spawn(async move {
5753 match task.await {
5754 Ok(worktree) => Ok(worktree),
5755 Err(err) => Err(anyhow!("{}", err)),
5756 }
5757 })
5758 }
5759
5760 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
5761 self.worktrees.retain(|worktree| {
5762 if let Some(worktree) = worktree.upgrade(cx) {
5763 let id = worktree.read(cx).id();
5764 if id == id_to_remove {
5765 cx.emit(Event::WorktreeRemoved(id));
5766 false
5767 } else {
5768 true
5769 }
5770 } else {
5771 false
5772 }
5773 });
5774 self.metadata_changed(cx);
5775 }
5776
5777 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
5778 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
5779 if worktree.read(cx).is_local() {
5780 cx.subscribe(worktree, |this, worktree, event, cx| match event {
5781 worktree::Event::UpdatedEntries(changes) => {
5782 this.update_local_worktree_buffers(&worktree, changes, cx);
5783 this.update_local_worktree_language_servers(&worktree, changes, cx);
5784 this.update_local_worktree_settings(&worktree, changes, cx);
5785 cx.emit(Event::WorktreeUpdatedEntries(
5786 worktree.read(cx).id(),
5787 changes.clone(),
5788 ));
5789 }
5790 worktree::Event::UpdatedGitRepositories(updated_repos) => {
5791 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
5792 }
5793 })
5794 .detach();
5795 }
5796
5797 let push_strong_handle = {
5798 let worktree = worktree.read(cx);
5799 self.is_shared() || worktree.is_visible() || worktree.is_remote()
5800 };
5801 if push_strong_handle {
5802 self.worktrees
5803 .push(WorktreeHandle::Strong(worktree.clone()));
5804 } else {
5805 self.worktrees
5806 .push(WorktreeHandle::Weak(worktree.downgrade()));
5807 }
5808
5809 let handle_id = worktree.id();
5810 cx.observe_release(worktree, move |this, worktree, cx| {
5811 let _ = this.remove_worktree(worktree.id(), cx);
5812 cx.update_global::<SettingsStore, _, _>(|store, cx| {
5813 store.clear_local_settings(handle_id, cx).log_err()
5814 });
5815 })
5816 .detach();
5817
5818 cx.emit(Event::WorktreeAdded);
5819 self.metadata_changed(cx);
5820 }
5821
5822 fn update_local_worktree_buffers(
5823 &mut self,
5824 worktree_handle: &ModelHandle<Worktree>,
5825 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
5826 cx: &mut ModelContext<Self>,
5827 ) {
5828 let snapshot = worktree_handle.read(cx).snapshot();
5829
5830 let mut renamed_buffers = Vec::new();
5831 for (path, entry_id, _) in changes {
5832 let worktree_id = worktree_handle.read(cx).id();
5833 let project_path = ProjectPath {
5834 worktree_id,
5835 path: path.clone(),
5836 };
5837
5838 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
5839 Some(&buffer_id) => buffer_id,
5840 None => match self.local_buffer_ids_by_path.get(&project_path) {
5841 Some(&buffer_id) => buffer_id,
5842 None => continue,
5843 },
5844 };
5845
5846 let open_buffer = self.opened_buffers.get(&buffer_id);
5847 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
5848 buffer
5849 } else {
5850 self.opened_buffers.remove(&buffer_id);
5851 self.local_buffer_ids_by_path.remove(&project_path);
5852 self.local_buffer_ids_by_entry_id.remove(entry_id);
5853 continue;
5854 };
5855
5856 buffer.update(cx, |buffer, cx| {
5857 if let Some(old_file) = File::from_dyn(buffer.file()) {
5858 if old_file.worktree != *worktree_handle {
5859 return;
5860 }
5861
5862 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
5863 File {
5864 is_local: true,
5865 entry_id: entry.id,
5866 mtime: entry.mtime,
5867 path: entry.path.clone(),
5868 worktree: worktree_handle.clone(),
5869 is_deleted: false,
5870 }
5871 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
5872 File {
5873 is_local: true,
5874 entry_id: entry.id,
5875 mtime: entry.mtime,
5876 path: entry.path.clone(),
5877 worktree: worktree_handle.clone(),
5878 is_deleted: false,
5879 }
5880 } else {
5881 File {
5882 is_local: true,
5883 entry_id: old_file.entry_id,
5884 path: old_file.path().clone(),
5885 mtime: old_file.mtime(),
5886 worktree: worktree_handle.clone(),
5887 is_deleted: true,
5888 }
5889 };
5890
5891 let old_path = old_file.abs_path(cx);
5892 if new_file.abs_path(cx) != old_path {
5893 renamed_buffers.push((cx.handle(), old_file.clone()));
5894 self.local_buffer_ids_by_path.remove(&project_path);
5895 self.local_buffer_ids_by_path.insert(
5896 ProjectPath {
5897 worktree_id,
5898 path: path.clone(),
5899 },
5900 buffer_id,
5901 );
5902 }
5903
5904 if new_file.entry_id != *entry_id {
5905 self.local_buffer_ids_by_entry_id.remove(entry_id);
5906 self.local_buffer_ids_by_entry_id
5907 .insert(new_file.entry_id, buffer_id);
5908 }
5909
5910 if new_file != *old_file {
5911 if let Some(project_id) = self.remote_id() {
5912 self.client
5913 .send(proto::UpdateBufferFile {
5914 project_id,
5915 buffer_id: buffer_id as u64,
5916 file: Some(new_file.to_proto()),
5917 })
5918 .log_err();
5919 }
5920
5921 buffer.file_updated(Arc::new(new_file), cx).detach();
5922 }
5923 }
5924 });
5925 }
5926
5927 for (buffer, old_file) in renamed_buffers {
5928 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
5929 self.detect_language_for_buffer(&buffer, cx);
5930 self.register_buffer_with_language_servers(&buffer, cx);
5931 }
5932 }
5933
5934 fn update_local_worktree_language_servers(
5935 &mut self,
5936 worktree_handle: &ModelHandle<Worktree>,
5937 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
5938 cx: &mut ModelContext<Self>,
5939 ) {
5940 if changes.is_empty() {
5941 return;
5942 }
5943
5944 let worktree_id = worktree_handle.read(cx).id();
5945 let mut language_server_ids = self
5946 .language_server_ids
5947 .iter()
5948 .filter_map(|((server_worktree_id, _), server_id)| {
5949 (*server_worktree_id == worktree_id).then_some(*server_id)
5950 })
5951 .collect::<Vec<_>>();
5952 language_server_ids.sort();
5953 language_server_ids.dedup();
5954
5955 let abs_path = worktree_handle.read(cx).abs_path();
5956 for server_id in &language_server_ids {
5957 if let Some(LanguageServerState::Running {
5958 server,
5959 watched_paths,
5960 ..
5961 }) = self.language_servers.get(server_id)
5962 {
5963 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
5964 let params = lsp::DidChangeWatchedFilesParams {
5965 changes: changes
5966 .iter()
5967 .filter_map(|(path, _, change)| {
5968 if !watched_paths.is_match(&path) {
5969 return None;
5970 }
5971 let typ = match change {
5972 PathChange::Loaded => return None,
5973 PathChange::Added => lsp::FileChangeType::CREATED,
5974 PathChange::Removed => lsp::FileChangeType::DELETED,
5975 PathChange::Updated => lsp::FileChangeType::CHANGED,
5976 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
5977 };
5978 Some(lsp::FileEvent {
5979 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
5980 typ,
5981 })
5982 })
5983 .collect(),
5984 };
5985
5986 if !params.changes.is_empty() {
5987 server
5988 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
5989 .log_err();
5990 }
5991 }
5992 }
5993 }
5994 }
5995
5996 fn update_local_worktree_buffers_git_repos(
5997 &mut self,
5998 worktree_handle: ModelHandle<Worktree>,
5999 changed_repos: &UpdatedGitRepositoriesSet,
6000 cx: &mut ModelContext<Self>,
6001 ) {
6002 debug_assert!(worktree_handle.read(cx).is_local());
6003
6004 // Identify the loading buffers whose containing repository that has changed.
6005 let future_buffers = self
6006 .loading_buffers_by_path
6007 .iter()
6008 .filter_map(|(project_path, receiver)| {
6009 if project_path.worktree_id != worktree_handle.read(cx).id() {
6010 return None;
6011 }
6012 let path = &project_path.path;
6013 changed_repos
6014 .iter()
6015 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6016 let receiver = receiver.clone();
6017 let path = path.clone();
6018 Some(async move {
6019 wait_for_loading_buffer(receiver)
6020 .await
6021 .ok()
6022 .map(|buffer| (buffer, path))
6023 })
6024 })
6025 .collect::<FuturesUnordered<_>>();
6026
6027 // Identify the current buffers whose containing repository has changed.
6028 let current_buffers = self
6029 .opened_buffers
6030 .values()
6031 .filter_map(|buffer| {
6032 let buffer = buffer.upgrade(cx)?;
6033 let file = File::from_dyn(buffer.read(cx).file())?;
6034 if file.worktree != worktree_handle {
6035 return None;
6036 }
6037 let path = file.path();
6038 changed_repos
6039 .iter()
6040 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6041 Some((buffer, path.clone()))
6042 })
6043 .collect::<Vec<_>>();
6044
6045 if future_buffers.len() + current_buffers.len() == 0 {
6046 return;
6047 }
6048
6049 let remote_id = self.remote_id();
6050 let client = self.client.clone();
6051 cx.spawn_weak(move |_, mut cx| async move {
6052 // Wait for all of the buffers to load.
6053 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6054
6055 // Reload the diff base for every buffer whose containing git repository has changed.
6056 let snapshot =
6057 worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
6058 let diff_bases_by_buffer = cx
6059 .background()
6060 .spawn(async move {
6061 future_buffers
6062 .into_iter()
6063 .filter_map(|e| e)
6064 .chain(current_buffers)
6065 .filter_map(|(buffer, path)| {
6066 let (work_directory, repo) =
6067 snapshot.repository_and_work_directory_for_path(&path)?;
6068 let repo = snapshot.get_local_repo(&repo)?;
6069 let relative_path = path.strip_prefix(&work_directory).ok()?;
6070 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6071 Some((buffer, base_text))
6072 })
6073 .collect::<Vec<_>>()
6074 })
6075 .await;
6076
6077 // Assign the new diff bases on all of the buffers.
6078 for (buffer, diff_base) in diff_bases_by_buffer {
6079 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6080 buffer.set_diff_base(diff_base.clone(), cx);
6081 buffer.remote_id()
6082 });
6083 if let Some(project_id) = remote_id {
6084 client
6085 .send(proto::UpdateDiffBase {
6086 project_id,
6087 buffer_id,
6088 diff_base,
6089 })
6090 .log_err();
6091 }
6092 }
6093 })
6094 .detach();
6095 }
6096
6097 fn update_local_worktree_settings(
6098 &mut self,
6099 worktree: &ModelHandle<Worktree>,
6100 changes: &UpdatedEntriesSet,
6101 cx: &mut ModelContext<Self>,
6102 ) {
6103 let project_id = self.remote_id();
6104 let worktree_id = worktree.id();
6105 let worktree = worktree.read(cx).as_local().unwrap();
6106 let remote_worktree_id = worktree.id();
6107
6108 let mut settings_contents = Vec::new();
6109 for (path, _, change) in changes.iter() {
6110 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6111 let settings_dir = Arc::from(
6112 path.ancestors()
6113 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6114 .unwrap(),
6115 );
6116 let fs = self.fs.clone();
6117 let removed = *change == PathChange::Removed;
6118 let abs_path = worktree.absolutize(path);
6119 settings_contents.push(async move {
6120 (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
6121 });
6122 }
6123 }
6124
6125 if settings_contents.is_empty() {
6126 return;
6127 }
6128
6129 let client = self.client.clone();
6130 cx.spawn_weak(move |_, mut cx| async move {
6131 let settings_contents: Vec<(Arc<Path>, _)> =
6132 futures::future::join_all(settings_contents).await;
6133 cx.update(|cx| {
6134 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6135 for (directory, file_content) in settings_contents {
6136 let file_content = file_content.and_then(|content| content.log_err());
6137 store
6138 .set_local_settings(
6139 worktree_id,
6140 directory.clone(),
6141 file_content.as_ref().map(String::as_str),
6142 cx,
6143 )
6144 .log_err();
6145 if let Some(remote_id) = project_id {
6146 client
6147 .send(proto::UpdateWorktreeSettings {
6148 project_id: remote_id,
6149 worktree_id: remote_worktree_id.to_proto(),
6150 path: directory.to_string_lossy().into_owned(),
6151 content: file_content,
6152 })
6153 .log_err();
6154 }
6155 }
6156 });
6157 });
6158 })
6159 .detach();
6160 }
6161
6162 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6163 let new_active_entry = entry.and_then(|project_path| {
6164 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6165 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6166 Some(entry.id)
6167 });
6168 if new_active_entry != self.active_entry {
6169 self.active_entry = new_active_entry;
6170 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6171 }
6172 }
6173
6174 pub fn language_servers_running_disk_based_diagnostics(
6175 &self,
6176 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6177 self.language_server_statuses
6178 .iter()
6179 .filter_map(|(id, status)| {
6180 if status.has_pending_diagnostic_updates {
6181 Some(*id)
6182 } else {
6183 None
6184 }
6185 })
6186 }
6187
6188 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
6189 let mut summary = DiagnosticSummary::default();
6190 for (_, _, path_summary) in self.diagnostic_summaries(cx) {
6191 summary.error_count += path_summary.error_count;
6192 summary.warning_count += path_summary.warning_count;
6193 }
6194 summary
6195 }
6196
6197 pub fn diagnostic_summaries<'a>(
6198 &'a self,
6199 cx: &'a AppContext,
6200 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6201 self.visible_worktrees(cx).flat_map(move |worktree| {
6202 let worktree = worktree.read(cx);
6203 let worktree_id = worktree.id();
6204 worktree
6205 .diagnostic_summaries()
6206 .map(move |(path, server_id, summary)| {
6207 (ProjectPath { worktree_id, path }, server_id, summary)
6208 })
6209 })
6210 }
6211
6212 pub fn disk_based_diagnostics_started(
6213 &mut self,
6214 language_server_id: LanguageServerId,
6215 cx: &mut ModelContext<Self>,
6216 ) {
6217 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6218 }
6219
6220 pub fn disk_based_diagnostics_finished(
6221 &mut self,
6222 language_server_id: LanguageServerId,
6223 cx: &mut ModelContext<Self>,
6224 ) {
6225 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6226 }
6227
6228 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6229 self.active_entry
6230 }
6231
6232 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6233 self.worktree_for_id(path.worktree_id, cx)?
6234 .read(cx)
6235 .entry_for_path(&path.path)
6236 .cloned()
6237 }
6238
6239 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6240 let worktree = self.worktree_for_entry(entry_id, cx)?;
6241 let worktree = worktree.read(cx);
6242 let worktree_id = worktree.id();
6243 let path = worktree.entry_for_id(entry_id)?.path.clone();
6244 Some(ProjectPath { worktree_id, path })
6245 }
6246
6247 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6248 let workspace_root = self
6249 .worktree_for_id(project_path.worktree_id, cx)?
6250 .read(cx)
6251 .abs_path();
6252 let project_path = project_path.path.as_ref();
6253
6254 Some(if project_path == Path::new("") {
6255 workspace_root.to_path_buf()
6256 } else {
6257 workspace_root.join(project_path)
6258 })
6259 }
6260
6261 // RPC message handlers
6262
6263 async fn handle_unshare_project(
6264 this: ModelHandle<Self>,
6265 _: TypedEnvelope<proto::UnshareProject>,
6266 _: Arc<Client>,
6267 mut cx: AsyncAppContext,
6268 ) -> Result<()> {
6269 this.update(&mut cx, |this, cx| {
6270 if this.is_local() {
6271 this.unshare(cx)?;
6272 } else {
6273 this.disconnected_from_host(cx);
6274 }
6275 Ok(())
6276 })
6277 }
6278
6279 async fn handle_add_collaborator(
6280 this: ModelHandle<Self>,
6281 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6282 _: Arc<Client>,
6283 mut cx: AsyncAppContext,
6284 ) -> Result<()> {
6285 let collaborator = envelope
6286 .payload
6287 .collaborator
6288 .take()
6289 .ok_or_else(|| anyhow!("empty collaborator"))?;
6290
6291 let collaborator = Collaborator::from_proto(collaborator)?;
6292 this.update(&mut cx, |this, cx| {
6293 this.shared_buffers.remove(&collaborator.peer_id);
6294 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6295 this.collaborators
6296 .insert(collaborator.peer_id, collaborator);
6297 cx.notify();
6298 });
6299
6300 Ok(())
6301 }
6302
6303 async fn handle_update_project_collaborator(
6304 this: ModelHandle<Self>,
6305 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6306 _: Arc<Client>,
6307 mut cx: AsyncAppContext,
6308 ) -> Result<()> {
6309 let old_peer_id = envelope
6310 .payload
6311 .old_peer_id
6312 .ok_or_else(|| anyhow!("missing old peer id"))?;
6313 let new_peer_id = envelope
6314 .payload
6315 .new_peer_id
6316 .ok_or_else(|| anyhow!("missing new peer id"))?;
6317 this.update(&mut cx, |this, cx| {
6318 let collaborator = this
6319 .collaborators
6320 .remove(&old_peer_id)
6321 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6322 let is_host = collaborator.replica_id == 0;
6323 this.collaborators.insert(new_peer_id, collaborator);
6324
6325 let buffers = this.shared_buffers.remove(&old_peer_id);
6326 log::info!(
6327 "peer {} became {}. moving buffers {:?}",
6328 old_peer_id,
6329 new_peer_id,
6330 &buffers
6331 );
6332 if let Some(buffers) = buffers {
6333 this.shared_buffers.insert(new_peer_id, buffers);
6334 }
6335
6336 if is_host {
6337 this.opened_buffers
6338 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6339 this.buffer_ordered_messages_tx
6340 .unbounded_send(BufferOrderedMessage::Resync)
6341 .unwrap();
6342 }
6343
6344 cx.emit(Event::CollaboratorUpdated {
6345 old_peer_id,
6346 new_peer_id,
6347 });
6348 cx.notify();
6349 Ok(())
6350 })
6351 }
6352
6353 async fn handle_remove_collaborator(
6354 this: ModelHandle<Self>,
6355 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6356 _: Arc<Client>,
6357 mut cx: AsyncAppContext,
6358 ) -> Result<()> {
6359 this.update(&mut cx, |this, cx| {
6360 let peer_id = envelope
6361 .payload
6362 .peer_id
6363 .ok_or_else(|| anyhow!("invalid peer id"))?;
6364 let replica_id = this
6365 .collaborators
6366 .remove(&peer_id)
6367 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6368 .replica_id;
6369 for buffer in this.opened_buffers.values() {
6370 if let Some(buffer) = buffer.upgrade(cx) {
6371 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6372 }
6373 }
6374 this.shared_buffers.remove(&peer_id);
6375
6376 cx.emit(Event::CollaboratorLeft(peer_id));
6377 cx.notify();
6378 Ok(())
6379 })
6380 }
6381
6382 async fn handle_update_project(
6383 this: ModelHandle<Self>,
6384 envelope: TypedEnvelope<proto::UpdateProject>,
6385 _: Arc<Client>,
6386 mut cx: AsyncAppContext,
6387 ) -> Result<()> {
6388 this.update(&mut cx, |this, cx| {
6389 // Don't handle messages that were sent before the response to us joining the project
6390 if envelope.message_id > this.join_project_response_message_id {
6391 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6392 }
6393 Ok(())
6394 })
6395 }
6396
6397 async fn handle_update_worktree(
6398 this: ModelHandle<Self>,
6399 envelope: TypedEnvelope<proto::UpdateWorktree>,
6400 _: Arc<Client>,
6401 mut cx: AsyncAppContext,
6402 ) -> Result<()> {
6403 this.update(&mut cx, |this, cx| {
6404 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6405 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6406 worktree.update(cx, |worktree, _| {
6407 let worktree = worktree.as_remote_mut().unwrap();
6408 worktree.update_from_remote(envelope.payload);
6409 });
6410 }
6411 Ok(())
6412 })
6413 }
6414
6415 async fn handle_update_worktree_settings(
6416 this: ModelHandle<Self>,
6417 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6418 _: Arc<Client>,
6419 mut cx: AsyncAppContext,
6420 ) -> Result<()> {
6421 this.update(&mut cx, |this, cx| {
6422 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6423 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6424 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6425 store
6426 .set_local_settings(
6427 worktree.id(),
6428 PathBuf::from(&envelope.payload.path).into(),
6429 envelope.payload.content.as_ref().map(String::as_str),
6430 cx,
6431 )
6432 .log_err();
6433 });
6434 }
6435 Ok(())
6436 })
6437 }
6438
6439 async fn handle_create_project_entry(
6440 this: ModelHandle<Self>,
6441 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6442 _: Arc<Client>,
6443 mut cx: AsyncAppContext,
6444 ) -> Result<proto::ProjectEntryResponse> {
6445 let worktree = this.update(&mut cx, |this, cx| {
6446 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6447 this.worktree_for_id(worktree_id, cx)
6448 .ok_or_else(|| anyhow!("worktree not found"))
6449 })?;
6450 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6451 let entry = worktree
6452 .update(&mut cx, |worktree, cx| {
6453 let worktree = worktree.as_local_mut().unwrap();
6454 let path = PathBuf::from(envelope.payload.path);
6455 worktree.create_entry(path, envelope.payload.is_directory, cx)
6456 })
6457 .await?;
6458 Ok(proto::ProjectEntryResponse {
6459 entry: Some((&entry).into()),
6460 worktree_scan_id: worktree_scan_id as u64,
6461 })
6462 }
6463
6464 async fn handle_rename_project_entry(
6465 this: ModelHandle<Self>,
6466 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6467 _: Arc<Client>,
6468 mut cx: AsyncAppContext,
6469 ) -> Result<proto::ProjectEntryResponse> {
6470 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6471 let worktree = this.read_with(&cx, |this, cx| {
6472 this.worktree_for_entry(entry_id, cx)
6473 .ok_or_else(|| anyhow!("worktree not found"))
6474 })?;
6475 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6476 let entry = worktree
6477 .update(&mut cx, |worktree, cx| {
6478 let new_path = PathBuf::from(envelope.payload.new_path);
6479 worktree
6480 .as_local_mut()
6481 .unwrap()
6482 .rename_entry(entry_id, new_path, cx)
6483 .ok_or_else(|| anyhow!("invalid entry"))
6484 })?
6485 .await?;
6486 Ok(proto::ProjectEntryResponse {
6487 entry: Some((&entry).into()),
6488 worktree_scan_id: worktree_scan_id as u64,
6489 })
6490 }
6491
6492 async fn handle_copy_project_entry(
6493 this: ModelHandle<Self>,
6494 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6495 _: Arc<Client>,
6496 mut cx: AsyncAppContext,
6497 ) -> Result<proto::ProjectEntryResponse> {
6498 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6499 let worktree = this.read_with(&cx, |this, cx| {
6500 this.worktree_for_entry(entry_id, cx)
6501 .ok_or_else(|| anyhow!("worktree not found"))
6502 })?;
6503 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6504 let entry = worktree
6505 .update(&mut cx, |worktree, cx| {
6506 let new_path = PathBuf::from(envelope.payload.new_path);
6507 worktree
6508 .as_local_mut()
6509 .unwrap()
6510 .copy_entry(entry_id, new_path, cx)
6511 .ok_or_else(|| anyhow!("invalid entry"))
6512 })?
6513 .await?;
6514 Ok(proto::ProjectEntryResponse {
6515 entry: Some((&entry).into()),
6516 worktree_scan_id: worktree_scan_id as u64,
6517 })
6518 }
6519
6520 async fn handle_delete_project_entry(
6521 this: ModelHandle<Self>,
6522 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
6523 _: Arc<Client>,
6524 mut cx: AsyncAppContext,
6525 ) -> Result<proto::ProjectEntryResponse> {
6526 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6527
6528 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
6529
6530 let worktree = this.read_with(&cx, |this, cx| {
6531 this.worktree_for_entry(entry_id, cx)
6532 .ok_or_else(|| anyhow!("worktree not found"))
6533 })?;
6534 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6535 worktree
6536 .update(&mut cx, |worktree, cx| {
6537 worktree
6538 .as_local_mut()
6539 .unwrap()
6540 .delete_entry(entry_id, cx)
6541 .ok_or_else(|| anyhow!("invalid entry"))
6542 })?
6543 .await?;
6544 Ok(proto::ProjectEntryResponse {
6545 entry: None,
6546 worktree_scan_id: worktree_scan_id as u64,
6547 })
6548 }
6549
6550 async fn handle_expand_project_entry(
6551 this: ModelHandle<Self>,
6552 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
6553 _: Arc<Client>,
6554 mut cx: AsyncAppContext,
6555 ) -> Result<proto::ExpandProjectEntryResponse> {
6556 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6557 let worktree = this
6558 .read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx))
6559 .ok_or_else(|| anyhow!("invalid request"))?;
6560 worktree
6561 .update(&mut cx, |worktree, cx| {
6562 worktree
6563 .as_local_mut()
6564 .unwrap()
6565 .expand_entry(entry_id, cx)
6566 .ok_or_else(|| anyhow!("invalid entry"))
6567 })?
6568 .await?;
6569 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
6570 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
6571 }
6572
6573 async fn handle_update_diagnostic_summary(
6574 this: ModelHandle<Self>,
6575 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
6576 _: Arc<Client>,
6577 mut cx: AsyncAppContext,
6578 ) -> Result<()> {
6579 this.update(&mut cx, |this, cx| {
6580 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6581 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6582 if let Some(summary) = envelope.payload.summary {
6583 let project_path = ProjectPath {
6584 worktree_id,
6585 path: Path::new(&summary.path).into(),
6586 };
6587 worktree.update(cx, |worktree, _| {
6588 worktree
6589 .as_remote_mut()
6590 .unwrap()
6591 .update_diagnostic_summary(project_path.path.clone(), &summary);
6592 });
6593 cx.emit(Event::DiagnosticsUpdated {
6594 language_server_id: LanguageServerId(summary.language_server_id as usize),
6595 path: project_path,
6596 });
6597 }
6598 }
6599 Ok(())
6600 })
6601 }
6602
6603 async fn handle_start_language_server(
6604 this: ModelHandle<Self>,
6605 envelope: TypedEnvelope<proto::StartLanguageServer>,
6606 _: Arc<Client>,
6607 mut cx: AsyncAppContext,
6608 ) -> Result<()> {
6609 let server = envelope
6610 .payload
6611 .server
6612 .ok_or_else(|| anyhow!("invalid server"))?;
6613 this.update(&mut cx, |this, cx| {
6614 this.language_server_statuses.insert(
6615 LanguageServerId(server.id as usize),
6616 LanguageServerStatus {
6617 name: server.name,
6618 pending_work: Default::default(),
6619 has_pending_diagnostic_updates: false,
6620 progress_tokens: Default::default(),
6621 },
6622 );
6623 cx.notify();
6624 });
6625 Ok(())
6626 }
6627
6628 async fn handle_update_language_server(
6629 this: ModelHandle<Self>,
6630 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
6631 _: Arc<Client>,
6632 mut cx: AsyncAppContext,
6633 ) -> Result<()> {
6634 this.update(&mut cx, |this, cx| {
6635 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
6636
6637 match envelope
6638 .payload
6639 .variant
6640 .ok_or_else(|| anyhow!("invalid variant"))?
6641 {
6642 proto::update_language_server::Variant::WorkStart(payload) => {
6643 this.on_lsp_work_start(
6644 language_server_id,
6645 payload.token,
6646 LanguageServerProgress {
6647 message: payload.message,
6648 percentage: payload.percentage.map(|p| p as usize),
6649 last_update_at: Instant::now(),
6650 },
6651 cx,
6652 );
6653 }
6654
6655 proto::update_language_server::Variant::WorkProgress(payload) => {
6656 this.on_lsp_work_progress(
6657 language_server_id,
6658 payload.token,
6659 LanguageServerProgress {
6660 message: payload.message,
6661 percentage: payload.percentage.map(|p| p as usize),
6662 last_update_at: Instant::now(),
6663 },
6664 cx,
6665 );
6666 }
6667
6668 proto::update_language_server::Variant::WorkEnd(payload) => {
6669 this.on_lsp_work_end(language_server_id, payload.token, cx);
6670 }
6671
6672 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
6673 this.disk_based_diagnostics_started(language_server_id, cx);
6674 }
6675
6676 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
6677 this.disk_based_diagnostics_finished(language_server_id, cx)
6678 }
6679 }
6680
6681 Ok(())
6682 })
6683 }
6684
6685 async fn handle_update_buffer(
6686 this: ModelHandle<Self>,
6687 envelope: TypedEnvelope<proto::UpdateBuffer>,
6688 _: Arc<Client>,
6689 mut cx: AsyncAppContext,
6690 ) -> Result<proto::Ack> {
6691 this.update(&mut cx, |this, cx| {
6692 let payload = envelope.payload.clone();
6693 let buffer_id = payload.buffer_id;
6694 let ops = payload
6695 .operations
6696 .into_iter()
6697 .map(language::proto::deserialize_operation)
6698 .collect::<Result<Vec<_>, _>>()?;
6699 let is_remote = this.is_remote();
6700 match this.opened_buffers.entry(buffer_id) {
6701 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
6702 OpenBuffer::Strong(buffer) => {
6703 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
6704 }
6705 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
6706 OpenBuffer::Weak(_) => {}
6707 },
6708 hash_map::Entry::Vacant(e) => {
6709 assert!(
6710 is_remote,
6711 "received buffer update from {:?}",
6712 envelope.original_sender_id
6713 );
6714 e.insert(OpenBuffer::Operations(ops));
6715 }
6716 }
6717 Ok(proto::Ack {})
6718 })
6719 }
6720
6721 async fn handle_create_buffer_for_peer(
6722 this: ModelHandle<Self>,
6723 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
6724 _: Arc<Client>,
6725 mut cx: AsyncAppContext,
6726 ) -> Result<()> {
6727 this.update(&mut cx, |this, cx| {
6728 match envelope
6729 .payload
6730 .variant
6731 .ok_or_else(|| anyhow!("missing variant"))?
6732 {
6733 proto::create_buffer_for_peer::Variant::State(mut state) => {
6734 let mut buffer_file = None;
6735 if let Some(file) = state.file.take() {
6736 let worktree_id = WorktreeId::from_proto(file.worktree_id);
6737 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
6738 anyhow!("no worktree found for id {}", file.worktree_id)
6739 })?;
6740 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
6741 as Arc<dyn language::File>);
6742 }
6743
6744 let buffer_id = state.id;
6745 let buffer = cx.add_model(|_| {
6746 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
6747 });
6748 this.incomplete_remote_buffers
6749 .insert(buffer_id, Some(buffer));
6750 }
6751 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
6752 let buffer = this
6753 .incomplete_remote_buffers
6754 .get(&chunk.buffer_id)
6755 .cloned()
6756 .flatten()
6757 .ok_or_else(|| {
6758 anyhow!(
6759 "received chunk for buffer {} without initial state",
6760 chunk.buffer_id
6761 )
6762 })?;
6763 let operations = chunk
6764 .operations
6765 .into_iter()
6766 .map(language::proto::deserialize_operation)
6767 .collect::<Result<Vec<_>>>()?;
6768 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
6769
6770 if chunk.is_last {
6771 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
6772 this.register_buffer(&buffer, cx)?;
6773 }
6774 }
6775 }
6776
6777 Ok(())
6778 })
6779 }
6780
6781 async fn handle_update_diff_base(
6782 this: ModelHandle<Self>,
6783 envelope: TypedEnvelope<proto::UpdateDiffBase>,
6784 _: Arc<Client>,
6785 mut cx: AsyncAppContext,
6786 ) -> Result<()> {
6787 this.update(&mut cx, |this, cx| {
6788 let buffer_id = envelope.payload.buffer_id;
6789 let diff_base = envelope.payload.diff_base;
6790 if let Some(buffer) = this
6791 .opened_buffers
6792 .get_mut(&buffer_id)
6793 .and_then(|b| b.upgrade(cx))
6794 .or_else(|| {
6795 this.incomplete_remote_buffers
6796 .get(&buffer_id)
6797 .cloned()
6798 .flatten()
6799 })
6800 {
6801 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
6802 }
6803 Ok(())
6804 })
6805 }
6806
6807 async fn handle_update_buffer_file(
6808 this: ModelHandle<Self>,
6809 envelope: TypedEnvelope<proto::UpdateBufferFile>,
6810 _: Arc<Client>,
6811 mut cx: AsyncAppContext,
6812 ) -> Result<()> {
6813 let buffer_id = envelope.payload.buffer_id;
6814
6815 this.update(&mut cx, |this, cx| {
6816 let payload = envelope.payload.clone();
6817 if let Some(buffer) = this
6818 .opened_buffers
6819 .get(&buffer_id)
6820 .and_then(|b| b.upgrade(cx))
6821 .or_else(|| {
6822 this.incomplete_remote_buffers
6823 .get(&buffer_id)
6824 .cloned()
6825 .flatten()
6826 })
6827 {
6828 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
6829 let worktree = this
6830 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
6831 .ok_or_else(|| anyhow!("no such worktree"))?;
6832 let file = File::from_proto(file, worktree, cx)?;
6833 buffer.update(cx, |buffer, cx| {
6834 buffer.file_updated(Arc::new(file), cx).detach();
6835 });
6836 this.detect_language_for_buffer(&buffer, cx);
6837 }
6838 Ok(())
6839 })
6840 }
6841
6842 async fn handle_save_buffer(
6843 this: ModelHandle<Self>,
6844 envelope: TypedEnvelope<proto::SaveBuffer>,
6845 _: Arc<Client>,
6846 mut cx: AsyncAppContext,
6847 ) -> Result<proto::BufferSaved> {
6848 let buffer_id = envelope.payload.buffer_id;
6849 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
6850 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
6851 let buffer = this
6852 .opened_buffers
6853 .get(&buffer_id)
6854 .and_then(|buffer| buffer.upgrade(cx))
6855 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
6856 anyhow::Ok((project_id, buffer))
6857 })?;
6858 buffer
6859 .update(&mut cx, |buffer, _| {
6860 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
6861 })
6862 .await?;
6863 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
6864
6865 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
6866 .await?;
6867 Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
6868 project_id,
6869 buffer_id,
6870 version: serialize_version(buffer.saved_version()),
6871 mtime: Some(buffer.saved_mtime().into()),
6872 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
6873 }))
6874 }
6875
6876 async fn handle_reload_buffers(
6877 this: ModelHandle<Self>,
6878 envelope: TypedEnvelope<proto::ReloadBuffers>,
6879 _: Arc<Client>,
6880 mut cx: AsyncAppContext,
6881 ) -> Result<proto::ReloadBuffersResponse> {
6882 let sender_id = envelope.original_sender_id()?;
6883 let reload = this.update(&mut cx, |this, cx| {
6884 let mut buffers = HashSet::default();
6885 for buffer_id in &envelope.payload.buffer_ids {
6886 buffers.insert(
6887 this.opened_buffers
6888 .get(buffer_id)
6889 .and_then(|buffer| buffer.upgrade(cx))
6890 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
6891 );
6892 }
6893 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
6894 })?;
6895
6896 let project_transaction = reload.await?;
6897 let project_transaction = this.update(&mut cx, |this, cx| {
6898 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
6899 });
6900 Ok(proto::ReloadBuffersResponse {
6901 transaction: Some(project_transaction),
6902 })
6903 }
6904
6905 async fn handle_synchronize_buffers(
6906 this: ModelHandle<Self>,
6907 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
6908 _: Arc<Client>,
6909 mut cx: AsyncAppContext,
6910 ) -> Result<proto::SynchronizeBuffersResponse> {
6911 let project_id = envelope.payload.project_id;
6912 let mut response = proto::SynchronizeBuffersResponse {
6913 buffers: Default::default(),
6914 };
6915
6916 this.update(&mut cx, |this, cx| {
6917 let Some(guest_id) = envelope.original_sender_id else {
6918 error!("missing original_sender_id on SynchronizeBuffers request");
6919 return;
6920 };
6921
6922 this.shared_buffers.entry(guest_id).or_default().clear();
6923 for buffer in envelope.payload.buffers {
6924 let buffer_id = buffer.id;
6925 let remote_version = language::proto::deserialize_version(&buffer.version);
6926 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
6927 this.shared_buffers
6928 .entry(guest_id)
6929 .or_default()
6930 .insert(buffer_id);
6931
6932 let buffer = buffer.read(cx);
6933 response.buffers.push(proto::BufferVersion {
6934 id: buffer_id,
6935 version: language::proto::serialize_version(&buffer.version),
6936 });
6937
6938 let operations = buffer.serialize_ops(Some(remote_version), cx);
6939 let client = this.client.clone();
6940 if let Some(file) = buffer.file() {
6941 client
6942 .send(proto::UpdateBufferFile {
6943 project_id,
6944 buffer_id: buffer_id as u64,
6945 file: Some(file.to_proto()),
6946 })
6947 .log_err();
6948 }
6949
6950 client
6951 .send(proto::UpdateDiffBase {
6952 project_id,
6953 buffer_id: buffer_id as u64,
6954 diff_base: buffer.diff_base().map(Into::into),
6955 })
6956 .log_err();
6957
6958 client
6959 .send(proto::BufferReloaded {
6960 project_id,
6961 buffer_id,
6962 version: language::proto::serialize_version(buffer.saved_version()),
6963 mtime: Some(buffer.saved_mtime().into()),
6964 fingerprint: language::proto::serialize_fingerprint(
6965 buffer.saved_version_fingerprint(),
6966 ),
6967 line_ending: language::proto::serialize_line_ending(
6968 buffer.line_ending(),
6969 ) as i32,
6970 })
6971 .log_err();
6972
6973 cx.background()
6974 .spawn(
6975 async move {
6976 let operations = operations.await;
6977 for chunk in split_operations(operations) {
6978 client
6979 .request(proto::UpdateBuffer {
6980 project_id,
6981 buffer_id,
6982 operations: chunk,
6983 })
6984 .await?;
6985 }
6986 anyhow::Ok(())
6987 }
6988 .log_err(),
6989 )
6990 .detach();
6991 }
6992 }
6993 });
6994
6995 Ok(response)
6996 }
6997
6998 async fn handle_format_buffers(
6999 this: ModelHandle<Self>,
7000 envelope: TypedEnvelope<proto::FormatBuffers>,
7001 _: Arc<Client>,
7002 mut cx: AsyncAppContext,
7003 ) -> Result<proto::FormatBuffersResponse> {
7004 let sender_id = envelope.original_sender_id()?;
7005 let format = this.update(&mut cx, |this, cx| {
7006 let mut buffers = HashSet::default();
7007 for buffer_id in &envelope.payload.buffer_ids {
7008 buffers.insert(
7009 this.opened_buffers
7010 .get(buffer_id)
7011 .and_then(|buffer| buffer.upgrade(cx))
7012 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7013 );
7014 }
7015 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7016 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7017 })?;
7018
7019 let project_transaction = format.await?;
7020 let project_transaction = this.update(&mut cx, |this, cx| {
7021 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7022 });
7023 Ok(proto::FormatBuffersResponse {
7024 transaction: Some(project_transaction),
7025 })
7026 }
7027
7028 async fn handle_apply_additional_edits_for_completion(
7029 this: ModelHandle<Self>,
7030 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7031 _: Arc<Client>,
7032 mut cx: AsyncAppContext,
7033 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7034 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7035 let buffer = this
7036 .opened_buffers
7037 .get(&envelope.payload.buffer_id)
7038 .and_then(|buffer| buffer.upgrade(cx))
7039 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7040 let language = buffer.read(cx).language();
7041 let completion = language::proto::deserialize_completion(
7042 envelope
7043 .payload
7044 .completion
7045 .ok_or_else(|| anyhow!("invalid completion"))?,
7046 language.cloned(),
7047 );
7048 Ok::<_, anyhow::Error>((buffer, completion))
7049 })?;
7050
7051 let completion = completion.await?;
7052
7053 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7054 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7055 });
7056
7057 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7058 transaction: apply_additional_edits
7059 .await?
7060 .as_ref()
7061 .map(language::proto::serialize_transaction),
7062 })
7063 }
7064
7065 async fn handle_apply_code_action(
7066 this: ModelHandle<Self>,
7067 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7068 _: Arc<Client>,
7069 mut cx: AsyncAppContext,
7070 ) -> Result<proto::ApplyCodeActionResponse> {
7071 let sender_id = envelope.original_sender_id()?;
7072 let action = language::proto::deserialize_code_action(
7073 envelope
7074 .payload
7075 .action
7076 .ok_or_else(|| anyhow!("invalid action"))?,
7077 )?;
7078 let apply_code_action = this.update(&mut cx, |this, cx| {
7079 let buffer = this
7080 .opened_buffers
7081 .get(&envelope.payload.buffer_id)
7082 .and_then(|buffer| buffer.upgrade(cx))
7083 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7084 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7085 })?;
7086
7087 let project_transaction = apply_code_action.await?;
7088 let project_transaction = this.update(&mut cx, |this, cx| {
7089 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7090 });
7091 Ok(proto::ApplyCodeActionResponse {
7092 transaction: Some(project_transaction),
7093 })
7094 }
7095
7096 async fn handle_on_type_formatting(
7097 this: ModelHandle<Self>,
7098 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7099 _: Arc<Client>,
7100 mut cx: AsyncAppContext,
7101 ) -> Result<proto::OnTypeFormattingResponse> {
7102 let on_type_formatting = this.update(&mut cx, |this, cx| {
7103 let buffer = this
7104 .opened_buffers
7105 .get(&envelope.payload.buffer_id)
7106 .and_then(|buffer| buffer.upgrade(cx))
7107 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7108 let position = envelope
7109 .payload
7110 .position
7111 .and_then(deserialize_anchor)
7112 .ok_or_else(|| anyhow!("invalid position"))?;
7113 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7114 buffer,
7115 position,
7116 envelope.payload.trigger.clone(),
7117 cx,
7118 ))
7119 })?;
7120
7121 let transaction = on_type_formatting
7122 .await?
7123 .as_ref()
7124 .map(language::proto::serialize_transaction);
7125 Ok(proto::OnTypeFormattingResponse { transaction })
7126 }
7127
7128 async fn handle_inlay_hints(
7129 this: ModelHandle<Self>,
7130 envelope: TypedEnvelope<proto::InlayHints>,
7131 _: Arc<Client>,
7132 mut cx: AsyncAppContext,
7133 ) -> Result<proto::InlayHintsResponse> {
7134 let sender_id = envelope.original_sender_id()?;
7135 let buffer = this.update(&mut cx, |this, cx| {
7136 this.opened_buffers
7137 .get(&envelope.payload.buffer_id)
7138 .and_then(|buffer| buffer.upgrade(cx))
7139 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7140 })?;
7141 let buffer_version = deserialize_version(&envelope.payload.version);
7142
7143 buffer
7144 .update(&mut cx, |buffer, _| {
7145 buffer.wait_for_version(buffer_version.clone())
7146 })
7147 .await
7148 .with_context(|| {
7149 format!(
7150 "waiting for version {:?} for buffer {}",
7151 buffer_version,
7152 buffer.id()
7153 )
7154 })?;
7155
7156 let start = envelope
7157 .payload
7158 .start
7159 .and_then(deserialize_anchor)
7160 .context("missing range start")?;
7161 let end = envelope
7162 .payload
7163 .end
7164 .and_then(deserialize_anchor)
7165 .context("missing range end")?;
7166 let buffer_hints = this
7167 .update(&mut cx, |project, cx| {
7168 project.inlay_hints(buffer, start..end, cx)
7169 })
7170 .await
7171 .context("inlay hints fetch")?;
7172
7173 Ok(this.update(&mut cx, |project, cx| {
7174 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7175 }))
7176 }
7177
7178 async fn handle_resolve_inlay_hint(
7179 this: ModelHandle<Self>,
7180 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7181 _: Arc<Client>,
7182 mut cx: AsyncAppContext,
7183 ) -> Result<proto::ResolveInlayHintResponse> {
7184 let proto_hint = envelope
7185 .payload
7186 .hint
7187 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7188 let hint = InlayHints::proto_to_project_hint(proto_hint)
7189 .context("resolved proto inlay hint conversion")?;
7190 let buffer = this.update(&mut cx, |this, cx| {
7191 this.opened_buffers
7192 .get(&envelope.payload.buffer_id)
7193 .and_then(|buffer| buffer.upgrade(cx))
7194 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7195 })?;
7196 let response_hint = this
7197 .update(&mut cx, |project, cx| {
7198 project.resolve_inlay_hint(
7199 hint,
7200 buffer,
7201 LanguageServerId(envelope.payload.language_server_id as usize),
7202 cx,
7203 )
7204 })
7205 .await
7206 .context("inlay hints fetch")?;
7207 Ok(proto::ResolveInlayHintResponse {
7208 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7209 })
7210 }
7211
7212 async fn handle_refresh_inlay_hints(
7213 this: ModelHandle<Self>,
7214 _: TypedEnvelope<proto::RefreshInlayHints>,
7215 _: Arc<Client>,
7216 mut cx: AsyncAppContext,
7217 ) -> Result<proto::Ack> {
7218 this.update(&mut cx, |_, cx| {
7219 cx.emit(Event::RefreshInlayHints);
7220 });
7221 Ok(proto::Ack {})
7222 }
7223
7224 async fn handle_lsp_command<T: LspCommand>(
7225 this: ModelHandle<Self>,
7226 envelope: TypedEnvelope<T::ProtoRequest>,
7227 _: Arc<Client>,
7228 mut cx: AsyncAppContext,
7229 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7230 where
7231 <T::LspRequest as lsp::request::Request>::Result: Send,
7232 {
7233 let sender_id = envelope.original_sender_id()?;
7234 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7235 let buffer_handle = this.read_with(&cx, |this, _| {
7236 this.opened_buffers
7237 .get(&buffer_id)
7238 .and_then(|buffer| buffer.upgrade(&cx))
7239 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7240 })?;
7241 let request = T::from_proto(
7242 envelope.payload,
7243 this.clone(),
7244 buffer_handle.clone(),
7245 cx.clone(),
7246 )
7247 .await?;
7248 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
7249 let response = this
7250 .update(&mut cx, |this, cx| {
7251 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7252 })
7253 .await?;
7254 this.update(&mut cx, |this, cx| {
7255 Ok(T::response_to_proto(
7256 response,
7257 this,
7258 sender_id,
7259 &buffer_version,
7260 cx,
7261 ))
7262 })
7263 }
7264
7265 async fn handle_get_project_symbols(
7266 this: ModelHandle<Self>,
7267 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7268 _: Arc<Client>,
7269 mut cx: AsyncAppContext,
7270 ) -> Result<proto::GetProjectSymbolsResponse> {
7271 let symbols = this
7272 .update(&mut cx, |this, cx| {
7273 this.symbols(&envelope.payload.query, cx)
7274 })
7275 .await?;
7276
7277 Ok(proto::GetProjectSymbolsResponse {
7278 symbols: symbols.iter().map(serialize_symbol).collect(),
7279 })
7280 }
7281
7282 async fn handle_search_project(
7283 this: ModelHandle<Self>,
7284 envelope: TypedEnvelope<proto::SearchProject>,
7285 _: Arc<Client>,
7286 mut cx: AsyncAppContext,
7287 ) -> Result<proto::SearchProjectResponse> {
7288 let peer_id = envelope.original_sender_id()?;
7289 let query = SearchQuery::from_proto(envelope.payload)?;
7290 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
7291
7292 cx.spawn(|mut cx| async move {
7293 let mut locations = Vec::new();
7294 while let Some((buffer, ranges)) = result.next().await {
7295 for range in ranges {
7296 let start = serialize_anchor(&range.start);
7297 let end = serialize_anchor(&range.end);
7298 let buffer_id = this.update(&mut cx, |this, cx| {
7299 this.create_buffer_for_peer(&buffer, peer_id, cx)
7300 });
7301 locations.push(proto::Location {
7302 buffer_id,
7303 start: Some(start),
7304 end: Some(end),
7305 });
7306 }
7307 }
7308 Ok(proto::SearchProjectResponse { locations })
7309 })
7310 .await
7311 }
7312
7313 async fn handle_open_buffer_for_symbol(
7314 this: ModelHandle<Self>,
7315 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7316 _: Arc<Client>,
7317 mut cx: AsyncAppContext,
7318 ) -> Result<proto::OpenBufferForSymbolResponse> {
7319 let peer_id = envelope.original_sender_id()?;
7320 let symbol = envelope
7321 .payload
7322 .symbol
7323 .ok_or_else(|| anyhow!("invalid symbol"))?;
7324 let symbol = this
7325 .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
7326 .await?;
7327 let symbol = this.read_with(&cx, |this, _| {
7328 let signature = this.symbol_signature(&symbol.path);
7329 if signature == symbol.signature {
7330 Ok(symbol)
7331 } else {
7332 Err(anyhow!("invalid symbol signature"))
7333 }
7334 })?;
7335 let buffer = this
7336 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
7337 .await?;
7338
7339 Ok(proto::OpenBufferForSymbolResponse {
7340 buffer_id: this.update(&mut cx, |this, cx| {
7341 this.create_buffer_for_peer(&buffer, peer_id, cx)
7342 }),
7343 })
7344 }
7345
7346 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7347 let mut hasher = Sha256::new();
7348 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7349 hasher.update(project_path.path.to_string_lossy().as_bytes());
7350 hasher.update(self.nonce.to_be_bytes());
7351 hasher.finalize().as_slice().try_into().unwrap()
7352 }
7353
7354 async fn handle_open_buffer_by_id(
7355 this: ModelHandle<Self>,
7356 envelope: TypedEnvelope<proto::OpenBufferById>,
7357 _: Arc<Client>,
7358 mut cx: AsyncAppContext,
7359 ) -> Result<proto::OpenBufferResponse> {
7360 let peer_id = envelope.original_sender_id()?;
7361 let buffer = this
7362 .update(&mut cx, |this, cx| {
7363 this.open_buffer_by_id(envelope.payload.id, cx)
7364 })
7365 .await?;
7366 this.update(&mut cx, |this, cx| {
7367 Ok(proto::OpenBufferResponse {
7368 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7369 })
7370 })
7371 }
7372
7373 async fn handle_open_buffer_by_path(
7374 this: ModelHandle<Self>,
7375 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7376 _: Arc<Client>,
7377 mut cx: AsyncAppContext,
7378 ) -> Result<proto::OpenBufferResponse> {
7379 let peer_id = envelope.original_sender_id()?;
7380 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7381 let open_buffer = this.update(&mut cx, |this, cx| {
7382 this.open_buffer(
7383 ProjectPath {
7384 worktree_id,
7385 path: PathBuf::from(envelope.payload.path).into(),
7386 },
7387 cx,
7388 )
7389 });
7390
7391 let buffer = open_buffer.await?;
7392 this.update(&mut cx, |this, cx| {
7393 Ok(proto::OpenBufferResponse {
7394 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7395 })
7396 })
7397 }
7398
7399 fn serialize_project_transaction_for_peer(
7400 &mut self,
7401 project_transaction: ProjectTransaction,
7402 peer_id: proto::PeerId,
7403 cx: &mut AppContext,
7404 ) -> proto::ProjectTransaction {
7405 let mut serialized_transaction = proto::ProjectTransaction {
7406 buffer_ids: Default::default(),
7407 transactions: Default::default(),
7408 };
7409 for (buffer, transaction) in project_transaction.0 {
7410 serialized_transaction
7411 .buffer_ids
7412 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7413 serialized_transaction
7414 .transactions
7415 .push(language::proto::serialize_transaction(&transaction));
7416 }
7417 serialized_transaction
7418 }
7419
7420 fn deserialize_project_transaction(
7421 &mut self,
7422 message: proto::ProjectTransaction,
7423 push_to_history: bool,
7424 cx: &mut ModelContext<Self>,
7425 ) -> Task<Result<ProjectTransaction>> {
7426 cx.spawn(|this, mut cx| async move {
7427 let mut project_transaction = ProjectTransaction::default();
7428 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7429 {
7430 let buffer = this
7431 .update(&mut cx, |this, cx| {
7432 this.wait_for_remote_buffer(buffer_id, cx)
7433 })
7434 .await?;
7435 let transaction = language::proto::deserialize_transaction(transaction)?;
7436 project_transaction.0.insert(buffer, transaction);
7437 }
7438
7439 for (buffer, transaction) in &project_transaction.0 {
7440 buffer
7441 .update(&mut cx, |buffer, _| {
7442 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7443 })
7444 .await?;
7445
7446 if push_to_history {
7447 buffer.update(&mut cx, |buffer, _| {
7448 buffer.push_transaction(transaction.clone(), Instant::now());
7449 });
7450 }
7451 }
7452
7453 Ok(project_transaction)
7454 })
7455 }
7456
7457 fn create_buffer_for_peer(
7458 &mut self,
7459 buffer: &ModelHandle<Buffer>,
7460 peer_id: proto::PeerId,
7461 cx: &mut AppContext,
7462 ) -> u64 {
7463 let buffer_id = buffer.read(cx).remote_id();
7464 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
7465 updates_tx
7466 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7467 .ok();
7468 }
7469 buffer_id
7470 }
7471
7472 fn wait_for_remote_buffer(
7473 &mut self,
7474 id: u64,
7475 cx: &mut ModelContext<Self>,
7476 ) -> Task<Result<ModelHandle<Buffer>>> {
7477 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7478
7479 cx.spawn_weak(|this, mut cx| async move {
7480 let buffer = loop {
7481 let Some(this) = this.upgrade(&cx) else {
7482 return Err(anyhow!("project dropped"));
7483 };
7484
7485 let buffer = this.read_with(&cx, |this, cx| {
7486 this.opened_buffers
7487 .get(&id)
7488 .and_then(|buffer| buffer.upgrade(cx))
7489 });
7490
7491 if let Some(buffer) = buffer {
7492 break buffer;
7493 } else if this.read_with(&cx, |this, _| this.is_read_only()) {
7494 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7495 }
7496
7497 this.update(&mut cx, |this, _| {
7498 this.incomplete_remote_buffers.entry(id).or_default();
7499 });
7500 drop(this);
7501
7502 opened_buffer_rx
7503 .next()
7504 .await
7505 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
7506 };
7507
7508 Ok(buffer)
7509 })
7510 }
7511
7512 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
7513 let project_id = match self.client_state.as_ref() {
7514 Some(ProjectClientState::Remote {
7515 sharing_has_stopped,
7516 remote_id,
7517 ..
7518 }) => {
7519 if *sharing_has_stopped {
7520 return Task::ready(Err(anyhow!(
7521 "can't synchronize remote buffers on a readonly project"
7522 )));
7523 } else {
7524 *remote_id
7525 }
7526 }
7527 Some(ProjectClientState::Local { .. }) | None => {
7528 return Task::ready(Err(anyhow!(
7529 "can't synchronize remote buffers on a local project"
7530 )))
7531 }
7532 };
7533
7534 let client = self.client.clone();
7535 cx.spawn(|this, cx| async move {
7536 let (buffers, incomplete_buffer_ids) = this.read_with(&cx, |this, cx| {
7537 let buffers = this
7538 .opened_buffers
7539 .iter()
7540 .filter_map(|(id, buffer)| {
7541 let buffer = buffer.upgrade(cx)?;
7542 Some(proto::BufferVersion {
7543 id: *id,
7544 version: language::proto::serialize_version(&buffer.read(cx).version),
7545 })
7546 })
7547 .collect();
7548 let incomplete_buffer_ids = this
7549 .incomplete_remote_buffers
7550 .keys()
7551 .copied()
7552 .collect::<Vec<_>>();
7553
7554 (buffers, incomplete_buffer_ids)
7555 });
7556 let response = client
7557 .request(proto::SynchronizeBuffers {
7558 project_id,
7559 buffers,
7560 })
7561 .await?;
7562
7563 let send_updates_for_buffers = response.buffers.into_iter().map(|buffer| {
7564 let client = client.clone();
7565 let buffer_id = buffer.id;
7566 let remote_version = language::proto::deserialize_version(&buffer.version);
7567 this.read_with(&cx, |this, cx| {
7568 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
7569 let operations = buffer.read(cx).serialize_ops(Some(remote_version), cx);
7570 cx.background().spawn(async move {
7571 let operations = operations.await;
7572 for chunk in split_operations(operations) {
7573 client
7574 .request(proto::UpdateBuffer {
7575 project_id,
7576 buffer_id,
7577 operations: chunk,
7578 })
7579 .await?;
7580 }
7581 anyhow::Ok(())
7582 })
7583 } else {
7584 Task::ready(Ok(()))
7585 }
7586 })
7587 });
7588
7589 // Any incomplete buffers have open requests waiting. Request that the host sends
7590 // creates these buffers for us again to unblock any waiting futures.
7591 for id in incomplete_buffer_ids {
7592 cx.background()
7593 .spawn(client.request(proto::OpenBufferById { project_id, id }))
7594 .detach();
7595 }
7596
7597 futures::future::join_all(send_updates_for_buffers)
7598 .await
7599 .into_iter()
7600 .collect()
7601 })
7602 }
7603
7604 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
7605 self.worktrees(cx)
7606 .map(|worktree| {
7607 let worktree = worktree.read(cx);
7608 proto::WorktreeMetadata {
7609 id: worktree.id().to_proto(),
7610 root_name: worktree.root_name().into(),
7611 visible: worktree.is_visible(),
7612 abs_path: worktree.abs_path().to_string_lossy().into(),
7613 }
7614 })
7615 .collect()
7616 }
7617
7618 fn set_worktrees_from_proto(
7619 &mut self,
7620 worktrees: Vec<proto::WorktreeMetadata>,
7621 cx: &mut ModelContext<Project>,
7622 ) -> Result<()> {
7623 let replica_id = self.replica_id();
7624 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
7625
7626 let mut old_worktrees_by_id = self
7627 .worktrees
7628 .drain(..)
7629 .filter_map(|worktree| {
7630 let worktree = worktree.upgrade(cx)?;
7631 Some((worktree.read(cx).id(), worktree))
7632 })
7633 .collect::<HashMap<_, _>>();
7634
7635 for worktree in worktrees {
7636 if let Some(old_worktree) =
7637 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
7638 {
7639 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
7640 } else {
7641 let worktree =
7642 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
7643 let _ = self.add_worktree(&worktree, cx);
7644 }
7645 }
7646
7647 self.metadata_changed(cx);
7648 for id in old_worktrees_by_id.keys() {
7649 cx.emit(Event::WorktreeRemoved(*id));
7650 }
7651
7652 Ok(())
7653 }
7654
7655 fn set_collaborators_from_proto(
7656 &mut self,
7657 messages: Vec<proto::Collaborator>,
7658 cx: &mut ModelContext<Self>,
7659 ) -> Result<()> {
7660 let mut collaborators = HashMap::default();
7661 for message in messages {
7662 let collaborator = Collaborator::from_proto(message)?;
7663 collaborators.insert(collaborator.peer_id, collaborator);
7664 }
7665 for old_peer_id in self.collaborators.keys() {
7666 if !collaborators.contains_key(old_peer_id) {
7667 cx.emit(Event::CollaboratorLeft(*old_peer_id));
7668 }
7669 }
7670 self.collaborators = collaborators;
7671 Ok(())
7672 }
7673
7674 fn deserialize_symbol(
7675 &self,
7676 serialized_symbol: proto::Symbol,
7677 ) -> impl Future<Output = Result<Symbol>> {
7678 let languages = self.languages.clone();
7679 async move {
7680 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
7681 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
7682 let start = serialized_symbol
7683 .start
7684 .ok_or_else(|| anyhow!("invalid start"))?;
7685 let end = serialized_symbol
7686 .end
7687 .ok_or_else(|| anyhow!("invalid end"))?;
7688 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
7689 let path = ProjectPath {
7690 worktree_id,
7691 path: PathBuf::from(serialized_symbol.path).into(),
7692 };
7693 let language = languages
7694 .language_for_file(&path.path, None)
7695 .await
7696 .log_err();
7697 Ok(Symbol {
7698 language_server_name: LanguageServerName(
7699 serialized_symbol.language_server_name.into(),
7700 ),
7701 source_worktree_id,
7702 path,
7703 label: {
7704 match language {
7705 Some(language) => {
7706 language
7707 .label_for_symbol(&serialized_symbol.name, kind)
7708 .await
7709 }
7710 None => None,
7711 }
7712 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
7713 },
7714
7715 name: serialized_symbol.name,
7716 range: Unclipped(PointUtf16::new(start.row, start.column))
7717 ..Unclipped(PointUtf16::new(end.row, end.column)),
7718 kind,
7719 signature: serialized_symbol
7720 .signature
7721 .try_into()
7722 .map_err(|_| anyhow!("invalid signature"))?,
7723 })
7724 }
7725 }
7726
7727 async fn handle_buffer_saved(
7728 this: ModelHandle<Self>,
7729 envelope: TypedEnvelope<proto::BufferSaved>,
7730 _: Arc<Client>,
7731 mut cx: AsyncAppContext,
7732 ) -> Result<()> {
7733 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
7734 let version = deserialize_version(&envelope.payload.version);
7735 let mtime = envelope
7736 .payload
7737 .mtime
7738 .ok_or_else(|| anyhow!("missing mtime"))?
7739 .into();
7740
7741 this.update(&mut cx, |this, cx| {
7742 let buffer = this
7743 .opened_buffers
7744 .get(&envelope.payload.buffer_id)
7745 .and_then(|buffer| buffer.upgrade(cx))
7746 .or_else(|| {
7747 this.incomplete_remote_buffers
7748 .get(&envelope.payload.buffer_id)
7749 .and_then(|b| b.clone())
7750 });
7751 if let Some(buffer) = buffer {
7752 buffer.update(cx, |buffer, cx| {
7753 buffer.did_save(version, fingerprint, mtime, cx);
7754 });
7755 }
7756 Ok(())
7757 })
7758 }
7759
7760 async fn handle_buffer_reloaded(
7761 this: ModelHandle<Self>,
7762 envelope: TypedEnvelope<proto::BufferReloaded>,
7763 _: Arc<Client>,
7764 mut cx: AsyncAppContext,
7765 ) -> Result<()> {
7766 let payload = envelope.payload;
7767 let version = deserialize_version(&payload.version);
7768 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
7769 let line_ending = deserialize_line_ending(
7770 proto::LineEnding::from_i32(payload.line_ending)
7771 .ok_or_else(|| anyhow!("missing line ending"))?,
7772 );
7773 let mtime = payload
7774 .mtime
7775 .ok_or_else(|| anyhow!("missing mtime"))?
7776 .into();
7777 this.update(&mut cx, |this, cx| {
7778 let buffer = this
7779 .opened_buffers
7780 .get(&payload.buffer_id)
7781 .and_then(|buffer| buffer.upgrade(cx))
7782 .or_else(|| {
7783 this.incomplete_remote_buffers
7784 .get(&payload.buffer_id)
7785 .cloned()
7786 .flatten()
7787 });
7788 if let Some(buffer) = buffer {
7789 buffer.update(cx, |buffer, cx| {
7790 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
7791 });
7792 }
7793 Ok(())
7794 })
7795 }
7796
7797 #[allow(clippy::type_complexity)]
7798 fn edits_from_lsp(
7799 &mut self,
7800 buffer: &ModelHandle<Buffer>,
7801 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
7802 server_id: LanguageServerId,
7803 version: Option<i32>,
7804 cx: &mut ModelContext<Self>,
7805 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
7806 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
7807 cx.background().spawn(async move {
7808 let snapshot = snapshot?;
7809 let mut lsp_edits = lsp_edits
7810 .into_iter()
7811 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
7812 .collect::<Vec<_>>();
7813 lsp_edits.sort_by_key(|(range, _)| range.start);
7814
7815 let mut lsp_edits = lsp_edits.into_iter().peekable();
7816 let mut edits = Vec::new();
7817 while let Some((range, mut new_text)) = lsp_edits.next() {
7818 // Clip invalid ranges provided by the language server.
7819 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
7820 ..snapshot.clip_point_utf16(range.end, Bias::Left);
7821
7822 // Combine any LSP edits that are adjacent.
7823 //
7824 // Also, combine LSP edits that are separated from each other by only
7825 // a newline. This is important because for some code actions,
7826 // Rust-analyzer rewrites the entire buffer via a series of edits that
7827 // are separated by unchanged newline characters.
7828 //
7829 // In order for the diffing logic below to work properly, any edits that
7830 // cancel each other out must be combined into one.
7831 while let Some((next_range, next_text)) = lsp_edits.peek() {
7832 if next_range.start.0 > range.end {
7833 if next_range.start.0.row > range.end.row + 1
7834 || next_range.start.0.column > 0
7835 || snapshot.clip_point_utf16(
7836 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
7837 Bias::Left,
7838 ) > range.end
7839 {
7840 break;
7841 }
7842 new_text.push('\n');
7843 }
7844 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
7845 new_text.push_str(next_text);
7846 lsp_edits.next();
7847 }
7848
7849 // For multiline edits, perform a diff of the old and new text so that
7850 // we can identify the changes more precisely, preserving the locations
7851 // of any anchors positioned in the unchanged regions.
7852 if range.end.row > range.start.row {
7853 let mut offset = range.start.to_offset(&snapshot);
7854 let old_text = snapshot.text_for_range(range).collect::<String>();
7855
7856 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
7857 let mut moved_since_edit = true;
7858 for change in diff.iter_all_changes() {
7859 let tag = change.tag();
7860 let value = change.value();
7861 match tag {
7862 ChangeTag::Equal => {
7863 offset += value.len();
7864 moved_since_edit = true;
7865 }
7866 ChangeTag::Delete => {
7867 let start = snapshot.anchor_after(offset);
7868 let end = snapshot.anchor_before(offset + value.len());
7869 if moved_since_edit {
7870 edits.push((start..end, String::new()));
7871 } else {
7872 edits.last_mut().unwrap().0.end = end;
7873 }
7874 offset += value.len();
7875 moved_since_edit = false;
7876 }
7877 ChangeTag::Insert => {
7878 if moved_since_edit {
7879 let anchor = snapshot.anchor_after(offset);
7880 edits.push((anchor..anchor, value.to_string()));
7881 } else {
7882 edits.last_mut().unwrap().1.push_str(value);
7883 }
7884 moved_since_edit = false;
7885 }
7886 }
7887 }
7888 } else if range.end == range.start {
7889 let anchor = snapshot.anchor_after(range.start);
7890 edits.push((anchor..anchor, new_text));
7891 } else {
7892 let edit_start = snapshot.anchor_after(range.start);
7893 let edit_end = snapshot.anchor_before(range.end);
7894 edits.push((edit_start..edit_end, new_text));
7895 }
7896 }
7897
7898 Ok(edits)
7899 })
7900 }
7901
7902 fn buffer_snapshot_for_lsp_version(
7903 &mut self,
7904 buffer: &ModelHandle<Buffer>,
7905 server_id: LanguageServerId,
7906 version: Option<i32>,
7907 cx: &AppContext,
7908 ) -> Result<TextBufferSnapshot> {
7909 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
7910
7911 if let Some(version) = version {
7912 let buffer_id = buffer.read(cx).remote_id();
7913 let snapshots = self
7914 .buffer_snapshots
7915 .get_mut(&buffer_id)
7916 .and_then(|m| m.get_mut(&server_id))
7917 .ok_or_else(|| {
7918 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
7919 })?;
7920
7921 let found_snapshot = snapshots
7922 .binary_search_by_key(&version, |e| e.version)
7923 .map(|ix| snapshots[ix].snapshot.clone())
7924 .map_err(|_| {
7925 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
7926 })?;
7927
7928 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
7929 Ok(found_snapshot)
7930 } else {
7931 Ok((buffer.read(cx)).text_snapshot())
7932 }
7933 }
7934
7935 pub fn language_servers(
7936 &self,
7937 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
7938 self.language_server_ids
7939 .iter()
7940 .map(|((worktree_id, server_name), server_id)| {
7941 (*server_id, server_name.clone(), *worktree_id)
7942 })
7943 }
7944
7945 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
7946 if let LanguageServerState::Running { server, .. } = self.language_servers.get(&id)? {
7947 Some(server.clone())
7948 } else {
7949 None
7950 }
7951 }
7952
7953 pub fn language_servers_for_buffer(
7954 &self,
7955 buffer: &Buffer,
7956 cx: &AppContext,
7957 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7958 self.language_server_ids_for_buffer(buffer, cx)
7959 .into_iter()
7960 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
7961 LanguageServerState::Running {
7962 adapter, server, ..
7963 } => Some((adapter, server)),
7964 _ => None,
7965 })
7966 }
7967
7968 fn primary_language_server_for_buffer(
7969 &self,
7970 buffer: &Buffer,
7971 cx: &AppContext,
7972 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7973 self.language_servers_for_buffer(buffer, cx).next()
7974 }
7975
7976 pub fn language_server_for_buffer(
7977 &self,
7978 buffer: &Buffer,
7979 server_id: LanguageServerId,
7980 cx: &AppContext,
7981 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7982 self.language_servers_for_buffer(buffer, cx)
7983 .find(|(_, s)| s.server_id() == server_id)
7984 }
7985
7986 fn language_server_ids_for_buffer(
7987 &self,
7988 buffer: &Buffer,
7989 cx: &AppContext,
7990 ) -> Vec<LanguageServerId> {
7991 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
7992 let worktree_id = file.worktree_id(cx);
7993 language
7994 .lsp_adapters()
7995 .iter()
7996 .flat_map(|adapter| {
7997 let key = (worktree_id, adapter.name.clone());
7998 self.language_server_ids.get(&key).copied()
7999 })
8000 .collect()
8001 } else {
8002 Vec::new()
8003 }
8004 }
8005}
8006
8007fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8008 let mut literal_end = 0;
8009 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8010 if part.contains(&['*', '?', '{', '}']) {
8011 break;
8012 } else {
8013 if i > 0 {
8014 // Acount for separator prior to this part
8015 literal_end += path::MAIN_SEPARATOR.len_utf8();
8016 }
8017 literal_end += part.len();
8018 }
8019 }
8020 &glob[..literal_end]
8021}
8022
8023impl WorktreeHandle {
8024 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
8025 match self {
8026 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8027 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
8028 }
8029 }
8030
8031 pub fn handle_id(&self) -> usize {
8032 match self {
8033 WorktreeHandle::Strong(handle) => handle.id(),
8034 WorktreeHandle::Weak(handle) => handle.id(),
8035 }
8036 }
8037}
8038
8039impl OpenBuffer {
8040 pub fn upgrade(&self, cx: &impl BorrowAppContext) -> Option<ModelHandle<Buffer>> {
8041 match self {
8042 OpenBuffer::Strong(handle) => Some(handle.clone()),
8043 OpenBuffer::Weak(handle) => handle.upgrade(cx),
8044 OpenBuffer::Operations(_) => None,
8045 }
8046 }
8047}
8048
8049pub struct PathMatchCandidateSet {
8050 pub snapshot: Snapshot,
8051 pub include_ignored: bool,
8052 pub include_root_name: bool,
8053}
8054
8055impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8056 type Candidates = PathMatchCandidateSetIter<'a>;
8057
8058 fn id(&self) -> usize {
8059 self.snapshot.id().to_usize()
8060 }
8061
8062 fn len(&self) -> usize {
8063 if self.include_ignored {
8064 self.snapshot.file_count()
8065 } else {
8066 self.snapshot.visible_file_count()
8067 }
8068 }
8069
8070 fn prefix(&self) -> Arc<str> {
8071 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8072 self.snapshot.root_name().into()
8073 } else if self.include_root_name {
8074 format!("{}/", self.snapshot.root_name()).into()
8075 } else {
8076 "".into()
8077 }
8078 }
8079
8080 fn candidates(&'a self, start: usize) -> Self::Candidates {
8081 PathMatchCandidateSetIter {
8082 traversal: self.snapshot.files(self.include_ignored, start),
8083 }
8084 }
8085}
8086
8087pub struct PathMatchCandidateSetIter<'a> {
8088 traversal: Traversal<'a>,
8089}
8090
8091impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8092 type Item = fuzzy::PathMatchCandidate<'a>;
8093
8094 fn next(&mut self) -> Option<Self::Item> {
8095 self.traversal.next().map(|entry| {
8096 if let EntryKind::File(char_bag) = entry.kind {
8097 fuzzy::PathMatchCandidate {
8098 path: &entry.path,
8099 char_bag,
8100 }
8101 } else {
8102 unreachable!()
8103 }
8104 })
8105 }
8106}
8107
8108impl Entity for Project {
8109 type Event = Event;
8110
8111 fn release(&mut self, cx: &mut gpui::AppContext) {
8112 match &self.client_state {
8113 Some(ProjectClientState::Local { .. }) => {
8114 let _ = self.unshare_internal(cx);
8115 }
8116 Some(ProjectClientState::Remote { remote_id, .. }) => {
8117 let _ = self.client.send(proto::LeaveProject {
8118 project_id: *remote_id,
8119 });
8120 self.disconnected_from_host_internal(cx);
8121 }
8122 _ => {}
8123 }
8124 }
8125
8126 fn app_will_quit(
8127 &mut self,
8128 _: &mut AppContext,
8129 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
8130 let shutdown_futures = self
8131 .language_servers
8132 .drain()
8133 .map(|(_, server_state)| async {
8134 use LanguageServerState::*;
8135 match server_state {
8136 Running { server, .. } => server.shutdown()?.await,
8137 Starting(task) => task.await?.shutdown()?.await,
8138 }
8139 })
8140 .collect::<Vec<_>>();
8141
8142 Some(
8143 async move {
8144 futures::future::join_all(shutdown_futures).await;
8145 }
8146 .boxed(),
8147 )
8148 }
8149}
8150
8151impl Collaborator {
8152 fn from_proto(message: proto::Collaborator) -> Result<Self> {
8153 Ok(Self {
8154 peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
8155 replica_id: message.replica_id as ReplicaId,
8156 user_id: message.user_id as UserId,
8157 })
8158 }
8159}
8160
8161impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8162 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8163 Self {
8164 worktree_id,
8165 path: path.as_ref().into(),
8166 }
8167 }
8168}
8169
8170impl ProjectLspAdapterDelegate {
8171 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8172 Arc::new(Self {
8173 project: cx.handle(),
8174 http_client: project.client.http_client(),
8175 })
8176 }
8177}
8178
8179impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8180 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8181 self.project
8182 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8183 }
8184
8185 fn http_client(&self) -> Arc<dyn HttpClient> {
8186 self.http_client.clone()
8187 }
8188}
8189
8190fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8191 proto::Symbol {
8192 language_server_name: symbol.language_server_name.0.to_string(),
8193 source_worktree_id: symbol.source_worktree_id.to_proto(),
8194 worktree_id: symbol.path.worktree_id.to_proto(),
8195 path: symbol.path.path.to_string_lossy().to_string(),
8196 name: symbol.name.clone(),
8197 kind: unsafe { mem::transmute(symbol.kind) },
8198 start: Some(proto::PointUtf16 {
8199 row: symbol.range.start.0.row,
8200 column: symbol.range.start.0.column,
8201 }),
8202 end: Some(proto::PointUtf16 {
8203 row: symbol.range.end.0.row,
8204 column: symbol.range.end.0.column,
8205 }),
8206 signature: symbol.signature.to_vec(),
8207 }
8208}
8209
8210fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8211 let mut path_components = path.components();
8212 let mut base_components = base.components();
8213 let mut components: Vec<Component> = Vec::new();
8214 loop {
8215 match (path_components.next(), base_components.next()) {
8216 (None, None) => break,
8217 (Some(a), None) => {
8218 components.push(a);
8219 components.extend(path_components.by_ref());
8220 break;
8221 }
8222 (None, _) => components.push(Component::ParentDir),
8223 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8224 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8225 (Some(a), Some(_)) => {
8226 components.push(Component::ParentDir);
8227 for _ in base_components {
8228 components.push(Component::ParentDir);
8229 }
8230 components.push(a);
8231 components.extend(path_components.by_ref());
8232 break;
8233 }
8234 }
8235 }
8236 components.iter().map(|c| c.as_os_str()).collect()
8237}
8238
8239impl Item for Buffer {
8240 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8241 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8242 }
8243
8244 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8245 File::from_dyn(self.file()).map(|file| ProjectPath {
8246 worktree_id: file.worktree_id(cx),
8247 path: file.path().clone(),
8248 })
8249 }
8250}
8251
8252async fn wait_for_loading_buffer(
8253 mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
8254) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
8255 loop {
8256 if let Some(result) = receiver.borrow().as_ref() {
8257 match result {
8258 Ok(buffer) => return Ok(buffer.to_owned()),
8259 Err(e) => return Err(e.to_owned()),
8260 }
8261 }
8262 receiver.next().await;
8263 }
8264}