1mod ignore;
2mod lsp_command;
3pub mod project_settings;
4pub mod search;
5pub mod terminals;
6pub mod worktree;
7
8#[cfg(test)]
9mod project_tests;
10#[cfg(test)]
11mod worktree_tests;
12
13use anyhow::{anyhow, Context, Result};
14use client::{proto, Client, TypedEnvelope, UserId, UserStore};
15use clock::ReplicaId;
16use collections::{hash_map, BTreeMap, HashMap, HashSet};
17use copilot::Copilot;
18use futures::{
19 channel::{
20 mpsc::{self, UnboundedReceiver},
21 oneshot,
22 },
23 future::{try_join_all, Shared},
24 stream::FuturesUnordered,
25 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
26};
27use globset::{Glob, GlobSet, GlobSetBuilder};
28use gpui::{
29 executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
30 ModelContext, ModelHandle, Task, WeakModelHandle,
31};
32use itertools::Itertools;
33use language::{
34 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
35 point_to_lsp,
36 proto::{
37 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
38 serialize_anchor, serialize_version, split_operations,
39 },
40 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
41 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
42 File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
43 OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
44 ToOffset, ToPointUtf16, Transaction, Unclipped,
45};
46use log::error;
47use lsp::{
48 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
49 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
50};
51use lsp_command::*;
52use postage::watch;
53use project_settings::{LspSettings, ProjectSettings};
54use rand::prelude::*;
55use search::SearchQuery;
56use serde::Serialize;
57use settings::SettingsStore;
58use sha2::{Digest, Sha256};
59use similar::{ChangeTag, TextDiff};
60use smol::channel::{Receiver, Sender};
61use std::{
62 cmp::{self, Ordering},
63 convert::TryInto,
64 hash::Hash,
65 mem,
66 num::NonZeroU32,
67 ops::Range,
68 path::{self, Component, Path, PathBuf},
69 process::Stdio,
70 str,
71 sync::{
72 atomic::{AtomicUsize, Ordering::SeqCst},
73 Arc,
74 },
75 time::{Duration, Instant},
76};
77use terminals::Terminals;
78use text::Anchor;
79use util::{
80 debug_panic, defer, http::HttpClient, merge_json_value_into,
81 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
82};
83
84pub use fs::*;
85pub use worktree::*;
86
87pub trait Item {
88 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
89 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
90}
91
92// Language server state is stored across 3 collections:
93// language_servers =>
94// a mapping from unique server id to LanguageServerState which can either be a task for a
95// server in the process of starting, or a running server with adapter and language server arcs
96// language_server_ids => a mapping from worktreeId and server name to the unique server id
97// language_server_statuses => a mapping from unique server id to the current server status
98//
99// Multiple worktrees can map to the same language server for example when you jump to the definition
100// of a file in the standard library. So language_server_ids is used to look up which server is active
101// for a given worktree and language server name
102//
103// When starting a language server, first the id map is checked to make sure a server isn't already available
104// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
105// the Starting variant of LanguageServerState is stored in the language_servers map.
106pub struct Project {
107 worktrees: Vec<WorktreeHandle>,
108 active_entry: Option<ProjectEntryId>,
109 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
110 languages: Arc<LanguageRegistry>,
111 language_servers: HashMap<LanguageServerId, LanguageServerState>,
112 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
113 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
114 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
115 client: Arc<client::Client>,
116 next_entry_id: Arc<AtomicUsize>,
117 join_project_response_message_id: u32,
118 next_diagnostic_group_id: usize,
119 user_store: ModelHandle<UserStore>,
120 fs: Arc<dyn Fs>,
121 client_state: Option<ProjectClientState>,
122 collaborators: HashMap<proto::PeerId, Collaborator>,
123 client_subscriptions: Vec<client::Subscription>,
124 _subscriptions: Vec<gpui::Subscription>,
125 next_buffer_id: u64,
126 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
127 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
128 #[allow(clippy::type_complexity)]
129 loading_buffers_by_path: HashMap<
130 ProjectPath,
131 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
132 >,
133 #[allow(clippy::type_complexity)]
134 loading_local_worktrees:
135 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
136 opened_buffers: HashMap<u64, OpenBuffer>,
137 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
138 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
139 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
140 /// Used for re-issuing buffer requests when peers temporarily disconnect
141 incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
142 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
143 buffers_being_formatted: HashSet<u64>,
144 buffers_needing_diff: HashSet<WeakModelHandle<Buffer>>,
145 git_diff_debouncer: DelayedDebounced,
146 nonce: u128,
147 _maintain_buffer_languages: Task<()>,
148 _maintain_workspace_config: Task<()>,
149 terminals: Terminals,
150 copilot_enabled: bool,
151 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
152}
153
154struct DelayedDebounced {
155 task: Option<Task<()>>,
156 cancel_channel: Option<oneshot::Sender<()>>,
157}
158
159enum LanguageServerToQuery {
160 Primary,
161 Other(LanguageServerId),
162}
163
164impl DelayedDebounced {
165 fn new() -> DelayedDebounced {
166 DelayedDebounced {
167 task: None,
168 cancel_channel: None,
169 }
170 }
171
172 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
173 where
174 F: 'static + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
175 {
176 if let Some(channel) = self.cancel_channel.take() {
177 _ = channel.send(());
178 }
179
180 let (sender, mut receiver) = oneshot::channel::<()>();
181 self.cancel_channel = Some(sender);
182
183 let previous_task = self.task.take();
184 self.task = Some(cx.spawn(|workspace, mut cx| async move {
185 let mut timer = cx.background().timer(delay).fuse();
186 if let Some(previous_task) = previous_task {
187 previous_task.await;
188 }
189
190 futures::select_biased! {
191 _ = receiver => return,
192 _ = timer => {}
193 }
194
195 workspace
196 .update(&mut cx, |workspace, cx| (func)(workspace, cx))
197 .await;
198 }));
199 }
200}
201
202struct LspBufferSnapshot {
203 version: i32,
204 snapshot: TextBufferSnapshot,
205}
206
207/// Message ordered with respect to buffer operations
208enum BufferOrderedMessage {
209 Operation {
210 buffer_id: u64,
211 operation: proto::Operation,
212 },
213 LanguageServerUpdate {
214 language_server_id: LanguageServerId,
215 message: proto::update_language_server::Variant,
216 },
217 Resync,
218}
219
220enum LocalProjectUpdate {
221 WorktreesChanged,
222 CreateBufferForPeer {
223 peer_id: proto::PeerId,
224 buffer_id: u64,
225 },
226}
227
228enum OpenBuffer {
229 Strong(ModelHandle<Buffer>),
230 Weak(WeakModelHandle<Buffer>),
231 Operations(Vec<Operation>),
232}
233
234#[derive(Clone)]
235enum WorktreeHandle {
236 Strong(ModelHandle<Worktree>),
237 Weak(WeakModelHandle<Worktree>),
238}
239
240enum ProjectClientState {
241 Local {
242 remote_id: u64,
243 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
244 _send_updates: Task<()>,
245 },
246 Remote {
247 sharing_has_stopped: bool,
248 remote_id: u64,
249 replica_id: ReplicaId,
250 },
251}
252
253#[derive(Clone, Debug)]
254pub struct Collaborator {
255 pub peer_id: proto::PeerId,
256 pub replica_id: ReplicaId,
257 pub user_id: UserId,
258}
259
260#[derive(Clone, Debug, PartialEq)]
261pub enum Event {
262 LanguageServerAdded(LanguageServerId),
263 LanguageServerRemoved(LanguageServerId),
264 LanguageServerLog(LanguageServerId, String),
265 Notification(String),
266 ActiveEntryChanged(Option<ProjectEntryId>),
267 ActivateProjectPanel,
268 WorktreeAdded,
269 WorktreeRemoved(WorktreeId),
270 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
271 DiskBasedDiagnosticsStarted {
272 language_server_id: LanguageServerId,
273 },
274 DiskBasedDiagnosticsFinished {
275 language_server_id: LanguageServerId,
276 },
277 DiagnosticsUpdated {
278 path: ProjectPath,
279 language_server_id: LanguageServerId,
280 },
281 RemoteIdChanged(Option<u64>),
282 DisconnectedFromHost,
283 Closed,
284 DeletedEntry(ProjectEntryId),
285 CollaboratorUpdated {
286 old_peer_id: proto::PeerId,
287 new_peer_id: proto::PeerId,
288 },
289 CollaboratorJoined(proto::PeerId),
290 CollaboratorLeft(proto::PeerId),
291 RefreshInlayHints,
292}
293
294pub enum LanguageServerState {
295 Starting(Task<Option<Arc<LanguageServer>>>),
296
297 Running {
298 language: Arc<Language>,
299 adapter: Arc<CachedLspAdapter>,
300 server: Arc<LanguageServer>,
301 watched_paths: HashMap<WorktreeId, GlobSet>,
302 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
303 },
304}
305
306#[derive(Serialize)]
307pub struct LanguageServerStatus {
308 pub name: String,
309 pub pending_work: BTreeMap<String, LanguageServerProgress>,
310 pub has_pending_diagnostic_updates: bool,
311 progress_tokens: HashSet<String>,
312}
313
314#[derive(Clone, Debug, Serialize)]
315pub struct LanguageServerProgress {
316 pub message: Option<String>,
317 pub percentage: Option<usize>,
318 #[serde(skip_serializing)]
319 pub last_update_at: Instant,
320}
321
322#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
323pub struct ProjectPath {
324 pub worktree_id: WorktreeId,
325 pub path: Arc<Path>,
326}
327
328#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
329pub struct DiagnosticSummary {
330 pub error_count: usize,
331 pub warning_count: usize,
332}
333
334#[derive(Debug, Clone, PartialEq, Eq, Hash)]
335pub struct Location {
336 pub buffer: ModelHandle<Buffer>,
337 pub range: Range<language::Anchor>,
338}
339
340#[derive(Debug, Clone, PartialEq, Eq)]
341pub struct InlayHint {
342 pub position: language::Anchor,
343 pub label: InlayHintLabel,
344 pub kind: Option<InlayHintKind>,
345 pub padding_left: bool,
346 pub padding_right: bool,
347 pub tooltip: Option<InlayHintTooltip>,
348 pub resolve_state: ResolveState,
349}
350
351#[derive(Debug, Clone, PartialEq, Eq)]
352pub enum ResolveState {
353 Resolved,
354 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
355 Resolving,
356}
357
358impl InlayHint {
359 pub fn text(&self) -> String {
360 match &self.label {
361 InlayHintLabel::String(s) => s.to_owned(),
362 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
363 }
364 }
365}
366
367#[derive(Debug, Clone, PartialEq, Eq)]
368pub enum InlayHintLabel {
369 String(String),
370 LabelParts(Vec<InlayHintLabelPart>),
371}
372
373#[derive(Debug, Clone, PartialEq, Eq)]
374pub struct InlayHintLabelPart {
375 pub value: String,
376 pub tooltip: Option<InlayHintLabelPartTooltip>,
377 pub location: Option<(LanguageServerId, lsp::Location)>,
378}
379
380#[derive(Debug, Clone, PartialEq, Eq)]
381pub enum InlayHintTooltip {
382 String(String),
383 MarkupContent(MarkupContent),
384}
385
386#[derive(Debug, Clone, PartialEq, Eq)]
387pub enum InlayHintLabelPartTooltip {
388 String(String),
389 MarkupContent(MarkupContent),
390}
391
392#[derive(Debug, Clone, PartialEq, Eq)]
393pub struct MarkupContent {
394 pub kind: HoverBlockKind,
395 pub value: String,
396}
397
398#[derive(Debug, Clone)]
399pub struct LocationLink {
400 pub origin: Option<Location>,
401 pub target: Location,
402}
403
404#[derive(Debug)]
405pub struct DocumentHighlight {
406 pub range: Range<language::Anchor>,
407 pub kind: DocumentHighlightKind,
408}
409
410#[derive(Clone, Debug)]
411pub struct Symbol {
412 pub language_server_name: LanguageServerName,
413 pub source_worktree_id: WorktreeId,
414 pub path: ProjectPath,
415 pub label: CodeLabel,
416 pub name: String,
417 pub kind: lsp::SymbolKind,
418 pub range: Range<Unclipped<PointUtf16>>,
419 pub signature: [u8; 32],
420}
421
422#[derive(Clone, Debug, PartialEq)]
423pub struct HoverBlock {
424 pub text: String,
425 pub kind: HoverBlockKind,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum HoverBlockKind {
430 PlainText,
431 Markdown,
432 Code { language: String },
433}
434
435#[derive(Debug)]
436pub struct Hover {
437 pub contents: Vec<HoverBlock>,
438 pub range: Option<Range<language::Anchor>>,
439 pub language: Option<Arc<Language>>,
440}
441
442impl Hover {
443 pub fn is_empty(&self) -> bool {
444 self.contents.iter().all(|block| block.text.is_empty())
445 }
446}
447
448#[derive(Default)]
449pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
450
451impl DiagnosticSummary {
452 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
453 let mut this = Self {
454 error_count: 0,
455 warning_count: 0,
456 };
457
458 for entry in diagnostics {
459 if entry.diagnostic.is_primary {
460 match entry.diagnostic.severity {
461 DiagnosticSeverity::ERROR => this.error_count += 1,
462 DiagnosticSeverity::WARNING => this.warning_count += 1,
463 _ => {}
464 }
465 }
466 }
467
468 this
469 }
470
471 pub fn is_empty(&self) -> bool {
472 self.error_count == 0 && self.warning_count == 0
473 }
474
475 pub fn to_proto(
476 &self,
477 language_server_id: LanguageServerId,
478 path: &Path,
479 ) -> proto::DiagnosticSummary {
480 proto::DiagnosticSummary {
481 path: path.to_string_lossy().to_string(),
482 language_server_id: language_server_id.0 as u64,
483 error_count: self.error_count as u32,
484 warning_count: self.warning_count as u32,
485 }
486 }
487}
488
489#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
490pub struct ProjectEntryId(usize);
491
492impl ProjectEntryId {
493 pub const MAX: Self = Self(usize::MAX);
494
495 pub fn new(counter: &AtomicUsize) -> Self {
496 Self(counter.fetch_add(1, SeqCst))
497 }
498
499 pub fn from_proto(id: u64) -> Self {
500 Self(id as usize)
501 }
502
503 pub fn to_proto(&self) -> u64 {
504 self.0 as u64
505 }
506
507 pub fn to_usize(&self) -> usize {
508 self.0
509 }
510}
511
512#[derive(Debug, Clone, Copy, PartialEq, Eq)]
513pub enum FormatTrigger {
514 Save,
515 Manual,
516}
517
518struct ProjectLspAdapterDelegate {
519 project: ModelHandle<Project>,
520 http_client: Arc<dyn HttpClient>,
521}
522
523impl FormatTrigger {
524 fn from_proto(value: i32) -> FormatTrigger {
525 match value {
526 0 => FormatTrigger::Save,
527 1 => FormatTrigger::Manual,
528 _ => FormatTrigger::Save,
529 }
530 }
531}
532#[derive(Clone, Debug, PartialEq)]
533enum SearchMatchCandidate {
534 OpenBuffer {
535 buffer: ModelHandle<Buffer>,
536 // This might be an unnamed file without representation on filesystem
537 path: Option<Arc<Path>>,
538 },
539 Path {
540 worktree_id: WorktreeId,
541 path: Arc<Path>,
542 },
543}
544
545type SearchMatchCandidateIndex = usize;
546impl SearchMatchCandidate {
547 fn path(&self) -> Option<Arc<Path>> {
548 match self {
549 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
550 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
551 }
552 }
553}
554
555impl Project {
556 pub fn init_settings(cx: &mut AppContext) {
557 settings::register::<ProjectSettings>(cx);
558 }
559
560 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
561 Self::init_settings(cx);
562
563 client.add_model_message_handler(Self::handle_add_collaborator);
564 client.add_model_message_handler(Self::handle_update_project_collaborator);
565 client.add_model_message_handler(Self::handle_remove_collaborator);
566 client.add_model_message_handler(Self::handle_buffer_reloaded);
567 client.add_model_message_handler(Self::handle_buffer_saved);
568 client.add_model_message_handler(Self::handle_start_language_server);
569 client.add_model_message_handler(Self::handle_update_language_server);
570 client.add_model_message_handler(Self::handle_update_project);
571 client.add_model_message_handler(Self::handle_unshare_project);
572 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
573 client.add_model_message_handler(Self::handle_update_buffer_file);
574 client.add_model_request_handler(Self::handle_update_buffer);
575 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
576 client.add_model_message_handler(Self::handle_update_worktree);
577 client.add_model_message_handler(Self::handle_update_worktree_settings);
578 client.add_model_request_handler(Self::handle_create_project_entry);
579 client.add_model_request_handler(Self::handle_rename_project_entry);
580 client.add_model_request_handler(Self::handle_copy_project_entry);
581 client.add_model_request_handler(Self::handle_delete_project_entry);
582 client.add_model_request_handler(Self::handle_expand_project_entry);
583 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
584 client.add_model_request_handler(Self::handle_apply_code_action);
585 client.add_model_request_handler(Self::handle_on_type_formatting);
586 client.add_model_request_handler(Self::handle_inlay_hints);
587 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
588 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
589 client.add_model_request_handler(Self::handle_reload_buffers);
590 client.add_model_request_handler(Self::handle_synchronize_buffers);
591 client.add_model_request_handler(Self::handle_format_buffers);
592 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
593 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
594 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
595 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
596 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
597 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
598 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
599 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
600 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
601 client.add_model_request_handler(Self::handle_search_project);
602 client.add_model_request_handler(Self::handle_get_project_symbols);
603 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
604 client.add_model_request_handler(Self::handle_open_buffer_by_id);
605 client.add_model_request_handler(Self::handle_open_buffer_by_path);
606 client.add_model_request_handler(Self::handle_save_buffer);
607 client.add_model_message_handler(Self::handle_update_diff_base);
608 }
609
610 pub fn local(
611 client: Arc<Client>,
612 user_store: ModelHandle<UserStore>,
613 languages: Arc<LanguageRegistry>,
614 fs: Arc<dyn Fs>,
615 cx: &mut AppContext,
616 ) -> ModelHandle<Self> {
617 cx.add_model(|cx: &mut ModelContext<Self>| {
618 let (tx, rx) = mpsc::unbounded();
619 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
620 .detach();
621 Self {
622 worktrees: Default::default(),
623 buffer_ordered_messages_tx: tx,
624 collaborators: Default::default(),
625 next_buffer_id: 0,
626 opened_buffers: Default::default(),
627 shared_buffers: Default::default(),
628 incomplete_remote_buffers: Default::default(),
629 loading_buffers_by_path: Default::default(),
630 loading_local_worktrees: Default::default(),
631 local_buffer_ids_by_path: Default::default(),
632 local_buffer_ids_by_entry_id: Default::default(),
633 buffer_snapshots: Default::default(),
634 join_project_response_message_id: 0,
635 client_state: None,
636 opened_buffer: watch::channel(),
637 client_subscriptions: Vec::new(),
638 _subscriptions: vec![
639 cx.observe_global::<SettingsStore, _>(Self::on_settings_changed)
640 ],
641 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
642 _maintain_workspace_config: Self::maintain_workspace_config(cx),
643 active_entry: None,
644 languages,
645 client,
646 user_store,
647 fs,
648 next_entry_id: Default::default(),
649 next_diagnostic_group_id: Default::default(),
650 language_servers: Default::default(),
651 language_server_ids: Default::default(),
652 language_server_statuses: Default::default(),
653 last_workspace_edits_by_language_server: Default::default(),
654 buffers_being_formatted: Default::default(),
655 buffers_needing_diff: Default::default(),
656 git_diff_debouncer: DelayedDebounced::new(),
657 nonce: StdRng::from_entropy().gen(),
658 terminals: Terminals {
659 local_handles: Vec::new(),
660 },
661 copilot_enabled: Copilot::global(cx).is_some(),
662 current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
663 }
664 })
665 }
666
667 pub async fn remote(
668 remote_id: u64,
669 client: Arc<Client>,
670 user_store: ModelHandle<UserStore>,
671 languages: Arc<LanguageRegistry>,
672 fs: Arc<dyn Fs>,
673 mut cx: AsyncAppContext,
674 ) -> Result<ModelHandle<Self>> {
675 client.authenticate_and_connect(true, &cx).await?;
676
677 let subscription = client.subscribe_to_entity(remote_id)?;
678 let response = client
679 .request_envelope(proto::JoinProject {
680 project_id: remote_id,
681 })
682 .await?;
683 let this = cx.add_model(|cx| {
684 let replica_id = response.payload.replica_id as ReplicaId;
685
686 let mut worktrees = Vec::new();
687 for worktree in response.payload.worktrees {
688 let worktree = cx.update(|cx| {
689 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
690 });
691 worktrees.push(worktree);
692 }
693
694 let (tx, rx) = mpsc::unbounded();
695 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
696 .detach();
697 let mut this = Self {
698 worktrees: Vec::new(),
699 buffer_ordered_messages_tx: tx,
700 loading_buffers_by_path: Default::default(),
701 next_buffer_id: 0,
702 opened_buffer: watch::channel(),
703 shared_buffers: Default::default(),
704 incomplete_remote_buffers: Default::default(),
705 loading_local_worktrees: Default::default(),
706 local_buffer_ids_by_path: Default::default(),
707 local_buffer_ids_by_entry_id: Default::default(),
708 active_entry: None,
709 collaborators: Default::default(),
710 join_project_response_message_id: response.message_id,
711 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
712 _maintain_workspace_config: Self::maintain_workspace_config(cx),
713 languages,
714 user_store: user_store.clone(),
715 fs,
716 next_entry_id: Default::default(),
717 next_diagnostic_group_id: Default::default(),
718 client_subscriptions: Default::default(),
719 _subscriptions: Default::default(),
720 client: client.clone(),
721 client_state: Some(ProjectClientState::Remote {
722 sharing_has_stopped: false,
723 remote_id,
724 replica_id,
725 }),
726 language_servers: Default::default(),
727 language_server_ids: Default::default(),
728 language_server_statuses: response
729 .payload
730 .language_servers
731 .into_iter()
732 .map(|server| {
733 (
734 LanguageServerId(server.id as usize),
735 LanguageServerStatus {
736 name: server.name,
737 pending_work: Default::default(),
738 has_pending_diagnostic_updates: false,
739 progress_tokens: Default::default(),
740 },
741 )
742 })
743 .collect(),
744 last_workspace_edits_by_language_server: Default::default(),
745 opened_buffers: Default::default(),
746 buffers_being_formatted: Default::default(),
747 buffers_needing_diff: Default::default(),
748 git_diff_debouncer: DelayedDebounced::new(),
749 buffer_snapshots: Default::default(),
750 nonce: StdRng::from_entropy().gen(),
751 terminals: Terminals {
752 local_handles: Vec::new(),
753 },
754 copilot_enabled: Copilot::global(cx).is_some(),
755 current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
756 };
757 for worktree in worktrees {
758 let _ = this.add_worktree(&worktree, cx);
759 }
760 this
761 });
762 let subscription = subscription.set_model(&this, &mut cx);
763
764 let user_ids = response
765 .payload
766 .collaborators
767 .iter()
768 .map(|peer| peer.user_id)
769 .collect();
770 user_store
771 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
772 .await?;
773
774 this.update(&mut cx, |this, cx| {
775 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
776 this.client_subscriptions.push(subscription);
777 anyhow::Ok(())
778 })?;
779
780 Ok(this)
781 }
782
783 #[cfg(any(test, feature = "test-support"))]
784 pub async fn test(
785 fs: Arc<dyn Fs>,
786 root_paths: impl IntoIterator<Item = &Path>,
787 cx: &mut gpui::TestAppContext,
788 ) -> ModelHandle<Project> {
789 let mut languages = LanguageRegistry::test();
790 languages.set_executor(cx.background());
791 let http_client = util::http::FakeHttpClient::with_404_response();
792 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
793 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
794 let project =
795 cx.update(|cx| Project::local(client, user_store, Arc::new(languages), fs, cx));
796 for path in root_paths {
797 let (tree, _) = project
798 .update(cx, |project, cx| {
799 project.find_or_create_local_worktree(path, true, cx)
800 })
801 .await
802 .unwrap();
803 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
804 .await;
805 }
806 project
807 }
808
809 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
810 let mut language_servers_to_start = Vec::new();
811 for buffer in self.opened_buffers.values() {
812 if let Some(buffer) = buffer.upgrade(cx) {
813 let buffer = buffer.read(cx);
814 if let Some((file, language)) = buffer.file().zip(buffer.language()) {
815 let settings = language_settings(Some(language), Some(file), cx);
816 if settings.enable_language_server {
817 if let Some(file) = File::from_dyn(Some(file)) {
818 language_servers_to_start
819 .push((file.worktree.clone(), language.clone()));
820 }
821 }
822 }
823 }
824 }
825
826 let mut language_servers_to_stop = Vec::new();
827 let mut language_servers_to_restart = Vec::new();
828 let languages = self.languages.to_vec();
829
830 let new_lsp_settings = settings::get::<ProjectSettings>(cx).lsp.clone();
831 let current_lsp_settings = &self.current_lsp_settings;
832 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
833 let language = languages.iter().find_map(|l| {
834 let adapter = l
835 .lsp_adapters()
836 .iter()
837 .find(|adapter| &adapter.name == started_lsp_name)?;
838 Some((l, adapter))
839 });
840 if let Some((language, adapter)) = language {
841 let worktree = self.worktree_for_id(*worktree_id, cx);
842 let file = worktree.as_ref().and_then(|tree| {
843 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
844 });
845 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
846 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
847 } else if let Some(worktree) = worktree {
848 let server_name = &adapter.name.0;
849 match (
850 current_lsp_settings.get(server_name),
851 new_lsp_settings.get(server_name),
852 ) {
853 (None, None) => {}
854 (Some(_), None) | (None, Some(_)) => {
855 language_servers_to_restart.push((worktree, Arc::clone(language)));
856 }
857 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
858 if current_lsp_settings != new_lsp_settings {
859 language_servers_to_restart.push((worktree, Arc::clone(language)));
860 }
861 }
862 }
863 }
864 }
865 }
866 self.current_lsp_settings = new_lsp_settings;
867
868 // Stop all newly-disabled language servers.
869 for (worktree_id, adapter_name) in language_servers_to_stop {
870 self.stop_language_server(worktree_id, adapter_name, cx)
871 .detach();
872 }
873
874 // Start all the newly-enabled language servers.
875 for (worktree, language) in language_servers_to_start {
876 let worktree_path = worktree.read(cx).abs_path();
877 self.start_language_servers(&worktree, worktree_path, language, cx);
878 }
879
880 // Restart all language servers with changed initialization options.
881 for (worktree, language) in language_servers_to_restart {
882 self.restart_language_servers(worktree, language, cx);
883 }
884
885 if !self.copilot_enabled && Copilot::global(cx).is_some() {
886 self.copilot_enabled = true;
887 for buffer in self.opened_buffers.values() {
888 if let Some(buffer) = buffer.upgrade(cx) {
889 self.register_buffer_with_copilot(&buffer, cx);
890 }
891 }
892 }
893
894 cx.notify();
895 }
896
897 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
898 self.opened_buffers
899 .get(&remote_id)
900 .and_then(|buffer| buffer.upgrade(cx))
901 }
902
903 pub fn languages(&self) -> &Arc<LanguageRegistry> {
904 &self.languages
905 }
906
907 pub fn client(&self) -> Arc<Client> {
908 self.client.clone()
909 }
910
911 pub fn user_store(&self) -> ModelHandle<UserStore> {
912 self.user_store.clone()
913 }
914
915 pub fn opened_buffers(&self, cx: &AppContext) -> Vec<ModelHandle<Buffer>> {
916 self.opened_buffers
917 .values()
918 .filter_map(|b| b.upgrade(cx))
919 .collect()
920 }
921
922 #[cfg(any(test, feature = "test-support"))]
923 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
924 let path = path.into();
925 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
926 self.opened_buffers.iter().any(|(_, buffer)| {
927 if let Some(buffer) = buffer.upgrade(cx) {
928 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
929 if file.worktree == worktree && file.path() == &path.path {
930 return true;
931 }
932 }
933 }
934 false
935 })
936 } else {
937 false
938 }
939 }
940
941 pub fn fs(&self) -> &Arc<dyn Fs> {
942 &self.fs
943 }
944
945 pub fn remote_id(&self) -> Option<u64> {
946 match self.client_state.as_ref()? {
947 ProjectClientState::Local { remote_id, .. }
948 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
949 }
950 }
951
952 pub fn replica_id(&self) -> ReplicaId {
953 match &self.client_state {
954 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
955 _ => 0,
956 }
957 }
958
959 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
960 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
961 updates_tx
962 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
963 .ok();
964 }
965 cx.notify();
966 }
967
968 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
969 &self.collaborators
970 }
971
972 /// Collect all worktrees, including ones that don't appear in the project panel
973 pub fn worktrees<'a>(
974 &'a self,
975 cx: &'a AppContext,
976 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
977 self.worktrees
978 .iter()
979 .filter_map(move |worktree| worktree.upgrade(cx))
980 }
981
982 /// Collect all user-visible worktrees, the ones that appear in the project panel
983 pub fn visible_worktrees<'a>(
984 &'a self,
985 cx: &'a AppContext,
986 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
987 self.worktrees.iter().filter_map(|worktree| {
988 worktree.upgrade(cx).and_then(|worktree| {
989 if worktree.read(cx).is_visible() {
990 Some(worktree)
991 } else {
992 None
993 }
994 })
995 })
996 }
997
998 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
999 self.visible_worktrees(cx)
1000 .map(|tree| tree.read(cx).root_name())
1001 }
1002
1003 pub fn worktree_for_id(
1004 &self,
1005 id: WorktreeId,
1006 cx: &AppContext,
1007 ) -> Option<ModelHandle<Worktree>> {
1008 self.worktrees(cx)
1009 .find(|worktree| worktree.read(cx).id() == id)
1010 }
1011
1012 pub fn worktree_for_entry(
1013 &self,
1014 entry_id: ProjectEntryId,
1015 cx: &AppContext,
1016 ) -> Option<ModelHandle<Worktree>> {
1017 self.worktrees(cx)
1018 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1019 }
1020
1021 pub fn worktree_id_for_entry(
1022 &self,
1023 entry_id: ProjectEntryId,
1024 cx: &AppContext,
1025 ) -> Option<WorktreeId> {
1026 self.worktree_for_entry(entry_id, cx)
1027 .map(|worktree| worktree.read(cx).id())
1028 }
1029
1030 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1031 paths.iter().all(|path| self.contains_path(path, cx))
1032 }
1033
1034 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1035 for worktree in self.worktrees(cx) {
1036 let worktree = worktree.read(cx).as_local();
1037 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1038 return true;
1039 }
1040 }
1041 false
1042 }
1043
1044 pub fn create_entry(
1045 &mut self,
1046 project_path: impl Into<ProjectPath>,
1047 is_directory: bool,
1048 cx: &mut ModelContext<Self>,
1049 ) -> Option<Task<Result<Entry>>> {
1050 let project_path = project_path.into();
1051 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1052 if self.is_local() {
1053 Some(worktree.update(cx, |worktree, cx| {
1054 worktree
1055 .as_local_mut()
1056 .unwrap()
1057 .create_entry(project_path.path, is_directory, cx)
1058 }))
1059 } else {
1060 let client = self.client.clone();
1061 let project_id = self.remote_id().unwrap();
1062 Some(cx.spawn_weak(|_, mut cx| async move {
1063 let response = client
1064 .request(proto::CreateProjectEntry {
1065 worktree_id: project_path.worktree_id.to_proto(),
1066 project_id,
1067 path: project_path.path.to_string_lossy().into(),
1068 is_directory,
1069 })
1070 .await?;
1071 let entry = response
1072 .entry
1073 .ok_or_else(|| anyhow!("missing entry in response"))?;
1074 worktree
1075 .update(&mut cx, |worktree, cx| {
1076 worktree.as_remote_mut().unwrap().insert_entry(
1077 entry,
1078 response.worktree_scan_id as usize,
1079 cx,
1080 )
1081 })
1082 .await
1083 }))
1084 }
1085 }
1086
1087 pub fn copy_entry(
1088 &mut self,
1089 entry_id: ProjectEntryId,
1090 new_path: impl Into<Arc<Path>>,
1091 cx: &mut ModelContext<Self>,
1092 ) -> Option<Task<Result<Entry>>> {
1093 let worktree = self.worktree_for_entry(entry_id, cx)?;
1094 let new_path = new_path.into();
1095 if self.is_local() {
1096 worktree.update(cx, |worktree, cx| {
1097 worktree
1098 .as_local_mut()
1099 .unwrap()
1100 .copy_entry(entry_id, new_path, cx)
1101 })
1102 } else {
1103 let client = self.client.clone();
1104 let project_id = self.remote_id().unwrap();
1105
1106 Some(cx.spawn_weak(|_, mut cx| async move {
1107 let response = client
1108 .request(proto::CopyProjectEntry {
1109 project_id,
1110 entry_id: entry_id.to_proto(),
1111 new_path: new_path.to_string_lossy().into(),
1112 })
1113 .await?;
1114 let entry = response
1115 .entry
1116 .ok_or_else(|| anyhow!("missing entry in response"))?;
1117 worktree
1118 .update(&mut cx, |worktree, cx| {
1119 worktree.as_remote_mut().unwrap().insert_entry(
1120 entry,
1121 response.worktree_scan_id as usize,
1122 cx,
1123 )
1124 })
1125 .await
1126 }))
1127 }
1128 }
1129
1130 pub fn rename_entry(
1131 &mut self,
1132 entry_id: ProjectEntryId,
1133 new_path: impl Into<Arc<Path>>,
1134 cx: &mut ModelContext<Self>,
1135 ) -> Option<Task<Result<Entry>>> {
1136 let worktree = self.worktree_for_entry(entry_id, cx)?;
1137 let new_path = new_path.into();
1138 if self.is_local() {
1139 worktree.update(cx, |worktree, cx| {
1140 worktree
1141 .as_local_mut()
1142 .unwrap()
1143 .rename_entry(entry_id, new_path, cx)
1144 })
1145 } else {
1146 let client = self.client.clone();
1147 let project_id = self.remote_id().unwrap();
1148
1149 Some(cx.spawn_weak(|_, mut cx| async move {
1150 let response = client
1151 .request(proto::RenameProjectEntry {
1152 project_id,
1153 entry_id: entry_id.to_proto(),
1154 new_path: new_path.to_string_lossy().into(),
1155 })
1156 .await?;
1157 let entry = response
1158 .entry
1159 .ok_or_else(|| anyhow!("missing entry in response"))?;
1160 worktree
1161 .update(&mut cx, |worktree, cx| {
1162 worktree.as_remote_mut().unwrap().insert_entry(
1163 entry,
1164 response.worktree_scan_id as usize,
1165 cx,
1166 )
1167 })
1168 .await
1169 }))
1170 }
1171 }
1172
1173 pub fn delete_entry(
1174 &mut self,
1175 entry_id: ProjectEntryId,
1176 cx: &mut ModelContext<Self>,
1177 ) -> Option<Task<Result<()>>> {
1178 let worktree = self.worktree_for_entry(entry_id, cx)?;
1179
1180 cx.emit(Event::DeletedEntry(entry_id));
1181
1182 if self.is_local() {
1183 worktree.update(cx, |worktree, cx| {
1184 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1185 })
1186 } else {
1187 let client = self.client.clone();
1188 let project_id = self.remote_id().unwrap();
1189 Some(cx.spawn_weak(|_, mut cx| async move {
1190 let response = client
1191 .request(proto::DeleteProjectEntry {
1192 project_id,
1193 entry_id: entry_id.to_proto(),
1194 })
1195 .await?;
1196 worktree
1197 .update(&mut cx, move |worktree, cx| {
1198 worktree.as_remote_mut().unwrap().delete_entry(
1199 entry_id,
1200 response.worktree_scan_id as usize,
1201 cx,
1202 )
1203 })
1204 .await
1205 }))
1206 }
1207 }
1208
1209 pub fn expand_entry(
1210 &mut self,
1211 worktree_id: WorktreeId,
1212 entry_id: ProjectEntryId,
1213 cx: &mut ModelContext<Self>,
1214 ) -> Option<Task<Result<()>>> {
1215 let worktree = self.worktree_for_id(worktree_id, cx)?;
1216 if self.is_local() {
1217 worktree.update(cx, |worktree, cx| {
1218 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1219 })
1220 } else {
1221 let worktree = worktree.downgrade();
1222 let request = self.client.request(proto::ExpandProjectEntry {
1223 project_id: self.remote_id().unwrap(),
1224 entry_id: entry_id.to_proto(),
1225 });
1226 Some(cx.spawn_weak(|_, mut cx| async move {
1227 let response = request.await?;
1228 if let Some(worktree) = worktree.upgrade(&cx) {
1229 worktree
1230 .update(&mut cx, |worktree, _| {
1231 worktree
1232 .as_remote_mut()
1233 .unwrap()
1234 .wait_for_snapshot(response.worktree_scan_id as usize)
1235 })
1236 .await?;
1237 }
1238 Ok(())
1239 }))
1240 }
1241 }
1242
1243 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1244 if self.client_state.is_some() {
1245 return Err(anyhow!("project was already shared"));
1246 }
1247 self.client_subscriptions.push(
1248 self.client
1249 .subscribe_to_entity(project_id)?
1250 .set_model(&cx.handle(), &mut cx.to_async()),
1251 );
1252
1253 for open_buffer in self.opened_buffers.values_mut() {
1254 match open_buffer {
1255 OpenBuffer::Strong(_) => {}
1256 OpenBuffer::Weak(buffer) => {
1257 if let Some(buffer) = buffer.upgrade(cx) {
1258 *open_buffer = OpenBuffer::Strong(buffer);
1259 }
1260 }
1261 OpenBuffer::Operations(_) => unreachable!(),
1262 }
1263 }
1264
1265 for worktree_handle in self.worktrees.iter_mut() {
1266 match worktree_handle {
1267 WorktreeHandle::Strong(_) => {}
1268 WorktreeHandle::Weak(worktree) => {
1269 if let Some(worktree) = worktree.upgrade(cx) {
1270 *worktree_handle = WorktreeHandle::Strong(worktree);
1271 }
1272 }
1273 }
1274 }
1275
1276 for (server_id, status) in &self.language_server_statuses {
1277 self.client
1278 .send(proto::StartLanguageServer {
1279 project_id,
1280 server: Some(proto::LanguageServer {
1281 id: server_id.0 as u64,
1282 name: status.name.clone(),
1283 }),
1284 })
1285 .log_err();
1286 }
1287
1288 let store = cx.global::<SettingsStore>();
1289 for worktree in self.worktrees(cx) {
1290 let worktree_id = worktree.read(cx).id().to_proto();
1291 for (path, content) in store.local_settings(worktree.id()) {
1292 self.client
1293 .send(proto::UpdateWorktreeSettings {
1294 project_id,
1295 worktree_id,
1296 path: path.to_string_lossy().into(),
1297 content: Some(content),
1298 })
1299 .log_err();
1300 }
1301 }
1302
1303 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1304 let client = self.client.clone();
1305 self.client_state = Some(ProjectClientState::Local {
1306 remote_id: project_id,
1307 updates_tx,
1308 _send_updates: cx.spawn_weak(move |this, mut cx| async move {
1309 while let Some(update) = updates_rx.next().await {
1310 let Some(this) = this.upgrade(&cx) else { break };
1311
1312 match update {
1313 LocalProjectUpdate::WorktreesChanged => {
1314 let worktrees = this
1315 .read_with(&cx, |this, cx| this.worktrees(cx).collect::<Vec<_>>());
1316 let update_project = this
1317 .read_with(&cx, |this, cx| {
1318 this.client.request(proto::UpdateProject {
1319 project_id,
1320 worktrees: this.worktree_metadata_protos(cx),
1321 })
1322 })
1323 .await;
1324 if update_project.is_ok() {
1325 for worktree in worktrees {
1326 worktree.update(&mut cx, |worktree, cx| {
1327 let worktree = worktree.as_local_mut().unwrap();
1328 worktree.share(project_id, cx).detach_and_log_err(cx)
1329 });
1330 }
1331 }
1332 }
1333 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1334 let buffer = this.update(&mut cx, |this, _| {
1335 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1336 let shared_buffers =
1337 this.shared_buffers.entry(peer_id).or_default();
1338 if shared_buffers.insert(buffer_id) {
1339 if let OpenBuffer::Strong(buffer) = buffer {
1340 Some(buffer.clone())
1341 } else {
1342 None
1343 }
1344 } else {
1345 None
1346 }
1347 });
1348
1349 let Some(buffer) = buffer else { continue };
1350 let operations =
1351 buffer.read_with(&cx, |b, cx| b.serialize_ops(None, cx));
1352 let operations = operations.await;
1353 let state = buffer.read_with(&cx, |buffer, _| buffer.to_proto());
1354
1355 let initial_state = proto::CreateBufferForPeer {
1356 project_id,
1357 peer_id: Some(peer_id),
1358 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1359 };
1360 if client.send(initial_state).log_err().is_some() {
1361 let client = client.clone();
1362 cx.background()
1363 .spawn(async move {
1364 let mut chunks = split_operations(operations).peekable();
1365 while let Some(chunk) = chunks.next() {
1366 let is_last = chunks.peek().is_none();
1367 client.send(proto::CreateBufferForPeer {
1368 project_id,
1369 peer_id: Some(peer_id),
1370 variant: Some(
1371 proto::create_buffer_for_peer::Variant::Chunk(
1372 proto::BufferChunk {
1373 buffer_id,
1374 operations: chunk,
1375 is_last,
1376 },
1377 ),
1378 ),
1379 })?;
1380 }
1381 anyhow::Ok(())
1382 })
1383 .await
1384 .log_err();
1385 }
1386 }
1387 }
1388 }
1389 }),
1390 });
1391
1392 self.metadata_changed(cx);
1393 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1394 cx.notify();
1395 Ok(())
1396 }
1397
1398 pub fn reshared(
1399 &mut self,
1400 message: proto::ResharedProject,
1401 cx: &mut ModelContext<Self>,
1402 ) -> Result<()> {
1403 self.shared_buffers.clear();
1404 self.set_collaborators_from_proto(message.collaborators, cx)?;
1405 self.metadata_changed(cx);
1406 Ok(())
1407 }
1408
1409 pub fn rejoined(
1410 &mut self,
1411 message: proto::RejoinedProject,
1412 message_id: u32,
1413 cx: &mut ModelContext<Self>,
1414 ) -> Result<()> {
1415 cx.update_global::<SettingsStore, _, _>(|store, cx| {
1416 for worktree in &self.worktrees {
1417 store
1418 .clear_local_settings(worktree.handle_id(), cx)
1419 .log_err();
1420 }
1421 });
1422
1423 self.join_project_response_message_id = message_id;
1424 self.set_worktrees_from_proto(message.worktrees, cx)?;
1425 self.set_collaborators_from_proto(message.collaborators, cx)?;
1426 self.language_server_statuses = message
1427 .language_servers
1428 .into_iter()
1429 .map(|server| {
1430 (
1431 LanguageServerId(server.id as usize),
1432 LanguageServerStatus {
1433 name: server.name,
1434 pending_work: Default::default(),
1435 has_pending_diagnostic_updates: false,
1436 progress_tokens: Default::default(),
1437 },
1438 )
1439 })
1440 .collect();
1441 self.buffer_ordered_messages_tx
1442 .unbounded_send(BufferOrderedMessage::Resync)
1443 .unwrap();
1444 cx.notify();
1445 Ok(())
1446 }
1447
1448 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1449 self.unshare_internal(cx)?;
1450 self.metadata_changed(cx);
1451 cx.notify();
1452 Ok(())
1453 }
1454
1455 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1456 if self.is_remote() {
1457 return Err(anyhow!("attempted to unshare a remote project"));
1458 }
1459
1460 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1461 self.collaborators.clear();
1462 self.shared_buffers.clear();
1463 self.client_subscriptions.clear();
1464
1465 for worktree_handle in self.worktrees.iter_mut() {
1466 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1467 let is_visible = worktree.update(cx, |worktree, _| {
1468 worktree.as_local_mut().unwrap().unshare();
1469 worktree.is_visible()
1470 });
1471 if !is_visible {
1472 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1473 }
1474 }
1475 }
1476
1477 for open_buffer in self.opened_buffers.values_mut() {
1478 // Wake up any tasks waiting for peers' edits to this buffer.
1479 if let Some(buffer) = open_buffer.upgrade(cx) {
1480 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1481 }
1482
1483 if let OpenBuffer::Strong(buffer) = open_buffer {
1484 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1485 }
1486 }
1487
1488 self.client.send(proto::UnshareProject {
1489 project_id: remote_id,
1490 })?;
1491
1492 Ok(())
1493 } else {
1494 Err(anyhow!("attempted to unshare an unshared project"))
1495 }
1496 }
1497
1498 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1499 self.disconnected_from_host_internal(cx);
1500 cx.emit(Event::DisconnectedFromHost);
1501 cx.notify();
1502 }
1503
1504 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1505 if let Some(ProjectClientState::Remote {
1506 sharing_has_stopped,
1507 ..
1508 }) = &mut self.client_state
1509 {
1510 *sharing_has_stopped = true;
1511
1512 self.collaborators.clear();
1513
1514 for worktree in &self.worktrees {
1515 if let Some(worktree) = worktree.upgrade(cx) {
1516 worktree.update(cx, |worktree, _| {
1517 if let Some(worktree) = worktree.as_remote_mut() {
1518 worktree.disconnected_from_host();
1519 }
1520 });
1521 }
1522 }
1523
1524 for open_buffer in self.opened_buffers.values_mut() {
1525 // Wake up any tasks waiting for peers' edits to this buffer.
1526 if let Some(buffer) = open_buffer.upgrade(cx) {
1527 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1528 }
1529
1530 if let OpenBuffer::Strong(buffer) = open_buffer {
1531 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1532 }
1533 }
1534
1535 // Wake up all futures currently waiting on a buffer to get opened,
1536 // to give them a chance to fail now that we've disconnected.
1537 *self.opened_buffer.0.borrow_mut() = ();
1538 }
1539 }
1540
1541 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1542 cx.emit(Event::Closed);
1543 }
1544
1545 pub fn is_read_only(&self) -> bool {
1546 match &self.client_state {
1547 Some(ProjectClientState::Remote {
1548 sharing_has_stopped,
1549 ..
1550 }) => *sharing_has_stopped,
1551 _ => false,
1552 }
1553 }
1554
1555 pub fn is_local(&self) -> bool {
1556 match &self.client_state {
1557 Some(ProjectClientState::Remote { .. }) => false,
1558 _ => true,
1559 }
1560 }
1561
1562 pub fn is_remote(&self) -> bool {
1563 !self.is_local()
1564 }
1565
1566 pub fn create_buffer(
1567 &mut self,
1568 text: &str,
1569 language: Option<Arc<Language>>,
1570 cx: &mut ModelContext<Self>,
1571 ) -> Result<ModelHandle<Buffer>> {
1572 if self.is_remote() {
1573 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1574 }
1575 let id = post_inc(&mut self.next_buffer_id);
1576 let buffer = cx.add_model(|cx| {
1577 Buffer::new(self.replica_id(), id, text)
1578 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1579 });
1580 self.register_buffer(&buffer, cx)?;
1581 Ok(buffer)
1582 }
1583
1584 pub fn open_path(
1585 &mut self,
1586 path: impl Into<ProjectPath>,
1587 cx: &mut ModelContext<Self>,
1588 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1589 let task = self.open_buffer(path, cx);
1590 cx.spawn_weak(|_, cx| async move {
1591 let buffer = task.await?;
1592 let project_entry_id = buffer
1593 .read_with(&cx, |buffer, cx| {
1594 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1595 })
1596 .ok_or_else(|| anyhow!("no project entry"))?;
1597
1598 let buffer: &AnyModelHandle = &buffer;
1599 Ok((project_entry_id, buffer.clone()))
1600 })
1601 }
1602
1603 pub fn open_local_buffer(
1604 &mut self,
1605 abs_path: impl AsRef<Path>,
1606 cx: &mut ModelContext<Self>,
1607 ) -> Task<Result<ModelHandle<Buffer>>> {
1608 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1609 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1610 } else {
1611 Task::ready(Err(anyhow!("no such path")))
1612 }
1613 }
1614
1615 pub fn open_buffer(
1616 &mut self,
1617 path: impl Into<ProjectPath>,
1618 cx: &mut ModelContext<Self>,
1619 ) -> Task<Result<ModelHandle<Buffer>>> {
1620 let project_path = path.into();
1621 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1622 worktree
1623 } else {
1624 return Task::ready(Err(anyhow!("no such worktree")));
1625 };
1626
1627 // If there is already a buffer for the given path, then return it.
1628 let existing_buffer = self.get_open_buffer(&project_path, cx);
1629 if let Some(existing_buffer) = existing_buffer {
1630 return Task::ready(Ok(existing_buffer));
1631 }
1632
1633 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1634 // If the given path is already being loaded, then wait for that existing
1635 // task to complete and return the same buffer.
1636 hash_map::Entry::Occupied(e) => e.get().clone(),
1637
1638 // Otherwise, record the fact that this path is now being loaded.
1639 hash_map::Entry::Vacant(entry) => {
1640 let (mut tx, rx) = postage::watch::channel();
1641 entry.insert(rx.clone());
1642
1643 let load_buffer = if worktree.read(cx).is_local() {
1644 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1645 } else {
1646 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1647 };
1648
1649 cx.spawn(move |this, mut cx| async move {
1650 let load_result = load_buffer.await;
1651 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1652 // Record the fact that the buffer is no longer loading.
1653 this.loading_buffers_by_path.remove(&project_path);
1654 let buffer = load_result.map_err(Arc::new)?;
1655 Ok(buffer)
1656 }));
1657 })
1658 .detach();
1659 rx
1660 }
1661 };
1662
1663 cx.foreground().spawn(async move {
1664 wait_for_loading_buffer(loading_watch)
1665 .await
1666 .map_err(|error| anyhow!("{}", error))
1667 })
1668 }
1669
1670 fn open_local_buffer_internal(
1671 &mut self,
1672 path: &Arc<Path>,
1673 worktree: &ModelHandle<Worktree>,
1674 cx: &mut ModelContext<Self>,
1675 ) -> Task<Result<ModelHandle<Buffer>>> {
1676 let buffer_id = post_inc(&mut self.next_buffer_id);
1677 let load_buffer = worktree.update(cx, |worktree, cx| {
1678 let worktree = worktree.as_local_mut().unwrap();
1679 worktree.load_buffer(buffer_id, path, cx)
1680 });
1681 cx.spawn(|this, mut cx| async move {
1682 let buffer = load_buffer.await?;
1683 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1684 Ok(buffer)
1685 })
1686 }
1687
1688 fn open_remote_buffer_internal(
1689 &mut self,
1690 path: &Arc<Path>,
1691 worktree: &ModelHandle<Worktree>,
1692 cx: &mut ModelContext<Self>,
1693 ) -> Task<Result<ModelHandle<Buffer>>> {
1694 let rpc = self.client.clone();
1695 let project_id = self.remote_id().unwrap();
1696 let remote_worktree_id = worktree.read(cx).id();
1697 let path = path.clone();
1698 let path_string = path.to_string_lossy().to_string();
1699 cx.spawn(|this, mut cx| async move {
1700 let response = rpc
1701 .request(proto::OpenBufferByPath {
1702 project_id,
1703 worktree_id: remote_worktree_id.to_proto(),
1704 path: path_string,
1705 })
1706 .await?;
1707 this.update(&mut cx, |this, cx| {
1708 this.wait_for_remote_buffer(response.buffer_id, cx)
1709 })
1710 .await
1711 })
1712 }
1713
1714 /// LanguageServerName is owned, because it is inserted into a map
1715 pub fn open_local_buffer_via_lsp(
1716 &mut self,
1717 abs_path: lsp::Url,
1718 language_server_id: LanguageServerId,
1719 language_server_name: LanguageServerName,
1720 cx: &mut ModelContext<Self>,
1721 ) -> Task<Result<ModelHandle<Buffer>>> {
1722 cx.spawn(|this, mut cx| async move {
1723 let abs_path = abs_path
1724 .to_file_path()
1725 .map_err(|_| anyhow!("can't convert URI to path"))?;
1726 let (worktree, relative_path) = if let Some(result) =
1727 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1728 {
1729 result
1730 } else {
1731 let worktree = this
1732 .update(&mut cx, |this, cx| {
1733 this.create_local_worktree(&abs_path, false, cx)
1734 })
1735 .await?;
1736 this.update(&mut cx, |this, cx| {
1737 this.language_server_ids.insert(
1738 (worktree.read(cx).id(), language_server_name),
1739 language_server_id,
1740 );
1741 });
1742 (worktree, PathBuf::new())
1743 };
1744
1745 let project_path = ProjectPath {
1746 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1747 path: relative_path.into(),
1748 };
1749 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1750 .await
1751 })
1752 }
1753
1754 pub fn open_buffer_by_id(
1755 &mut self,
1756 id: u64,
1757 cx: &mut ModelContext<Self>,
1758 ) -> Task<Result<ModelHandle<Buffer>>> {
1759 if let Some(buffer) = self.buffer_for_id(id, cx) {
1760 Task::ready(Ok(buffer))
1761 } else if self.is_local() {
1762 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1763 } else if let Some(project_id) = self.remote_id() {
1764 let request = self
1765 .client
1766 .request(proto::OpenBufferById { project_id, id });
1767 cx.spawn(|this, mut cx| async move {
1768 let buffer_id = request.await?.buffer_id;
1769 this.update(&mut cx, |this, cx| {
1770 this.wait_for_remote_buffer(buffer_id, cx)
1771 })
1772 .await
1773 })
1774 } else {
1775 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1776 }
1777 }
1778
1779 pub fn save_buffers(
1780 &self,
1781 buffers: HashSet<ModelHandle<Buffer>>,
1782 cx: &mut ModelContext<Self>,
1783 ) -> Task<Result<()>> {
1784 cx.spawn(|this, mut cx| async move {
1785 let save_tasks = buffers
1786 .into_iter()
1787 .map(|buffer| this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx)));
1788 try_join_all(save_tasks).await?;
1789 Ok(())
1790 })
1791 }
1792
1793 pub fn save_buffer(
1794 &self,
1795 buffer: ModelHandle<Buffer>,
1796 cx: &mut ModelContext<Self>,
1797 ) -> Task<Result<()>> {
1798 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1799 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1800 };
1801 let worktree = file.worktree.clone();
1802 let path = file.path.clone();
1803 worktree.update(cx, |worktree, cx| match worktree {
1804 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1805 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1806 })
1807 }
1808
1809 pub fn save_buffer_as(
1810 &mut self,
1811 buffer: ModelHandle<Buffer>,
1812 abs_path: PathBuf,
1813 cx: &mut ModelContext<Self>,
1814 ) -> Task<Result<()>> {
1815 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1816 let old_file = File::from_dyn(buffer.read(cx).file())
1817 .filter(|f| f.is_local())
1818 .cloned();
1819 cx.spawn(|this, mut cx| async move {
1820 if let Some(old_file) = &old_file {
1821 this.update(&mut cx, |this, cx| {
1822 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1823 });
1824 }
1825 let (worktree, path) = worktree_task.await?;
1826 worktree
1827 .update(&mut cx, |worktree, cx| match worktree {
1828 Worktree::Local(worktree) => {
1829 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1830 }
1831 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1832 })
1833 .await?;
1834 this.update(&mut cx, |this, cx| {
1835 this.detect_language_for_buffer(&buffer, cx);
1836 this.register_buffer_with_language_servers(&buffer, cx);
1837 });
1838 Ok(())
1839 })
1840 }
1841
1842 pub fn get_open_buffer(
1843 &mut self,
1844 path: &ProjectPath,
1845 cx: &mut ModelContext<Self>,
1846 ) -> Option<ModelHandle<Buffer>> {
1847 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1848 self.opened_buffers.values().find_map(|buffer| {
1849 let buffer = buffer.upgrade(cx)?;
1850 let file = File::from_dyn(buffer.read(cx).file())?;
1851 if file.worktree == worktree && file.path() == &path.path {
1852 Some(buffer)
1853 } else {
1854 None
1855 }
1856 })
1857 }
1858
1859 fn register_buffer(
1860 &mut self,
1861 buffer: &ModelHandle<Buffer>,
1862 cx: &mut ModelContext<Self>,
1863 ) -> Result<()> {
1864 self.request_buffer_diff_recalculation(buffer, cx);
1865 buffer.update(cx, |buffer, _| {
1866 buffer.set_language_registry(self.languages.clone())
1867 });
1868
1869 let remote_id = buffer.read(cx).remote_id();
1870 let is_remote = self.is_remote();
1871 let open_buffer = if is_remote || self.is_shared() {
1872 OpenBuffer::Strong(buffer.clone())
1873 } else {
1874 OpenBuffer::Weak(buffer.downgrade())
1875 };
1876
1877 match self.opened_buffers.entry(remote_id) {
1878 hash_map::Entry::Vacant(entry) => {
1879 entry.insert(open_buffer);
1880 }
1881 hash_map::Entry::Occupied(mut entry) => {
1882 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1883 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1884 } else if entry.get().upgrade(cx).is_some() {
1885 if is_remote {
1886 return Ok(());
1887 } else {
1888 debug_panic!("buffer {} was already registered", remote_id);
1889 Err(anyhow!("buffer {} was already registered", remote_id))?;
1890 }
1891 }
1892 entry.insert(open_buffer);
1893 }
1894 }
1895 cx.subscribe(buffer, |this, buffer, event, cx| {
1896 this.on_buffer_event(buffer, event, cx);
1897 })
1898 .detach();
1899
1900 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1901 if file.is_local {
1902 self.local_buffer_ids_by_path.insert(
1903 ProjectPath {
1904 worktree_id: file.worktree_id(cx),
1905 path: file.path.clone(),
1906 },
1907 remote_id,
1908 );
1909
1910 self.local_buffer_ids_by_entry_id
1911 .insert(file.entry_id, remote_id);
1912 }
1913 }
1914
1915 self.detect_language_for_buffer(buffer, cx);
1916 self.register_buffer_with_language_servers(buffer, cx);
1917 self.register_buffer_with_copilot(buffer, cx);
1918 cx.observe_release(buffer, |this, buffer, cx| {
1919 if let Some(file) = File::from_dyn(buffer.file()) {
1920 if file.is_local() {
1921 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1922 for server in this.language_servers_for_buffer(buffer, cx) {
1923 server
1924 .1
1925 .notify::<lsp::notification::DidCloseTextDocument>(
1926 lsp::DidCloseTextDocumentParams {
1927 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1928 },
1929 )
1930 .log_err();
1931 }
1932 }
1933 }
1934 })
1935 .detach();
1936
1937 *self.opened_buffer.0.borrow_mut() = ();
1938 Ok(())
1939 }
1940
1941 fn register_buffer_with_language_servers(
1942 &mut self,
1943 buffer_handle: &ModelHandle<Buffer>,
1944 cx: &mut ModelContext<Self>,
1945 ) {
1946 let buffer = buffer_handle.read(cx);
1947 let buffer_id = buffer.remote_id();
1948
1949 if let Some(file) = File::from_dyn(buffer.file()) {
1950 if !file.is_local() {
1951 return;
1952 }
1953
1954 let abs_path = file.abs_path(cx);
1955 let uri = lsp::Url::from_file_path(&abs_path)
1956 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
1957 let initial_snapshot = buffer.text_snapshot();
1958 let language = buffer.language().cloned();
1959 let worktree_id = file.worktree_id(cx);
1960
1961 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1962 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
1963 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
1964 .log_err();
1965 }
1966 }
1967
1968 if let Some(language) = language {
1969 for adapter in language.lsp_adapters() {
1970 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
1971 let server = self
1972 .language_server_ids
1973 .get(&(worktree_id, adapter.name.clone()))
1974 .and_then(|id| self.language_servers.get(id))
1975 .and_then(|server_state| {
1976 if let LanguageServerState::Running { server, .. } = server_state {
1977 Some(server.clone())
1978 } else {
1979 None
1980 }
1981 });
1982 let server = match server {
1983 Some(server) => server,
1984 None => continue,
1985 };
1986
1987 server
1988 .notify::<lsp::notification::DidOpenTextDocument>(
1989 lsp::DidOpenTextDocumentParams {
1990 text_document: lsp::TextDocumentItem::new(
1991 uri.clone(),
1992 language_id.unwrap_or_default(),
1993 0,
1994 initial_snapshot.text(),
1995 ),
1996 },
1997 )
1998 .log_err();
1999
2000 buffer_handle.update(cx, |buffer, cx| {
2001 buffer.set_completion_triggers(
2002 server
2003 .capabilities()
2004 .completion_provider
2005 .as_ref()
2006 .and_then(|provider| provider.trigger_characters.clone())
2007 .unwrap_or_default(),
2008 cx,
2009 );
2010 });
2011
2012 let snapshot = LspBufferSnapshot {
2013 version: 0,
2014 snapshot: initial_snapshot.clone(),
2015 };
2016 self.buffer_snapshots
2017 .entry(buffer_id)
2018 .or_default()
2019 .insert(server.server_id(), vec![snapshot]);
2020 }
2021 }
2022 }
2023 }
2024
2025 fn unregister_buffer_from_language_servers(
2026 &mut self,
2027 buffer: &ModelHandle<Buffer>,
2028 old_file: &File,
2029 cx: &mut ModelContext<Self>,
2030 ) {
2031 let old_path = match old_file.as_local() {
2032 Some(local) => local.abs_path(cx),
2033 None => return,
2034 };
2035
2036 buffer.update(cx, |buffer, cx| {
2037 let worktree_id = old_file.worktree_id(cx);
2038 let ids = &self.language_server_ids;
2039
2040 let language = buffer.language().cloned();
2041 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2042 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2043 buffer.update_diagnostics(server_id, Default::default(), cx);
2044 }
2045
2046 self.buffer_snapshots.remove(&buffer.remote_id());
2047 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2048 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2049 language_server
2050 .notify::<lsp::notification::DidCloseTextDocument>(
2051 lsp::DidCloseTextDocumentParams {
2052 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2053 },
2054 )
2055 .log_err();
2056 }
2057 });
2058 }
2059
2060 fn register_buffer_with_copilot(
2061 &self,
2062 buffer_handle: &ModelHandle<Buffer>,
2063 cx: &mut ModelContext<Self>,
2064 ) {
2065 if let Some(copilot) = Copilot::global(cx) {
2066 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2067 }
2068 }
2069
2070 async fn send_buffer_ordered_messages(
2071 this: WeakModelHandle<Self>,
2072 rx: UnboundedReceiver<BufferOrderedMessage>,
2073 mut cx: AsyncAppContext,
2074 ) -> Option<()> {
2075 const MAX_BATCH_SIZE: usize = 128;
2076
2077 let mut operations_by_buffer_id = HashMap::default();
2078 async fn flush_operations(
2079 this: &ModelHandle<Project>,
2080 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2081 needs_resync_with_host: &mut bool,
2082 is_local: bool,
2083 cx: &AsyncAppContext,
2084 ) {
2085 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2086 let request = this.read_with(cx, |this, _| {
2087 let project_id = this.remote_id()?;
2088 Some(this.client.request(proto::UpdateBuffer {
2089 buffer_id,
2090 project_id,
2091 operations,
2092 }))
2093 });
2094 if let Some(request) = request {
2095 if request.await.is_err() && !is_local {
2096 *needs_resync_with_host = true;
2097 break;
2098 }
2099 }
2100 }
2101 }
2102
2103 let mut needs_resync_with_host = false;
2104 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2105
2106 while let Some(changes) = changes.next().await {
2107 let this = this.upgrade(&mut cx)?;
2108 let is_local = this.read_with(&cx, |this, _| this.is_local());
2109
2110 for change in changes {
2111 match change {
2112 BufferOrderedMessage::Operation {
2113 buffer_id,
2114 operation,
2115 } => {
2116 if needs_resync_with_host {
2117 continue;
2118 }
2119
2120 operations_by_buffer_id
2121 .entry(buffer_id)
2122 .or_insert(Vec::new())
2123 .push(operation);
2124 }
2125
2126 BufferOrderedMessage::Resync => {
2127 operations_by_buffer_id.clear();
2128 if this
2129 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
2130 .await
2131 .is_ok()
2132 {
2133 needs_resync_with_host = false;
2134 }
2135 }
2136
2137 BufferOrderedMessage::LanguageServerUpdate {
2138 language_server_id,
2139 message,
2140 } => {
2141 flush_operations(
2142 &this,
2143 &mut operations_by_buffer_id,
2144 &mut needs_resync_with_host,
2145 is_local,
2146 &cx,
2147 )
2148 .await;
2149
2150 this.read_with(&cx, |this, _| {
2151 if let Some(project_id) = this.remote_id() {
2152 this.client
2153 .send(proto::UpdateLanguageServer {
2154 project_id,
2155 language_server_id: language_server_id.0 as u64,
2156 variant: Some(message),
2157 })
2158 .log_err();
2159 }
2160 });
2161 }
2162 }
2163 }
2164
2165 flush_operations(
2166 &this,
2167 &mut operations_by_buffer_id,
2168 &mut needs_resync_with_host,
2169 is_local,
2170 &cx,
2171 )
2172 .await;
2173 }
2174
2175 None
2176 }
2177
2178 fn on_buffer_event(
2179 &mut self,
2180 buffer: ModelHandle<Buffer>,
2181 event: &BufferEvent,
2182 cx: &mut ModelContext<Self>,
2183 ) -> Option<()> {
2184 if matches!(
2185 event,
2186 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2187 ) {
2188 self.request_buffer_diff_recalculation(&buffer, cx);
2189 }
2190
2191 match event {
2192 BufferEvent::Operation(operation) => {
2193 self.buffer_ordered_messages_tx
2194 .unbounded_send(BufferOrderedMessage::Operation {
2195 buffer_id: buffer.read(cx).remote_id(),
2196 operation: language::proto::serialize_operation(operation),
2197 })
2198 .ok();
2199 }
2200
2201 BufferEvent::Edited { .. } => {
2202 let buffer = buffer.read(cx);
2203 let file = File::from_dyn(buffer.file())?;
2204 let abs_path = file.as_local()?.abs_path(cx);
2205 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2206 let next_snapshot = buffer.text_snapshot();
2207
2208 let language_servers: Vec<_> = self
2209 .language_servers_for_buffer(buffer, cx)
2210 .map(|i| i.1.clone())
2211 .collect();
2212
2213 for language_server in language_servers {
2214 let language_server = language_server.clone();
2215
2216 let buffer_snapshots = self
2217 .buffer_snapshots
2218 .get_mut(&buffer.remote_id())
2219 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2220 let previous_snapshot = buffer_snapshots.last()?;
2221 let next_version = previous_snapshot.version + 1;
2222
2223 let content_changes = buffer
2224 .edits_since::<(PointUtf16, usize)>(previous_snapshot.snapshot.version())
2225 .map(|edit| {
2226 let edit_start = edit.new.start.0;
2227 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2228 let new_text = next_snapshot
2229 .text_for_range(edit.new.start.1..edit.new.end.1)
2230 .collect();
2231 lsp::TextDocumentContentChangeEvent {
2232 range: Some(lsp::Range::new(
2233 point_to_lsp(edit_start),
2234 point_to_lsp(edit_end),
2235 )),
2236 range_length: None,
2237 text: new_text,
2238 }
2239 })
2240 .collect();
2241
2242 buffer_snapshots.push(LspBufferSnapshot {
2243 version: next_version,
2244 snapshot: next_snapshot.clone(),
2245 });
2246
2247 language_server
2248 .notify::<lsp::notification::DidChangeTextDocument>(
2249 lsp::DidChangeTextDocumentParams {
2250 text_document: lsp::VersionedTextDocumentIdentifier::new(
2251 uri.clone(),
2252 next_version,
2253 ),
2254 content_changes,
2255 },
2256 )
2257 .log_err();
2258 }
2259 }
2260
2261 BufferEvent::Saved => {
2262 let file = File::from_dyn(buffer.read(cx).file())?;
2263 let worktree_id = file.worktree_id(cx);
2264 let abs_path = file.as_local()?.abs_path(cx);
2265 let text_document = lsp::TextDocumentIdentifier {
2266 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2267 };
2268
2269 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2270 server
2271 .notify::<lsp::notification::DidSaveTextDocument>(
2272 lsp::DidSaveTextDocumentParams {
2273 text_document: text_document.clone(),
2274 text: None,
2275 },
2276 )
2277 .log_err();
2278 }
2279
2280 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2281 for language_server_id in language_server_ids {
2282 if let Some(LanguageServerState::Running {
2283 adapter,
2284 simulate_disk_based_diagnostics_completion,
2285 ..
2286 }) = self.language_servers.get_mut(&language_server_id)
2287 {
2288 // After saving a buffer using a language server that doesn't provide
2289 // a disk-based progress token, kick off a timer that will reset every
2290 // time the buffer is saved. If the timer eventually fires, simulate
2291 // disk-based diagnostics being finished so that other pieces of UI
2292 // (e.g., project diagnostics view, diagnostic status bar) can update.
2293 // We don't emit an event right away because the language server might take
2294 // some time to publish diagnostics.
2295 if adapter.disk_based_diagnostics_progress_token.is_none() {
2296 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2297 Duration::from_secs(1);
2298
2299 let task = cx.spawn_weak(|this, mut cx| async move {
2300 cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2301 if let Some(this) = this.upgrade(&cx) {
2302 this.update(&mut cx, |this, cx| {
2303 this.disk_based_diagnostics_finished(
2304 language_server_id,
2305 cx,
2306 );
2307 this.buffer_ordered_messages_tx
2308 .unbounded_send(
2309 BufferOrderedMessage::LanguageServerUpdate {
2310 language_server_id,
2311 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2312 },
2313 )
2314 .ok();
2315 });
2316 }
2317 });
2318 *simulate_disk_based_diagnostics_completion = Some(task);
2319 }
2320 }
2321 }
2322 }
2323
2324 _ => {}
2325 }
2326
2327 None
2328 }
2329
2330 fn request_buffer_diff_recalculation(
2331 &mut self,
2332 buffer: &ModelHandle<Buffer>,
2333 cx: &mut ModelContext<Self>,
2334 ) {
2335 self.buffers_needing_diff.insert(buffer.downgrade());
2336 let first_insertion = self.buffers_needing_diff.len() == 1;
2337
2338 let settings = settings::get::<ProjectSettings>(cx);
2339 let delay = if let Some(delay) = settings.git.gutter_debounce {
2340 delay
2341 } else {
2342 if first_insertion {
2343 let this = cx.weak_handle();
2344 cx.defer(move |cx| {
2345 if let Some(this) = this.upgrade(cx) {
2346 this.update(cx, |this, cx| {
2347 this.recalculate_buffer_diffs(cx).detach();
2348 });
2349 }
2350 });
2351 }
2352 return;
2353 };
2354
2355 const MIN_DELAY: u64 = 50;
2356 let delay = delay.max(MIN_DELAY);
2357 let duration = Duration::from_millis(delay);
2358
2359 self.git_diff_debouncer
2360 .fire_new(duration, cx, move |this, cx| {
2361 this.recalculate_buffer_diffs(cx)
2362 });
2363 }
2364
2365 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2366 cx.spawn(|this, mut cx| async move {
2367 let buffers: Vec<_> = this.update(&mut cx, |this, _| {
2368 this.buffers_needing_diff.drain().collect()
2369 });
2370
2371 let tasks: Vec<_> = this.update(&mut cx, |_, cx| {
2372 buffers
2373 .iter()
2374 .filter_map(|buffer| {
2375 let buffer = buffer.upgrade(cx)?;
2376 buffer.update(cx, |buffer, cx| buffer.git_diff_recalc(cx))
2377 })
2378 .collect()
2379 });
2380
2381 futures::future::join_all(tasks).await;
2382
2383 this.update(&mut cx, |this, cx| {
2384 if !this.buffers_needing_diff.is_empty() {
2385 this.recalculate_buffer_diffs(cx).detach();
2386 } else {
2387 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2388 for buffer in buffers {
2389 if let Some(buffer) = buffer.upgrade(cx) {
2390 buffer.update(cx, |_, cx| cx.notify());
2391 }
2392 }
2393 }
2394 });
2395 })
2396 }
2397
2398 fn language_servers_for_worktree(
2399 &self,
2400 worktree_id: WorktreeId,
2401 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2402 self.language_server_ids
2403 .iter()
2404 .filter_map(move |((language_server_worktree_id, _), id)| {
2405 if *language_server_worktree_id == worktree_id {
2406 if let Some(LanguageServerState::Running {
2407 adapter,
2408 language,
2409 server,
2410 ..
2411 }) = self.language_servers.get(id)
2412 {
2413 return Some((adapter, language, server));
2414 }
2415 }
2416 None
2417 })
2418 }
2419
2420 fn maintain_buffer_languages(
2421 languages: Arc<LanguageRegistry>,
2422 cx: &mut ModelContext<Project>,
2423 ) -> Task<()> {
2424 let mut subscription = languages.subscribe();
2425 let mut prev_reload_count = languages.reload_count();
2426 cx.spawn_weak(|project, mut cx| async move {
2427 while let Some(()) = subscription.next().await {
2428 if let Some(project) = project.upgrade(&cx) {
2429 // If the language registry has been reloaded, then remove and
2430 // re-assign the languages on all open buffers.
2431 let reload_count = languages.reload_count();
2432 if reload_count > prev_reload_count {
2433 prev_reload_count = reload_count;
2434 project.update(&mut cx, |this, cx| {
2435 let buffers = this
2436 .opened_buffers
2437 .values()
2438 .filter_map(|b| b.upgrade(cx))
2439 .collect::<Vec<_>>();
2440 for buffer in buffers {
2441 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() {
2442 this.unregister_buffer_from_language_servers(&buffer, &f, cx);
2443 buffer.update(cx, |buffer, cx| buffer.set_language(None, cx));
2444 }
2445 }
2446 });
2447 }
2448
2449 project.update(&mut cx, |project, cx| {
2450 let mut plain_text_buffers = Vec::new();
2451 let mut buffers_with_unknown_injections = Vec::new();
2452 for buffer in project.opened_buffers.values() {
2453 if let Some(handle) = buffer.upgrade(cx) {
2454 let buffer = &handle.read(cx);
2455 if buffer.language().is_none()
2456 || buffer.language() == Some(&*language::PLAIN_TEXT)
2457 {
2458 plain_text_buffers.push(handle);
2459 } else if buffer.contains_unknown_injections() {
2460 buffers_with_unknown_injections.push(handle);
2461 }
2462 }
2463 }
2464
2465 for buffer in plain_text_buffers {
2466 project.detect_language_for_buffer(&buffer, cx);
2467 project.register_buffer_with_language_servers(&buffer, cx);
2468 }
2469
2470 for buffer in buffers_with_unknown_injections {
2471 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2472 }
2473 });
2474 }
2475 }
2476 })
2477 }
2478
2479 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<()> {
2480 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2481 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2482
2483 let settings_observation = cx.observe_global::<SettingsStore, _>(move |_, _| {
2484 *settings_changed_tx.borrow_mut() = ();
2485 });
2486
2487 cx.spawn_weak(|this, mut cx| async move {
2488 while let Some(_) = settings_changed_rx.next().await {
2489 let Some(this) = this.upgrade(&cx) else {
2490 break;
2491 };
2492
2493 let servers: Vec<_> = this.read_with(&cx, |this, _| {
2494 this.language_servers
2495 .values()
2496 .filter_map(|state| match state {
2497 LanguageServerState::Starting(_) => None,
2498 LanguageServerState::Running {
2499 adapter, server, ..
2500 } => Some((adapter.clone(), server.clone())),
2501 })
2502 .collect()
2503 });
2504
2505 for (adapter, server) in servers {
2506 let workspace_config =
2507 cx.update(|cx| adapter.workspace_configuration(cx)).await;
2508 server
2509 .notify::<lsp::notification::DidChangeConfiguration>(
2510 lsp::DidChangeConfigurationParams {
2511 settings: workspace_config.clone(),
2512 },
2513 )
2514 .ok();
2515 }
2516 }
2517
2518 drop(settings_observation);
2519 })
2520 }
2521
2522 fn detect_language_for_buffer(
2523 &mut self,
2524 buffer_handle: &ModelHandle<Buffer>,
2525 cx: &mut ModelContext<Self>,
2526 ) -> Option<()> {
2527 // If the buffer has a language, set it and start the language server if we haven't already.
2528 let buffer = buffer_handle.read(cx);
2529 let full_path = buffer.file()?.full_path(cx);
2530 let content = buffer.as_rope();
2531 let new_language = self
2532 .languages
2533 .language_for_file(&full_path, Some(content))
2534 .now_or_never()?
2535 .ok()?;
2536 self.set_language_for_buffer(buffer_handle, new_language, cx);
2537 None
2538 }
2539
2540 pub fn set_language_for_buffer(
2541 &mut self,
2542 buffer: &ModelHandle<Buffer>,
2543 new_language: Arc<Language>,
2544 cx: &mut ModelContext<Self>,
2545 ) {
2546 buffer.update(cx, |buffer, cx| {
2547 if buffer.language().map_or(true, |old_language| {
2548 !Arc::ptr_eq(old_language, &new_language)
2549 }) {
2550 buffer.set_language(Some(new_language.clone()), cx);
2551 }
2552 });
2553
2554 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2555 let worktree = file.worktree.clone();
2556 if let Some(tree) = worktree.read(cx).as_local() {
2557 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2558 }
2559 }
2560 }
2561
2562 fn start_language_servers(
2563 &mut self,
2564 worktree: &ModelHandle<Worktree>,
2565 worktree_path: Arc<Path>,
2566 language: Arc<Language>,
2567 cx: &mut ModelContext<Self>,
2568 ) {
2569 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2570 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2571 if !settings.enable_language_server {
2572 return;
2573 }
2574
2575 let worktree_id = worktree.read(cx).id();
2576 for adapter in language.lsp_adapters() {
2577 self.start_language_server(
2578 worktree_id,
2579 worktree_path.clone(),
2580 adapter.clone(),
2581 language.clone(),
2582 cx,
2583 );
2584 }
2585 }
2586
2587 fn start_language_server(
2588 &mut self,
2589 worktree_id: WorktreeId,
2590 worktree_path: Arc<Path>,
2591 adapter: Arc<CachedLspAdapter>,
2592 language: Arc<Language>,
2593 cx: &mut ModelContext<Self>,
2594 ) {
2595 let key = (worktree_id, adapter.name.clone());
2596 if self.language_server_ids.contains_key(&key) {
2597 return;
2598 }
2599
2600 let pending_server = match self.languages.create_pending_language_server(
2601 language.clone(),
2602 adapter.clone(),
2603 worktree_path,
2604 ProjectLspAdapterDelegate::new(self, cx),
2605 cx,
2606 ) {
2607 Some(pending_server) => pending_server,
2608 None => return,
2609 };
2610
2611 let project_settings = settings::get::<ProjectSettings>(cx);
2612 let lsp = project_settings.lsp.get(&adapter.name.0);
2613 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2614
2615 let mut initialization_options = adapter.initialization_options.clone();
2616 match (&mut initialization_options, override_options) {
2617 (Some(initialization_options), Some(override_options)) => {
2618 merge_json_value_into(override_options, initialization_options);
2619 }
2620 (None, override_options) => initialization_options = override_options,
2621 _ => {}
2622 }
2623
2624 let server_id = pending_server.server_id;
2625 let container_dir = pending_server.container_dir.clone();
2626 let state = LanguageServerState::Starting({
2627 let adapter = adapter.clone();
2628 let server_name = adapter.name.0.clone();
2629 let language = language.clone();
2630 let key = key.clone();
2631
2632 cx.spawn_weak(|this, mut cx| async move {
2633 let result = Self::setup_and_insert_language_server(
2634 this,
2635 initialization_options,
2636 pending_server,
2637 adapter.clone(),
2638 language.clone(),
2639 server_id,
2640 key,
2641 &mut cx,
2642 )
2643 .await;
2644
2645 match result {
2646 Ok(server) => server,
2647
2648 Err(err) => {
2649 log::error!("failed to start language server {:?}: {}", server_name, err);
2650
2651 if let Some(this) = this.upgrade(&cx) {
2652 if let Some(container_dir) = container_dir {
2653 let installation_test_binary = adapter
2654 .installation_test_binary(container_dir.to_path_buf())
2655 .await;
2656
2657 this.update(&mut cx, |_, cx| {
2658 Self::check_errored_server(
2659 language,
2660 adapter,
2661 server_id,
2662 installation_test_binary,
2663 cx,
2664 )
2665 });
2666 }
2667 }
2668
2669 None
2670 }
2671 }
2672 })
2673 });
2674
2675 self.language_servers.insert(server_id, state);
2676 self.language_server_ids.insert(key, server_id);
2677 }
2678
2679 fn reinstall_language_server(
2680 &mut self,
2681 language: Arc<Language>,
2682 adapter: Arc<CachedLspAdapter>,
2683 server_id: LanguageServerId,
2684 cx: &mut ModelContext<Self>,
2685 ) -> Option<Task<()>> {
2686 log::info!("beginning to reinstall server");
2687
2688 let existing_server = match self.language_servers.remove(&server_id) {
2689 Some(LanguageServerState::Running { server, .. }) => Some(server),
2690 _ => None,
2691 };
2692
2693 for worktree in &self.worktrees {
2694 if let Some(worktree) = worktree.upgrade(cx) {
2695 let key = (worktree.read(cx).id(), adapter.name.clone());
2696 self.language_server_ids.remove(&key);
2697 }
2698 }
2699
2700 Some(cx.spawn(move |this, mut cx| async move {
2701 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2702 log::info!("shutting down existing server");
2703 task.await;
2704 }
2705
2706 // TODO: This is race-safe with regards to preventing new instances from
2707 // starting while deleting, but existing instances in other projects are going
2708 // to be very confused and messed up
2709 this.update(&mut cx, |this, cx| {
2710 this.languages.delete_server_container(adapter.clone(), cx)
2711 })
2712 .await;
2713
2714 this.update(&mut cx, |this, mut cx| {
2715 let worktrees = this.worktrees.clone();
2716 for worktree in worktrees {
2717 let worktree = match worktree.upgrade(cx) {
2718 Some(worktree) => worktree.read(cx),
2719 None => continue,
2720 };
2721 let worktree_id = worktree.id();
2722 let root_path = worktree.abs_path();
2723
2724 this.start_language_server(
2725 worktree_id,
2726 root_path,
2727 adapter.clone(),
2728 language.clone(),
2729 &mut cx,
2730 );
2731 }
2732 })
2733 }))
2734 }
2735
2736 async fn setup_and_insert_language_server(
2737 this: WeakModelHandle<Self>,
2738 initialization_options: Option<serde_json::Value>,
2739 pending_server: PendingLanguageServer,
2740 adapter: Arc<CachedLspAdapter>,
2741 language: Arc<Language>,
2742 server_id: LanguageServerId,
2743 key: (WorktreeId, LanguageServerName),
2744 cx: &mut AsyncAppContext,
2745 ) -> Result<Option<Arc<LanguageServer>>> {
2746 let setup = Self::setup_pending_language_server(
2747 this,
2748 initialization_options,
2749 pending_server,
2750 adapter.clone(),
2751 server_id,
2752 cx,
2753 );
2754
2755 let language_server = match setup.await? {
2756 Some(language_server) => language_server,
2757 None => return Ok(None),
2758 };
2759 let this = match this.upgrade(cx) {
2760 Some(this) => this,
2761 None => return Err(anyhow!("failed to upgrade project handle")),
2762 };
2763
2764 this.update(cx, |this, cx| {
2765 this.insert_newly_running_language_server(
2766 language,
2767 adapter,
2768 language_server.clone(),
2769 server_id,
2770 key,
2771 cx,
2772 )
2773 })?;
2774
2775 Ok(Some(language_server))
2776 }
2777
2778 async fn setup_pending_language_server(
2779 this: WeakModelHandle<Self>,
2780 initialization_options: Option<serde_json::Value>,
2781 pending_server: PendingLanguageServer,
2782 adapter: Arc<CachedLspAdapter>,
2783 server_id: LanguageServerId,
2784 cx: &mut AsyncAppContext,
2785 ) -> Result<Option<Arc<LanguageServer>>> {
2786 let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await;
2787 let language_server = match pending_server.task.await? {
2788 Some(server) => server,
2789 None => return Ok(None),
2790 };
2791
2792 language_server
2793 .on_notification::<lsp::notification::LogMessage, _>({
2794 move |params, mut cx| {
2795 if let Some(this) = this.upgrade(&cx) {
2796 this.update(&mut cx, |_, cx| {
2797 cx.emit(Event::LanguageServerLog(server_id, params.message))
2798 });
2799 }
2800 }
2801 })
2802 .detach();
2803
2804 language_server
2805 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2806 let adapter = adapter.clone();
2807 move |mut params, mut cx| {
2808 let this = this;
2809 let adapter = adapter.clone();
2810 adapter.process_diagnostics(&mut params);
2811 if let Some(this) = this.upgrade(&cx) {
2812 this.update(&mut cx, |this, cx| {
2813 this.update_diagnostics(
2814 server_id,
2815 params,
2816 &adapter.disk_based_diagnostic_sources,
2817 cx,
2818 )
2819 .log_err();
2820 });
2821 }
2822 }
2823 })
2824 .detach();
2825
2826 language_server
2827 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2828 let adapter = adapter.clone();
2829 move |params, mut cx| {
2830 let adapter = adapter.clone();
2831 async move {
2832 let workspace_config =
2833 cx.update(|cx| adapter.workspace_configuration(cx)).await;
2834 Ok(params
2835 .items
2836 .into_iter()
2837 .map(|item| {
2838 if let Some(section) = &item.section {
2839 workspace_config
2840 .get(section)
2841 .cloned()
2842 .unwrap_or(serde_json::Value::Null)
2843 } else {
2844 workspace_config.clone()
2845 }
2846 })
2847 .collect())
2848 }
2849 }
2850 })
2851 .detach();
2852
2853 // Even though we don't have handling for these requests, respond to them to
2854 // avoid stalling any language server like `gopls` which waits for a response
2855 // to these requests when initializing.
2856 language_server
2857 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(
2858 move |params, mut cx| async move {
2859 if let Some(this) = this.upgrade(&cx) {
2860 this.update(&mut cx, |this, _| {
2861 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
2862 {
2863 if let lsp::NumberOrString::String(token) = params.token {
2864 status.progress_tokens.insert(token);
2865 }
2866 }
2867 });
2868 }
2869 Ok(())
2870 },
2871 )
2872 .detach();
2873 language_server
2874 .on_request::<lsp::request::RegisterCapability, _, _>({
2875 move |params, mut cx| async move {
2876 let this = this
2877 .upgrade(&cx)
2878 .ok_or_else(|| anyhow!("project dropped"))?;
2879 for reg in params.registrations {
2880 if reg.method == "workspace/didChangeWatchedFiles" {
2881 if let Some(options) = reg.register_options {
2882 let options = serde_json::from_value(options)?;
2883 this.update(&mut cx, |this, cx| {
2884 this.on_lsp_did_change_watched_files(server_id, options, cx);
2885 });
2886 }
2887 }
2888 }
2889 Ok(())
2890 }
2891 })
2892 .detach();
2893
2894 language_server
2895 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2896 let adapter = adapter.clone();
2897 move |params, cx| {
2898 Self::on_lsp_workspace_edit(this, params, server_id, adapter.clone(), cx)
2899 }
2900 })
2901 .detach();
2902
2903 language_server
2904 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
2905 move |(), mut cx| async move {
2906 let this = this
2907 .upgrade(&cx)
2908 .ok_or_else(|| anyhow!("project dropped"))?;
2909 this.update(&mut cx, |project, cx| {
2910 cx.emit(Event::RefreshInlayHints);
2911 project.remote_id().map(|project_id| {
2912 project.client.send(proto::RefreshInlayHints { project_id })
2913 })
2914 })
2915 .transpose()?;
2916 Ok(())
2917 }
2918 })
2919 .detach();
2920
2921 let disk_based_diagnostics_progress_token =
2922 adapter.disk_based_diagnostics_progress_token.clone();
2923
2924 language_server
2925 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
2926 if let Some(this) = this.upgrade(&cx) {
2927 this.update(&mut cx, |this, cx| {
2928 this.on_lsp_progress(
2929 params,
2930 server_id,
2931 disk_based_diagnostics_progress_token.clone(),
2932 cx,
2933 );
2934 });
2935 }
2936 })
2937 .detach();
2938
2939 let language_server = language_server.initialize(initialization_options).await?;
2940
2941 language_server
2942 .notify::<lsp::notification::DidChangeConfiguration>(
2943 lsp::DidChangeConfigurationParams {
2944 settings: workspace_config,
2945 },
2946 )
2947 .ok();
2948
2949 Ok(Some(language_server))
2950 }
2951
2952 fn insert_newly_running_language_server(
2953 &mut self,
2954 language: Arc<Language>,
2955 adapter: Arc<CachedLspAdapter>,
2956 language_server: Arc<LanguageServer>,
2957 server_id: LanguageServerId,
2958 key: (WorktreeId, LanguageServerName),
2959 cx: &mut ModelContext<Self>,
2960 ) -> Result<()> {
2961 // If the language server for this key doesn't match the server id, don't store the
2962 // server. Which will cause it to be dropped, killing the process
2963 if self
2964 .language_server_ids
2965 .get(&key)
2966 .map(|id| id != &server_id)
2967 .unwrap_or(false)
2968 {
2969 return Ok(());
2970 }
2971
2972 // Update language_servers collection with Running variant of LanguageServerState
2973 // indicating that the server is up and running and ready
2974 self.language_servers.insert(
2975 server_id,
2976 LanguageServerState::Running {
2977 adapter: adapter.clone(),
2978 language: language.clone(),
2979 watched_paths: Default::default(),
2980 server: language_server.clone(),
2981 simulate_disk_based_diagnostics_completion: None,
2982 },
2983 );
2984
2985 self.language_server_statuses.insert(
2986 server_id,
2987 LanguageServerStatus {
2988 name: language_server.name().to_string(),
2989 pending_work: Default::default(),
2990 has_pending_diagnostic_updates: false,
2991 progress_tokens: Default::default(),
2992 },
2993 );
2994
2995 cx.emit(Event::LanguageServerAdded(server_id));
2996
2997 if let Some(project_id) = self.remote_id() {
2998 self.client.send(proto::StartLanguageServer {
2999 project_id,
3000 server: Some(proto::LanguageServer {
3001 id: server_id.0 as u64,
3002 name: language_server.name().to_string(),
3003 }),
3004 })?;
3005 }
3006
3007 // Tell the language server about every open buffer in the worktree that matches the language.
3008 for buffer in self.opened_buffers.values() {
3009 if let Some(buffer_handle) = buffer.upgrade(cx) {
3010 let buffer = buffer_handle.read(cx);
3011 let file = match File::from_dyn(buffer.file()) {
3012 Some(file) => file,
3013 None => continue,
3014 };
3015 let language = match buffer.language() {
3016 Some(language) => language,
3017 None => continue,
3018 };
3019
3020 if file.worktree.read(cx).id() != key.0
3021 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3022 {
3023 continue;
3024 }
3025
3026 let file = match file.as_local() {
3027 Some(file) => file,
3028 None => continue,
3029 };
3030
3031 let versions = self
3032 .buffer_snapshots
3033 .entry(buffer.remote_id())
3034 .or_default()
3035 .entry(server_id)
3036 .or_insert_with(|| {
3037 vec![LspBufferSnapshot {
3038 version: 0,
3039 snapshot: buffer.text_snapshot(),
3040 }]
3041 });
3042
3043 let snapshot = versions.last().unwrap();
3044 let version = snapshot.version;
3045 let initial_snapshot = &snapshot.snapshot;
3046 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3047 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3048 lsp::DidOpenTextDocumentParams {
3049 text_document: lsp::TextDocumentItem::new(
3050 uri,
3051 adapter
3052 .language_ids
3053 .get(language.name().as_ref())
3054 .cloned()
3055 .unwrap_or_default(),
3056 version,
3057 initial_snapshot.text(),
3058 ),
3059 },
3060 )?;
3061
3062 buffer_handle.update(cx, |buffer, cx| {
3063 buffer.set_completion_triggers(
3064 language_server
3065 .capabilities()
3066 .completion_provider
3067 .as_ref()
3068 .and_then(|provider| provider.trigger_characters.clone())
3069 .unwrap_or_default(),
3070 cx,
3071 )
3072 });
3073 }
3074 }
3075
3076 cx.notify();
3077 Ok(())
3078 }
3079
3080 // Returns a list of all of the worktrees which no longer have a language server and the root path
3081 // for the stopped server
3082 fn stop_language_server(
3083 &mut self,
3084 worktree_id: WorktreeId,
3085 adapter_name: LanguageServerName,
3086 cx: &mut ModelContext<Self>,
3087 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3088 let key = (worktree_id, adapter_name);
3089 if let Some(server_id) = self.language_server_ids.remove(&key) {
3090 log::info!("stopping language server {}", key.1 .0);
3091
3092 // Remove other entries for this language server as well
3093 let mut orphaned_worktrees = vec![worktree_id];
3094 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3095 for other_key in other_keys {
3096 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3097 self.language_server_ids.remove(&other_key);
3098 orphaned_worktrees.push(other_key.0);
3099 }
3100 }
3101
3102 for buffer in self.opened_buffers.values() {
3103 if let Some(buffer) = buffer.upgrade(cx) {
3104 buffer.update(cx, |buffer, cx| {
3105 buffer.update_diagnostics(server_id, Default::default(), cx);
3106 });
3107 }
3108 }
3109 for worktree in &self.worktrees {
3110 if let Some(worktree) = worktree.upgrade(cx) {
3111 worktree.update(cx, |worktree, cx| {
3112 if let Some(worktree) = worktree.as_local_mut() {
3113 worktree.clear_diagnostics_for_language_server(server_id, cx);
3114 }
3115 });
3116 }
3117 }
3118
3119 self.language_server_statuses.remove(&server_id);
3120 cx.notify();
3121
3122 let server_state = self.language_servers.remove(&server_id);
3123 cx.emit(Event::LanguageServerRemoved(server_id));
3124 cx.spawn_weak(|this, mut cx| async move {
3125 let mut root_path = None;
3126
3127 let server = match server_state {
3128 Some(LanguageServerState::Starting(task)) => task.await,
3129 Some(LanguageServerState::Running { server, .. }) => Some(server),
3130 None => None,
3131 };
3132
3133 if let Some(server) = server {
3134 root_path = Some(server.root_path().clone());
3135 if let Some(shutdown) = server.shutdown() {
3136 shutdown.await;
3137 }
3138 }
3139
3140 if let Some(this) = this.upgrade(&cx) {
3141 this.update(&mut cx, |this, cx| {
3142 this.language_server_statuses.remove(&server_id);
3143 cx.notify();
3144 });
3145 }
3146
3147 (root_path, orphaned_worktrees)
3148 })
3149 } else {
3150 Task::ready((None, Vec::new()))
3151 }
3152 }
3153
3154 pub fn restart_language_servers_for_buffers(
3155 &mut self,
3156 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
3157 cx: &mut ModelContext<Self>,
3158 ) -> Option<()> {
3159 let language_server_lookup_info: HashSet<(ModelHandle<Worktree>, Arc<Language>)> = buffers
3160 .into_iter()
3161 .filter_map(|buffer| {
3162 let buffer = buffer.read(cx);
3163 let file = File::from_dyn(buffer.file())?;
3164 let full_path = file.full_path(cx);
3165 let language = self
3166 .languages
3167 .language_for_file(&full_path, Some(buffer.as_rope()))
3168 .now_or_never()?
3169 .ok()?;
3170 Some((file.worktree.clone(), language))
3171 })
3172 .collect();
3173 for (worktree, language) in language_server_lookup_info {
3174 self.restart_language_servers(worktree, language, cx);
3175 }
3176
3177 None
3178 }
3179
3180 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3181 fn restart_language_servers(
3182 &mut self,
3183 worktree: ModelHandle<Worktree>,
3184 language: Arc<Language>,
3185 cx: &mut ModelContext<Self>,
3186 ) {
3187 let worktree_id = worktree.read(cx).id();
3188 let fallback_path = worktree.read(cx).abs_path();
3189
3190 let mut stops = Vec::new();
3191 for adapter in language.lsp_adapters() {
3192 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3193 }
3194
3195 if stops.is_empty() {
3196 return;
3197 }
3198 let mut stops = stops.into_iter();
3199
3200 cx.spawn_weak(|this, mut cx| async move {
3201 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3202 for stop in stops {
3203 let (_, worktrees) = stop.await;
3204 orphaned_worktrees.extend_from_slice(&worktrees);
3205 }
3206
3207 let this = match this.upgrade(&cx) {
3208 Some(this) => this,
3209 None => return,
3210 };
3211
3212 this.update(&mut cx, |this, cx| {
3213 // Attempt to restart using original server path. Fallback to passed in
3214 // path if we could not retrieve the root path
3215 let root_path = original_root_path
3216 .map(|path_buf| Arc::from(path_buf.as_path()))
3217 .unwrap_or(fallback_path);
3218
3219 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3220
3221 // Lookup new server ids and set them for each of the orphaned worktrees
3222 for adapter in language.lsp_adapters() {
3223 if let Some(new_server_id) = this
3224 .language_server_ids
3225 .get(&(worktree_id, adapter.name.clone()))
3226 .cloned()
3227 {
3228 for &orphaned_worktree in &orphaned_worktrees {
3229 this.language_server_ids
3230 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3231 }
3232 }
3233 }
3234 });
3235 })
3236 .detach();
3237 }
3238
3239 fn check_errored_server(
3240 language: Arc<Language>,
3241 adapter: Arc<CachedLspAdapter>,
3242 server_id: LanguageServerId,
3243 installation_test_binary: Option<LanguageServerBinary>,
3244 cx: &mut ModelContext<Self>,
3245 ) {
3246 if !adapter.can_be_reinstalled() {
3247 log::info!(
3248 "Validation check requested for {:?} but it cannot be reinstalled",
3249 adapter.name.0
3250 );
3251 return;
3252 }
3253
3254 cx.spawn(|this, mut cx| async move {
3255 log::info!("About to spawn test binary");
3256
3257 // A lack of test binary counts as a failure
3258 let process = installation_test_binary.and_then(|binary| {
3259 smol::process::Command::new(&binary.path)
3260 .current_dir(&binary.path)
3261 .args(binary.arguments)
3262 .stdin(Stdio::piped())
3263 .stdout(Stdio::piped())
3264 .stderr(Stdio::inherit())
3265 .kill_on_drop(true)
3266 .spawn()
3267 .ok()
3268 });
3269
3270 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3271 let mut timeout = cx.background().timer(PROCESS_TIMEOUT).fuse();
3272
3273 let mut errored = false;
3274 if let Some(mut process) = process {
3275 futures::select! {
3276 status = process.status().fuse() => match status {
3277 Ok(status) => errored = !status.success(),
3278 Err(_) => errored = true,
3279 },
3280
3281 _ = timeout => {
3282 log::info!("test binary time-ed out, this counts as a success");
3283 _ = process.kill();
3284 }
3285 }
3286 } else {
3287 log::warn!("test binary failed to launch");
3288 errored = true;
3289 }
3290
3291 if errored {
3292 log::warn!("test binary check failed");
3293 let task = this.update(&mut cx, move |this, mut cx| {
3294 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3295 });
3296
3297 if let Some(task) = task {
3298 task.await;
3299 }
3300 }
3301 })
3302 .detach();
3303 }
3304
3305 fn on_lsp_progress(
3306 &mut self,
3307 progress: lsp::ProgressParams,
3308 language_server_id: LanguageServerId,
3309 disk_based_diagnostics_progress_token: Option<String>,
3310 cx: &mut ModelContext<Self>,
3311 ) {
3312 let token = match progress.token {
3313 lsp::NumberOrString::String(token) => token,
3314 lsp::NumberOrString::Number(token) => {
3315 log::info!("skipping numeric progress token {}", token);
3316 return;
3317 }
3318 };
3319 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3320 let language_server_status =
3321 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3322 status
3323 } else {
3324 return;
3325 };
3326
3327 if !language_server_status.progress_tokens.contains(&token) {
3328 return;
3329 }
3330
3331 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3332 .as_ref()
3333 .map_or(false, |disk_based_token| {
3334 token.starts_with(disk_based_token)
3335 });
3336
3337 match progress {
3338 lsp::WorkDoneProgress::Begin(report) => {
3339 if is_disk_based_diagnostics_progress {
3340 language_server_status.has_pending_diagnostic_updates = true;
3341 self.disk_based_diagnostics_started(language_server_id, cx);
3342 self.buffer_ordered_messages_tx
3343 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3344 language_server_id,
3345 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3346 })
3347 .ok();
3348 } else {
3349 self.on_lsp_work_start(
3350 language_server_id,
3351 token.clone(),
3352 LanguageServerProgress {
3353 message: report.message.clone(),
3354 percentage: report.percentage.map(|p| p as usize),
3355 last_update_at: Instant::now(),
3356 },
3357 cx,
3358 );
3359 self.buffer_ordered_messages_tx
3360 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3361 language_server_id,
3362 message: proto::update_language_server::Variant::WorkStart(
3363 proto::LspWorkStart {
3364 token,
3365 message: report.message,
3366 percentage: report.percentage.map(|p| p as u32),
3367 },
3368 ),
3369 })
3370 .ok();
3371 }
3372 }
3373 lsp::WorkDoneProgress::Report(report) => {
3374 if !is_disk_based_diagnostics_progress {
3375 self.on_lsp_work_progress(
3376 language_server_id,
3377 token.clone(),
3378 LanguageServerProgress {
3379 message: report.message.clone(),
3380 percentage: report.percentage.map(|p| p as usize),
3381 last_update_at: Instant::now(),
3382 },
3383 cx,
3384 );
3385 self.buffer_ordered_messages_tx
3386 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3387 language_server_id,
3388 message: proto::update_language_server::Variant::WorkProgress(
3389 proto::LspWorkProgress {
3390 token,
3391 message: report.message,
3392 percentage: report.percentage.map(|p| p as u32),
3393 },
3394 ),
3395 })
3396 .ok();
3397 }
3398 }
3399 lsp::WorkDoneProgress::End(_) => {
3400 language_server_status.progress_tokens.remove(&token);
3401
3402 if is_disk_based_diagnostics_progress {
3403 language_server_status.has_pending_diagnostic_updates = false;
3404 self.disk_based_diagnostics_finished(language_server_id, cx);
3405 self.buffer_ordered_messages_tx
3406 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3407 language_server_id,
3408 message:
3409 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3410 Default::default(),
3411 ),
3412 })
3413 .ok();
3414 } else {
3415 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3416 self.buffer_ordered_messages_tx
3417 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3418 language_server_id,
3419 message: proto::update_language_server::Variant::WorkEnd(
3420 proto::LspWorkEnd { token },
3421 ),
3422 })
3423 .ok();
3424 }
3425 }
3426 }
3427 }
3428
3429 fn on_lsp_work_start(
3430 &mut self,
3431 language_server_id: LanguageServerId,
3432 token: String,
3433 progress: LanguageServerProgress,
3434 cx: &mut ModelContext<Self>,
3435 ) {
3436 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3437 status.pending_work.insert(token, progress);
3438 cx.notify();
3439 }
3440 }
3441
3442 fn on_lsp_work_progress(
3443 &mut self,
3444 language_server_id: LanguageServerId,
3445 token: String,
3446 progress: LanguageServerProgress,
3447 cx: &mut ModelContext<Self>,
3448 ) {
3449 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3450 let entry = status
3451 .pending_work
3452 .entry(token)
3453 .or_insert(LanguageServerProgress {
3454 message: Default::default(),
3455 percentage: Default::default(),
3456 last_update_at: progress.last_update_at,
3457 });
3458 if progress.message.is_some() {
3459 entry.message = progress.message;
3460 }
3461 if progress.percentage.is_some() {
3462 entry.percentage = progress.percentage;
3463 }
3464 entry.last_update_at = progress.last_update_at;
3465 cx.notify();
3466 }
3467 }
3468
3469 fn on_lsp_work_end(
3470 &mut self,
3471 language_server_id: LanguageServerId,
3472 token: String,
3473 cx: &mut ModelContext<Self>,
3474 ) {
3475 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3476 cx.emit(Event::RefreshInlayHints);
3477 status.pending_work.remove(&token);
3478 cx.notify();
3479 }
3480 }
3481
3482 fn on_lsp_did_change_watched_files(
3483 &mut self,
3484 language_server_id: LanguageServerId,
3485 params: DidChangeWatchedFilesRegistrationOptions,
3486 cx: &mut ModelContext<Self>,
3487 ) {
3488 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3489 self.language_servers.get_mut(&language_server_id)
3490 {
3491 let mut builders = HashMap::default();
3492 for watcher in params.watchers {
3493 for worktree in &self.worktrees {
3494 if let Some(worktree) = worktree.upgrade(cx) {
3495 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3496 if let Some(abs_path) = tree.abs_path().to_str() {
3497 let relative_glob_pattern = match &watcher.glob_pattern {
3498 lsp::GlobPattern::String(s) => s
3499 .strip_prefix(abs_path)
3500 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3501 lsp::GlobPattern::Relative(rp) => {
3502 let base_uri = match &rp.base_uri {
3503 lsp::OneOf::Left(workspace_folder) => {
3504 &workspace_folder.uri
3505 }
3506 lsp::OneOf::Right(base_uri) => base_uri,
3507 };
3508 base_uri.to_file_path().ok().and_then(|file_path| {
3509 (file_path.to_str() == Some(abs_path))
3510 .then_some(rp.pattern.as_str())
3511 })
3512 }
3513 };
3514 if let Some(relative_glob_pattern) = relative_glob_pattern {
3515 let literal_prefix =
3516 glob_literal_prefix(&relative_glob_pattern);
3517 tree.as_local_mut()
3518 .unwrap()
3519 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3520 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3521 builders
3522 .entry(tree.id())
3523 .or_insert_with(|| GlobSetBuilder::new())
3524 .add(glob);
3525 }
3526 return true;
3527 }
3528 }
3529 false
3530 });
3531 if glob_is_inside_worktree {
3532 break;
3533 }
3534 }
3535 }
3536 }
3537
3538 watched_paths.clear();
3539 for (worktree_id, builder) in builders {
3540 if let Ok(globset) = builder.build() {
3541 watched_paths.insert(worktree_id, globset);
3542 }
3543 }
3544
3545 cx.notify();
3546 }
3547 }
3548
3549 async fn on_lsp_workspace_edit(
3550 this: WeakModelHandle<Self>,
3551 params: lsp::ApplyWorkspaceEditParams,
3552 server_id: LanguageServerId,
3553 adapter: Arc<CachedLspAdapter>,
3554 mut cx: AsyncAppContext,
3555 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3556 let this = this
3557 .upgrade(&cx)
3558 .ok_or_else(|| anyhow!("project project closed"))?;
3559 let language_server = this
3560 .read_with(&cx, |this, _| this.language_server_for_id(server_id))
3561 .ok_or_else(|| anyhow!("language server not found"))?;
3562 let transaction = Self::deserialize_workspace_edit(
3563 this.clone(),
3564 params.edit,
3565 true,
3566 adapter.clone(),
3567 language_server.clone(),
3568 &mut cx,
3569 )
3570 .await
3571 .log_err();
3572 this.update(&mut cx, |this, _| {
3573 if let Some(transaction) = transaction {
3574 this.last_workspace_edits_by_language_server
3575 .insert(server_id, transaction);
3576 }
3577 });
3578 Ok(lsp::ApplyWorkspaceEditResponse {
3579 applied: true,
3580 failed_change: None,
3581 failure_reason: None,
3582 })
3583 }
3584
3585 pub fn language_server_statuses(
3586 &self,
3587 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3588 self.language_server_statuses.values()
3589 }
3590
3591 pub fn update_diagnostics(
3592 &mut self,
3593 language_server_id: LanguageServerId,
3594 mut params: lsp::PublishDiagnosticsParams,
3595 disk_based_sources: &[String],
3596 cx: &mut ModelContext<Self>,
3597 ) -> Result<()> {
3598 let abs_path = params
3599 .uri
3600 .to_file_path()
3601 .map_err(|_| anyhow!("URI is not a file"))?;
3602 let mut diagnostics = Vec::default();
3603 let mut primary_diagnostic_group_ids = HashMap::default();
3604 let mut sources_by_group_id = HashMap::default();
3605 let mut supporting_diagnostics = HashMap::default();
3606
3607 // Ensure that primary diagnostics are always the most severe
3608 params.diagnostics.sort_by_key(|item| item.severity);
3609
3610 for diagnostic in ¶ms.diagnostics {
3611 let source = diagnostic.source.as_ref();
3612 let code = diagnostic.code.as_ref().map(|code| match code {
3613 lsp::NumberOrString::Number(code) => code.to_string(),
3614 lsp::NumberOrString::String(code) => code.clone(),
3615 });
3616 let range = range_from_lsp(diagnostic.range);
3617 let is_supporting = diagnostic
3618 .related_information
3619 .as_ref()
3620 .map_or(false, |infos| {
3621 infos.iter().any(|info| {
3622 primary_diagnostic_group_ids.contains_key(&(
3623 source,
3624 code.clone(),
3625 range_from_lsp(info.location.range),
3626 ))
3627 })
3628 });
3629
3630 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3631 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3632 });
3633
3634 if is_supporting {
3635 supporting_diagnostics.insert(
3636 (source, code.clone(), range),
3637 (diagnostic.severity, is_unnecessary),
3638 );
3639 } else {
3640 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3641 let is_disk_based =
3642 source.map_or(false, |source| disk_based_sources.contains(source));
3643
3644 sources_by_group_id.insert(group_id, source);
3645 primary_diagnostic_group_ids
3646 .insert((source, code.clone(), range.clone()), group_id);
3647
3648 diagnostics.push(DiagnosticEntry {
3649 range,
3650 diagnostic: Diagnostic {
3651 source: diagnostic.source.clone(),
3652 code: code.clone(),
3653 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3654 message: diagnostic.message.clone(),
3655 group_id,
3656 is_primary: true,
3657 is_valid: true,
3658 is_disk_based,
3659 is_unnecessary,
3660 },
3661 });
3662 if let Some(infos) = &diagnostic.related_information {
3663 for info in infos {
3664 if info.location.uri == params.uri && !info.message.is_empty() {
3665 let range = range_from_lsp(info.location.range);
3666 diagnostics.push(DiagnosticEntry {
3667 range,
3668 diagnostic: Diagnostic {
3669 source: diagnostic.source.clone(),
3670 code: code.clone(),
3671 severity: DiagnosticSeverity::INFORMATION,
3672 message: info.message.clone(),
3673 group_id,
3674 is_primary: false,
3675 is_valid: true,
3676 is_disk_based,
3677 is_unnecessary: false,
3678 },
3679 });
3680 }
3681 }
3682 }
3683 }
3684 }
3685
3686 for entry in &mut diagnostics {
3687 let diagnostic = &mut entry.diagnostic;
3688 if !diagnostic.is_primary {
3689 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3690 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3691 source,
3692 diagnostic.code.clone(),
3693 entry.range.clone(),
3694 )) {
3695 if let Some(severity) = severity {
3696 diagnostic.severity = severity;
3697 }
3698 diagnostic.is_unnecessary = is_unnecessary;
3699 }
3700 }
3701 }
3702
3703 self.update_diagnostic_entries(
3704 language_server_id,
3705 abs_path,
3706 params.version,
3707 diagnostics,
3708 cx,
3709 )?;
3710 Ok(())
3711 }
3712
3713 pub fn update_diagnostic_entries(
3714 &mut self,
3715 server_id: LanguageServerId,
3716 abs_path: PathBuf,
3717 version: Option<i32>,
3718 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3719 cx: &mut ModelContext<Project>,
3720 ) -> Result<(), anyhow::Error> {
3721 let (worktree, relative_path) = self
3722 .find_local_worktree(&abs_path, cx)
3723 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3724
3725 let project_path = ProjectPath {
3726 worktree_id: worktree.read(cx).id(),
3727 path: relative_path.into(),
3728 };
3729
3730 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3731 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3732 }
3733
3734 let updated = worktree.update(cx, |worktree, cx| {
3735 worktree
3736 .as_local_mut()
3737 .ok_or_else(|| anyhow!("not a local worktree"))?
3738 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3739 })?;
3740 if updated {
3741 cx.emit(Event::DiagnosticsUpdated {
3742 language_server_id: server_id,
3743 path: project_path,
3744 });
3745 }
3746 Ok(())
3747 }
3748
3749 fn update_buffer_diagnostics(
3750 &mut self,
3751 buffer: &ModelHandle<Buffer>,
3752 server_id: LanguageServerId,
3753 version: Option<i32>,
3754 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3755 cx: &mut ModelContext<Self>,
3756 ) -> Result<()> {
3757 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3758 Ordering::Equal
3759 .then_with(|| b.is_primary.cmp(&a.is_primary))
3760 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3761 .then_with(|| a.severity.cmp(&b.severity))
3762 .then_with(|| a.message.cmp(&b.message))
3763 }
3764
3765 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3766
3767 diagnostics.sort_unstable_by(|a, b| {
3768 Ordering::Equal
3769 .then_with(|| a.range.start.cmp(&b.range.start))
3770 .then_with(|| b.range.end.cmp(&a.range.end))
3771 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3772 });
3773
3774 let mut sanitized_diagnostics = Vec::new();
3775 let edits_since_save = Patch::new(
3776 snapshot
3777 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
3778 .collect(),
3779 );
3780 for entry in diagnostics {
3781 let start;
3782 let end;
3783 if entry.diagnostic.is_disk_based {
3784 // Some diagnostics are based on files on disk instead of buffers'
3785 // current contents. Adjust these diagnostics' ranges to reflect
3786 // any unsaved edits.
3787 start = edits_since_save.old_to_new(entry.range.start);
3788 end = edits_since_save.old_to_new(entry.range.end);
3789 } else {
3790 start = entry.range.start;
3791 end = entry.range.end;
3792 }
3793
3794 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
3795 ..snapshot.clip_point_utf16(end, Bias::Right);
3796
3797 // Expand empty ranges by one codepoint
3798 if range.start == range.end {
3799 // This will be go to the next boundary when being clipped
3800 range.end.column += 1;
3801 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
3802 if range.start == range.end && range.end.column > 0 {
3803 range.start.column -= 1;
3804 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
3805 }
3806 }
3807
3808 sanitized_diagnostics.push(DiagnosticEntry {
3809 range,
3810 diagnostic: entry.diagnostic,
3811 });
3812 }
3813 drop(edits_since_save);
3814
3815 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
3816 buffer.update(cx, |buffer, cx| {
3817 buffer.update_diagnostics(server_id, set, cx)
3818 });
3819 Ok(())
3820 }
3821
3822 pub fn reload_buffers(
3823 &self,
3824 buffers: HashSet<ModelHandle<Buffer>>,
3825 push_to_history: bool,
3826 cx: &mut ModelContext<Self>,
3827 ) -> Task<Result<ProjectTransaction>> {
3828 let mut local_buffers = Vec::new();
3829 let mut remote_buffers = None;
3830 for buffer_handle in buffers {
3831 let buffer = buffer_handle.read(cx);
3832 if buffer.is_dirty() {
3833 if let Some(file) = File::from_dyn(buffer.file()) {
3834 if file.is_local() {
3835 local_buffers.push(buffer_handle);
3836 } else {
3837 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
3838 }
3839 }
3840 }
3841 }
3842
3843 let remote_buffers = self.remote_id().zip(remote_buffers);
3844 let client = self.client.clone();
3845
3846 cx.spawn(|this, mut cx| async move {
3847 let mut project_transaction = ProjectTransaction::default();
3848
3849 if let Some((project_id, remote_buffers)) = remote_buffers {
3850 let response = client
3851 .request(proto::ReloadBuffers {
3852 project_id,
3853 buffer_ids: remote_buffers
3854 .iter()
3855 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3856 .collect(),
3857 })
3858 .await?
3859 .transaction
3860 .ok_or_else(|| anyhow!("missing transaction"))?;
3861 project_transaction = this
3862 .update(&mut cx, |this, cx| {
3863 this.deserialize_project_transaction(response, push_to_history, cx)
3864 })
3865 .await?;
3866 }
3867
3868 for buffer in local_buffers {
3869 let transaction = buffer
3870 .update(&mut cx, |buffer, cx| buffer.reload(cx))
3871 .await?;
3872 buffer.update(&mut cx, |buffer, cx| {
3873 if let Some(transaction) = transaction {
3874 if !push_to_history {
3875 buffer.forget_transaction(transaction.id);
3876 }
3877 project_transaction.0.insert(cx.handle(), transaction);
3878 }
3879 });
3880 }
3881
3882 Ok(project_transaction)
3883 })
3884 }
3885
3886 pub fn format(
3887 &self,
3888 buffers: HashSet<ModelHandle<Buffer>>,
3889 push_to_history: bool,
3890 trigger: FormatTrigger,
3891 cx: &mut ModelContext<Project>,
3892 ) -> Task<Result<ProjectTransaction>> {
3893 if self.is_local() {
3894 let mut buffers_with_paths_and_servers = buffers
3895 .into_iter()
3896 .filter_map(|buffer_handle| {
3897 let buffer = buffer_handle.read(cx);
3898 let file = File::from_dyn(buffer.file())?;
3899 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3900 let server = self
3901 .primary_language_server_for_buffer(buffer, cx)
3902 .map(|s| s.1.clone());
3903 Some((buffer_handle, buffer_abs_path, server))
3904 })
3905 .collect::<Vec<_>>();
3906
3907 cx.spawn(|this, mut cx| async move {
3908 // Do not allow multiple concurrent formatting requests for the
3909 // same buffer.
3910 this.update(&mut cx, |this, cx| {
3911 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
3912 this.buffers_being_formatted
3913 .insert(buffer.read(cx).remote_id())
3914 });
3915 });
3916
3917 let _cleanup = defer({
3918 let this = this.clone();
3919 let mut cx = cx.clone();
3920 let buffers = &buffers_with_paths_and_servers;
3921 move || {
3922 this.update(&mut cx, |this, cx| {
3923 for (buffer, _, _) in buffers {
3924 this.buffers_being_formatted
3925 .remove(&buffer.read(cx).remote_id());
3926 }
3927 });
3928 }
3929 });
3930
3931 let mut project_transaction = ProjectTransaction::default();
3932 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
3933 let settings = buffer.read_with(&cx, |buffer, cx| {
3934 language_settings(buffer.language(), buffer.file(), cx).clone()
3935 });
3936
3937 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
3938 let ensure_final_newline = settings.ensure_final_newline_on_save;
3939 let format_on_save = settings.format_on_save.clone();
3940 let formatter = settings.formatter.clone();
3941 let tab_size = settings.tab_size;
3942
3943 // First, format buffer's whitespace according to the settings.
3944 let trailing_whitespace_diff = if remove_trailing_whitespace {
3945 Some(
3946 buffer
3947 .read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
3948 .await,
3949 )
3950 } else {
3951 None
3952 };
3953 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
3954 buffer.finalize_last_transaction();
3955 buffer.start_transaction();
3956 if let Some(diff) = trailing_whitespace_diff {
3957 buffer.apply_diff(diff, cx);
3958 }
3959 if ensure_final_newline {
3960 buffer.ensure_final_newline(cx);
3961 }
3962 buffer.end_transaction(cx)
3963 });
3964
3965 // Currently, formatting operations are represented differently depending on
3966 // whether they come from a language server or an external command.
3967 enum FormatOperation {
3968 Lsp(Vec<(Range<Anchor>, String)>),
3969 External(Diff),
3970 }
3971
3972 // Apply language-specific formatting using either a language server
3973 // or external command.
3974 let mut format_operation = None;
3975 match (formatter, format_on_save) {
3976 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
3977
3978 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
3979 | (_, FormatOnSave::LanguageServer) => {
3980 if let Some((language_server, buffer_abs_path)) =
3981 language_server.as_ref().zip(buffer_abs_path.as_ref())
3982 {
3983 format_operation = Some(FormatOperation::Lsp(
3984 Self::format_via_lsp(
3985 &this,
3986 &buffer,
3987 buffer_abs_path,
3988 &language_server,
3989 tab_size,
3990 &mut cx,
3991 )
3992 .await
3993 .context("failed to format via language server")?,
3994 ));
3995 }
3996 }
3997
3998 (
3999 Formatter::External { command, arguments },
4000 FormatOnSave::On | FormatOnSave::Off,
4001 )
4002 | (_, FormatOnSave::External { command, arguments }) => {
4003 if let Some(buffer_abs_path) = buffer_abs_path {
4004 format_operation = Self::format_via_external_command(
4005 &buffer,
4006 &buffer_abs_path,
4007 &command,
4008 &arguments,
4009 &mut cx,
4010 )
4011 .await
4012 .context(format!(
4013 "failed to format via external command {:?}",
4014 command
4015 ))?
4016 .map(FormatOperation::External);
4017 }
4018 }
4019 };
4020
4021 buffer.update(&mut cx, |b, cx| {
4022 // If the buffer had its whitespace formatted and was edited while the language-specific
4023 // formatting was being computed, avoid applying the language-specific formatting, because
4024 // it can't be grouped with the whitespace formatting in the undo history.
4025 if let Some(transaction_id) = whitespace_transaction_id {
4026 if b.peek_undo_stack()
4027 .map_or(true, |e| e.transaction_id() != transaction_id)
4028 {
4029 format_operation.take();
4030 }
4031 }
4032
4033 // Apply any language-specific formatting, and group the two formatting operations
4034 // in the buffer's undo history.
4035 if let Some(operation) = format_operation {
4036 match operation {
4037 FormatOperation::Lsp(edits) => {
4038 b.edit(edits, None, cx);
4039 }
4040 FormatOperation::External(diff) => {
4041 b.apply_diff(diff, cx);
4042 }
4043 }
4044
4045 if let Some(transaction_id) = whitespace_transaction_id {
4046 b.group_until_transaction(transaction_id);
4047 }
4048 }
4049
4050 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4051 if !push_to_history {
4052 b.forget_transaction(transaction.id);
4053 }
4054 project_transaction.0.insert(buffer.clone(), transaction);
4055 }
4056 });
4057 }
4058
4059 Ok(project_transaction)
4060 })
4061 } else {
4062 let remote_id = self.remote_id();
4063 let client = self.client.clone();
4064 cx.spawn(|this, mut cx| async move {
4065 let mut project_transaction = ProjectTransaction::default();
4066 if let Some(project_id) = remote_id {
4067 let response = client
4068 .request(proto::FormatBuffers {
4069 project_id,
4070 trigger: trigger as i32,
4071 buffer_ids: buffers
4072 .iter()
4073 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
4074 .collect(),
4075 })
4076 .await?
4077 .transaction
4078 .ok_or_else(|| anyhow!("missing transaction"))?;
4079 project_transaction = this
4080 .update(&mut cx, |this, cx| {
4081 this.deserialize_project_transaction(response, push_to_history, cx)
4082 })
4083 .await?;
4084 }
4085 Ok(project_transaction)
4086 })
4087 }
4088 }
4089
4090 async fn format_via_lsp(
4091 this: &ModelHandle<Self>,
4092 buffer: &ModelHandle<Buffer>,
4093 abs_path: &Path,
4094 language_server: &Arc<LanguageServer>,
4095 tab_size: NonZeroU32,
4096 cx: &mut AsyncAppContext,
4097 ) -> Result<Vec<(Range<Anchor>, String)>> {
4098 let uri = lsp::Url::from_file_path(abs_path)
4099 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4100 let text_document = lsp::TextDocumentIdentifier::new(uri);
4101 let capabilities = &language_server.capabilities();
4102
4103 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4104 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4105
4106 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4107 language_server
4108 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4109 text_document,
4110 options: lsp_command::lsp_formatting_options(tab_size.get()),
4111 work_done_progress_params: Default::default(),
4112 })
4113 .await?
4114 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4115 let buffer_start = lsp::Position::new(0, 0);
4116 let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()));
4117
4118 language_server
4119 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4120 text_document,
4121 range: lsp::Range::new(buffer_start, buffer_end),
4122 options: lsp_command::lsp_formatting_options(tab_size.get()),
4123 work_done_progress_params: Default::default(),
4124 })
4125 .await?
4126 } else {
4127 None
4128 };
4129
4130 if let Some(lsp_edits) = lsp_edits {
4131 this.update(cx, |this, cx| {
4132 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4133 })
4134 .await
4135 } else {
4136 Ok(Vec::new())
4137 }
4138 }
4139
4140 async fn format_via_external_command(
4141 buffer: &ModelHandle<Buffer>,
4142 buffer_abs_path: &Path,
4143 command: &str,
4144 arguments: &[String],
4145 cx: &mut AsyncAppContext,
4146 ) -> Result<Option<Diff>> {
4147 let working_dir_path = buffer.read_with(cx, |buffer, cx| {
4148 let file = File::from_dyn(buffer.file())?;
4149 let worktree = file.worktree.read(cx).as_local()?;
4150 let mut worktree_path = worktree.abs_path().to_path_buf();
4151 if worktree.root_entry()?.is_file() {
4152 worktree_path.pop();
4153 }
4154 Some(worktree_path)
4155 });
4156
4157 if let Some(working_dir_path) = working_dir_path {
4158 let mut child =
4159 smol::process::Command::new(command)
4160 .args(arguments.iter().map(|arg| {
4161 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4162 }))
4163 .current_dir(&working_dir_path)
4164 .stdin(smol::process::Stdio::piped())
4165 .stdout(smol::process::Stdio::piped())
4166 .stderr(smol::process::Stdio::piped())
4167 .spawn()?;
4168 let stdin = child
4169 .stdin
4170 .as_mut()
4171 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4172 let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
4173 for chunk in text.chunks() {
4174 stdin.write_all(chunk.as_bytes()).await?;
4175 }
4176 stdin.flush().await?;
4177
4178 let output = child.output().await?;
4179 if !output.status.success() {
4180 return Err(anyhow!(
4181 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4182 output.status.code(),
4183 String::from_utf8_lossy(&output.stdout),
4184 String::from_utf8_lossy(&output.stderr),
4185 ));
4186 }
4187
4188 let stdout = String::from_utf8(output.stdout)?;
4189 Ok(Some(
4190 buffer
4191 .read_with(cx, |buffer, cx| buffer.diff(stdout, cx))
4192 .await,
4193 ))
4194 } else {
4195 Ok(None)
4196 }
4197 }
4198
4199 pub fn definition<T: ToPointUtf16>(
4200 &self,
4201 buffer: &ModelHandle<Buffer>,
4202 position: T,
4203 cx: &mut ModelContext<Self>,
4204 ) -> Task<Result<Vec<LocationLink>>> {
4205 let position = position.to_point_utf16(buffer.read(cx));
4206 self.request_lsp(
4207 buffer.clone(),
4208 LanguageServerToQuery::Primary,
4209 GetDefinition { position },
4210 cx,
4211 )
4212 }
4213
4214 pub fn type_definition<T: ToPointUtf16>(
4215 &self,
4216 buffer: &ModelHandle<Buffer>,
4217 position: T,
4218 cx: &mut ModelContext<Self>,
4219 ) -> Task<Result<Vec<LocationLink>>> {
4220 let position = position.to_point_utf16(buffer.read(cx));
4221 self.request_lsp(
4222 buffer.clone(),
4223 LanguageServerToQuery::Primary,
4224 GetTypeDefinition { position },
4225 cx,
4226 )
4227 }
4228
4229 pub fn references<T: ToPointUtf16>(
4230 &self,
4231 buffer: &ModelHandle<Buffer>,
4232 position: T,
4233 cx: &mut ModelContext<Self>,
4234 ) -> Task<Result<Vec<Location>>> {
4235 let position = position.to_point_utf16(buffer.read(cx));
4236 self.request_lsp(
4237 buffer.clone(),
4238 LanguageServerToQuery::Primary,
4239 GetReferences { position },
4240 cx,
4241 )
4242 }
4243
4244 pub fn document_highlights<T: ToPointUtf16>(
4245 &self,
4246 buffer: &ModelHandle<Buffer>,
4247 position: T,
4248 cx: &mut ModelContext<Self>,
4249 ) -> Task<Result<Vec<DocumentHighlight>>> {
4250 let position = position.to_point_utf16(buffer.read(cx));
4251 self.request_lsp(
4252 buffer.clone(),
4253 LanguageServerToQuery::Primary,
4254 GetDocumentHighlights { position },
4255 cx,
4256 )
4257 }
4258
4259 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4260 if self.is_local() {
4261 let mut requests = Vec::new();
4262 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4263 let worktree_id = *worktree_id;
4264 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4265 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4266 Some(worktree) => worktree,
4267 None => continue,
4268 };
4269 let worktree_abs_path = worktree.abs_path().clone();
4270
4271 let (adapter, language, server) = match self.language_servers.get(server_id) {
4272 Some(LanguageServerState::Running {
4273 adapter,
4274 language,
4275 server,
4276 ..
4277 }) => (adapter.clone(), language.clone(), server),
4278
4279 _ => continue,
4280 };
4281
4282 requests.push(
4283 server
4284 .request::<lsp::request::WorkspaceSymbolRequest>(
4285 lsp::WorkspaceSymbolParams {
4286 query: query.to_string(),
4287 ..Default::default()
4288 },
4289 )
4290 .log_err()
4291 .map(move |response| {
4292 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4293 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4294 flat_responses.into_iter().map(|lsp_symbol| {
4295 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4296 }).collect::<Vec<_>>()
4297 }
4298 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4299 nested_responses.into_iter().filter_map(|lsp_symbol| {
4300 let location = match lsp_symbol.location {
4301 OneOf::Left(location) => location,
4302 OneOf::Right(_) => {
4303 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4304 return None
4305 }
4306 };
4307 Some((lsp_symbol.name, lsp_symbol.kind, location))
4308 }).collect::<Vec<_>>()
4309 }
4310 }).unwrap_or_default();
4311
4312 (
4313 adapter,
4314 language,
4315 worktree_id,
4316 worktree_abs_path,
4317 lsp_symbols,
4318 )
4319 }),
4320 );
4321 }
4322
4323 cx.spawn_weak(|this, cx| async move {
4324 let responses = futures::future::join_all(requests).await;
4325 let this = match this.upgrade(&cx) {
4326 Some(this) => this,
4327 None => return Ok(Vec::new()),
4328 };
4329
4330 let symbols = this.read_with(&cx, |this, cx| {
4331 let mut symbols = Vec::new();
4332 for (
4333 adapter,
4334 adapter_language,
4335 source_worktree_id,
4336 worktree_abs_path,
4337 lsp_symbols,
4338 ) in responses
4339 {
4340 symbols.extend(lsp_symbols.into_iter().filter_map(
4341 |(symbol_name, symbol_kind, symbol_location)| {
4342 let abs_path = symbol_location.uri.to_file_path().ok()?;
4343 let mut worktree_id = source_worktree_id;
4344 let path;
4345 if let Some((worktree, rel_path)) =
4346 this.find_local_worktree(&abs_path, cx)
4347 {
4348 worktree_id = worktree.read(cx).id();
4349 path = rel_path;
4350 } else {
4351 path = relativize_path(&worktree_abs_path, &abs_path);
4352 }
4353
4354 let project_path = ProjectPath {
4355 worktree_id,
4356 path: path.into(),
4357 };
4358 let signature = this.symbol_signature(&project_path);
4359 let adapter_language = adapter_language.clone();
4360 let language = this
4361 .languages
4362 .language_for_file(&project_path.path, None)
4363 .unwrap_or_else(move |_| adapter_language);
4364 let language_server_name = adapter.name.clone();
4365 Some(async move {
4366 let language = language.await;
4367 let label =
4368 language.label_for_symbol(&symbol_name, symbol_kind).await;
4369
4370 Symbol {
4371 language_server_name,
4372 source_worktree_id,
4373 path: project_path,
4374 label: label.unwrap_or_else(|| {
4375 CodeLabel::plain(symbol_name.clone(), None)
4376 }),
4377 kind: symbol_kind,
4378 name: symbol_name,
4379 range: range_from_lsp(symbol_location.range),
4380 signature,
4381 }
4382 })
4383 },
4384 ));
4385 }
4386
4387 symbols
4388 });
4389
4390 Ok(futures::future::join_all(symbols).await)
4391 })
4392 } else if let Some(project_id) = self.remote_id() {
4393 let request = self.client.request(proto::GetProjectSymbols {
4394 project_id,
4395 query: query.to_string(),
4396 });
4397 cx.spawn_weak(|this, cx| async move {
4398 let response = request.await?;
4399 let mut symbols = Vec::new();
4400 if let Some(this) = this.upgrade(&cx) {
4401 let new_symbols = this.read_with(&cx, |this, _| {
4402 response
4403 .symbols
4404 .into_iter()
4405 .map(|symbol| this.deserialize_symbol(symbol))
4406 .collect::<Vec<_>>()
4407 });
4408 symbols = futures::future::join_all(new_symbols)
4409 .await
4410 .into_iter()
4411 .filter_map(|symbol| symbol.log_err())
4412 .collect::<Vec<_>>();
4413 }
4414 Ok(symbols)
4415 })
4416 } else {
4417 Task::ready(Ok(Default::default()))
4418 }
4419 }
4420
4421 pub fn open_buffer_for_symbol(
4422 &mut self,
4423 symbol: &Symbol,
4424 cx: &mut ModelContext<Self>,
4425 ) -> Task<Result<ModelHandle<Buffer>>> {
4426 if self.is_local() {
4427 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4428 symbol.source_worktree_id,
4429 symbol.language_server_name.clone(),
4430 )) {
4431 *id
4432 } else {
4433 return Task::ready(Err(anyhow!(
4434 "language server for worktree and language not found"
4435 )));
4436 };
4437
4438 let worktree_abs_path = if let Some(worktree_abs_path) = self
4439 .worktree_for_id(symbol.path.worktree_id, cx)
4440 .and_then(|worktree| worktree.read(cx).as_local())
4441 .map(|local_worktree| local_worktree.abs_path())
4442 {
4443 worktree_abs_path
4444 } else {
4445 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4446 };
4447 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
4448 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4449 uri
4450 } else {
4451 return Task::ready(Err(anyhow!("invalid symbol path")));
4452 };
4453
4454 self.open_local_buffer_via_lsp(
4455 symbol_uri,
4456 language_server_id,
4457 symbol.language_server_name.clone(),
4458 cx,
4459 )
4460 } else if let Some(project_id) = self.remote_id() {
4461 let request = self.client.request(proto::OpenBufferForSymbol {
4462 project_id,
4463 symbol: Some(serialize_symbol(symbol)),
4464 });
4465 cx.spawn(|this, mut cx| async move {
4466 let response = request.await?;
4467 this.update(&mut cx, |this, cx| {
4468 this.wait_for_remote_buffer(response.buffer_id, cx)
4469 })
4470 .await
4471 })
4472 } else {
4473 Task::ready(Err(anyhow!("project does not have a remote id")))
4474 }
4475 }
4476
4477 pub fn hover<T: ToPointUtf16>(
4478 &self,
4479 buffer: &ModelHandle<Buffer>,
4480 position: T,
4481 cx: &mut ModelContext<Self>,
4482 ) -> Task<Result<Option<Hover>>> {
4483 let position = position.to_point_utf16(buffer.read(cx));
4484 self.request_lsp(
4485 buffer.clone(),
4486 LanguageServerToQuery::Primary,
4487 GetHover { position },
4488 cx,
4489 )
4490 }
4491
4492 pub fn completions<T: ToOffset + ToPointUtf16>(
4493 &self,
4494 buffer: &ModelHandle<Buffer>,
4495 position: T,
4496 cx: &mut ModelContext<Self>,
4497 ) -> Task<Result<Vec<Completion>>> {
4498 let position = position.to_point_utf16(buffer.read(cx));
4499 if self.is_local() {
4500 let snapshot = buffer.read(cx).snapshot();
4501 let offset = position.to_offset(&snapshot);
4502 let scope = snapshot.language_scope_at(offset);
4503
4504 let server_ids: Vec<_> = self
4505 .language_servers_for_buffer(buffer.read(cx), cx)
4506 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4507 .filter(|(adapter, _)| {
4508 scope
4509 .as_ref()
4510 .map(|scope| scope.language_allowed(&adapter.name))
4511 .unwrap_or(true)
4512 })
4513 .map(|(_, server)| server.server_id())
4514 .collect();
4515
4516 let buffer = buffer.clone();
4517 cx.spawn(|this, mut cx| async move {
4518 let mut tasks = Vec::with_capacity(server_ids.len());
4519 this.update(&mut cx, |this, cx| {
4520 for server_id in server_ids {
4521 tasks.push(this.request_lsp(
4522 buffer.clone(),
4523 LanguageServerToQuery::Other(server_id),
4524 GetCompletions { position },
4525 cx,
4526 ));
4527 }
4528 });
4529
4530 let mut completions = Vec::new();
4531 for task in tasks {
4532 if let Ok(new_completions) = task.await {
4533 completions.extend_from_slice(&new_completions);
4534 }
4535 }
4536
4537 Ok(completions)
4538 })
4539 } else if let Some(project_id) = self.remote_id() {
4540 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4541 } else {
4542 Task::ready(Ok(Default::default()))
4543 }
4544 }
4545
4546 pub fn apply_additional_edits_for_completion(
4547 &self,
4548 buffer_handle: ModelHandle<Buffer>,
4549 completion: Completion,
4550 push_to_history: bool,
4551 cx: &mut ModelContext<Self>,
4552 ) -> Task<Result<Option<Transaction>>> {
4553 let buffer = buffer_handle.read(cx);
4554 let buffer_id = buffer.remote_id();
4555
4556 if self.is_local() {
4557 let server_id = completion.server_id;
4558 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4559 Some((_, server)) => server.clone(),
4560 _ => return Task::ready(Ok(Default::default())),
4561 };
4562
4563 cx.spawn(|this, mut cx| async move {
4564 let can_resolve = lang_server
4565 .capabilities()
4566 .completion_provider
4567 .as_ref()
4568 .and_then(|options| options.resolve_provider)
4569 .unwrap_or(false);
4570 let additional_text_edits = if can_resolve {
4571 lang_server
4572 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4573 .await?
4574 .additional_text_edits
4575 } else {
4576 completion.lsp_completion.additional_text_edits
4577 };
4578 if let Some(edits) = additional_text_edits {
4579 let edits = this
4580 .update(&mut cx, |this, cx| {
4581 this.edits_from_lsp(
4582 &buffer_handle,
4583 edits,
4584 lang_server.server_id(),
4585 None,
4586 cx,
4587 )
4588 })
4589 .await?;
4590
4591 buffer_handle.update(&mut cx, |buffer, cx| {
4592 buffer.finalize_last_transaction();
4593 buffer.start_transaction();
4594
4595 for (range, text) in edits {
4596 let primary = &completion.old_range;
4597 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4598 && primary.end.cmp(&range.start, buffer).is_ge();
4599 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4600 && range.end.cmp(&primary.end, buffer).is_ge();
4601
4602 //Skip additional edits which overlap with the primary completion edit
4603 //https://github.com/zed-industries/zed/pull/1871
4604 if !start_within && !end_within {
4605 buffer.edit([(range, text)], None, cx);
4606 }
4607 }
4608
4609 let transaction = if buffer.end_transaction(cx).is_some() {
4610 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4611 if !push_to_history {
4612 buffer.forget_transaction(transaction.id);
4613 }
4614 Some(transaction)
4615 } else {
4616 None
4617 };
4618 Ok(transaction)
4619 })
4620 } else {
4621 Ok(None)
4622 }
4623 })
4624 } else if let Some(project_id) = self.remote_id() {
4625 let client = self.client.clone();
4626 cx.spawn(|_, mut cx| async move {
4627 let response = client
4628 .request(proto::ApplyCompletionAdditionalEdits {
4629 project_id,
4630 buffer_id,
4631 completion: Some(language::proto::serialize_completion(&completion)),
4632 })
4633 .await?;
4634
4635 if let Some(transaction) = response.transaction {
4636 let transaction = language::proto::deserialize_transaction(transaction)?;
4637 buffer_handle
4638 .update(&mut cx, |buffer, _| {
4639 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4640 })
4641 .await?;
4642 if push_to_history {
4643 buffer_handle.update(&mut cx, |buffer, _| {
4644 buffer.push_transaction(transaction.clone(), Instant::now());
4645 });
4646 }
4647 Ok(Some(transaction))
4648 } else {
4649 Ok(None)
4650 }
4651 })
4652 } else {
4653 Task::ready(Err(anyhow!("project does not have a remote id")))
4654 }
4655 }
4656
4657 pub fn code_actions<T: Clone + ToOffset>(
4658 &self,
4659 buffer_handle: &ModelHandle<Buffer>,
4660 range: Range<T>,
4661 cx: &mut ModelContext<Self>,
4662 ) -> Task<Result<Vec<CodeAction>>> {
4663 let buffer = buffer_handle.read(cx);
4664 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4665 self.request_lsp(
4666 buffer_handle.clone(),
4667 LanguageServerToQuery::Primary,
4668 GetCodeActions { range },
4669 cx,
4670 )
4671 }
4672
4673 pub fn apply_code_action(
4674 &self,
4675 buffer_handle: ModelHandle<Buffer>,
4676 mut action: CodeAction,
4677 push_to_history: bool,
4678 cx: &mut ModelContext<Self>,
4679 ) -> Task<Result<ProjectTransaction>> {
4680 if self.is_local() {
4681 let buffer = buffer_handle.read(cx);
4682 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4683 self.language_server_for_buffer(buffer, action.server_id, cx)
4684 {
4685 (adapter.clone(), server.clone())
4686 } else {
4687 return Task::ready(Ok(Default::default()));
4688 };
4689 let range = action.range.to_point_utf16(buffer);
4690
4691 cx.spawn(|this, mut cx| async move {
4692 if let Some(lsp_range) = action
4693 .lsp_action
4694 .data
4695 .as_mut()
4696 .and_then(|d| d.get_mut("codeActionParams"))
4697 .and_then(|d| d.get_mut("range"))
4698 {
4699 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4700 action.lsp_action = lang_server
4701 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
4702 .await?;
4703 } else {
4704 let actions = this
4705 .update(&mut cx, |this, cx| {
4706 this.code_actions(&buffer_handle, action.range, cx)
4707 })
4708 .await?;
4709 action.lsp_action = actions
4710 .into_iter()
4711 .find(|a| a.lsp_action.title == action.lsp_action.title)
4712 .ok_or_else(|| anyhow!("code action is outdated"))?
4713 .lsp_action;
4714 }
4715
4716 if let Some(edit) = action.lsp_action.edit {
4717 if edit.changes.is_some() || edit.document_changes.is_some() {
4718 return Self::deserialize_workspace_edit(
4719 this,
4720 edit,
4721 push_to_history,
4722 lsp_adapter.clone(),
4723 lang_server.clone(),
4724 &mut cx,
4725 )
4726 .await;
4727 }
4728 }
4729
4730 if let Some(command) = action.lsp_action.command {
4731 this.update(&mut cx, |this, _| {
4732 this.last_workspace_edits_by_language_server
4733 .remove(&lang_server.server_id());
4734 });
4735
4736 let result = lang_server
4737 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
4738 command: command.command,
4739 arguments: command.arguments.unwrap_or_default(),
4740 ..Default::default()
4741 })
4742 .await;
4743
4744 if let Err(err) = result {
4745 // TODO: LSP ERROR
4746 return Err(err);
4747 }
4748
4749 return Ok(this.update(&mut cx, |this, _| {
4750 this.last_workspace_edits_by_language_server
4751 .remove(&lang_server.server_id())
4752 .unwrap_or_default()
4753 }));
4754 }
4755
4756 Ok(ProjectTransaction::default())
4757 })
4758 } else if let Some(project_id) = self.remote_id() {
4759 let client = self.client.clone();
4760 let request = proto::ApplyCodeAction {
4761 project_id,
4762 buffer_id: buffer_handle.read(cx).remote_id(),
4763 action: Some(language::proto::serialize_code_action(&action)),
4764 };
4765 cx.spawn(|this, mut cx| async move {
4766 let response = client
4767 .request(request)
4768 .await?
4769 .transaction
4770 .ok_or_else(|| anyhow!("missing transaction"))?;
4771 this.update(&mut cx, |this, cx| {
4772 this.deserialize_project_transaction(response, push_to_history, cx)
4773 })
4774 .await
4775 })
4776 } else {
4777 Task::ready(Err(anyhow!("project does not have a remote id")))
4778 }
4779 }
4780
4781 fn apply_on_type_formatting(
4782 &self,
4783 buffer: ModelHandle<Buffer>,
4784 position: Anchor,
4785 trigger: String,
4786 cx: &mut ModelContext<Self>,
4787 ) -> Task<Result<Option<Transaction>>> {
4788 if self.is_local() {
4789 cx.spawn(|this, mut cx| async move {
4790 // Do not allow multiple concurrent formatting requests for the
4791 // same buffer.
4792 this.update(&mut cx, |this, cx| {
4793 this.buffers_being_formatted
4794 .insert(buffer.read(cx).remote_id())
4795 });
4796
4797 let _cleanup = defer({
4798 let this = this.clone();
4799 let mut cx = cx.clone();
4800 let closure_buffer = buffer.clone();
4801 move || {
4802 this.update(&mut cx, |this, cx| {
4803 this.buffers_being_formatted
4804 .remove(&closure_buffer.read(cx).remote_id());
4805 });
4806 }
4807 });
4808
4809 buffer
4810 .update(&mut cx, |buffer, _| {
4811 buffer.wait_for_edits(Some(position.timestamp))
4812 })
4813 .await?;
4814 this.update(&mut cx, |this, cx| {
4815 let position = position.to_point_utf16(buffer.read(cx));
4816 this.on_type_format(buffer, position, trigger, false, cx)
4817 })
4818 .await
4819 })
4820 } else if let Some(project_id) = self.remote_id() {
4821 let client = self.client.clone();
4822 let request = proto::OnTypeFormatting {
4823 project_id,
4824 buffer_id: buffer.read(cx).remote_id(),
4825 position: Some(serialize_anchor(&position)),
4826 trigger,
4827 version: serialize_version(&buffer.read(cx).version()),
4828 };
4829 cx.spawn(|_, _| async move {
4830 client
4831 .request(request)
4832 .await?
4833 .transaction
4834 .map(language::proto::deserialize_transaction)
4835 .transpose()
4836 })
4837 } else {
4838 Task::ready(Err(anyhow!("project does not have a remote id")))
4839 }
4840 }
4841
4842 async fn deserialize_edits(
4843 this: ModelHandle<Self>,
4844 buffer_to_edit: ModelHandle<Buffer>,
4845 edits: Vec<lsp::TextEdit>,
4846 push_to_history: bool,
4847 _: Arc<CachedLspAdapter>,
4848 language_server: Arc<LanguageServer>,
4849 cx: &mut AsyncAppContext,
4850 ) -> Result<Option<Transaction>> {
4851 let edits = this
4852 .update(cx, |this, cx| {
4853 this.edits_from_lsp(
4854 &buffer_to_edit,
4855 edits,
4856 language_server.server_id(),
4857 None,
4858 cx,
4859 )
4860 })
4861 .await?;
4862
4863 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4864 buffer.finalize_last_transaction();
4865 buffer.start_transaction();
4866 for (range, text) in edits {
4867 buffer.edit([(range, text)], None, cx);
4868 }
4869
4870 if buffer.end_transaction(cx).is_some() {
4871 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4872 if !push_to_history {
4873 buffer.forget_transaction(transaction.id);
4874 }
4875 Some(transaction)
4876 } else {
4877 None
4878 }
4879 });
4880
4881 Ok(transaction)
4882 }
4883
4884 async fn deserialize_workspace_edit(
4885 this: ModelHandle<Self>,
4886 edit: lsp::WorkspaceEdit,
4887 push_to_history: bool,
4888 lsp_adapter: Arc<CachedLspAdapter>,
4889 language_server: Arc<LanguageServer>,
4890 cx: &mut AsyncAppContext,
4891 ) -> Result<ProjectTransaction> {
4892 let fs = this.read_with(cx, |this, _| this.fs.clone());
4893 let mut operations = Vec::new();
4894 if let Some(document_changes) = edit.document_changes {
4895 match document_changes {
4896 lsp::DocumentChanges::Edits(edits) => {
4897 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
4898 }
4899 lsp::DocumentChanges::Operations(ops) => operations = ops,
4900 }
4901 } else if let Some(changes) = edit.changes {
4902 operations.extend(changes.into_iter().map(|(uri, edits)| {
4903 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
4904 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4905 uri,
4906 version: None,
4907 },
4908 edits: edits.into_iter().map(OneOf::Left).collect(),
4909 })
4910 }));
4911 }
4912
4913 let mut project_transaction = ProjectTransaction::default();
4914 for operation in operations {
4915 match operation {
4916 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
4917 let abs_path = op
4918 .uri
4919 .to_file_path()
4920 .map_err(|_| anyhow!("can't convert URI to path"))?;
4921
4922 if let Some(parent_path) = abs_path.parent() {
4923 fs.create_dir(parent_path).await?;
4924 }
4925 if abs_path.ends_with("/") {
4926 fs.create_dir(&abs_path).await?;
4927 } else {
4928 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
4929 .await?;
4930 }
4931 }
4932
4933 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
4934 let source_abs_path = op
4935 .old_uri
4936 .to_file_path()
4937 .map_err(|_| anyhow!("can't convert URI to path"))?;
4938 let target_abs_path = op
4939 .new_uri
4940 .to_file_path()
4941 .map_err(|_| anyhow!("can't convert URI to path"))?;
4942 fs.rename(
4943 &source_abs_path,
4944 &target_abs_path,
4945 op.options.map(Into::into).unwrap_or_default(),
4946 )
4947 .await?;
4948 }
4949
4950 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
4951 let abs_path = op
4952 .uri
4953 .to_file_path()
4954 .map_err(|_| anyhow!("can't convert URI to path"))?;
4955 let options = op.options.map(Into::into).unwrap_or_default();
4956 if abs_path.ends_with("/") {
4957 fs.remove_dir(&abs_path, options).await?;
4958 } else {
4959 fs.remove_file(&abs_path, options).await?;
4960 }
4961 }
4962
4963 lsp::DocumentChangeOperation::Edit(op) => {
4964 let buffer_to_edit = this
4965 .update(cx, |this, cx| {
4966 this.open_local_buffer_via_lsp(
4967 op.text_document.uri,
4968 language_server.server_id(),
4969 lsp_adapter.name.clone(),
4970 cx,
4971 )
4972 })
4973 .await?;
4974
4975 let edits = this
4976 .update(cx, |this, cx| {
4977 let edits = op.edits.into_iter().map(|edit| match edit {
4978 OneOf::Left(edit) => edit,
4979 OneOf::Right(edit) => edit.text_edit,
4980 });
4981 this.edits_from_lsp(
4982 &buffer_to_edit,
4983 edits,
4984 language_server.server_id(),
4985 op.text_document.version,
4986 cx,
4987 )
4988 })
4989 .await?;
4990
4991 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4992 buffer.finalize_last_transaction();
4993 buffer.start_transaction();
4994 for (range, text) in edits {
4995 buffer.edit([(range, text)], None, cx);
4996 }
4997 let transaction = if buffer.end_transaction(cx).is_some() {
4998 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4999 if !push_to_history {
5000 buffer.forget_transaction(transaction.id);
5001 }
5002 Some(transaction)
5003 } else {
5004 None
5005 };
5006
5007 transaction
5008 });
5009 if let Some(transaction) = transaction {
5010 project_transaction.0.insert(buffer_to_edit, transaction);
5011 }
5012 }
5013 }
5014 }
5015
5016 Ok(project_transaction)
5017 }
5018
5019 pub fn prepare_rename<T: ToPointUtf16>(
5020 &self,
5021 buffer: ModelHandle<Buffer>,
5022 position: T,
5023 cx: &mut ModelContext<Self>,
5024 ) -> Task<Result<Option<Range<Anchor>>>> {
5025 let position = position.to_point_utf16(buffer.read(cx));
5026 self.request_lsp(
5027 buffer,
5028 LanguageServerToQuery::Primary,
5029 PrepareRename { position },
5030 cx,
5031 )
5032 }
5033
5034 pub fn perform_rename<T: ToPointUtf16>(
5035 &self,
5036 buffer: ModelHandle<Buffer>,
5037 position: T,
5038 new_name: String,
5039 push_to_history: bool,
5040 cx: &mut ModelContext<Self>,
5041 ) -> Task<Result<ProjectTransaction>> {
5042 let position = position.to_point_utf16(buffer.read(cx));
5043 self.request_lsp(
5044 buffer,
5045 LanguageServerToQuery::Primary,
5046 PerformRename {
5047 position,
5048 new_name,
5049 push_to_history,
5050 },
5051 cx,
5052 )
5053 }
5054
5055 pub fn on_type_format<T: ToPointUtf16>(
5056 &self,
5057 buffer: ModelHandle<Buffer>,
5058 position: T,
5059 trigger: String,
5060 push_to_history: bool,
5061 cx: &mut ModelContext<Self>,
5062 ) -> Task<Result<Option<Transaction>>> {
5063 let (position, tab_size) = buffer.read_with(cx, |buffer, cx| {
5064 let position = position.to_point_utf16(buffer);
5065 (
5066 position,
5067 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5068 .tab_size,
5069 )
5070 });
5071 self.request_lsp(
5072 buffer.clone(),
5073 LanguageServerToQuery::Primary,
5074 OnTypeFormatting {
5075 position,
5076 trigger,
5077 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5078 push_to_history,
5079 },
5080 cx,
5081 )
5082 }
5083
5084 pub fn inlay_hints<T: ToOffset>(
5085 &self,
5086 buffer_handle: ModelHandle<Buffer>,
5087 range: Range<T>,
5088 cx: &mut ModelContext<Self>,
5089 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5090 let buffer = buffer_handle.read(cx);
5091 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5092 let range_start = range.start;
5093 let range_end = range.end;
5094 let buffer_id = buffer.remote_id();
5095 let buffer_version = buffer.version().clone();
5096 let lsp_request = InlayHints { range };
5097
5098 if self.is_local() {
5099 let lsp_request_task = self.request_lsp(
5100 buffer_handle.clone(),
5101 LanguageServerToQuery::Primary,
5102 lsp_request,
5103 cx,
5104 );
5105 cx.spawn(|_, mut cx| async move {
5106 buffer_handle
5107 .update(&mut cx, |buffer, _| {
5108 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5109 })
5110 .await
5111 .context("waiting for inlay hint request range edits")?;
5112 lsp_request_task.await.context("inlay hints LSP request")
5113 })
5114 } else if let Some(project_id) = self.remote_id() {
5115 let client = self.client.clone();
5116 let request = proto::InlayHints {
5117 project_id,
5118 buffer_id,
5119 start: Some(serialize_anchor(&range_start)),
5120 end: Some(serialize_anchor(&range_end)),
5121 version: serialize_version(&buffer_version),
5122 };
5123 cx.spawn(|project, cx| async move {
5124 let response = client
5125 .request(request)
5126 .await
5127 .context("inlay hints proto request")?;
5128 let hints_request_result = LspCommand::response_from_proto(
5129 lsp_request,
5130 response,
5131 project,
5132 buffer_handle.clone(),
5133 cx,
5134 )
5135 .await;
5136
5137 hints_request_result.context("inlay hints proto response conversion")
5138 })
5139 } else {
5140 Task::ready(Err(anyhow!("project does not have a remote id")))
5141 }
5142 }
5143
5144 pub fn resolve_inlay_hint(
5145 &self,
5146 hint: InlayHint,
5147 buffer_handle: ModelHandle<Buffer>,
5148 server_id: LanguageServerId,
5149 cx: &mut ModelContext<Self>,
5150 ) -> Task<anyhow::Result<InlayHint>> {
5151 if self.is_local() {
5152 let buffer = buffer_handle.read(cx);
5153 let (_, lang_server) = if let Some((adapter, server)) =
5154 self.language_server_for_buffer(buffer, server_id, cx)
5155 {
5156 (adapter.clone(), server.clone())
5157 } else {
5158 return Task::ready(Ok(hint));
5159 };
5160 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5161 return Task::ready(Ok(hint));
5162 }
5163
5164 let buffer_snapshot = buffer.snapshot();
5165 cx.spawn(|_, mut cx| async move {
5166 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5167 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5168 );
5169 let resolved_hint = resolve_task
5170 .await
5171 .context("inlay hint resolve LSP request")?;
5172 let resolved_hint = InlayHints::lsp_to_project_hint(
5173 resolved_hint,
5174 &buffer_handle,
5175 server_id,
5176 ResolveState::Resolved,
5177 false,
5178 &mut cx,
5179 )
5180 .await?;
5181 Ok(resolved_hint)
5182 })
5183 } else if let Some(project_id) = self.remote_id() {
5184 let client = self.client.clone();
5185 let request = proto::ResolveInlayHint {
5186 project_id,
5187 buffer_id: buffer_handle.read(cx).remote_id(),
5188 language_server_id: server_id.0 as u64,
5189 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5190 };
5191 cx.spawn(|_, _| async move {
5192 let response = client
5193 .request(request)
5194 .await
5195 .context("inlay hints proto request")?;
5196 match response.hint {
5197 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5198 .context("inlay hints proto resolve response conversion"),
5199 None => Ok(hint),
5200 }
5201 })
5202 } else {
5203 Task::ready(Err(anyhow!("project does not have a remote id")))
5204 }
5205 }
5206
5207 #[allow(clippy::type_complexity)]
5208 pub fn search(
5209 &self,
5210 query: SearchQuery,
5211 cx: &mut ModelContext<Self>,
5212 ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
5213 if self.is_local() {
5214 self.search_local(query, cx)
5215 } else if let Some(project_id) = self.remote_id() {
5216 let (tx, rx) = smol::channel::unbounded();
5217 let request = self.client.request(query.to_proto(project_id));
5218 cx.spawn(|this, mut cx| async move {
5219 let response = request.await?;
5220 let mut result = HashMap::default();
5221 for location in response.locations {
5222 let target_buffer = this
5223 .update(&mut cx, |this, cx| {
5224 this.wait_for_remote_buffer(location.buffer_id, cx)
5225 })
5226 .await?;
5227 let start = location
5228 .start
5229 .and_then(deserialize_anchor)
5230 .ok_or_else(|| anyhow!("missing target start"))?;
5231 let end = location
5232 .end
5233 .and_then(deserialize_anchor)
5234 .ok_or_else(|| anyhow!("missing target end"))?;
5235 result
5236 .entry(target_buffer)
5237 .or_insert(Vec::new())
5238 .push(start..end)
5239 }
5240 for (buffer, ranges) in result {
5241 let _ = tx.send((buffer, ranges)).await;
5242 }
5243 Result::<(), anyhow::Error>::Ok(())
5244 })
5245 .detach_and_log_err(cx);
5246 rx
5247 } else {
5248 unimplemented!();
5249 }
5250 }
5251
5252 pub fn search_local(
5253 &self,
5254 query: SearchQuery,
5255 cx: &mut ModelContext<Self>,
5256 ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
5257 // Local search is split into several phases.
5258 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5259 // and the second phase that finds positions of all the matches found in the candidate files.
5260 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5261 //
5262 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5263 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5264 //
5265 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5266 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5267 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5268 // 2. At this point, we have a list of all potentially matching buffers/files.
5269 // We sort that list by buffer path - this list is retained for later use.
5270 // We ensure that all buffers are now opened and available in project.
5271 // 3. We run a scan over all the candidate buffers on multiple background threads.
5272 // We cannot assume that there will even be a match - while at least one match
5273 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5274 // There is also an auxilliary background thread responsible for result gathering.
5275 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5276 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5277 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5278 // entry - which might already be available thanks to out-of-order processing.
5279 //
5280 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5281 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5282 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5283 // in face of constantly updating list of sorted matches.
5284 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5285 let snapshots = self
5286 .visible_worktrees(cx)
5287 .filter_map(|tree| {
5288 let tree = tree.read(cx).as_local()?;
5289 Some(tree.snapshot())
5290 })
5291 .collect::<Vec<_>>();
5292
5293 let background = cx.background().clone();
5294 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
5295 if path_count == 0 {
5296 let (_, rx) = smol::channel::bounded(1024);
5297 return rx;
5298 }
5299 let workers = background.num_cpus().min(path_count);
5300 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5301 let mut unnamed_files = vec![];
5302 let opened_buffers = self
5303 .opened_buffers
5304 .iter()
5305 .filter_map(|(_, b)| {
5306 let buffer = b.upgrade(cx)?;
5307 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
5308 if let Some(path) = snapshot.file().map(|file| file.path()) {
5309 Some((path.clone(), (buffer, snapshot)))
5310 } else {
5311 unnamed_files.push(buffer);
5312 None
5313 }
5314 })
5315 .collect();
5316 cx.background()
5317 .spawn(Self::background_search(
5318 unnamed_files,
5319 opened_buffers,
5320 cx.background().clone(),
5321 self.fs.clone(),
5322 workers,
5323 query.clone(),
5324 path_count,
5325 snapshots,
5326 matching_paths_tx,
5327 ))
5328 .detach();
5329
5330 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5331 let background = cx.background().clone();
5332 let (result_tx, result_rx) = smol::channel::bounded(1024);
5333 cx.background()
5334 .spawn(async move {
5335 let Ok(buffers) = buffers.await else {
5336 return;
5337 };
5338
5339 let buffers_len = buffers.len();
5340 if buffers_len == 0 {
5341 return;
5342 }
5343 let query = &query;
5344 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5345 background
5346 .scoped(|scope| {
5347 #[derive(Clone)]
5348 struct FinishedStatus {
5349 entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
5350 buffer_index: SearchMatchCandidateIndex,
5351 }
5352
5353 for _ in 0..workers {
5354 let finished_tx = finished_tx.clone();
5355 let mut buffers_rx = buffers_rx.clone();
5356 scope.spawn(async move {
5357 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5358 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5359 {
5360 if query.file_matches(
5361 snapshot.file().map(|file| file.path().as_ref()),
5362 ) {
5363 query
5364 .search(&snapshot, None)
5365 .await
5366 .iter()
5367 .map(|range| {
5368 snapshot.anchor_before(range.start)
5369 ..snapshot.anchor_after(range.end)
5370 })
5371 .collect()
5372 } else {
5373 Vec::new()
5374 }
5375 } else {
5376 Vec::new()
5377 };
5378
5379 let status = if !buffer_matches.is_empty() {
5380 let entry = if let Some((buffer, _)) = entry.as_ref() {
5381 Some((buffer.clone(), buffer_matches))
5382 } else {
5383 None
5384 };
5385 FinishedStatus {
5386 entry,
5387 buffer_index,
5388 }
5389 } else {
5390 FinishedStatus {
5391 entry: None,
5392 buffer_index,
5393 }
5394 };
5395 if finished_tx.send(status).await.is_err() {
5396 break;
5397 }
5398 }
5399 });
5400 }
5401 // Report sorted matches
5402 scope.spawn(async move {
5403 let mut current_index = 0;
5404 let mut scratch = vec![None; buffers_len];
5405 while let Some(status) = finished_rx.next().await {
5406 debug_assert!(
5407 scratch[status.buffer_index].is_none(),
5408 "Got match status of position {} twice",
5409 status.buffer_index
5410 );
5411 let index = status.buffer_index;
5412 scratch[index] = Some(status);
5413 while current_index < buffers_len {
5414 let Some(current_entry) = scratch[current_index].take() else {
5415 // We intentionally **do not** increment `current_index` here. When next element arrives
5416 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5417 // this time.
5418 break;
5419 };
5420 if let Some(entry) = current_entry.entry {
5421 result_tx.send(entry).await.log_err();
5422 }
5423 current_index += 1;
5424 }
5425 if current_index == buffers_len {
5426 break;
5427 }
5428 }
5429 });
5430 })
5431 .await;
5432 })
5433 .detach();
5434 result_rx
5435 }
5436 /// Pick paths that might potentially contain a match of a given search query.
5437 async fn background_search(
5438 unnamed_buffers: Vec<ModelHandle<Buffer>>,
5439 opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
5440 background: Arc<Background>,
5441 fs: Arc<dyn Fs>,
5442 workers: usize,
5443 query: SearchQuery,
5444 path_count: usize,
5445 snapshots: Vec<LocalSnapshot>,
5446 matching_paths_tx: Sender<SearchMatchCandidate>,
5447 ) {
5448 let fs = &fs;
5449 let query = &query;
5450 let matching_paths_tx = &matching_paths_tx;
5451 let snapshots = &snapshots;
5452 let paths_per_worker = (path_count + workers - 1) / workers;
5453 for buffer in unnamed_buffers {
5454 matching_paths_tx
5455 .send(SearchMatchCandidate::OpenBuffer {
5456 buffer: buffer.clone(),
5457 path: None,
5458 })
5459 .await
5460 .log_err();
5461 }
5462 for (path, (buffer, _)) in opened_buffers.iter() {
5463 matching_paths_tx
5464 .send(SearchMatchCandidate::OpenBuffer {
5465 buffer: buffer.clone(),
5466 path: Some(path.clone()),
5467 })
5468 .await
5469 .log_err();
5470 }
5471 background
5472 .scoped(|scope| {
5473 for worker_ix in 0..workers {
5474 let worker_start_ix = worker_ix * paths_per_worker;
5475 let worker_end_ix = worker_start_ix + paths_per_worker;
5476 let unnamed_buffers = opened_buffers.clone();
5477 scope.spawn(async move {
5478 let mut snapshot_start_ix = 0;
5479 let mut abs_path = PathBuf::new();
5480 for snapshot in snapshots {
5481 let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
5482 if worker_end_ix <= snapshot_start_ix {
5483 break;
5484 } else if worker_start_ix > snapshot_end_ix {
5485 snapshot_start_ix = snapshot_end_ix;
5486 continue;
5487 } else {
5488 let start_in_snapshot =
5489 worker_start_ix.saturating_sub(snapshot_start_ix);
5490 let end_in_snapshot =
5491 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5492
5493 for entry in snapshot
5494 .files(false, start_in_snapshot)
5495 .take(end_in_snapshot - start_in_snapshot)
5496 {
5497 if matching_paths_tx.is_closed() {
5498 break;
5499 }
5500 if unnamed_buffers.contains_key(&entry.path) {
5501 continue;
5502 }
5503 let matches = if query.file_matches(Some(&entry.path)) {
5504 abs_path.clear();
5505 abs_path.push(&snapshot.abs_path());
5506 abs_path.push(&entry.path);
5507 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5508 {
5509 query.detect(file).unwrap_or(false)
5510 } else {
5511 false
5512 }
5513 } else {
5514 false
5515 };
5516
5517 if matches {
5518 let project_path = SearchMatchCandidate::Path {
5519 worktree_id: snapshot.id(),
5520 path: entry.path.clone(),
5521 };
5522 if matching_paths_tx.send(project_path).await.is_err() {
5523 break;
5524 }
5525 }
5526 }
5527
5528 snapshot_start_ix = snapshot_end_ix;
5529 }
5530 }
5531 });
5532 }
5533 })
5534 .await;
5535 }
5536
5537 fn request_lsp<R: LspCommand>(
5538 &self,
5539 buffer_handle: ModelHandle<Buffer>,
5540 server: LanguageServerToQuery,
5541 request: R,
5542 cx: &mut ModelContext<Self>,
5543 ) -> Task<Result<R::Response>>
5544 where
5545 <R::LspRequest as lsp::request::Request>::Result: Send,
5546 {
5547 let buffer = buffer_handle.read(cx);
5548 if self.is_local() {
5549 let language_server = match server {
5550 LanguageServerToQuery::Primary => {
5551 match self.primary_language_server_for_buffer(buffer, cx) {
5552 Some((_, server)) => Some(Arc::clone(server)),
5553 None => return Task::ready(Ok(Default::default())),
5554 }
5555 }
5556 LanguageServerToQuery::Other(id) => self
5557 .language_server_for_buffer(buffer, id, cx)
5558 .map(|(_, server)| Arc::clone(server)),
5559 };
5560 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
5561 if let (Some(file), Some(language_server)) = (file, language_server) {
5562 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
5563 return cx.spawn(|this, cx| async move {
5564 if !request.check_capabilities(language_server.capabilities()) {
5565 return Ok(Default::default());
5566 }
5567
5568 let result = language_server.request::<R::LspRequest>(lsp_params).await;
5569 let response = match result {
5570 Ok(response) => response,
5571
5572 Err(err) => {
5573 log::warn!(
5574 "Generic lsp request to {} failed: {}",
5575 language_server.name(),
5576 err
5577 );
5578 return Err(err);
5579 }
5580 };
5581
5582 request
5583 .response_from_lsp(
5584 response,
5585 this,
5586 buffer_handle,
5587 language_server.server_id(),
5588 cx,
5589 )
5590 .await
5591 });
5592 }
5593 } else if let Some(project_id) = self.remote_id() {
5594 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
5595 }
5596
5597 Task::ready(Ok(Default::default()))
5598 }
5599
5600 fn send_lsp_proto_request<R: LspCommand>(
5601 &self,
5602 buffer: ModelHandle<Buffer>,
5603 project_id: u64,
5604 request: R,
5605 cx: &mut ModelContext<'_, Project>,
5606 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
5607 let rpc = self.client.clone();
5608 let message = request.to_proto(project_id, buffer.read(cx));
5609 cx.spawn_weak(|this, cx| async move {
5610 // Ensure the project is still alive by the time the task
5611 // is scheduled.
5612 this.upgrade(&cx)
5613 .ok_or_else(|| anyhow!("project dropped"))?;
5614 let response = rpc.request(message).await?;
5615 let this = this
5616 .upgrade(&cx)
5617 .ok_or_else(|| anyhow!("project dropped"))?;
5618 if this.read_with(&cx, |this, _| this.is_read_only()) {
5619 Err(anyhow!("disconnected before completing request"))
5620 } else {
5621 request
5622 .response_from_proto(response, this, buffer, cx)
5623 .await
5624 }
5625 })
5626 }
5627
5628 fn sort_candidates_and_open_buffers(
5629 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
5630 cx: &mut ModelContext<Self>,
5631 ) -> (
5632 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
5633 Receiver<(
5634 Option<(ModelHandle<Buffer>, BufferSnapshot)>,
5635 SearchMatchCandidateIndex,
5636 )>,
5637 ) {
5638 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
5639 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
5640 cx.spawn(|this, cx| async move {
5641 let mut buffers = vec![];
5642 while let Some(entry) = matching_paths_rx.next().await {
5643 buffers.push(entry);
5644 }
5645 buffers.sort_by_key(|candidate| candidate.path());
5646 let matching_paths = buffers.clone();
5647 let _ = sorted_buffers_tx.send(buffers);
5648 for (index, candidate) in matching_paths.into_iter().enumerate() {
5649 if buffers_tx.is_closed() {
5650 break;
5651 }
5652 let this = this.clone();
5653 let buffers_tx = buffers_tx.clone();
5654 cx.spawn(|mut cx| async move {
5655 let buffer = match candidate {
5656 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
5657 SearchMatchCandidate::Path { worktree_id, path } => this
5658 .update(&mut cx, |this, cx| {
5659 this.open_buffer((worktree_id, path), cx)
5660 })
5661 .await
5662 .log_err(),
5663 };
5664 if let Some(buffer) = buffer {
5665 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
5666 buffers_tx
5667 .send((Some((buffer, snapshot)), index))
5668 .await
5669 .log_err();
5670 } else {
5671 buffers_tx.send((None, index)).await.log_err();
5672 }
5673
5674 Ok::<_, anyhow::Error>(())
5675 })
5676 .detach();
5677 }
5678 })
5679 .detach();
5680 (sorted_buffers_rx, buffers_rx)
5681 }
5682
5683 pub fn find_or_create_local_worktree(
5684 &mut self,
5685 abs_path: impl AsRef<Path>,
5686 visible: bool,
5687 cx: &mut ModelContext<Self>,
5688 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
5689 let abs_path = abs_path.as_ref();
5690 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
5691 Task::ready(Ok((tree, relative_path)))
5692 } else {
5693 let worktree = self.create_local_worktree(abs_path, visible, cx);
5694 cx.foreground()
5695 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
5696 }
5697 }
5698
5699 pub fn find_local_worktree(
5700 &self,
5701 abs_path: &Path,
5702 cx: &AppContext,
5703 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
5704 for tree in &self.worktrees {
5705 if let Some(tree) = tree.upgrade(cx) {
5706 if let Some(relative_path) = tree
5707 .read(cx)
5708 .as_local()
5709 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
5710 {
5711 return Some((tree.clone(), relative_path.into()));
5712 }
5713 }
5714 }
5715 None
5716 }
5717
5718 pub fn is_shared(&self) -> bool {
5719 match &self.client_state {
5720 Some(ProjectClientState::Local { .. }) => true,
5721 _ => false,
5722 }
5723 }
5724
5725 fn create_local_worktree(
5726 &mut self,
5727 abs_path: impl AsRef<Path>,
5728 visible: bool,
5729 cx: &mut ModelContext<Self>,
5730 ) -> Task<Result<ModelHandle<Worktree>>> {
5731 let fs = self.fs.clone();
5732 let client = self.client.clone();
5733 let next_entry_id = self.next_entry_id.clone();
5734 let path: Arc<Path> = abs_path.as_ref().into();
5735 let task = self
5736 .loading_local_worktrees
5737 .entry(path.clone())
5738 .or_insert_with(|| {
5739 cx.spawn(|project, mut cx| {
5740 async move {
5741 let worktree = Worktree::local(
5742 client.clone(),
5743 path.clone(),
5744 visible,
5745 fs,
5746 next_entry_id,
5747 &mut cx,
5748 )
5749 .await;
5750
5751 project.update(&mut cx, |project, _| {
5752 project.loading_local_worktrees.remove(&path);
5753 });
5754
5755 let worktree = worktree?;
5756 project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
5757 Ok(worktree)
5758 }
5759 .map_err(Arc::new)
5760 })
5761 .shared()
5762 })
5763 .clone();
5764 cx.foreground().spawn(async move {
5765 match task.await {
5766 Ok(worktree) => Ok(worktree),
5767 Err(err) => Err(anyhow!("{}", err)),
5768 }
5769 })
5770 }
5771
5772 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
5773 self.worktrees.retain(|worktree| {
5774 if let Some(worktree) = worktree.upgrade(cx) {
5775 let id = worktree.read(cx).id();
5776 if id == id_to_remove {
5777 cx.emit(Event::WorktreeRemoved(id));
5778 false
5779 } else {
5780 true
5781 }
5782 } else {
5783 false
5784 }
5785 });
5786 self.metadata_changed(cx);
5787 }
5788
5789 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
5790 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
5791 if worktree.read(cx).is_local() {
5792 cx.subscribe(worktree, |this, worktree, event, cx| match event {
5793 worktree::Event::UpdatedEntries(changes) => {
5794 this.update_local_worktree_buffers(&worktree, changes, cx);
5795 this.update_local_worktree_language_servers(&worktree, changes, cx);
5796 this.update_local_worktree_settings(&worktree, changes, cx);
5797 cx.emit(Event::WorktreeUpdatedEntries(
5798 worktree.read(cx).id(),
5799 changes.clone(),
5800 ));
5801 }
5802 worktree::Event::UpdatedGitRepositories(updated_repos) => {
5803 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
5804 }
5805 })
5806 .detach();
5807 }
5808
5809 let push_strong_handle = {
5810 let worktree = worktree.read(cx);
5811 self.is_shared() || worktree.is_visible() || worktree.is_remote()
5812 };
5813 if push_strong_handle {
5814 self.worktrees
5815 .push(WorktreeHandle::Strong(worktree.clone()));
5816 } else {
5817 self.worktrees
5818 .push(WorktreeHandle::Weak(worktree.downgrade()));
5819 }
5820
5821 let handle_id = worktree.id();
5822 cx.observe_release(worktree, move |this, worktree, cx| {
5823 let _ = this.remove_worktree(worktree.id(), cx);
5824 cx.update_global::<SettingsStore, _, _>(|store, cx| {
5825 store.clear_local_settings(handle_id, cx).log_err()
5826 });
5827 })
5828 .detach();
5829
5830 cx.emit(Event::WorktreeAdded);
5831 self.metadata_changed(cx);
5832 }
5833
5834 fn update_local_worktree_buffers(
5835 &mut self,
5836 worktree_handle: &ModelHandle<Worktree>,
5837 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
5838 cx: &mut ModelContext<Self>,
5839 ) {
5840 let snapshot = worktree_handle.read(cx).snapshot();
5841
5842 let mut renamed_buffers = Vec::new();
5843 for (path, entry_id, _) in changes {
5844 let worktree_id = worktree_handle.read(cx).id();
5845 let project_path = ProjectPath {
5846 worktree_id,
5847 path: path.clone(),
5848 };
5849
5850 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
5851 Some(&buffer_id) => buffer_id,
5852 None => match self.local_buffer_ids_by_path.get(&project_path) {
5853 Some(&buffer_id) => buffer_id,
5854 None => continue,
5855 },
5856 };
5857
5858 let open_buffer = self.opened_buffers.get(&buffer_id);
5859 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
5860 buffer
5861 } else {
5862 self.opened_buffers.remove(&buffer_id);
5863 self.local_buffer_ids_by_path.remove(&project_path);
5864 self.local_buffer_ids_by_entry_id.remove(entry_id);
5865 continue;
5866 };
5867
5868 buffer.update(cx, |buffer, cx| {
5869 if let Some(old_file) = File::from_dyn(buffer.file()) {
5870 if old_file.worktree != *worktree_handle {
5871 return;
5872 }
5873
5874 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
5875 File {
5876 is_local: true,
5877 entry_id: entry.id,
5878 mtime: entry.mtime,
5879 path: entry.path.clone(),
5880 worktree: worktree_handle.clone(),
5881 is_deleted: false,
5882 }
5883 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
5884 File {
5885 is_local: true,
5886 entry_id: entry.id,
5887 mtime: entry.mtime,
5888 path: entry.path.clone(),
5889 worktree: worktree_handle.clone(),
5890 is_deleted: false,
5891 }
5892 } else {
5893 File {
5894 is_local: true,
5895 entry_id: old_file.entry_id,
5896 path: old_file.path().clone(),
5897 mtime: old_file.mtime(),
5898 worktree: worktree_handle.clone(),
5899 is_deleted: true,
5900 }
5901 };
5902
5903 let old_path = old_file.abs_path(cx);
5904 if new_file.abs_path(cx) != old_path {
5905 renamed_buffers.push((cx.handle(), old_file.clone()));
5906 self.local_buffer_ids_by_path.remove(&project_path);
5907 self.local_buffer_ids_by_path.insert(
5908 ProjectPath {
5909 worktree_id,
5910 path: path.clone(),
5911 },
5912 buffer_id,
5913 );
5914 }
5915
5916 if new_file.entry_id != *entry_id {
5917 self.local_buffer_ids_by_entry_id.remove(entry_id);
5918 self.local_buffer_ids_by_entry_id
5919 .insert(new_file.entry_id, buffer_id);
5920 }
5921
5922 if new_file != *old_file {
5923 if let Some(project_id) = self.remote_id() {
5924 self.client
5925 .send(proto::UpdateBufferFile {
5926 project_id,
5927 buffer_id: buffer_id as u64,
5928 file: Some(new_file.to_proto()),
5929 })
5930 .log_err();
5931 }
5932
5933 buffer.file_updated(Arc::new(new_file), cx).detach();
5934 }
5935 }
5936 });
5937 }
5938
5939 for (buffer, old_file) in renamed_buffers {
5940 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
5941 self.detect_language_for_buffer(&buffer, cx);
5942 self.register_buffer_with_language_servers(&buffer, cx);
5943 }
5944 }
5945
5946 fn update_local_worktree_language_servers(
5947 &mut self,
5948 worktree_handle: &ModelHandle<Worktree>,
5949 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
5950 cx: &mut ModelContext<Self>,
5951 ) {
5952 if changes.is_empty() {
5953 return;
5954 }
5955
5956 let worktree_id = worktree_handle.read(cx).id();
5957 let mut language_server_ids = self
5958 .language_server_ids
5959 .iter()
5960 .filter_map(|((server_worktree_id, _), server_id)| {
5961 (*server_worktree_id == worktree_id).then_some(*server_id)
5962 })
5963 .collect::<Vec<_>>();
5964 language_server_ids.sort();
5965 language_server_ids.dedup();
5966
5967 let abs_path = worktree_handle.read(cx).abs_path();
5968 for server_id in &language_server_ids {
5969 if let Some(LanguageServerState::Running {
5970 server,
5971 watched_paths,
5972 ..
5973 }) = self.language_servers.get(server_id)
5974 {
5975 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
5976 let params = lsp::DidChangeWatchedFilesParams {
5977 changes: changes
5978 .iter()
5979 .filter_map(|(path, _, change)| {
5980 if !watched_paths.is_match(&path) {
5981 return None;
5982 }
5983 let typ = match change {
5984 PathChange::Loaded => return None,
5985 PathChange::Added => lsp::FileChangeType::CREATED,
5986 PathChange::Removed => lsp::FileChangeType::DELETED,
5987 PathChange::Updated => lsp::FileChangeType::CHANGED,
5988 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
5989 };
5990 Some(lsp::FileEvent {
5991 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
5992 typ,
5993 })
5994 })
5995 .collect(),
5996 };
5997
5998 if !params.changes.is_empty() {
5999 server
6000 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6001 .log_err();
6002 }
6003 }
6004 }
6005 }
6006 }
6007
6008 fn update_local_worktree_buffers_git_repos(
6009 &mut self,
6010 worktree_handle: ModelHandle<Worktree>,
6011 changed_repos: &UpdatedGitRepositoriesSet,
6012 cx: &mut ModelContext<Self>,
6013 ) {
6014 debug_assert!(worktree_handle.read(cx).is_local());
6015
6016 // Identify the loading buffers whose containing repository that has changed.
6017 let future_buffers = self
6018 .loading_buffers_by_path
6019 .iter()
6020 .filter_map(|(project_path, receiver)| {
6021 if project_path.worktree_id != worktree_handle.read(cx).id() {
6022 return None;
6023 }
6024 let path = &project_path.path;
6025 changed_repos
6026 .iter()
6027 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6028 let receiver = receiver.clone();
6029 let path = path.clone();
6030 Some(async move {
6031 wait_for_loading_buffer(receiver)
6032 .await
6033 .ok()
6034 .map(|buffer| (buffer, path))
6035 })
6036 })
6037 .collect::<FuturesUnordered<_>>();
6038
6039 // Identify the current buffers whose containing repository has changed.
6040 let current_buffers = self
6041 .opened_buffers
6042 .values()
6043 .filter_map(|buffer| {
6044 let buffer = buffer.upgrade(cx)?;
6045 let file = File::from_dyn(buffer.read(cx).file())?;
6046 if file.worktree != worktree_handle {
6047 return None;
6048 }
6049 let path = file.path();
6050 changed_repos
6051 .iter()
6052 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6053 Some((buffer, path.clone()))
6054 })
6055 .collect::<Vec<_>>();
6056
6057 if future_buffers.len() + current_buffers.len() == 0 {
6058 return;
6059 }
6060
6061 let remote_id = self.remote_id();
6062 let client = self.client.clone();
6063 cx.spawn_weak(move |_, mut cx| async move {
6064 // Wait for all of the buffers to load.
6065 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6066
6067 // Reload the diff base for every buffer whose containing git repository has changed.
6068 let snapshot =
6069 worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
6070 let diff_bases_by_buffer = cx
6071 .background()
6072 .spawn(async move {
6073 future_buffers
6074 .into_iter()
6075 .filter_map(|e| e)
6076 .chain(current_buffers)
6077 .filter_map(|(buffer, path)| {
6078 let (work_directory, repo) =
6079 snapshot.repository_and_work_directory_for_path(&path)?;
6080 let repo = snapshot.get_local_repo(&repo)?;
6081 let relative_path = path.strip_prefix(&work_directory).ok()?;
6082 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6083 Some((buffer, base_text))
6084 })
6085 .collect::<Vec<_>>()
6086 })
6087 .await;
6088
6089 // Assign the new diff bases on all of the buffers.
6090 for (buffer, diff_base) in diff_bases_by_buffer {
6091 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6092 buffer.set_diff_base(diff_base.clone(), cx);
6093 buffer.remote_id()
6094 });
6095 if let Some(project_id) = remote_id {
6096 client
6097 .send(proto::UpdateDiffBase {
6098 project_id,
6099 buffer_id,
6100 diff_base,
6101 })
6102 .log_err();
6103 }
6104 }
6105 })
6106 .detach();
6107 }
6108
6109 fn update_local_worktree_settings(
6110 &mut self,
6111 worktree: &ModelHandle<Worktree>,
6112 changes: &UpdatedEntriesSet,
6113 cx: &mut ModelContext<Self>,
6114 ) {
6115 let project_id = self.remote_id();
6116 let worktree_id = worktree.id();
6117 let worktree = worktree.read(cx).as_local().unwrap();
6118 let remote_worktree_id = worktree.id();
6119
6120 let mut settings_contents = Vec::new();
6121 for (path, _, change) in changes.iter() {
6122 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6123 let settings_dir = Arc::from(
6124 path.ancestors()
6125 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6126 .unwrap(),
6127 );
6128 let fs = self.fs.clone();
6129 let removed = *change == PathChange::Removed;
6130 let abs_path = worktree.absolutize(path);
6131 settings_contents.push(async move {
6132 (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
6133 });
6134 }
6135 }
6136
6137 if settings_contents.is_empty() {
6138 return;
6139 }
6140
6141 let client = self.client.clone();
6142 cx.spawn_weak(move |_, mut cx| async move {
6143 let settings_contents: Vec<(Arc<Path>, _)> =
6144 futures::future::join_all(settings_contents).await;
6145 cx.update(|cx| {
6146 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6147 for (directory, file_content) in settings_contents {
6148 let file_content = file_content.and_then(|content| content.log_err());
6149 store
6150 .set_local_settings(
6151 worktree_id,
6152 directory.clone(),
6153 file_content.as_ref().map(String::as_str),
6154 cx,
6155 )
6156 .log_err();
6157 if let Some(remote_id) = project_id {
6158 client
6159 .send(proto::UpdateWorktreeSettings {
6160 project_id: remote_id,
6161 worktree_id: remote_worktree_id.to_proto(),
6162 path: directory.to_string_lossy().into_owned(),
6163 content: file_content,
6164 })
6165 .log_err();
6166 }
6167 }
6168 });
6169 });
6170 })
6171 .detach();
6172 }
6173
6174 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6175 let new_active_entry = entry.and_then(|project_path| {
6176 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6177 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6178 Some(entry.id)
6179 });
6180 if new_active_entry != self.active_entry {
6181 self.active_entry = new_active_entry;
6182 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6183 }
6184 }
6185
6186 pub fn language_servers_running_disk_based_diagnostics(
6187 &self,
6188 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6189 self.language_server_statuses
6190 .iter()
6191 .filter_map(|(id, status)| {
6192 if status.has_pending_diagnostic_updates {
6193 Some(*id)
6194 } else {
6195 None
6196 }
6197 })
6198 }
6199
6200 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
6201 let mut summary = DiagnosticSummary::default();
6202 for (_, _, path_summary) in self.diagnostic_summaries(cx) {
6203 summary.error_count += path_summary.error_count;
6204 summary.warning_count += path_summary.warning_count;
6205 }
6206 summary
6207 }
6208
6209 pub fn diagnostic_summaries<'a>(
6210 &'a self,
6211 cx: &'a AppContext,
6212 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6213 self.visible_worktrees(cx).flat_map(move |worktree| {
6214 let worktree = worktree.read(cx);
6215 let worktree_id = worktree.id();
6216 worktree
6217 .diagnostic_summaries()
6218 .map(move |(path, server_id, summary)| {
6219 (ProjectPath { worktree_id, path }, server_id, summary)
6220 })
6221 })
6222 }
6223
6224 pub fn disk_based_diagnostics_started(
6225 &mut self,
6226 language_server_id: LanguageServerId,
6227 cx: &mut ModelContext<Self>,
6228 ) {
6229 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6230 }
6231
6232 pub fn disk_based_diagnostics_finished(
6233 &mut self,
6234 language_server_id: LanguageServerId,
6235 cx: &mut ModelContext<Self>,
6236 ) {
6237 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6238 }
6239
6240 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6241 self.active_entry
6242 }
6243
6244 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6245 self.worktree_for_id(path.worktree_id, cx)?
6246 .read(cx)
6247 .entry_for_path(&path.path)
6248 .cloned()
6249 }
6250
6251 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6252 let worktree = self.worktree_for_entry(entry_id, cx)?;
6253 let worktree = worktree.read(cx);
6254 let worktree_id = worktree.id();
6255 let path = worktree.entry_for_id(entry_id)?.path.clone();
6256 Some(ProjectPath { worktree_id, path })
6257 }
6258
6259 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6260 let workspace_root = self
6261 .worktree_for_id(project_path.worktree_id, cx)?
6262 .read(cx)
6263 .abs_path();
6264 let project_path = project_path.path.as_ref();
6265
6266 Some(if project_path == Path::new("") {
6267 workspace_root.to_path_buf()
6268 } else {
6269 workspace_root.join(project_path)
6270 })
6271 }
6272
6273 // RPC message handlers
6274
6275 async fn handle_unshare_project(
6276 this: ModelHandle<Self>,
6277 _: TypedEnvelope<proto::UnshareProject>,
6278 _: Arc<Client>,
6279 mut cx: AsyncAppContext,
6280 ) -> Result<()> {
6281 this.update(&mut cx, |this, cx| {
6282 if this.is_local() {
6283 this.unshare(cx)?;
6284 } else {
6285 this.disconnected_from_host(cx);
6286 }
6287 Ok(())
6288 })
6289 }
6290
6291 async fn handle_add_collaborator(
6292 this: ModelHandle<Self>,
6293 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6294 _: Arc<Client>,
6295 mut cx: AsyncAppContext,
6296 ) -> Result<()> {
6297 let collaborator = envelope
6298 .payload
6299 .collaborator
6300 .take()
6301 .ok_or_else(|| anyhow!("empty collaborator"))?;
6302
6303 let collaborator = Collaborator::from_proto(collaborator)?;
6304 this.update(&mut cx, |this, cx| {
6305 this.shared_buffers.remove(&collaborator.peer_id);
6306 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6307 this.collaborators
6308 .insert(collaborator.peer_id, collaborator);
6309 cx.notify();
6310 });
6311
6312 Ok(())
6313 }
6314
6315 async fn handle_update_project_collaborator(
6316 this: ModelHandle<Self>,
6317 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6318 _: Arc<Client>,
6319 mut cx: AsyncAppContext,
6320 ) -> Result<()> {
6321 let old_peer_id = envelope
6322 .payload
6323 .old_peer_id
6324 .ok_or_else(|| anyhow!("missing old peer id"))?;
6325 let new_peer_id = envelope
6326 .payload
6327 .new_peer_id
6328 .ok_or_else(|| anyhow!("missing new peer id"))?;
6329 this.update(&mut cx, |this, cx| {
6330 let collaborator = this
6331 .collaborators
6332 .remove(&old_peer_id)
6333 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6334 let is_host = collaborator.replica_id == 0;
6335 this.collaborators.insert(new_peer_id, collaborator);
6336
6337 let buffers = this.shared_buffers.remove(&old_peer_id);
6338 log::info!(
6339 "peer {} became {}. moving buffers {:?}",
6340 old_peer_id,
6341 new_peer_id,
6342 &buffers
6343 );
6344 if let Some(buffers) = buffers {
6345 this.shared_buffers.insert(new_peer_id, buffers);
6346 }
6347
6348 if is_host {
6349 this.opened_buffers
6350 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6351 this.buffer_ordered_messages_tx
6352 .unbounded_send(BufferOrderedMessage::Resync)
6353 .unwrap();
6354 }
6355
6356 cx.emit(Event::CollaboratorUpdated {
6357 old_peer_id,
6358 new_peer_id,
6359 });
6360 cx.notify();
6361 Ok(())
6362 })
6363 }
6364
6365 async fn handle_remove_collaborator(
6366 this: ModelHandle<Self>,
6367 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6368 _: Arc<Client>,
6369 mut cx: AsyncAppContext,
6370 ) -> Result<()> {
6371 this.update(&mut cx, |this, cx| {
6372 let peer_id = envelope
6373 .payload
6374 .peer_id
6375 .ok_or_else(|| anyhow!("invalid peer id"))?;
6376 let replica_id = this
6377 .collaborators
6378 .remove(&peer_id)
6379 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6380 .replica_id;
6381 for buffer in this.opened_buffers.values() {
6382 if let Some(buffer) = buffer.upgrade(cx) {
6383 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6384 }
6385 }
6386 this.shared_buffers.remove(&peer_id);
6387
6388 cx.emit(Event::CollaboratorLeft(peer_id));
6389 cx.notify();
6390 Ok(())
6391 })
6392 }
6393
6394 async fn handle_update_project(
6395 this: ModelHandle<Self>,
6396 envelope: TypedEnvelope<proto::UpdateProject>,
6397 _: Arc<Client>,
6398 mut cx: AsyncAppContext,
6399 ) -> Result<()> {
6400 this.update(&mut cx, |this, cx| {
6401 // Don't handle messages that were sent before the response to us joining the project
6402 if envelope.message_id > this.join_project_response_message_id {
6403 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6404 }
6405 Ok(())
6406 })
6407 }
6408
6409 async fn handle_update_worktree(
6410 this: ModelHandle<Self>,
6411 envelope: TypedEnvelope<proto::UpdateWorktree>,
6412 _: Arc<Client>,
6413 mut cx: AsyncAppContext,
6414 ) -> Result<()> {
6415 this.update(&mut cx, |this, cx| {
6416 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6417 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6418 worktree.update(cx, |worktree, _| {
6419 let worktree = worktree.as_remote_mut().unwrap();
6420 worktree.update_from_remote(envelope.payload);
6421 });
6422 }
6423 Ok(())
6424 })
6425 }
6426
6427 async fn handle_update_worktree_settings(
6428 this: ModelHandle<Self>,
6429 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6430 _: Arc<Client>,
6431 mut cx: AsyncAppContext,
6432 ) -> Result<()> {
6433 this.update(&mut cx, |this, cx| {
6434 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6435 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6436 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6437 store
6438 .set_local_settings(
6439 worktree.id(),
6440 PathBuf::from(&envelope.payload.path).into(),
6441 envelope.payload.content.as_ref().map(String::as_str),
6442 cx,
6443 )
6444 .log_err();
6445 });
6446 }
6447 Ok(())
6448 })
6449 }
6450
6451 async fn handle_create_project_entry(
6452 this: ModelHandle<Self>,
6453 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6454 _: Arc<Client>,
6455 mut cx: AsyncAppContext,
6456 ) -> Result<proto::ProjectEntryResponse> {
6457 let worktree = this.update(&mut cx, |this, cx| {
6458 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6459 this.worktree_for_id(worktree_id, cx)
6460 .ok_or_else(|| anyhow!("worktree not found"))
6461 })?;
6462 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6463 let entry = worktree
6464 .update(&mut cx, |worktree, cx| {
6465 let worktree = worktree.as_local_mut().unwrap();
6466 let path = PathBuf::from(envelope.payload.path);
6467 worktree.create_entry(path, envelope.payload.is_directory, cx)
6468 })
6469 .await?;
6470 Ok(proto::ProjectEntryResponse {
6471 entry: Some((&entry).into()),
6472 worktree_scan_id: worktree_scan_id as u64,
6473 })
6474 }
6475
6476 async fn handle_rename_project_entry(
6477 this: ModelHandle<Self>,
6478 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6479 _: Arc<Client>,
6480 mut cx: AsyncAppContext,
6481 ) -> Result<proto::ProjectEntryResponse> {
6482 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6483 let worktree = this.read_with(&cx, |this, cx| {
6484 this.worktree_for_entry(entry_id, cx)
6485 .ok_or_else(|| anyhow!("worktree not found"))
6486 })?;
6487 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6488 let entry = worktree
6489 .update(&mut cx, |worktree, cx| {
6490 let new_path = PathBuf::from(envelope.payload.new_path);
6491 worktree
6492 .as_local_mut()
6493 .unwrap()
6494 .rename_entry(entry_id, new_path, cx)
6495 .ok_or_else(|| anyhow!("invalid entry"))
6496 })?
6497 .await?;
6498 Ok(proto::ProjectEntryResponse {
6499 entry: Some((&entry).into()),
6500 worktree_scan_id: worktree_scan_id as u64,
6501 })
6502 }
6503
6504 async fn handle_copy_project_entry(
6505 this: ModelHandle<Self>,
6506 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6507 _: Arc<Client>,
6508 mut cx: AsyncAppContext,
6509 ) -> Result<proto::ProjectEntryResponse> {
6510 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6511 let worktree = this.read_with(&cx, |this, cx| {
6512 this.worktree_for_entry(entry_id, cx)
6513 .ok_or_else(|| anyhow!("worktree not found"))
6514 })?;
6515 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6516 let entry = worktree
6517 .update(&mut cx, |worktree, cx| {
6518 let new_path = PathBuf::from(envelope.payload.new_path);
6519 worktree
6520 .as_local_mut()
6521 .unwrap()
6522 .copy_entry(entry_id, new_path, cx)
6523 .ok_or_else(|| anyhow!("invalid entry"))
6524 })?
6525 .await?;
6526 Ok(proto::ProjectEntryResponse {
6527 entry: Some((&entry).into()),
6528 worktree_scan_id: worktree_scan_id as u64,
6529 })
6530 }
6531
6532 async fn handle_delete_project_entry(
6533 this: ModelHandle<Self>,
6534 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
6535 _: Arc<Client>,
6536 mut cx: AsyncAppContext,
6537 ) -> Result<proto::ProjectEntryResponse> {
6538 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6539
6540 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
6541
6542 let worktree = this.read_with(&cx, |this, cx| {
6543 this.worktree_for_entry(entry_id, cx)
6544 .ok_or_else(|| anyhow!("worktree not found"))
6545 })?;
6546 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6547 worktree
6548 .update(&mut cx, |worktree, cx| {
6549 worktree
6550 .as_local_mut()
6551 .unwrap()
6552 .delete_entry(entry_id, cx)
6553 .ok_or_else(|| anyhow!("invalid entry"))
6554 })?
6555 .await?;
6556 Ok(proto::ProjectEntryResponse {
6557 entry: None,
6558 worktree_scan_id: worktree_scan_id as u64,
6559 })
6560 }
6561
6562 async fn handle_expand_project_entry(
6563 this: ModelHandle<Self>,
6564 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
6565 _: Arc<Client>,
6566 mut cx: AsyncAppContext,
6567 ) -> Result<proto::ExpandProjectEntryResponse> {
6568 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6569 let worktree = this
6570 .read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx))
6571 .ok_or_else(|| anyhow!("invalid request"))?;
6572 worktree
6573 .update(&mut cx, |worktree, cx| {
6574 worktree
6575 .as_local_mut()
6576 .unwrap()
6577 .expand_entry(entry_id, cx)
6578 .ok_or_else(|| anyhow!("invalid entry"))
6579 })?
6580 .await?;
6581 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
6582 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
6583 }
6584
6585 async fn handle_update_diagnostic_summary(
6586 this: ModelHandle<Self>,
6587 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
6588 _: Arc<Client>,
6589 mut cx: AsyncAppContext,
6590 ) -> Result<()> {
6591 this.update(&mut cx, |this, cx| {
6592 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6593 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6594 if let Some(summary) = envelope.payload.summary {
6595 let project_path = ProjectPath {
6596 worktree_id,
6597 path: Path::new(&summary.path).into(),
6598 };
6599 worktree.update(cx, |worktree, _| {
6600 worktree
6601 .as_remote_mut()
6602 .unwrap()
6603 .update_diagnostic_summary(project_path.path.clone(), &summary);
6604 });
6605 cx.emit(Event::DiagnosticsUpdated {
6606 language_server_id: LanguageServerId(summary.language_server_id as usize),
6607 path: project_path,
6608 });
6609 }
6610 }
6611 Ok(())
6612 })
6613 }
6614
6615 async fn handle_start_language_server(
6616 this: ModelHandle<Self>,
6617 envelope: TypedEnvelope<proto::StartLanguageServer>,
6618 _: Arc<Client>,
6619 mut cx: AsyncAppContext,
6620 ) -> Result<()> {
6621 let server = envelope
6622 .payload
6623 .server
6624 .ok_or_else(|| anyhow!("invalid server"))?;
6625 this.update(&mut cx, |this, cx| {
6626 this.language_server_statuses.insert(
6627 LanguageServerId(server.id as usize),
6628 LanguageServerStatus {
6629 name: server.name,
6630 pending_work: Default::default(),
6631 has_pending_diagnostic_updates: false,
6632 progress_tokens: Default::default(),
6633 },
6634 );
6635 cx.notify();
6636 });
6637 Ok(())
6638 }
6639
6640 async fn handle_update_language_server(
6641 this: ModelHandle<Self>,
6642 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
6643 _: Arc<Client>,
6644 mut cx: AsyncAppContext,
6645 ) -> Result<()> {
6646 this.update(&mut cx, |this, cx| {
6647 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
6648
6649 match envelope
6650 .payload
6651 .variant
6652 .ok_or_else(|| anyhow!("invalid variant"))?
6653 {
6654 proto::update_language_server::Variant::WorkStart(payload) => {
6655 this.on_lsp_work_start(
6656 language_server_id,
6657 payload.token,
6658 LanguageServerProgress {
6659 message: payload.message,
6660 percentage: payload.percentage.map(|p| p as usize),
6661 last_update_at: Instant::now(),
6662 },
6663 cx,
6664 );
6665 }
6666
6667 proto::update_language_server::Variant::WorkProgress(payload) => {
6668 this.on_lsp_work_progress(
6669 language_server_id,
6670 payload.token,
6671 LanguageServerProgress {
6672 message: payload.message,
6673 percentage: payload.percentage.map(|p| p as usize),
6674 last_update_at: Instant::now(),
6675 },
6676 cx,
6677 );
6678 }
6679
6680 proto::update_language_server::Variant::WorkEnd(payload) => {
6681 this.on_lsp_work_end(language_server_id, payload.token, cx);
6682 }
6683
6684 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
6685 this.disk_based_diagnostics_started(language_server_id, cx);
6686 }
6687
6688 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
6689 this.disk_based_diagnostics_finished(language_server_id, cx)
6690 }
6691 }
6692
6693 Ok(())
6694 })
6695 }
6696
6697 async fn handle_update_buffer(
6698 this: ModelHandle<Self>,
6699 envelope: TypedEnvelope<proto::UpdateBuffer>,
6700 _: Arc<Client>,
6701 mut cx: AsyncAppContext,
6702 ) -> Result<proto::Ack> {
6703 this.update(&mut cx, |this, cx| {
6704 let payload = envelope.payload.clone();
6705 let buffer_id = payload.buffer_id;
6706 let ops = payload
6707 .operations
6708 .into_iter()
6709 .map(language::proto::deserialize_operation)
6710 .collect::<Result<Vec<_>, _>>()?;
6711 let is_remote = this.is_remote();
6712 match this.opened_buffers.entry(buffer_id) {
6713 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
6714 OpenBuffer::Strong(buffer) => {
6715 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
6716 }
6717 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
6718 OpenBuffer::Weak(_) => {}
6719 },
6720 hash_map::Entry::Vacant(e) => {
6721 assert!(
6722 is_remote,
6723 "received buffer update from {:?}",
6724 envelope.original_sender_id
6725 );
6726 e.insert(OpenBuffer::Operations(ops));
6727 }
6728 }
6729 Ok(proto::Ack {})
6730 })
6731 }
6732
6733 async fn handle_create_buffer_for_peer(
6734 this: ModelHandle<Self>,
6735 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
6736 _: Arc<Client>,
6737 mut cx: AsyncAppContext,
6738 ) -> Result<()> {
6739 this.update(&mut cx, |this, cx| {
6740 match envelope
6741 .payload
6742 .variant
6743 .ok_or_else(|| anyhow!("missing variant"))?
6744 {
6745 proto::create_buffer_for_peer::Variant::State(mut state) => {
6746 let mut buffer_file = None;
6747 if let Some(file) = state.file.take() {
6748 let worktree_id = WorktreeId::from_proto(file.worktree_id);
6749 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
6750 anyhow!("no worktree found for id {}", file.worktree_id)
6751 })?;
6752 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
6753 as Arc<dyn language::File>);
6754 }
6755
6756 let buffer_id = state.id;
6757 let buffer = cx.add_model(|_| {
6758 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
6759 });
6760 this.incomplete_remote_buffers
6761 .insert(buffer_id, Some(buffer));
6762 }
6763 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
6764 let buffer = this
6765 .incomplete_remote_buffers
6766 .get(&chunk.buffer_id)
6767 .cloned()
6768 .flatten()
6769 .ok_or_else(|| {
6770 anyhow!(
6771 "received chunk for buffer {} without initial state",
6772 chunk.buffer_id
6773 )
6774 })?;
6775 let operations = chunk
6776 .operations
6777 .into_iter()
6778 .map(language::proto::deserialize_operation)
6779 .collect::<Result<Vec<_>>>()?;
6780 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
6781
6782 if chunk.is_last {
6783 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
6784 this.register_buffer(&buffer, cx)?;
6785 }
6786 }
6787 }
6788
6789 Ok(())
6790 })
6791 }
6792
6793 async fn handle_update_diff_base(
6794 this: ModelHandle<Self>,
6795 envelope: TypedEnvelope<proto::UpdateDiffBase>,
6796 _: Arc<Client>,
6797 mut cx: AsyncAppContext,
6798 ) -> Result<()> {
6799 this.update(&mut cx, |this, cx| {
6800 let buffer_id = envelope.payload.buffer_id;
6801 let diff_base = envelope.payload.diff_base;
6802 if let Some(buffer) = this
6803 .opened_buffers
6804 .get_mut(&buffer_id)
6805 .and_then(|b| b.upgrade(cx))
6806 .or_else(|| {
6807 this.incomplete_remote_buffers
6808 .get(&buffer_id)
6809 .cloned()
6810 .flatten()
6811 })
6812 {
6813 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
6814 }
6815 Ok(())
6816 })
6817 }
6818
6819 async fn handle_update_buffer_file(
6820 this: ModelHandle<Self>,
6821 envelope: TypedEnvelope<proto::UpdateBufferFile>,
6822 _: Arc<Client>,
6823 mut cx: AsyncAppContext,
6824 ) -> Result<()> {
6825 let buffer_id = envelope.payload.buffer_id;
6826
6827 this.update(&mut cx, |this, cx| {
6828 let payload = envelope.payload.clone();
6829 if let Some(buffer) = this
6830 .opened_buffers
6831 .get(&buffer_id)
6832 .and_then(|b| b.upgrade(cx))
6833 .or_else(|| {
6834 this.incomplete_remote_buffers
6835 .get(&buffer_id)
6836 .cloned()
6837 .flatten()
6838 })
6839 {
6840 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
6841 let worktree = this
6842 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
6843 .ok_or_else(|| anyhow!("no such worktree"))?;
6844 let file = File::from_proto(file, worktree, cx)?;
6845 buffer.update(cx, |buffer, cx| {
6846 buffer.file_updated(Arc::new(file), cx).detach();
6847 });
6848 this.detect_language_for_buffer(&buffer, cx);
6849 }
6850 Ok(())
6851 })
6852 }
6853
6854 async fn handle_save_buffer(
6855 this: ModelHandle<Self>,
6856 envelope: TypedEnvelope<proto::SaveBuffer>,
6857 _: Arc<Client>,
6858 mut cx: AsyncAppContext,
6859 ) -> Result<proto::BufferSaved> {
6860 let buffer_id = envelope.payload.buffer_id;
6861 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
6862 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
6863 let buffer = this
6864 .opened_buffers
6865 .get(&buffer_id)
6866 .and_then(|buffer| buffer.upgrade(cx))
6867 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
6868 anyhow::Ok((project_id, buffer))
6869 })?;
6870 buffer
6871 .update(&mut cx, |buffer, _| {
6872 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
6873 })
6874 .await?;
6875 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
6876
6877 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
6878 .await?;
6879 Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
6880 project_id,
6881 buffer_id,
6882 version: serialize_version(buffer.saved_version()),
6883 mtime: Some(buffer.saved_mtime().into()),
6884 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
6885 }))
6886 }
6887
6888 async fn handle_reload_buffers(
6889 this: ModelHandle<Self>,
6890 envelope: TypedEnvelope<proto::ReloadBuffers>,
6891 _: Arc<Client>,
6892 mut cx: AsyncAppContext,
6893 ) -> Result<proto::ReloadBuffersResponse> {
6894 let sender_id = envelope.original_sender_id()?;
6895 let reload = this.update(&mut cx, |this, cx| {
6896 let mut buffers = HashSet::default();
6897 for buffer_id in &envelope.payload.buffer_ids {
6898 buffers.insert(
6899 this.opened_buffers
6900 .get(buffer_id)
6901 .and_then(|buffer| buffer.upgrade(cx))
6902 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
6903 );
6904 }
6905 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
6906 })?;
6907
6908 let project_transaction = reload.await?;
6909 let project_transaction = this.update(&mut cx, |this, cx| {
6910 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
6911 });
6912 Ok(proto::ReloadBuffersResponse {
6913 transaction: Some(project_transaction),
6914 })
6915 }
6916
6917 async fn handle_synchronize_buffers(
6918 this: ModelHandle<Self>,
6919 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
6920 _: Arc<Client>,
6921 mut cx: AsyncAppContext,
6922 ) -> Result<proto::SynchronizeBuffersResponse> {
6923 let project_id = envelope.payload.project_id;
6924 let mut response = proto::SynchronizeBuffersResponse {
6925 buffers: Default::default(),
6926 };
6927
6928 this.update(&mut cx, |this, cx| {
6929 let Some(guest_id) = envelope.original_sender_id else {
6930 error!("missing original_sender_id on SynchronizeBuffers request");
6931 return;
6932 };
6933
6934 this.shared_buffers.entry(guest_id).or_default().clear();
6935 for buffer in envelope.payload.buffers {
6936 let buffer_id = buffer.id;
6937 let remote_version = language::proto::deserialize_version(&buffer.version);
6938 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
6939 this.shared_buffers
6940 .entry(guest_id)
6941 .or_default()
6942 .insert(buffer_id);
6943
6944 let buffer = buffer.read(cx);
6945 response.buffers.push(proto::BufferVersion {
6946 id: buffer_id,
6947 version: language::proto::serialize_version(&buffer.version),
6948 });
6949
6950 let operations = buffer.serialize_ops(Some(remote_version), cx);
6951 let client = this.client.clone();
6952 if let Some(file) = buffer.file() {
6953 client
6954 .send(proto::UpdateBufferFile {
6955 project_id,
6956 buffer_id: buffer_id as u64,
6957 file: Some(file.to_proto()),
6958 })
6959 .log_err();
6960 }
6961
6962 client
6963 .send(proto::UpdateDiffBase {
6964 project_id,
6965 buffer_id: buffer_id as u64,
6966 diff_base: buffer.diff_base().map(Into::into),
6967 })
6968 .log_err();
6969
6970 client
6971 .send(proto::BufferReloaded {
6972 project_id,
6973 buffer_id,
6974 version: language::proto::serialize_version(buffer.saved_version()),
6975 mtime: Some(buffer.saved_mtime().into()),
6976 fingerprint: language::proto::serialize_fingerprint(
6977 buffer.saved_version_fingerprint(),
6978 ),
6979 line_ending: language::proto::serialize_line_ending(
6980 buffer.line_ending(),
6981 ) as i32,
6982 })
6983 .log_err();
6984
6985 cx.background()
6986 .spawn(
6987 async move {
6988 let operations = operations.await;
6989 for chunk in split_operations(operations) {
6990 client
6991 .request(proto::UpdateBuffer {
6992 project_id,
6993 buffer_id,
6994 operations: chunk,
6995 })
6996 .await?;
6997 }
6998 anyhow::Ok(())
6999 }
7000 .log_err(),
7001 )
7002 .detach();
7003 }
7004 }
7005 });
7006
7007 Ok(response)
7008 }
7009
7010 async fn handle_format_buffers(
7011 this: ModelHandle<Self>,
7012 envelope: TypedEnvelope<proto::FormatBuffers>,
7013 _: Arc<Client>,
7014 mut cx: AsyncAppContext,
7015 ) -> Result<proto::FormatBuffersResponse> {
7016 let sender_id = envelope.original_sender_id()?;
7017 let format = this.update(&mut cx, |this, cx| {
7018 let mut buffers = HashSet::default();
7019 for buffer_id in &envelope.payload.buffer_ids {
7020 buffers.insert(
7021 this.opened_buffers
7022 .get(buffer_id)
7023 .and_then(|buffer| buffer.upgrade(cx))
7024 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7025 );
7026 }
7027 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7028 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7029 })?;
7030
7031 let project_transaction = format.await?;
7032 let project_transaction = this.update(&mut cx, |this, cx| {
7033 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7034 });
7035 Ok(proto::FormatBuffersResponse {
7036 transaction: Some(project_transaction),
7037 })
7038 }
7039
7040 async fn handle_apply_additional_edits_for_completion(
7041 this: ModelHandle<Self>,
7042 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7043 _: Arc<Client>,
7044 mut cx: AsyncAppContext,
7045 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7046 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7047 let buffer = this
7048 .opened_buffers
7049 .get(&envelope.payload.buffer_id)
7050 .and_then(|buffer| buffer.upgrade(cx))
7051 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7052 let language = buffer.read(cx).language();
7053 let completion = language::proto::deserialize_completion(
7054 envelope
7055 .payload
7056 .completion
7057 .ok_or_else(|| anyhow!("invalid completion"))?,
7058 language.cloned(),
7059 );
7060 Ok::<_, anyhow::Error>((buffer, completion))
7061 })?;
7062
7063 let completion = completion.await?;
7064
7065 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7066 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7067 });
7068
7069 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7070 transaction: apply_additional_edits
7071 .await?
7072 .as_ref()
7073 .map(language::proto::serialize_transaction),
7074 })
7075 }
7076
7077 async fn handle_apply_code_action(
7078 this: ModelHandle<Self>,
7079 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7080 _: Arc<Client>,
7081 mut cx: AsyncAppContext,
7082 ) -> Result<proto::ApplyCodeActionResponse> {
7083 let sender_id = envelope.original_sender_id()?;
7084 let action = language::proto::deserialize_code_action(
7085 envelope
7086 .payload
7087 .action
7088 .ok_or_else(|| anyhow!("invalid action"))?,
7089 )?;
7090 let apply_code_action = this.update(&mut cx, |this, cx| {
7091 let buffer = this
7092 .opened_buffers
7093 .get(&envelope.payload.buffer_id)
7094 .and_then(|buffer| buffer.upgrade(cx))
7095 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7096 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7097 })?;
7098
7099 let project_transaction = apply_code_action.await?;
7100 let project_transaction = this.update(&mut cx, |this, cx| {
7101 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7102 });
7103 Ok(proto::ApplyCodeActionResponse {
7104 transaction: Some(project_transaction),
7105 })
7106 }
7107
7108 async fn handle_on_type_formatting(
7109 this: ModelHandle<Self>,
7110 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7111 _: Arc<Client>,
7112 mut cx: AsyncAppContext,
7113 ) -> Result<proto::OnTypeFormattingResponse> {
7114 let on_type_formatting = this.update(&mut cx, |this, cx| {
7115 let buffer = this
7116 .opened_buffers
7117 .get(&envelope.payload.buffer_id)
7118 .and_then(|buffer| buffer.upgrade(cx))
7119 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7120 let position = envelope
7121 .payload
7122 .position
7123 .and_then(deserialize_anchor)
7124 .ok_or_else(|| anyhow!("invalid position"))?;
7125 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7126 buffer,
7127 position,
7128 envelope.payload.trigger.clone(),
7129 cx,
7130 ))
7131 })?;
7132
7133 let transaction = on_type_formatting
7134 .await?
7135 .as_ref()
7136 .map(language::proto::serialize_transaction);
7137 Ok(proto::OnTypeFormattingResponse { transaction })
7138 }
7139
7140 async fn handle_inlay_hints(
7141 this: ModelHandle<Self>,
7142 envelope: TypedEnvelope<proto::InlayHints>,
7143 _: Arc<Client>,
7144 mut cx: AsyncAppContext,
7145 ) -> Result<proto::InlayHintsResponse> {
7146 let sender_id = envelope.original_sender_id()?;
7147 let buffer = this.update(&mut cx, |this, cx| {
7148 this.opened_buffers
7149 .get(&envelope.payload.buffer_id)
7150 .and_then(|buffer| buffer.upgrade(cx))
7151 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7152 })?;
7153 let buffer_version = deserialize_version(&envelope.payload.version);
7154
7155 buffer
7156 .update(&mut cx, |buffer, _| {
7157 buffer.wait_for_version(buffer_version.clone())
7158 })
7159 .await
7160 .with_context(|| {
7161 format!(
7162 "waiting for version {:?} for buffer {}",
7163 buffer_version,
7164 buffer.id()
7165 )
7166 })?;
7167
7168 let start = envelope
7169 .payload
7170 .start
7171 .and_then(deserialize_anchor)
7172 .context("missing range start")?;
7173 let end = envelope
7174 .payload
7175 .end
7176 .and_then(deserialize_anchor)
7177 .context("missing range end")?;
7178 let buffer_hints = this
7179 .update(&mut cx, |project, cx| {
7180 project.inlay_hints(buffer, start..end, cx)
7181 })
7182 .await
7183 .context("inlay hints fetch")?;
7184
7185 Ok(this.update(&mut cx, |project, cx| {
7186 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7187 }))
7188 }
7189
7190 async fn handle_resolve_inlay_hint(
7191 this: ModelHandle<Self>,
7192 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7193 _: Arc<Client>,
7194 mut cx: AsyncAppContext,
7195 ) -> Result<proto::ResolveInlayHintResponse> {
7196 let proto_hint = envelope
7197 .payload
7198 .hint
7199 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7200 let hint = InlayHints::proto_to_project_hint(proto_hint)
7201 .context("resolved proto inlay hint conversion")?;
7202 let buffer = this.update(&mut cx, |this, cx| {
7203 this.opened_buffers
7204 .get(&envelope.payload.buffer_id)
7205 .and_then(|buffer| buffer.upgrade(cx))
7206 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7207 })?;
7208 let response_hint = this
7209 .update(&mut cx, |project, cx| {
7210 project.resolve_inlay_hint(
7211 hint,
7212 buffer,
7213 LanguageServerId(envelope.payload.language_server_id as usize),
7214 cx,
7215 )
7216 })
7217 .await
7218 .context("inlay hints fetch")?;
7219 Ok(proto::ResolveInlayHintResponse {
7220 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7221 })
7222 }
7223
7224 async fn handle_refresh_inlay_hints(
7225 this: ModelHandle<Self>,
7226 _: TypedEnvelope<proto::RefreshInlayHints>,
7227 _: Arc<Client>,
7228 mut cx: AsyncAppContext,
7229 ) -> Result<proto::Ack> {
7230 this.update(&mut cx, |_, cx| {
7231 cx.emit(Event::RefreshInlayHints);
7232 });
7233 Ok(proto::Ack {})
7234 }
7235
7236 async fn handle_lsp_command<T: LspCommand>(
7237 this: ModelHandle<Self>,
7238 envelope: TypedEnvelope<T::ProtoRequest>,
7239 _: Arc<Client>,
7240 mut cx: AsyncAppContext,
7241 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7242 where
7243 <T::LspRequest as lsp::request::Request>::Result: Send,
7244 {
7245 let sender_id = envelope.original_sender_id()?;
7246 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7247 let buffer_handle = this.read_with(&cx, |this, _| {
7248 this.opened_buffers
7249 .get(&buffer_id)
7250 .and_then(|buffer| buffer.upgrade(&cx))
7251 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7252 })?;
7253 let request = T::from_proto(
7254 envelope.payload,
7255 this.clone(),
7256 buffer_handle.clone(),
7257 cx.clone(),
7258 )
7259 .await?;
7260 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
7261 let response = this
7262 .update(&mut cx, |this, cx| {
7263 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7264 })
7265 .await?;
7266 this.update(&mut cx, |this, cx| {
7267 Ok(T::response_to_proto(
7268 response,
7269 this,
7270 sender_id,
7271 &buffer_version,
7272 cx,
7273 ))
7274 })
7275 }
7276
7277 async fn handle_get_project_symbols(
7278 this: ModelHandle<Self>,
7279 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7280 _: Arc<Client>,
7281 mut cx: AsyncAppContext,
7282 ) -> Result<proto::GetProjectSymbolsResponse> {
7283 let symbols = this
7284 .update(&mut cx, |this, cx| {
7285 this.symbols(&envelope.payload.query, cx)
7286 })
7287 .await?;
7288
7289 Ok(proto::GetProjectSymbolsResponse {
7290 symbols: symbols.iter().map(serialize_symbol).collect(),
7291 })
7292 }
7293
7294 async fn handle_search_project(
7295 this: ModelHandle<Self>,
7296 envelope: TypedEnvelope<proto::SearchProject>,
7297 _: Arc<Client>,
7298 mut cx: AsyncAppContext,
7299 ) -> Result<proto::SearchProjectResponse> {
7300 let peer_id = envelope.original_sender_id()?;
7301 let query = SearchQuery::from_proto(envelope.payload)?;
7302 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
7303
7304 cx.spawn(|mut cx| async move {
7305 let mut locations = Vec::new();
7306 while let Some((buffer, ranges)) = result.next().await {
7307 for range in ranges {
7308 let start = serialize_anchor(&range.start);
7309 let end = serialize_anchor(&range.end);
7310 let buffer_id = this.update(&mut cx, |this, cx| {
7311 this.create_buffer_for_peer(&buffer, peer_id, cx)
7312 });
7313 locations.push(proto::Location {
7314 buffer_id,
7315 start: Some(start),
7316 end: Some(end),
7317 });
7318 }
7319 }
7320 Ok(proto::SearchProjectResponse { locations })
7321 })
7322 .await
7323 }
7324
7325 async fn handle_open_buffer_for_symbol(
7326 this: ModelHandle<Self>,
7327 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7328 _: Arc<Client>,
7329 mut cx: AsyncAppContext,
7330 ) -> Result<proto::OpenBufferForSymbolResponse> {
7331 let peer_id = envelope.original_sender_id()?;
7332 let symbol = envelope
7333 .payload
7334 .symbol
7335 .ok_or_else(|| anyhow!("invalid symbol"))?;
7336 let symbol = this
7337 .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
7338 .await?;
7339 let symbol = this.read_with(&cx, |this, _| {
7340 let signature = this.symbol_signature(&symbol.path);
7341 if signature == symbol.signature {
7342 Ok(symbol)
7343 } else {
7344 Err(anyhow!("invalid symbol signature"))
7345 }
7346 })?;
7347 let buffer = this
7348 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
7349 .await?;
7350
7351 Ok(proto::OpenBufferForSymbolResponse {
7352 buffer_id: this.update(&mut cx, |this, cx| {
7353 this.create_buffer_for_peer(&buffer, peer_id, cx)
7354 }),
7355 })
7356 }
7357
7358 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7359 let mut hasher = Sha256::new();
7360 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7361 hasher.update(project_path.path.to_string_lossy().as_bytes());
7362 hasher.update(self.nonce.to_be_bytes());
7363 hasher.finalize().as_slice().try_into().unwrap()
7364 }
7365
7366 async fn handle_open_buffer_by_id(
7367 this: ModelHandle<Self>,
7368 envelope: TypedEnvelope<proto::OpenBufferById>,
7369 _: Arc<Client>,
7370 mut cx: AsyncAppContext,
7371 ) -> Result<proto::OpenBufferResponse> {
7372 let peer_id = envelope.original_sender_id()?;
7373 let buffer = this
7374 .update(&mut cx, |this, cx| {
7375 this.open_buffer_by_id(envelope.payload.id, cx)
7376 })
7377 .await?;
7378 this.update(&mut cx, |this, cx| {
7379 Ok(proto::OpenBufferResponse {
7380 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7381 })
7382 })
7383 }
7384
7385 async fn handle_open_buffer_by_path(
7386 this: ModelHandle<Self>,
7387 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7388 _: Arc<Client>,
7389 mut cx: AsyncAppContext,
7390 ) -> Result<proto::OpenBufferResponse> {
7391 let peer_id = envelope.original_sender_id()?;
7392 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7393 let open_buffer = this.update(&mut cx, |this, cx| {
7394 this.open_buffer(
7395 ProjectPath {
7396 worktree_id,
7397 path: PathBuf::from(envelope.payload.path).into(),
7398 },
7399 cx,
7400 )
7401 });
7402
7403 let buffer = open_buffer.await?;
7404 this.update(&mut cx, |this, cx| {
7405 Ok(proto::OpenBufferResponse {
7406 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7407 })
7408 })
7409 }
7410
7411 fn serialize_project_transaction_for_peer(
7412 &mut self,
7413 project_transaction: ProjectTransaction,
7414 peer_id: proto::PeerId,
7415 cx: &mut AppContext,
7416 ) -> proto::ProjectTransaction {
7417 let mut serialized_transaction = proto::ProjectTransaction {
7418 buffer_ids: Default::default(),
7419 transactions: Default::default(),
7420 };
7421 for (buffer, transaction) in project_transaction.0 {
7422 serialized_transaction
7423 .buffer_ids
7424 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7425 serialized_transaction
7426 .transactions
7427 .push(language::proto::serialize_transaction(&transaction));
7428 }
7429 serialized_transaction
7430 }
7431
7432 fn deserialize_project_transaction(
7433 &mut self,
7434 message: proto::ProjectTransaction,
7435 push_to_history: bool,
7436 cx: &mut ModelContext<Self>,
7437 ) -> Task<Result<ProjectTransaction>> {
7438 cx.spawn(|this, mut cx| async move {
7439 let mut project_transaction = ProjectTransaction::default();
7440 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7441 {
7442 let buffer = this
7443 .update(&mut cx, |this, cx| {
7444 this.wait_for_remote_buffer(buffer_id, cx)
7445 })
7446 .await?;
7447 let transaction = language::proto::deserialize_transaction(transaction)?;
7448 project_transaction.0.insert(buffer, transaction);
7449 }
7450
7451 for (buffer, transaction) in &project_transaction.0 {
7452 buffer
7453 .update(&mut cx, |buffer, _| {
7454 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7455 })
7456 .await?;
7457
7458 if push_to_history {
7459 buffer.update(&mut cx, |buffer, _| {
7460 buffer.push_transaction(transaction.clone(), Instant::now());
7461 });
7462 }
7463 }
7464
7465 Ok(project_transaction)
7466 })
7467 }
7468
7469 fn create_buffer_for_peer(
7470 &mut self,
7471 buffer: &ModelHandle<Buffer>,
7472 peer_id: proto::PeerId,
7473 cx: &mut AppContext,
7474 ) -> u64 {
7475 let buffer_id = buffer.read(cx).remote_id();
7476 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
7477 updates_tx
7478 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7479 .ok();
7480 }
7481 buffer_id
7482 }
7483
7484 fn wait_for_remote_buffer(
7485 &mut self,
7486 id: u64,
7487 cx: &mut ModelContext<Self>,
7488 ) -> Task<Result<ModelHandle<Buffer>>> {
7489 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7490
7491 cx.spawn_weak(|this, mut cx| async move {
7492 let buffer = loop {
7493 let Some(this) = this.upgrade(&cx) else {
7494 return Err(anyhow!("project dropped"));
7495 };
7496
7497 let buffer = this.read_with(&cx, |this, cx| {
7498 this.opened_buffers
7499 .get(&id)
7500 .and_then(|buffer| buffer.upgrade(cx))
7501 });
7502
7503 if let Some(buffer) = buffer {
7504 break buffer;
7505 } else if this.read_with(&cx, |this, _| this.is_read_only()) {
7506 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7507 }
7508
7509 this.update(&mut cx, |this, _| {
7510 this.incomplete_remote_buffers.entry(id).or_default();
7511 });
7512 drop(this);
7513
7514 opened_buffer_rx
7515 .next()
7516 .await
7517 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
7518 };
7519
7520 Ok(buffer)
7521 })
7522 }
7523
7524 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
7525 let project_id = match self.client_state.as_ref() {
7526 Some(ProjectClientState::Remote {
7527 sharing_has_stopped,
7528 remote_id,
7529 ..
7530 }) => {
7531 if *sharing_has_stopped {
7532 return Task::ready(Err(anyhow!(
7533 "can't synchronize remote buffers on a readonly project"
7534 )));
7535 } else {
7536 *remote_id
7537 }
7538 }
7539 Some(ProjectClientState::Local { .. }) | None => {
7540 return Task::ready(Err(anyhow!(
7541 "can't synchronize remote buffers on a local project"
7542 )))
7543 }
7544 };
7545
7546 let client = self.client.clone();
7547 cx.spawn(|this, cx| async move {
7548 let (buffers, incomplete_buffer_ids) = this.read_with(&cx, |this, cx| {
7549 let buffers = this
7550 .opened_buffers
7551 .iter()
7552 .filter_map(|(id, buffer)| {
7553 let buffer = buffer.upgrade(cx)?;
7554 Some(proto::BufferVersion {
7555 id: *id,
7556 version: language::proto::serialize_version(&buffer.read(cx).version),
7557 })
7558 })
7559 .collect();
7560 let incomplete_buffer_ids = this
7561 .incomplete_remote_buffers
7562 .keys()
7563 .copied()
7564 .collect::<Vec<_>>();
7565
7566 (buffers, incomplete_buffer_ids)
7567 });
7568 let response = client
7569 .request(proto::SynchronizeBuffers {
7570 project_id,
7571 buffers,
7572 })
7573 .await?;
7574
7575 let send_updates_for_buffers = response.buffers.into_iter().map(|buffer| {
7576 let client = client.clone();
7577 let buffer_id = buffer.id;
7578 let remote_version = language::proto::deserialize_version(&buffer.version);
7579 this.read_with(&cx, |this, cx| {
7580 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
7581 let operations = buffer.read(cx).serialize_ops(Some(remote_version), cx);
7582 cx.background().spawn(async move {
7583 let operations = operations.await;
7584 for chunk in split_operations(operations) {
7585 client
7586 .request(proto::UpdateBuffer {
7587 project_id,
7588 buffer_id,
7589 operations: chunk,
7590 })
7591 .await?;
7592 }
7593 anyhow::Ok(())
7594 })
7595 } else {
7596 Task::ready(Ok(()))
7597 }
7598 })
7599 });
7600
7601 // Any incomplete buffers have open requests waiting. Request that the host sends
7602 // creates these buffers for us again to unblock any waiting futures.
7603 for id in incomplete_buffer_ids {
7604 cx.background()
7605 .spawn(client.request(proto::OpenBufferById { project_id, id }))
7606 .detach();
7607 }
7608
7609 futures::future::join_all(send_updates_for_buffers)
7610 .await
7611 .into_iter()
7612 .collect()
7613 })
7614 }
7615
7616 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
7617 self.worktrees(cx)
7618 .map(|worktree| {
7619 let worktree = worktree.read(cx);
7620 proto::WorktreeMetadata {
7621 id: worktree.id().to_proto(),
7622 root_name: worktree.root_name().into(),
7623 visible: worktree.is_visible(),
7624 abs_path: worktree.abs_path().to_string_lossy().into(),
7625 }
7626 })
7627 .collect()
7628 }
7629
7630 fn set_worktrees_from_proto(
7631 &mut self,
7632 worktrees: Vec<proto::WorktreeMetadata>,
7633 cx: &mut ModelContext<Project>,
7634 ) -> Result<()> {
7635 let replica_id = self.replica_id();
7636 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
7637
7638 let mut old_worktrees_by_id = self
7639 .worktrees
7640 .drain(..)
7641 .filter_map(|worktree| {
7642 let worktree = worktree.upgrade(cx)?;
7643 Some((worktree.read(cx).id(), worktree))
7644 })
7645 .collect::<HashMap<_, _>>();
7646
7647 for worktree in worktrees {
7648 if let Some(old_worktree) =
7649 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
7650 {
7651 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
7652 } else {
7653 let worktree =
7654 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
7655 let _ = self.add_worktree(&worktree, cx);
7656 }
7657 }
7658
7659 self.metadata_changed(cx);
7660 for id in old_worktrees_by_id.keys() {
7661 cx.emit(Event::WorktreeRemoved(*id));
7662 }
7663
7664 Ok(())
7665 }
7666
7667 fn set_collaborators_from_proto(
7668 &mut self,
7669 messages: Vec<proto::Collaborator>,
7670 cx: &mut ModelContext<Self>,
7671 ) -> Result<()> {
7672 let mut collaborators = HashMap::default();
7673 for message in messages {
7674 let collaborator = Collaborator::from_proto(message)?;
7675 collaborators.insert(collaborator.peer_id, collaborator);
7676 }
7677 for old_peer_id in self.collaborators.keys() {
7678 if !collaborators.contains_key(old_peer_id) {
7679 cx.emit(Event::CollaboratorLeft(*old_peer_id));
7680 }
7681 }
7682 self.collaborators = collaborators;
7683 Ok(())
7684 }
7685
7686 fn deserialize_symbol(
7687 &self,
7688 serialized_symbol: proto::Symbol,
7689 ) -> impl Future<Output = Result<Symbol>> {
7690 let languages = self.languages.clone();
7691 async move {
7692 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
7693 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
7694 let start = serialized_symbol
7695 .start
7696 .ok_or_else(|| anyhow!("invalid start"))?;
7697 let end = serialized_symbol
7698 .end
7699 .ok_or_else(|| anyhow!("invalid end"))?;
7700 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
7701 let path = ProjectPath {
7702 worktree_id,
7703 path: PathBuf::from(serialized_symbol.path).into(),
7704 };
7705 let language = languages
7706 .language_for_file(&path.path, None)
7707 .await
7708 .log_err();
7709 Ok(Symbol {
7710 language_server_name: LanguageServerName(
7711 serialized_symbol.language_server_name.into(),
7712 ),
7713 source_worktree_id,
7714 path,
7715 label: {
7716 match language {
7717 Some(language) => {
7718 language
7719 .label_for_symbol(&serialized_symbol.name, kind)
7720 .await
7721 }
7722 None => None,
7723 }
7724 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
7725 },
7726
7727 name: serialized_symbol.name,
7728 range: Unclipped(PointUtf16::new(start.row, start.column))
7729 ..Unclipped(PointUtf16::new(end.row, end.column)),
7730 kind,
7731 signature: serialized_symbol
7732 .signature
7733 .try_into()
7734 .map_err(|_| anyhow!("invalid signature"))?,
7735 })
7736 }
7737 }
7738
7739 async fn handle_buffer_saved(
7740 this: ModelHandle<Self>,
7741 envelope: TypedEnvelope<proto::BufferSaved>,
7742 _: Arc<Client>,
7743 mut cx: AsyncAppContext,
7744 ) -> Result<()> {
7745 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
7746 let version = deserialize_version(&envelope.payload.version);
7747 let mtime = envelope
7748 .payload
7749 .mtime
7750 .ok_or_else(|| anyhow!("missing mtime"))?
7751 .into();
7752
7753 this.update(&mut cx, |this, cx| {
7754 let buffer = this
7755 .opened_buffers
7756 .get(&envelope.payload.buffer_id)
7757 .and_then(|buffer| buffer.upgrade(cx))
7758 .or_else(|| {
7759 this.incomplete_remote_buffers
7760 .get(&envelope.payload.buffer_id)
7761 .and_then(|b| b.clone())
7762 });
7763 if let Some(buffer) = buffer {
7764 buffer.update(cx, |buffer, cx| {
7765 buffer.did_save(version, fingerprint, mtime, cx);
7766 });
7767 }
7768 Ok(())
7769 })
7770 }
7771
7772 async fn handle_buffer_reloaded(
7773 this: ModelHandle<Self>,
7774 envelope: TypedEnvelope<proto::BufferReloaded>,
7775 _: Arc<Client>,
7776 mut cx: AsyncAppContext,
7777 ) -> Result<()> {
7778 let payload = envelope.payload;
7779 let version = deserialize_version(&payload.version);
7780 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
7781 let line_ending = deserialize_line_ending(
7782 proto::LineEnding::from_i32(payload.line_ending)
7783 .ok_or_else(|| anyhow!("missing line ending"))?,
7784 );
7785 let mtime = payload
7786 .mtime
7787 .ok_or_else(|| anyhow!("missing mtime"))?
7788 .into();
7789 this.update(&mut cx, |this, cx| {
7790 let buffer = this
7791 .opened_buffers
7792 .get(&payload.buffer_id)
7793 .and_then(|buffer| buffer.upgrade(cx))
7794 .or_else(|| {
7795 this.incomplete_remote_buffers
7796 .get(&payload.buffer_id)
7797 .cloned()
7798 .flatten()
7799 });
7800 if let Some(buffer) = buffer {
7801 buffer.update(cx, |buffer, cx| {
7802 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
7803 });
7804 }
7805 Ok(())
7806 })
7807 }
7808
7809 #[allow(clippy::type_complexity)]
7810 fn edits_from_lsp(
7811 &mut self,
7812 buffer: &ModelHandle<Buffer>,
7813 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
7814 server_id: LanguageServerId,
7815 version: Option<i32>,
7816 cx: &mut ModelContext<Self>,
7817 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
7818 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
7819 cx.background().spawn(async move {
7820 let snapshot = snapshot?;
7821 let mut lsp_edits = lsp_edits
7822 .into_iter()
7823 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
7824 .collect::<Vec<_>>();
7825 lsp_edits.sort_by_key(|(range, _)| range.start);
7826
7827 let mut lsp_edits = lsp_edits.into_iter().peekable();
7828 let mut edits = Vec::new();
7829 while let Some((range, mut new_text)) = lsp_edits.next() {
7830 // Clip invalid ranges provided by the language server.
7831 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
7832 ..snapshot.clip_point_utf16(range.end, Bias::Left);
7833
7834 // Combine any LSP edits that are adjacent.
7835 //
7836 // Also, combine LSP edits that are separated from each other by only
7837 // a newline. This is important because for some code actions,
7838 // Rust-analyzer rewrites the entire buffer via a series of edits that
7839 // are separated by unchanged newline characters.
7840 //
7841 // In order for the diffing logic below to work properly, any edits that
7842 // cancel each other out must be combined into one.
7843 while let Some((next_range, next_text)) = lsp_edits.peek() {
7844 if next_range.start.0 > range.end {
7845 if next_range.start.0.row > range.end.row + 1
7846 || next_range.start.0.column > 0
7847 || snapshot.clip_point_utf16(
7848 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
7849 Bias::Left,
7850 ) > range.end
7851 {
7852 break;
7853 }
7854 new_text.push('\n');
7855 }
7856 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
7857 new_text.push_str(next_text);
7858 lsp_edits.next();
7859 }
7860
7861 // For multiline edits, perform a diff of the old and new text so that
7862 // we can identify the changes more precisely, preserving the locations
7863 // of any anchors positioned in the unchanged regions.
7864 if range.end.row > range.start.row {
7865 let mut offset = range.start.to_offset(&snapshot);
7866 let old_text = snapshot.text_for_range(range).collect::<String>();
7867
7868 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
7869 let mut moved_since_edit = true;
7870 for change in diff.iter_all_changes() {
7871 let tag = change.tag();
7872 let value = change.value();
7873 match tag {
7874 ChangeTag::Equal => {
7875 offset += value.len();
7876 moved_since_edit = true;
7877 }
7878 ChangeTag::Delete => {
7879 let start = snapshot.anchor_after(offset);
7880 let end = snapshot.anchor_before(offset + value.len());
7881 if moved_since_edit {
7882 edits.push((start..end, String::new()));
7883 } else {
7884 edits.last_mut().unwrap().0.end = end;
7885 }
7886 offset += value.len();
7887 moved_since_edit = false;
7888 }
7889 ChangeTag::Insert => {
7890 if moved_since_edit {
7891 let anchor = snapshot.anchor_after(offset);
7892 edits.push((anchor..anchor, value.to_string()));
7893 } else {
7894 edits.last_mut().unwrap().1.push_str(value);
7895 }
7896 moved_since_edit = false;
7897 }
7898 }
7899 }
7900 } else if range.end == range.start {
7901 let anchor = snapshot.anchor_after(range.start);
7902 edits.push((anchor..anchor, new_text));
7903 } else {
7904 let edit_start = snapshot.anchor_after(range.start);
7905 let edit_end = snapshot.anchor_before(range.end);
7906 edits.push((edit_start..edit_end, new_text));
7907 }
7908 }
7909
7910 Ok(edits)
7911 })
7912 }
7913
7914 fn buffer_snapshot_for_lsp_version(
7915 &mut self,
7916 buffer: &ModelHandle<Buffer>,
7917 server_id: LanguageServerId,
7918 version: Option<i32>,
7919 cx: &AppContext,
7920 ) -> Result<TextBufferSnapshot> {
7921 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
7922
7923 if let Some(version) = version {
7924 let buffer_id = buffer.read(cx).remote_id();
7925 let snapshots = self
7926 .buffer_snapshots
7927 .get_mut(&buffer_id)
7928 .and_then(|m| m.get_mut(&server_id))
7929 .ok_or_else(|| {
7930 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
7931 })?;
7932
7933 let found_snapshot = snapshots
7934 .binary_search_by_key(&version, |e| e.version)
7935 .map(|ix| snapshots[ix].snapshot.clone())
7936 .map_err(|_| {
7937 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
7938 })?;
7939
7940 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
7941 Ok(found_snapshot)
7942 } else {
7943 Ok((buffer.read(cx)).text_snapshot())
7944 }
7945 }
7946
7947 pub fn language_servers(
7948 &self,
7949 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
7950 self.language_server_ids
7951 .iter()
7952 .map(|((worktree_id, server_name), server_id)| {
7953 (*server_id, server_name.clone(), *worktree_id)
7954 })
7955 }
7956
7957 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
7958 if let LanguageServerState::Running { server, .. } = self.language_servers.get(&id)? {
7959 Some(server.clone())
7960 } else {
7961 None
7962 }
7963 }
7964
7965 pub fn language_servers_for_buffer(
7966 &self,
7967 buffer: &Buffer,
7968 cx: &AppContext,
7969 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7970 self.language_server_ids_for_buffer(buffer, cx)
7971 .into_iter()
7972 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
7973 LanguageServerState::Running {
7974 adapter, server, ..
7975 } => Some((adapter, server)),
7976 _ => None,
7977 })
7978 }
7979
7980 fn primary_language_server_for_buffer(
7981 &self,
7982 buffer: &Buffer,
7983 cx: &AppContext,
7984 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7985 self.language_servers_for_buffer(buffer, cx).next()
7986 }
7987
7988 pub fn language_server_for_buffer(
7989 &self,
7990 buffer: &Buffer,
7991 server_id: LanguageServerId,
7992 cx: &AppContext,
7993 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7994 self.language_servers_for_buffer(buffer, cx)
7995 .find(|(_, s)| s.server_id() == server_id)
7996 }
7997
7998 fn language_server_ids_for_buffer(
7999 &self,
8000 buffer: &Buffer,
8001 cx: &AppContext,
8002 ) -> Vec<LanguageServerId> {
8003 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8004 let worktree_id = file.worktree_id(cx);
8005 language
8006 .lsp_adapters()
8007 .iter()
8008 .flat_map(|adapter| {
8009 let key = (worktree_id, adapter.name.clone());
8010 self.language_server_ids.get(&key).copied()
8011 })
8012 .collect()
8013 } else {
8014 Vec::new()
8015 }
8016 }
8017}
8018
8019fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8020 let mut literal_end = 0;
8021 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8022 if part.contains(&['*', '?', '{', '}']) {
8023 break;
8024 } else {
8025 if i > 0 {
8026 // Acount for separator prior to this part
8027 literal_end += path::MAIN_SEPARATOR.len_utf8();
8028 }
8029 literal_end += part.len();
8030 }
8031 }
8032 &glob[..literal_end]
8033}
8034
8035impl WorktreeHandle {
8036 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
8037 match self {
8038 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8039 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
8040 }
8041 }
8042
8043 pub fn handle_id(&self) -> usize {
8044 match self {
8045 WorktreeHandle::Strong(handle) => handle.id(),
8046 WorktreeHandle::Weak(handle) => handle.id(),
8047 }
8048 }
8049}
8050
8051impl OpenBuffer {
8052 pub fn upgrade(&self, cx: &impl BorrowAppContext) -> Option<ModelHandle<Buffer>> {
8053 match self {
8054 OpenBuffer::Strong(handle) => Some(handle.clone()),
8055 OpenBuffer::Weak(handle) => handle.upgrade(cx),
8056 OpenBuffer::Operations(_) => None,
8057 }
8058 }
8059}
8060
8061pub struct PathMatchCandidateSet {
8062 pub snapshot: Snapshot,
8063 pub include_ignored: bool,
8064 pub include_root_name: bool,
8065}
8066
8067impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8068 type Candidates = PathMatchCandidateSetIter<'a>;
8069
8070 fn id(&self) -> usize {
8071 self.snapshot.id().to_usize()
8072 }
8073
8074 fn len(&self) -> usize {
8075 if self.include_ignored {
8076 self.snapshot.file_count()
8077 } else {
8078 self.snapshot.visible_file_count()
8079 }
8080 }
8081
8082 fn prefix(&self) -> Arc<str> {
8083 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8084 self.snapshot.root_name().into()
8085 } else if self.include_root_name {
8086 format!("{}/", self.snapshot.root_name()).into()
8087 } else {
8088 "".into()
8089 }
8090 }
8091
8092 fn candidates(&'a self, start: usize) -> Self::Candidates {
8093 PathMatchCandidateSetIter {
8094 traversal: self.snapshot.files(self.include_ignored, start),
8095 }
8096 }
8097}
8098
8099pub struct PathMatchCandidateSetIter<'a> {
8100 traversal: Traversal<'a>,
8101}
8102
8103impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8104 type Item = fuzzy::PathMatchCandidate<'a>;
8105
8106 fn next(&mut self) -> Option<Self::Item> {
8107 self.traversal.next().map(|entry| {
8108 if let EntryKind::File(char_bag) = entry.kind {
8109 fuzzy::PathMatchCandidate {
8110 path: &entry.path,
8111 char_bag,
8112 }
8113 } else {
8114 unreachable!()
8115 }
8116 })
8117 }
8118}
8119
8120impl Entity for Project {
8121 type Event = Event;
8122
8123 fn release(&mut self, cx: &mut gpui::AppContext) {
8124 match &self.client_state {
8125 Some(ProjectClientState::Local { .. }) => {
8126 let _ = self.unshare_internal(cx);
8127 }
8128 Some(ProjectClientState::Remote { remote_id, .. }) => {
8129 let _ = self.client.send(proto::LeaveProject {
8130 project_id: *remote_id,
8131 });
8132 self.disconnected_from_host_internal(cx);
8133 }
8134 _ => {}
8135 }
8136 }
8137
8138 fn app_will_quit(
8139 &mut self,
8140 _: &mut AppContext,
8141 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
8142 let shutdown_futures = self
8143 .language_servers
8144 .drain()
8145 .map(|(_, server_state)| async {
8146 use LanguageServerState::*;
8147 match server_state {
8148 Running { server, .. } => server.shutdown()?.await,
8149 Starting(task) => task.await?.shutdown()?.await,
8150 }
8151 })
8152 .collect::<Vec<_>>();
8153
8154 Some(
8155 async move {
8156 futures::future::join_all(shutdown_futures).await;
8157 }
8158 .boxed(),
8159 )
8160 }
8161}
8162
8163impl Collaborator {
8164 fn from_proto(message: proto::Collaborator) -> Result<Self> {
8165 Ok(Self {
8166 peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
8167 replica_id: message.replica_id as ReplicaId,
8168 user_id: message.user_id as UserId,
8169 })
8170 }
8171}
8172
8173impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8174 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8175 Self {
8176 worktree_id,
8177 path: path.as_ref().into(),
8178 }
8179 }
8180}
8181
8182impl ProjectLspAdapterDelegate {
8183 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8184 Arc::new(Self {
8185 project: cx.handle(),
8186 http_client: project.client.http_client(),
8187 })
8188 }
8189}
8190
8191impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8192 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8193 self.project
8194 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8195 }
8196
8197 fn http_client(&self) -> Arc<dyn HttpClient> {
8198 self.http_client.clone()
8199 }
8200}
8201
8202fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8203 proto::Symbol {
8204 language_server_name: symbol.language_server_name.0.to_string(),
8205 source_worktree_id: symbol.source_worktree_id.to_proto(),
8206 worktree_id: symbol.path.worktree_id.to_proto(),
8207 path: symbol.path.path.to_string_lossy().to_string(),
8208 name: symbol.name.clone(),
8209 kind: unsafe { mem::transmute(symbol.kind) },
8210 start: Some(proto::PointUtf16 {
8211 row: symbol.range.start.0.row,
8212 column: symbol.range.start.0.column,
8213 }),
8214 end: Some(proto::PointUtf16 {
8215 row: symbol.range.end.0.row,
8216 column: symbol.range.end.0.column,
8217 }),
8218 signature: symbol.signature.to_vec(),
8219 }
8220}
8221
8222fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8223 let mut path_components = path.components();
8224 let mut base_components = base.components();
8225 let mut components: Vec<Component> = Vec::new();
8226 loop {
8227 match (path_components.next(), base_components.next()) {
8228 (None, None) => break,
8229 (Some(a), None) => {
8230 components.push(a);
8231 components.extend(path_components.by_ref());
8232 break;
8233 }
8234 (None, _) => components.push(Component::ParentDir),
8235 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8236 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8237 (Some(a), Some(_)) => {
8238 components.push(Component::ParentDir);
8239 for _ in base_components {
8240 components.push(Component::ParentDir);
8241 }
8242 components.push(a);
8243 components.extend(path_components.by_ref());
8244 break;
8245 }
8246 }
8247 }
8248 components.iter().map(|c| c.as_os_str()).collect()
8249}
8250
8251impl Item for Buffer {
8252 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8253 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8254 }
8255
8256 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8257 File::from_dyn(self.file()).map(|file| ProjectPath {
8258 worktree_id: file.worktree_id(cx),
8259 path: file.path().clone(),
8260 })
8261 }
8262}
8263
8264async fn wait_for_loading_buffer(
8265 mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
8266) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
8267 loop {
8268 if let Some(result) = receiver.borrow().as_ref() {
8269 match result {
8270 Ok(buffer) => return Ok(buffer.to_owned()),
8271 Err(e) => return Err(e.to_owned()),
8272 }
8273 }
8274 receiver.next().await;
8275 }
8276}