1mod ignore;
2mod lsp_command;
3pub mod project_settings;
4pub mod search;
5pub mod terminals;
6pub mod worktree;
7
8#[cfg(test)]
9mod project_tests;
10#[cfg(test)]
11mod worktree_tests;
12
13use anyhow::{anyhow, Context, Result};
14use client::{proto, Client, TypedEnvelope, UserStore};
15use clock::ReplicaId;
16use collections::{hash_map, BTreeMap, HashMap, HashSet};
17use copilot::Copilot;
18use futures::{
19 channel::{
20 mpsc::{self, UnboundedReceiver},
21 oneshot,
22 },
23 future::{try_join_all, Shared},
24 stream::FuturesUnordered,
25 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
26};
27use globset::{Glob, GlobSet, GlobSetBuilder};
28use gpui::{
29 AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity, ModelContext,
30 ModelHandle, Task, WeakModelHandle,
31};
32use language::{
33 language_settings::{language_settings, FormatOnSave, Formatter},
34 point_to_lsp,
35 proto::{
36 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
37 serialize_anchor, serialize_version,
38 },
39 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
40 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
41 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
42 Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
43 ToPointUtf16, Transaction, Unclipped,
44};
45use log::error;
46use lsp::{
47 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
48 DocumentHighlightKind, LanguageServer, LanguageServerId,
49};
50use lsp_command::*;
51use postage::watch;
52use project_settings::ProjectSettings;
53use rand::prelude::*;
54use search::SearchQuery;
55use serde::Serialize;
56use settings::SettingsStore;
57use sha2::{Digest, Sha256};
58use similar::{ChangeTag, TextDiff};
59use std::{
60 cell::RefCell,
61 cmp::{self, Ordering},
62 convert::TryInto,
63 hash::Hash,
64 mem,
65 num::NonZeroU32,
66 ops::Range,
67 path::{Component, Path, PathBuf},
68 rc::Rc,
69 str,
70 sync::{
71 atomic::{AtomicUsize, Ordering::SeqCst},
72 Arc,
73 },
74 time::{Duration, Instant},
75};
76use terminals::Terminals;
77use util::{
78 debug_panic, defer, http::HttpClient, merge_json_value_into,
79 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
80};
81
82pub use fs::*;
83pub use worktree::*;
84
85pub trait Item {
86 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
87 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
88}
89
90// Language server state is stored across 3 collections:
91// language_servers =>
92// a mapping from unique server id to LanguageServerState which can either be a task for a
93// server in the process of starting, or a running server with adapter and language server arcs
94// language_server_ids => a mapping from worktreeId and server name to the unique server id
95// language_server_statuses => a mapping from unique server id to the current server status
96//
97// Multiple worktrees can map to the same language server for example when you jump to the definition
98// of a file in the standard library. So language_server_ids is used to look up which server is active
99// for a given worktree and language server name
100//
101// When starting a language server, first the id map is checked to make sure a server isn't already available
102// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
103// the Starting variant of LanguageServerState is stored in the language_servers map.
104pub struct Project {
105 worktrees: Vec<WorktreeHandle>,
106 active_entry: Option<ProjectEntryId>,
107 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
108 languages: Arc<LanguageRegistry>,
109 language_servers: HashMap<LanguageServerId, LanguageServerState>,
110 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
111 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
112 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
113 client: Arc<client::Client>,
114 next_entry_id: Arc<AtomicUsize>,
115 join_project_response_message_id: u32,
116 next_diagnostic_group_id: usize,
117 user_store: ModelHandle<UserStore>,
118 fs: Arc<dyn Fs>,
119 client_state: Option<ProjectClientState>,
120 collaborators: HashMap<proto::PeerId, Collaborator>,
121 client_subscriptions: Vec<client::Subscription>,
122 _subscriptions: Vec<gpui::Subscription>,
123 next_buffer_id: u64,
124 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
125 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
126 #[allow(clippy::type_complexity)]
127 loading_buffers_by_path: HashMap<
128 ProjectPath,
129 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
130 >,
131 #[allow(clippy::type_complexity)]
132 loading_local_worktrees:
133 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
134 opened_buffers: HashMap<u64, OpenBuffer>,
135 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
136 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
137 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
138 /// Used for re-issuing buffer requests when peers temporarily disconnect
139 incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
140 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
141 buffers_being_formatted: HashSet<u64>,
142 buffers_needing_diff: HashSet<WeakModelHandle<Buffer>>,
143 git_diff_debouncer: DelayedDebounced,
144 nonce: u128,
145 _maintain_buffer_languages: Task<()>,
146 _maintain_workspace_config: Task<()>,
147 terminals: Terminals,
148 copilot_enabled: bool,
149}
150
151struct DelayedDebounced {
152 task: Option<Task<()>>,
153 cancel_channel: Option<oneshot::Sender<()>>,
154}
155
156impl DelayedDebounced {
157 fn new() -> DelayedDebounced {
158 DelayedDebounced {
159 task: None,
160 cancel_channel: None,
161 }
162 }
163
164 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
165 where
166 F: 'static + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
167 {
168 if let Some(channel) = self.cancel_channel.take() {
169 _ = channel.send(());
170 }
171
172 let (sender, mut receiver) = oneshot::channel::<()>();
173 self.cancel_channel = Some(sender);
174
175 let previous_task = self.task.take();
176 self.task = Some(cx.spawn(|workspace, mut cx| async move {
177 let mut timer = cx.background().timer(delay).fuse();
178 if let Some(previous_task) = previous_task {
179 previous_task.await;
180 }
181
182 futures::select_biased! {
183 _ = receiver => return,
184 _ = timer => {}
185 }
186
187 workspace
188 .update(&mut cx, |workspace, cx| (func)(workspace, cx))
189 .await;
190 }));
191 }
192}
193
194struct LspBufferSnapshot {
195 version: i32,
196 snapshot: TextBufferSnapshot,
197}
198
199/// Message ordered with respect to buffer operations
200enum BufferOrderedMessage {
201 Operation {
202 buffer_id: u64,
203 operation: proto::Operation,
204 },
205 LanguageServerUpdate {
206 language_server_id: LanguageServerId,
207 message: proto::update_language_server::Variant,
208 },
209 Resync,
210}
211
212enum LocalProjectUpdate {
213 WorktreesChanged,
214 CreateBufferForPeer {
215 peer_id: proto::PeerId,
216 buffer_id: u64,
217 },
218}
219
220enum OpenBuffer {
221 Strong(ModelHandle<Buffer>),
222 Weak(WeakModelHandle<Buffer>),
223 Operations(Vec<Operation>),
224}
225
226enum WorktreeHandle {
227 Strong(ModelHandle<Worktree>),
228 Weak(WeakModelHandle<Worktree>),
229}
230
231enum ProjectClientState {
232 Local {
233 remote_id: u64,
234 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
235 _send_updates: Task<()>,
236 },
237 Remote {
238 sharing_has_stopped: bool,
239 remote_id: u64,
240 replica_id: ReplicaId,
241 },
242}
243
244#[derive(Clone, Debug)]
245pub struct Collaborator {
246 pub peer_id: proto::PeerId,
247 pub replica_id: ReplicaId,
248}
249
250#[derive(Clone, Debug, PartialEq)]
251pub enum Event {
252 LanguageServerAdded(LanguageServerId),
253 LanguageServerRemoved(LanguageServerId),
254 LanguageServerLog(LanguageServerId, String),
255 Notification(String),
256 ActiveEntryChanged(Option<ProjectEntryId>),
257 WorktreeAdded,
258 WorktreeRemoved(WorktreeId),
259 DiskBasedDiagnosticsStarted {
260 language_server_id: LanguageServerId,
261 },
262 DiskBasedDiagnosticsFinished {
263 language_server_id: LanguageServerId,
264 },
265 DiagnosticsUpdated {
266 path: ProjectPath,
267 language_server_id: LanguageServerId,
268 },
269 RemoteIdChanged(Option<u64>),
270 DisconnectedFromHost,
271 Closed,
272 DeletedEntry(ProjectEntryId),
273 CollaboratorUpdated {
274 old_peer_id: proto::PeerId,
275 new_peer_id: proto::PeerId,
276 },
277 CollaboratorLeft(proto::PeerId),
278}
279
280pub enum LanguageServerState {
281 Starting(Task<Option<Arc<LanguageServer>>>),
282 Running {
283 language: Arc<Language>,
284 adapter: Arc<CachedLspAdapter>,
285 server: Arc<LanguageServer>,
286 watched_paths: HashMap<WorktreeId, GlobSet>,
287 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
288 },
289}
290
291#[derive(Serialize)]
292pub struct LanguageServerStatus {
293 pub name: String,
294 pub pending_work: BTreeMap<String, LanguageServerProgress>,
295 pub has_pending_diagnostic_updates: bool,
296 progress_tokens: HashSet<String>,
297}
298
299#[derive(Clone, Debug, Serialize)]
300pub struct LanguageServerProgress {
301 pub message: Option<String>,
302 pub percentage: Option<usize>,
303 #[serde(skip_serializing)]
304 pub last_update_at: Instant,
305}
306
307#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
308pub struct ProjectPath {
309 pub worktree_id: WorktreeId,
310 pub path: Arc<Path>,
311}
312
313#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
314pub struct DiagnosticSummary {
315 pub error_count: usize,
316 pub warning_count: usize,
317}
318
319#[derive(Debug, Clone)]
320pub struct Location {
321 pub buffer: ModelHandle<Buffer>,
322 pub range: Range<language::Anchor>,
323}
324
325#[derive(Debug, Clone)]
326pub struct LocationLink {
327 pub origin: Option<Location>,
328 pub target: Location,
329}
330
331#[derive(Debug)]
332pub struct DocumentHighlight {
333 pub range: Range<language::Anchor>,
334 pub kind: DocumentHighlightKind,
335}
336
337#[derive(Clone, Debug)]
338pub struct Symbol {
339 pub language_server_name: LanguageServerName,
340 pub source_worktree_id: WorktreeId,
341 pub path: ProjectPath,
342 pub label: CodeLabel,
343 pub name: String,
344 pub kind: lsp::SymbolKind,
345 pub range: Range<Unclipped<PointUtf16>>,
346 pub signature: [u8; 32],
347}
348
349#[derive(Clone, Debug, PartialEq)]
350pub struct HoverBlock {
351 pub text: String,
352 pub kind: HoverBlockKind,
353}
354
355#[derive(Clone, Debug, PartialEq)]
356pub enum HoverBlockKind {
357 PlainText,
358 Markdown,
359 Code { language: String },
360}
361
362#[derive(Debug)]
363pub struct Hover {
364 pub contents: Vec<HoverBlock>,
365 pub range: Option<Range<language::Anchor>>,
366 pub language: Option<Arc<Language>>,
367}
368
369#[derive(Default)]
370pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
371
372impl DiagnosticSummary {
373 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
374 let mut this = Self {
375 error_count: 0,
376 warning_count: 0,
377 };
378
379 for entry in diagnostics {
380 if entry.diagnostic.is_primary {
381 match entry.diagnostic.severity {
382 DiagnosticSeverity::ERROR => this.error_count += 1,
383 DiagnosticSeverity::WARNING => this.warning_count += 1,
384 _ => {}
385 }
386 }
387 }
388
389 this
390 }
391
392 pub fn is_empty(&self) -> bool {
393 self.error_count == 0 && self.warning_count == 0
394 }
395
396 pub fn to_proto(
397 &self,
398 language_server_id: LanguageServerId,
399 path: &Path,
400 ) -> proto::DiagnosticSummary {
401 proto::DiagnosticSummary {
402 path: path.to_string_lossy().to_string(),
403 language_server_id: language_server_id.0 as u64,
404 error_count: self.error_count as u32,
405 warning_count: self.warning_count as u32,
406 }
407 }
408}
409
410#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
411pub struct ProjectEntryId(usize);
412
413impl ProjectEntryId {
414 pub const MAX: Self = Self(usize::MAX);
415
416 pub fn new(counter: &AtomicUsize) -> Self {
417 Self(counter.fetch_add(1, SeqCst))
418 }
419
420 pub fn from_proto(id: u64) -> Self {
421 Self(id as usize)
422 }
423
424 pub fn to_proto(&self) -> u64 {
425 self.0 as u64
426 }
427
428 pub fn to_usize(&self) -> usize {
429 self.0
430 }
431}
432
433#[derive(Debug, Clone, Copy, PartialEq, Eq)]
434pub enum FormatTrigger {
435 Save,
436 Manual,
437}
438
439struct ProjectLspAdapterDelegate {
440 project: ModelHandle<Project>,
441 http_client: Arc<dyn HttpClient>,
442}
443
444impl FormatTrigger {
445 fn from_proto(value: i32) -> FormatTrigger {
446 match value {
447 0 => FormatTrigger::Save,
448 1 => FormatTrigger::Manual,
449 _ => FormatTrigger::Save,
450 }
451 }
452}
453
454impl Project {
455 pub fn init_settings(cx: &mut AppContext) {
456 settings::register::<ProjectSettings>(cx);
457 }
458
459 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
460 Self::init_settings(cx);
461
462 client.add_model_message_handler(Self::handle_add_collaborator);
463 client.add_model_message_handler(Self::handle_update_project_collaborator);
464 client.add_model_message_handler(Self::handle_remove_collaborator);
465 client.add_model_message_handler(Self::handle_buffer_reloaded);
466 client.add_model_message_handler(Self::handle_buffer_saved);
467 client.add_model_message_handler(Self::handle_start_language_server);
468 client.add_model_message_handler(Self::handle_update_language_server);
469 client.add_model_message_handler(Self::handle_update_project);
470 client.add_model_message_handler(Self::handle_unshare_project);
471 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
472 client.add_model_message_handler(Self::handle_update_buffer_file);
473 client.add_model_request_handler(Self::handle_update_buffer);
474 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
475 client.add_model_message_handler(Self::handle_update_worktree);
476 client.add_model_message_handler(Self::handle_update_worktree_settings);
477 client.add_model_request_handler(Self::handle_create_project_entry);
478 client.add_model_request_handler(Self::handle_rename_project_entry);
479 client.add_model_request_handler(Self::handle_copy_project_entry);
480 client.add_model_request_handler(Self::handle_delete_project_entry);
481 client.add_model_request_handler(Self::handle_expand_project_entry);
482 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
483 client.add_model_request_handler(Self::handle_apply_code_action);
484 client.add_model_request_handler(Self::handle_on_type_formatting);
485 client.add_model_request_handler(Self::handle_reload_buffers);
486 client.add_model_request_handler(Self::handle_synchronize_buffers);
487 client.add_model_request_handler(Self::handle_format_buffers);
488 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
489 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
490 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
491 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
492 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
493 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
494 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
495 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
496 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
497 client.add_model_request_handler(Self::handle_search_project);
498 client.add_model_request_handler(Self::handle_get_project_symbols);
499 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
500 client.add_model_request_handler(Self::handle_open_buffer_by_id);
501 client.add_model_request_handler(Self::handle_open_buffer_by_path);
502 client.add_model_request_handler(Self::handle_save_buffer);
503 client.add_model_message_handler(Self::handle_update_diff_base);
504 }
505
506 pub fn local(
507 client: Arc<Client>,
508 user_store: ModelHandle<UserStore>,
509 languages: Arc<LanguageRegistry>,
510 fs: Arc<dyn Fs>,
511 cx: &mut AppContext,
512 ) -> ModelHandle<Self> {
513 cx.add_model(|cx: &mut ModelContext<Self>| {
514 let (tx, rx) = mpsc::unbounded();
515 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
516 .detach();
517 Self {
518 worktrees: Default::default(),
519 buffer_ordered_messages_tx: tx,
520 collaborators: Default::default(),
521 next_buffer_id: 0,
522 opened_buffers: Default::default(),
523 shared_buffers: Default::default(),
524 incomplete_remote_buffers: Default::default(),
525 loading_buffers_by_path: Default::default(),
526 loading_local_worktrees: Default::default(),
527 local_buffer_ids_by_path: Default::default(),
528 local_buffer_ids_by_entry_id: Default::default(),
529 buffer_snapshots: Default::default(),
530 join_project_response_message_id: 0,
531 client_state: None,
532 opened_buffer: watch::channel(),
533 client_subscriptions: Vec::new(),
534 _subscriptions: vec![
535 cx.observe_global::<SettingsStore, _>(Self::on_settings_changed)
536 ],
537 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
538 _maintain_workspace_config: Self::maintain_workspace_config(languages.clone(), cx),
539 active_entry: None,
540 languages,
541 client,
542 user_store,
543 fs,
544 next_entry_id: Default::default(),
545 next_diagnostic_group_id: Default::default(),
546 language_servers: Default::default(),
547 language_server_ids: Default::default(),
548 language_server_statuses: Default::default(),
549 last_workspace_edits_by_language_server: Default::default(),
550 buffers_being_formatted: Default::default(),
551 buffers_needing_diff: Default::default(),
552 git_diff_debouncer: DelayedDebounced::new(),
553 nonce: StdRng::from_entropy().gen(),
554 terminals: Terminals {
555 local_handles: Vec::new(),
556 },
557 copilot_enabled: Copilot::global(cx).is_some(),
558 }
559 })
560 }
561
562 pub async fn remote(
563 remote_id: u64,
564 client: Arc<Client>,
565 user_store: ModelHandle<UserStore>,
566 languages: Arc<LanguageRegistry>,
567 fs: Arc<dyn Fs>,
568 mut cx: AsyncAppContext,
569 ) -> Result<ModelHandle<Self>> {
570 client.authenticate_and_connect(true, &cx).await?;
571
572 let subscription = client.subscribe_to_entity(remote_id)?;
573 let response = client
574 .request_envelope(proto::JoinProject {
575 project_id: remote_id,
576 })
577 .await?;
578 let this = cx.add_model(|cx| {
579 let replica_id = response.payload.replica_id as ReplicaId;
580
581 let mut worktrees = Vec::new();
582 for worktree in response.payload.worktrees {
583 let worktree = cx.update(|cx| {
584 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
585 });
586 worktrees.push(worktree);
587 }
588
589 let (tx, rx) = mpsc::unbounded();
590 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
591 .detach();
592 let mut this = Self {
593 worktrees: Vec::new(),
594 buffer_ordered_messages_tx: tx,
595 loading_buffers_by_path: Default::default(),
596 next_buffer_id: 0,
597 opened_buffer: watch::channel(),
598 shared_buffers: Default::default(),
599 incomplete_remote_buffers: Default::default(),
600 loading_local_worktrees: Default::default(),
601 local_buffer_ids_by_path: Default::default(),
602 local_buffer_ids_by_entry_id: Default::default(),
603 active_entry: None,
604 collaborators: Default::default(),
605 join_project_response_message_id: response.message_id,
606 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
607 _maintain_workspace_config: Self::maintain_workspace_config(languages.clone(), cx),
608 languages,
609 user_store: user_store.clone(),
610 fs,
611 next_entry_id: Default::default(),
612 next_diagnostic_group_id: Default::default(),
613 client_subscriptions: Default::default(),
614 _subscriptions: Default::default(),
615 client: client.clone(),
616 client_state: Some(ProjectClientState::Remote {
617 sharing_has_stopped: false,
618 remote_id,
619 replica_id,
620 }),
621 language_servers: Default::default(),
622 language_server_ids: Default::default(),
623 language_server_statuses: response
624 .payload
625 .language_servers
626 .into_iter()
627 .map(|server| {
628 (
629 LanguageServerId(server.id as usize),
630 LanguageServerStatus {
631 name: server.name,
632 pending_work: Default::default(),
633 has_pending_diagnostic_updates: false,
634 progress_tokens: Default::default(),
635 },
636 )
637 })
638 .collect(),
639 last_workspace_edits_by_language_server: Default::default(),
640 opened_buffers: Default::default(),
641 buffers_being_formatted: Default::default(),
642 buffers_needing_diff: Default::default(),
643 git_diff_debouncer: DelayedDebounced::new(),
644 buffer_snapshots: Default::default(),
645 nonce: StdRng::from_entropy().gen(),
646 terminals: Terminals {
647 local_handles: Vec::new(),
648 },
649 copilot_enabled: Copilot::global(cx).is_some(),
650 };
651 for worktree in worktrees {
652 let _ = this.add_worktree(&worktree, cx);
653 }
654 this
655 });
656 let subscription = subscription.set_model(&this, &mut cx);
657
658 let user_ids = response
659 .payload
660 .collaborators
661 .iter()
662 .map(|peer| peer.user_id)
663 .collect();
664 user_store
665 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
666 .await?;
667
668 this.update(&mut cx, |this, cx| {
669 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
670 this.client_subscriptions.push(subscription);
671 anyhow::Ok(())
672 })?;
673
674 Ok(this)
675 }
676
677 #[cfg(any(test, feature = "test-support"))]
678 pub async fn test(
679 fs: Arc<dyn Fs>,
680 root_paths: impl IntoIterator<Item = &Path>,
681 cx: &mut gpui::TestAppContext,
682 ) -> ModelHandle<Project> {
683 let mut languages = LanguageRegistry::test();
684 languages.set_executor(cx.background());
685 let http_client = util::http::FakeHttpClient::with_404_response();
686 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
687 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
688 let project =
689 cx.update(|cx| Project::local(client, user_store, Arc::new(languages), fs, cx));
690 for path in root_paths {
691 let (tree, _) = project
692 .update(cx, |project, cx| {
693 project.find_or_create_local_worktree(path, true, cx)
694 })
695 .await
696 .unwrap();
697 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
698 .await;
699 }
700 project
701 }
702
703 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
704 let mut language_servers_to_start = Vec::new();
705 for buffer in self.opened_buffers.values() {
706 if let Some(buffer) = buffer.upgrade(cx) {
707 let buffer = buffer.read(cx);
708 if let Some((file, language)) = buffer.file().zip(buffer.language()) {
709 let settings = language_settings(Some(language), Some(file), cx);
710 if settings.enable_language_server {
711 if let Some(file) = File::from_dyn(Some(file)) {
712 language_servers_to_start
713 .push((file.worktree.clone(), language.clone()));
714 }
715 }
716 }
717 }
718 }
719
720 let mut language_servers_to_stop = Vec::new();
721 let languages = self.languages.to_vec();
722 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
723 let language = languages.iter().find(|l| {
724 l.lsp_adapters()
725 .iter()
726 .any(|adapter| &adapter.name == started_lsp_name)
727 });
728 if let Some(language) = language {
729 let worktree = self.worktree_for_id(*worktree_id, cx);
730 let file = worktree.and_then(|tree| {
731 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
732 });
733 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
734 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
735 }
736 }
737 }
738
739 // Stop all newly-disabled language servers.
740 for (worktree_id, adapter_name) in language_servers_to_stop {
741 self.stop_language_server(worktree_id, adapter_name, cx)
742 .detach();
743 }
744
745 // Start all the newly-enabled language servers.
746 for (worktree, language) in language_servers_to_start {
747 let worktree_path = worktree.read(cx).abs_path();
748 self.start_language_servers(&worktree, worktree_path, language, cx);
749 }
750
751 if !self.copilot_enabled && Copilot::global(cx).is_some() {
752 self.copilot_enabled = true;
753 for buffer in self.opened_buffers.values() {
754 if let Some(buffer) = buffer.upgrade(cx) {
755 self.register_buffer_with_copilot(&buffer, cx);
756 }
757 }
758 }
759
760 cx.notify();
761 }
762
763 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
764 self.opened_buffers
765 .get(&remote_id)
766 .and_then(|buffer| buffer.upgrade(cx))
767 }
768
769 pub fn languages(&self) -> &Arc<LanguageRegistry> {
770 &self.languages
771 }
772
773 pub fn client(&self) -> Arc<Client> {
774 self.client.clone()
775 }
776
777 pub fn user_store(&self) -> ModelHandle<UserStore> {
778 self.user_store.clone()
779 }
780
781 #[cfg(any(test, feature = "test-support"))]
782 pub fn opened_buffers(&self, cx: &AppContext) -> Vec<ModelHandle<Buffer>> {
783 self.opened_buffers
784 .values()
785 .filter_map(|b| b.upgrade(cx))
786 .collect()
787 }
788
789 #[cfg(any(test, feature = "test-support"))]
790 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
791 let path = path.into();
792 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
793 self.opened_buffers.iter().any(|(_, buffer)| {
794 if let Some(buffer) = buffer.upgrade(cx) {
795 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
796 if file.worktree == worktree && file.path() == &path.path {
797 return true;
798 }
799 }
800 }
801 false
802 })
803 } else {
804 false
805 }
806 }
807
808 pub fn fs(&self) -> &Arc<dyn Fs> {
809 &self.fs
810 }
811
812 pub fn remote_id(&self) -> Option<u64> {
813 match self.client_state.as_ref()? {
814 ProjectClientState::Local { remote_id, .. }
815 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
816 }
817 }
818
819 pub fn replica_id(&self) -> ReplicaId {
820 match &self.client_state {
821 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
822 _ => 0,
823 }
824 }
825
826 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
827 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
828 updates_tx
829 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
830 .ok();
831 }
832 cx.notify();
833 }
834
835 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
836 &self.collaborators
837 }
838
839 /// Collect all worktrees, including ones that don't appear in the project panel
840 pub fn worktrees<'a>(
841 &'a self,
842 cx: &'a AppContext,
843 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
844 self.worktrees
845 .iter()
846 .filter_map(move |worktree| worktree.upgrade(cx))
847 }
848
849 /// Collect all user-visible worktrees, the ones that appear in the project panel
850 pub fn visible_worktrees<'a>(
851 &'a self,
852 cx: &'a AppContext,
853 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
854 self.worktrees.iter().filter_map(|worktree| {
855 worktree.upgrade(cx).and_then(|worktree| {
856 if worktree.read(cx).is_visible() {
857 Some(worktree)
858 } else {
859 None
860 }
861 })
862 })
863 }
864
865 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
866 self.visible_worktrees(cx)
867 .map(|tree| tree.read(cx).root_name())
868 }
869
870 pub fn worktree_for_id(
871 &self,
872 id: WorktreeId,
873 cx: &AppContext,
874 ) -> Option<ModelHandle<Worktree>> {
875 self.worktrees(cx)
876 .find(|worktree| worktree.read(cx).id() == id)
877 }
878
879 pub fn worktree_for_entry(
880 &self,
881 entry_id: ProjectEntryId,
882 cx: &AppContext,
883 ) -> Option<ModelHandle<Worktree>> {
884 self.worktrees(cx)
885 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
886 }
887
888 pub fn worktree_id_for_entry(
889 &self,
890 entry_id: ProjectEntryId,
891 cx: &AppContext,
892 ) -> Option<WorktreeId> {
893 self.worktree_for_entry(entry_id, cx)
894 .map(|worktree| worktree.read(cx).id())
895 }
896
897 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
898 paths.iter().all(|path| self.contains_path(path, cx))
899 }
900
901 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
902 for worktree in self.worktrees(cx) {
903 let worktree = worktree.read(cx).as_local();
904 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
905 return true;
906 }
907 }
908 false
909 }
910
911 pub fn create_entry(
912 &mut self,
913 project_path: impl Into<ProjectPath>,
914 is_directory: bool,
915 cx: &mut ModelContext<Self>,
916 ) -> Option<Task<Result<Entry>>> {
917 let project_path = project_path.into();
918 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
919 if self.is_local() {
920 Some(worktree.update(cx, |worktree, cx| {
921 worktree
922 .as_local_mut()
923 .unwrap()
924 .create_entry(project_path.path, is_directory, cx)
925 }))
926 } else {
927 let client = self.client.clone();
928 let project_id = self.remote_id().unwrap();
929 Some(cx.spawn_weak(|_, mut cx| async move {
930 let response = client
931 .request(proto::CreateProjectEntry {
932 worktree_id: project_path.worktree_id.to_proto(),
933 project_id,
934 path: project_path.path.to_string_lossy().into(),
935 is_directory,
936 })
937 .await?;
938 let entry = response
939 .entry
940 .ok_or_else(|| anyhow!("missing entry in response"))?;
941 worktree
942 .update(&mut cx, |worktree, cx| {
943 worktree.as_remote_mut().unwrap().insert_entry(
944 entry,
945 response.worktree_scan_id as usize,
946 cx,
947 )
948 })
949 .await
950 }))
951 }
952 }
953
954 pub fn copy_entry(
955 &mut self,
956 entry_id: ProjectEntryId,
957 new_path: impl Into<Arc<Path>>,
958 cx: &mut ModelContext<Self>,
959 ) -> Option<Task<Result<Entry>>> {
960 let worktree = self.worktree_for_entry(entry_id, cx)?;
961 let new_path = new_path.into();
962 if self.is_local() {
963 worktree.update(cx, |worktree, cx| {
964 worktree
965 .as_local_mut()
966 .unwrap()
967 .copy_entry(entry_id, new_path, cx)
968 })
969 } else {
970 let client = self.client.clone();
971 let project_id = self.remote_id().unwrap();
972
973 Some(cx.spawn_weak(|_, mut cx| async move {
974 let response = client
975 .request(proto::CopyProjectEntry {
976 project_id,
977 entry_id: entry_id.to_proto(),
978 new_path: new_path.to_string_lossy().into(),
979 })
980 .await?;
981 let entry = response
982 .entry
983 .ok_or_else(|| anyhow!("missing entry in response"))?;
984 worktree
985 .update(&mut cx, |worktree, cx| {
986 worktree.as_remote_mut().unwrap().insert_entry(
987 entry,
988 response.worktree_scan_id as usize,
989 cx,
990 )
991 })
992 .await
993 }))
994 }
995 }
996
997 pub fn rename_entry(
998 &mut self,
999 entry_id: ProjectEntryId,
1000 new_path: impl Into<Arc<Path>>,
1001 cx: &mut ModelContext<Self>,
1002 ) -> Option<Task<Result<Entry>>> {
1003 let worktree = self.worktree_for_entry(entry_id, cx)?;
1004 let new_path = new_path.into();
1005 if self.is_local() {
1006 worktree.update(cx, |worktree, cx| {
1007 worktree
1008 .as_local_mut()
1009 .unwrap()
1010 .rename_entry(entry_id, new_path, cx)
1011 })
1012 } else {
1013 let client = self.client.clone();
1014 let project_id = self.remote_id().unwrap();
1015
1016 Some(cx.spawn_weak(|_, mut cx| async move {
1017 let response = client
1018 .request(proto::RenameProjectEntry {
1019 project_id,
1020 entry_id: entry_id.to_proto(),
1021 new_path: new_path.to_string_lossy().into(),
1022 })
1023 .await?;
1024 let entry = response
1025 .entry
1026 .ok_or_else(|| anyhow!("missing entry in response"))?;
1027 worktree
1028 .update(&mut cx, |worktree, cx| {
1029 worktree.as_remote_mut().unwrap().insert_entry(
1030 entry,
1031 response.worktree_scan_id as usize,
1032 cx,
1033 )
1034 })
1035 .await
1036 }))
1037 }
1038 }
1039
1040 pub fn delete_entry(
1041 &mut self,
1042 entry_id: ProjectEntryId,
1043 cx: &mut ModelContext<Self>,
1044 ) -> Option<Task<Result<()>>> {
1045 let worktree = self.worktree_for_entry(entry_id, cx)?;
1046
1047 cx.emit(Event::DeletedEntry(entry_id));
1048
1049 if self.is_local() {
1050 worktree.update(cx, |worktree, cx| {
1051 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1052 })
1053 } else {
1054 let client = self.client.clone();
1055 let project_id = self.remote_id().unwrap();
1056 Some(cx.spawn_weak(|_, mut cx| async move {
1057 let response = client
1058 .request(proto::DeleteProjectEntry {
1059 project_id,
1060 entry_id: entry_id.to_proto(),
1061 })
1062 .await?;
1063 worktree
1064 .update(&mut cx, move |worktree, cx| {
1065 worktree.as_remote_mut().unwrap().delete_entry(
1066 entry_id,
1067 response.worktree_scan_id as usize,
1068 cx,
1069 )
1070 })
1071 .await
1072 }))
1073 }
1074 }
1075
1076 pub fn expand_entry(
1077 &mut self,
1078 worktree_id: WorktreeId,
1079 entry_id: ProjectEntryId,
1080 cx: &mut ModelContext<Self>,
1081 ) -> Option<Task<Result<()>>> {
1082 let worktree = self.worktree_for_id(worktree_id, cx)?;
1083 if self.is_local() {
1084 worktree.update(cx, |worktree, cx| {
1085 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1086 })
1087 } else {
1088 let worktree = worktree.downgrade();
1089 let request = self.client.request(proto::ExpandProjectEntry {
1090 project_id: self.remote_id().unwrap(),
1091 entry_id: entry_id.to_proto(),
1092 });
1093 Some(cx.spawn_weak(|_, mut cx| async move {
1094 let response = request.await?;
1095 if let Some(worktree) = worktree.upgrade(&cx) {
1096 worktree
1097 .update(&mut cx, |worktree, _| {
1098 worktree
1099 .as_remote_mut()
1100 .unwrap()
1101 .wait_for_snapshot(response.worktree_scan_id as usize)
1102 })
1103 .await?;
1104 }
1105 Ok(())
1106 }))
1107 }
1108 }
1109
1110 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1111 if self.client_state.is_some() {
1112 return Err(anyhow!("project was already shared"));
1113 }
1114 self.client_subscriptions.push(
1115 self.client
1116 .subscribe_to_entity(project_id)?
1117 .set_model(&cx.handle(), &mut cx.to_async()),
1118 );
1119
1120 for open_buffer in self.opened_buffers.values_mut() {
1121 match open_buffer {
1122 OpenBuffer::Strong(_) => {}
1123 OpenBuffer::Weak(buffer) => {
1124 if let Some(buffer) = buffer.upgrade(cx) {
1125 *open_buffer = OpenBuffer::Strong(buffer);
1126 }
1127 }
1128 OpenBuffer::Operations(_) => unreachable!(),
1129 }
1130 }
1131
1132 for worktree_handle in self.worktrees.iter_mut() {
1133 match worktree_handle {
1134 WorktreeHandle::Strong(_) => {}
1135 WorktreeHandle::Weak(worktree) => {
1136 if let Some(worktree) = worktree.upgrade(cx) {
1137 *worktree_handle = WorktreeHandle::Strong(worktree);
1138 }
1139 }
1140 }
1141 }
1142
1143 for (server_id, status) in &self.language_server_statuses {
1144 self.client
1145 .send(proto::StartLanguageServer {
1146 project_id,
1147 server: Some(proto::LanguageServer {
1148 id: server_id.0 as u64,
1149 name: status.name.clone(),
1150 }),
1151 })
1152 .log_err();
1153 }
1154
1155 let store = cx.global::<SettingsStore>();
1156 for worktree in self.worktrees(cx) {
1157 let worktree_id = worktree.read(cx).id().to_proto();
1158 for (path, content) in store.local_settings(worktree.id()) {
1159 self.client
1160 .send(proto::UpdateWorktreeSettings {
1161 project_id,
1162 worktree_id,
1163 path: path.to_string_lossy().into(),
1164 content: Some(content),
1165 })
1166 .log_err();
1167 }
1168 }
1169
1170 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1171 let client = self.client.clone();
1172 self.client_state = Some(ProjectClientState::Local {
1173 remote_id: project_id,
1174 updates_tx,
1175 _send_updates: cx.spawn_weak(move |this, mut cx| async move {
1176 while let Some(update) = updates_rx.next().await {
1177 let Some(this) = this.upgrade(&cx) else { break };
1178
1179 match update {
1180 LocalProjectUpdate::WorktreesChanged => {
1181 let worktrees = this
1182 .read_with(&cx, |this, cx| this.worktrees(cx).collect::<Vec<_>>());
1183 let update_project = this
1184 .read_with(&cx, |this, cx| {
1185 this.client.request(proto::UpdateProject {
1186 project_id,
1187 worktrees: this.worktree_metadata_protos(cx),
1188 })
1189 })
1190 .await;
1191 if update_project.is_ok() {
1192 for worktree in worktrees {
1193 worktree.update(&mut cx, |worktree, cx| {
1194 let worktree = worktree.as_local_mut().unwrap();
1195 worktree.share(project_id, cx).detach_and_log_err(cx)
1196 });
1197 }
1198 }
1199 }
1200 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1201 let buffer = this.update(&mut cx, |this, _| {
1202 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1203 let shared_buffers =
1204 this.shared_buffers.entry(peer_id).or_default();
1205 if shared_buffers.insert(buffer_id) {
1206 if let OpenBuffer::Strong(buffer) = buffer {
1207 Some(buffer.clone())
1208 } else {
1209 None
1210 }
1211 } else {
1212 None
1213 }
1214 });
1215
1216 let Some(buffer) = buffer else { continue };
1217 let operations =
1218 buffer.read_with(&cx, |b, cx| b.serialize_ops(None, cx));
1219 let operations = operations.await;
1220 let state = buffer.read_with(&cx, |buffer, _| buffer.to_proto());
1221
1222 let initial_state = proto::CreateBufferForPeer {
1223 project_id,
1224 peer_id: Some(peer_id),
1225 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1226 };
1227 if client.send(initial_state).log_err().is_some() {
1228 let client = client.clone();
1229 cx.background()
1230 .spawn(async move {
1231 let mut chunks = split_operations(operations).peekable();
1232 while let Some(chunk) = chunks.next() {
1233 let is_last = chunks.peek().is_none();
1234 client.send(proto::CreateBufferForPeer {
1235 project_id,
1236 peer_id: Some(peer_id),
1237 variant: Some(
1238 proto::create_buffer_for_peer::Variant::Chunk(
1239 proto::BufferChunk {
1240 buffer_id,
1241 operations: chunk,
1242 is_last,
1243 },
1244 ),
1245 ),
1246 })?;
1247 }
1248 anyhow::Ok(())
1249 })
1250 .await
1251 .log_err();
1252 }
1253 }
1254 }
1255 }
1256 }),
1257 });
1258
1259 self.metadata_changed(cx);
1260 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1261 cx.notify();
1262 Ok(())
1263 }
1264
1265 pub fn reshared(
1266 &mut self,
1267 message: proto::ResharedProject,
1268 cx: &mut ModelContext<Self>,
1269 ) -> Result<()> {
1270 self.shared_buffers.clear();
1271 self.set_collaborators_from_proto(message.collaborators, cx)?;
1272 self.metadata_changed(cx);
1273 Ok(())
1274 }
1275
1276 pub fn rejoined(
1277 &mut self,
1278 message: proto::RejoinedProject,
1279 message_id: u32,
1280 cx: &mut ModelContext<Self>,
1281 ) -> Result<()> {
1282 cx.update_global::<SettingsStore, _, _>(|store, cx| {
1283 for worktree in &self.worktrees {
1284 store
1285 .clear_local_settings(worktree.handle_id(), cx)
1286 .log_err();
1287 }
1288 });
1289
1290 self.join_project_response_message_id = message_id;
1291 self.set_worktrees_from_proto(message.worktrees, cx)?;
1292 self.set_collaborators_from_proto(message.collaborators, cx)?;
1293 self.language_server_statuses = message
1294 .language_servers
1295 .into_iter()
1296 .map(|server| {
1297 (
1298 LanguageServerId(server.id as usize),
1299 LanguageServerStatus {
1300 name: server.name,
1301 pending_work: Default::default(),
1302 has_pending_diagnostic_updates: false,
1303 progress_tokens: Default::default(),
1304 },
1305 )
1306 })
1307 .collect();
1308 self.buffer_ordered_messages_tx
1309 .unbounded_send(BufferOrderedMessage::Resync)
1310 .unwrap();
1311 cx.notify();
1312 Ok(())
1313 }
1314
1315 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1316 self.unshare_internal(cx)?;
1317 self.metadata_changed(cx);
1318 cx.notify();
1319 Ok(())
1320 }
1321
1322 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1323 if self.is_remote() {
1324 return Err(anyhow!("attempted to unshare a remote project"));
1325 }
1326
1327 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1328 self.collaborators.clear();
1329 self.shared_buffers.clear();
1330 self.client_subscriptions.clear();
1331
1332 for worktree_handle in self.worktrees.iter_mut() {
1333 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1334 let is_visible = worktree.update(cx, |worktree, _| {
1335 worktree.as_local_mut().unwrap().unshare();
1336 worktree.is_visible()
1337 });
1338 if !is_visible {
1339 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1340 }
1341 }
1342 }
1343
1344 for open_buffer in self.opened_buffers.values_mut() {
1345 // Wake up any tasks waiting for peers' edits to this buffer.
1346 if let Some(buffer) = open_buffer.upgrade(cx) {
1347 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1348 }
1349
1350 if let OpenBuffer::Strong(buffer) = open_buffer {
1351 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1352 }
1353 }
1354
1355 self.client.send(proto::UnshareProject {
1356 project_id: remote_id,
1357 })?;
1358
1359 Ok(())
1360 } else {
1361 Err(anyhow!("attempted to unshare an unshared project"))
1362 }
1363 }
1364
1365 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1366 self.disconnected_from_host_internal(cx);
1367 cx.emit(Event::DisconnectedFromHost);
1368 cx.notify();
1369 }
1370
1371 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1372 if let Some(ProjectClientState::Remote {
1373 sharing_has_stopped,
1374 ..
1375 }) = &mut self.client_state
1376 {
1377 *sharing_has_stopped = true;
1378
1379 self.collaborators.clear();
1380
1381 for worktree in &self.worktrees {
1382 if let Some(worktree) = worktree.upgrade(cx) {
1383 worktree.update(cx, |worktree, _| {
1384 if let Some(worktree) = worktree.as_remote_mut() {
1385 worktree.disconnected_from_host();
1386 }
1387 });
1388 }
1389 }
1390
1391 for open_buffer in self.opened_buffers.values_mut() {
1392 // Wake up any tasks waiting for peers' edits to this buffer.
1393 if let Some(buffer) = open_buffer.upgrade(cx) {
1394 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1395 }
1396
1397 if let OpenBuffer::Strong(buffer) = open_buffer {
1398 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1399 }
1400 }
1401
1402 // Wake up all futures currently waiting on a buffer to get opened,
1403 // to give them a chance to fail now that we've disconnected.
1404 *self.opened_buffer.0.borrow_mut() = ();
1405 }
1406 }
1407
1408 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1409 cx.emit(Event::Closed);
1410 }
1411
1412 pub fn is_read_only(&self) -> bool {
1413 match &self.client_state {
1414 Some(ProjectClientState::Remote {
1415 sharing_has_stopped,
1416 ..
1417 }) => *sharing_has_stopped,
1418 _ => false,
1419 }
1420 }
1421
1422 pub fn is_local(&self) -> bool {
1423 match &self.client_state {
1424 Some(ProjectClientState::Remote { .. }) => false,
1425 _ => true,
1426 }
1427 }
1428
1429 pub fn is_remote(&self) -> bool {
1430 !self.is_local()
1431 }
1432
1433 pub fn create_buffer(
1434 &mut self,
1435 text: &str,
1436 language: Option<Arc<Language>>,
1437 cx: &mut ModelContext<Self>,
1438 ) -> Result<ModelHandle<Buffer>> {
1439 if self.is_remote() {
1440 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1441 }
1442
1443 let buffer = cx.add_model(|cx| {
1444 Buffer::new(self.replica_id(), text, cx)
1445 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1446 });
1447 self.register_buffer(&buffer, cx)?;
1448 Ok(buffer)
1449 }
1450
1451 pub fn open_path(
1452 &mut self,
1453 path: impl Into<ProjectPath>,
1454 cx: &mut ModelContext<Self>,
1455 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1456 let task = self.open_buffer(path, cx);
1457 cx.spawn_weak(|_, cx| async move {
1458 let buffer = task.await?;
1459 let project_entry_id = buffer
1460 .read_with(&cx, |buffer, cx| {
1461 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1462 })
1463 .ok_or_else(|| anyhow!("no project entry"))?;
1464
1465 let buffer: &AnyModelHandle = &buffer;
1466 Ok((project_entry_id, buffer.clone()))
1467 })
1468 }
1469
1470 pub fn open_local_buffer(
1471 &mut self,
1472 abs_path: impl AsRef<Path>,
1473 cx: &mut ModelContext<Self>,
1474 ) -> Task<Result<ModelHandle<Buffer>>> {
1475 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1476 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1477 } else {
1478 Task::ready(Err(anyhow!("no such path")))
1479 }
1480 }
1481
1482 pub fn open_buffer(
1483 &mut self,
1484 path: impl Into<ProjectPath>,
1485 cx: &mut ModelContext<Self>,
1486 ) -> Task<Result<ModelHandle<Buffer>>> {
1487 let project_path = path.into();
1488 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1489 worktree
1490 } else {
1491 return Task::ready(Err(anyhow!("no such worktree")));
1492 };
1493
1494 // If there is already a buffer for the given path, then return it.
1495 let existing_buffer = self.get_open_buffer(&project_path, cx);
1496 if let Some(existing_buffer) = existing_buffer {
1497 return Task::ready(Ok(existing_buffer));
1498 }
1499
1500 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1501 // If the given path is already being loaded, then wait for that existing
1502 // task to complete and return the same buffer.
1503 hash_map::Entry::Occupied(e) => e.get().clone(),
1504
1505 // Otherwise, record the fact that this path is now being loaded.
1506 hash_map::Entry::Vacant(entry) => {
1507 let (mut tx, rx) = postage::watch::channel();
1508 entry.insert(rx.clone());
1509
1510 let load_buffer = if worktree.read(cx).is_local() {
1511 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1512 } else {
1513 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1514 };
1515
1516 cx.spawn(move |this, mut cx| async move {
1517 let load_result = load_buffer.await;
1518 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1519 // Record the fact that the buffer is no longer loading.
1520 this.loading_buffers_by_path.remove(&project_path);
1521 let buffer = load_result.map_err(Arc::new)?;
1522 Ok(buffer)
1523 }));
1524 })
1525 .detach();
1526 rx
1527 }
1528 };
1529
1530 cx.foreground().spawn(async move {
1531 wait_for_loading_buffer(loading_watch)
1532 .await
1533 .map_err(|error| anyhow!("{}", error))
1534 })
1535 }
1536
1537 fn open_local_buffer_internal(
1538 &mut self,
1539 path: &Arc<Path>,
1540 worktree: &ModelHandle<Worktree>,
1541 cx: &mut ModelContext<Self>,
1542 ) -> Task<Result<ModelHandle<Buffer>>> {
1543 let buffer_id = post_inc(&mut self.next_buffer_id);
1544 let load_buffer = worktree.update(cx, |worktree, cx| {
1545 let worktree = worktree.as_local_mut().unwrap();
1546 worktree.load_buffer(buffer_id, path, cx)
1547 });
1548 cx.spawn(|this, mut cx| async move {
1549 let buffer = load_buffer.await?;
1550 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1551 Ok(buffer)
1552 })
1553 }
1554
1555 fn open_remote_buffer_internal(
1556 &mut self,
1557 path: &Arc<Path>,
1558 worktree: &ModelHandle<Worktree>,
1559 cx: &mut ModelContext<Self>,
1560 ) -> Task<Result<ModelHandle<Buffer>>> {
1561 let rpc = self.client.clone();
1562 let project_id = self.remote_id().unwrap();
1563 let remote_worktree_id = worktree.read(cx).id();
1564 let path = path.clone();
1565 let path_string = path.to_string_lossy().to_string();
1566 cx.spawn(|this, mut cx| async move {
1567 let response = rpc
1568 .request(proto::OpenBufferByPath {
1569 project_id,
1570 worktree_id: remote_worktree_id.to_proto(),
1571 path: path_string,
1572 })
1573 .await?;
1574 this.update(&mut cx, |this, cx| {
1575 this.wait_for_remote_buffer(response.buffer_id, cx)
1576 })
1577 .await
1578 })
1579 }
1580
1581 /// LanguageServerName is owned, because it is inserted into a map
1582 fn open_local_buffer_via_lsp(
1583 &mut self,
1584 abs_path: lsp::Url,
1585 language_server_id: LanguageServerId,
1586 language_server_name: LanguageServerName,
1587 cx: &mut ModelContext<Self>,
1588 ) -> Task<Result<ModelHandle<Buffer>>> {
1589 cx.spawn(|this, mut cx| async move {
1590 let abs_path = abs_path
1591 .to_file_path()
1592 .map_err(|_| anyhow!("can't convert URI to path"))?;
1593 let (worktree, relative_path) = if let Some(result) =
1594 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1595 {
1596 result
1597 } else {
1598 let worktree = this
1599 .update(&mut cx, |this, cx| {
1600 this.create_local_worktree(&abs_path, false, cx)
1601 })
1602 .await?;
1603 this.update(&mut cx, |this, cx| {
1604 this.language_server_ids.insert(
1605 (worktree.read(cx).id(), language_server_name),
1606 language_server_id,
1607 );
1608 });
1609 (worktree, PathBuf::new())
1610 };
1611
1612 let project_path = ProjectPath {
1613 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1614 path: relative_path.into(),
1615 };
1616 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1617 .await
1618 })
1619 }
1620
1621 pub fn open_buffer_by_id(
1622 &mut self,
1623 id: u64,
1624 cx: &mut ModelContext<Self>,
1625 ) -> Task<Result<ModelHandle<Buffer>>> {
1626 if let Some(buffer) = self.buffer_for_id(id, cx) {
1627 Task::ready(Ok(buffer))
1628 } else if self.is_local() {
1629 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1630 } else if let Some(project_id) = self.remote_id() {
1631 let request = self
1632 .client
1633 .request(proto::OpenBufferById { project_id, id });
1634 cx.spawn(|this, mut cx| async move {
1635 let buffer_id = request.await?.buffer_id;
1636 this.update(&mut cx, |this, cx| {
1637 this.wait_for_remote_buffer(buffer_id, cx)
1638 })
1639 .await
1640 })
1641 } else {
1642 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1643 }
1644 }
1645
1646 pub fn save_buffers(
1647 &self,
1648 buffers: HashSet<ModelHandle<Buffer>>,
1649 cx: &mut ModelContext<Self>,
1650 ) -> Task<Result<()>> {
1651 cx.spawn(|this, mut cx| async move {
1652 let save_tasks = buffers
1653 .into_iter()
1654 .map(|buffer| this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx)));
1655 try_join_all(save_tasks).await?;
1656 Ok(())
1657 })
1658 }
1659
1660 pub fn save_buffer(
1661 &self,
1662 buffer: ModelHandle<Buffer>,
1663 cx: &mut ModelContext<Self>,
1664 ) -> Task<Result<()>> {
1665 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1666 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1667 };
1668 let worktree = file.worktree.clone();
1669 let path = file.path.clone();
1670 worktree.update(cx, |worktree, cx| match worktree {
1671 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1672 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1673 })
1674 }
1675
1676 pub fn save_buffer_as(
1677 &mut self,
1678 buffer: ModelHandle<Buffer>,
1679 abs_path: PathBuf,
1680 cx: &mut ModelContext<Self>,
1681 ) -> Task<Result<()>> {
1682 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1683 let old_file = File::from_dyn(buffer.read(cx).file())
1684 .filter(|f| f.is_local())
1685 .cloned();
1686 cx.spawn(|this, mut cx| async move {
1687 if let Some(old_file) = &old_file {
1688 this.update(&mut cx, |this, cx| {
1689 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1690 });
1691 }
1692 let (worktree, path) = worktree_task.await?;
1693 worktree
1694 .update(&mut cx, |worktree, cx| match worktree {
1695 Worktree::Local(worktree) => {
1696 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1697 }
1698 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1699 })
1700 .await?;
1701 this.update(&mut cx, |this, cx| {
1702 this.detect_language_for_buffer(&buffer, cx);
1703 this.register_buffer_with_language_servers(&buffer, cx);
1704 });
1705 Ok(())
1706 })
1707 }
1708
1709 pub fn get_open_buffer(
1710 &mut self,
1711 path: &ProjectPath,
1712 cx: &mut ModelContext<Self>,
1713 ) -> Option<ModelHandle<Buffer>> {
1714 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1715 self.opened_buffers.values().find_map(|buffer| {
1716 let buffer = buffer.upgrade(cx)?;
1717 let file = File::from_dyn(buffer.read(cx).file())?;
1718 if file.worktree == worktree && file.path() == &path.path {
1719 Some(buffer)
1720 } else {
1721 None
1722 }
1723 })
1724 }
1725
1726 fn register_buffer(
1727 &mut self,
1728 buffer: &ModelHandle<Buffer>,
1729 cx: &mut ModelContext<Self>,
1730 ) -> Result<()> {
1731 self.request_buffer_diff_recalculation(buffer, cx);
1732 buffer.update(cx, |buffer, _| {
1733 buffer.set_language_registry(self.languages.clone())
1734 });
1735
1736 let remote_id = buffer.read(cx).remote_id();
1737 let is_remote = self.is_remote();
1738 let open_buffer = if is_remote || self.is_shared() {
1739 OpenBuffer::Strong(buffer.clone())
1740 } else {
1741 OpenBuffer::Weak(buffer.downgrade())
1742 };
1743
1744 match self.opened_buffers.entry(remote_id) {
1745 hash_map::Entry::Vacant(entry) => {
1746 entry.insert(open_buffer);
1747 }
1748 hash_map::Entry::Occupied(mut entry) => {
1749 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1750 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1751 } else if entry.get().upgrade(cx).is_some() {
1752 if is_remote {
1753 return Ok(());
1754 } else {
1755 debug_panic!("buffer {} was already registered", remote_id);
1756 Err(anyhow!("buffer {} was already registered", remote_id))?;
1757 }
1758 }
1759 entry.insert(open_buffer);
1760 }
1761 }
1762 cx.subscribe(buffer, |this, buffer, event, cx| {
1763 this.on_buffer_event(buffer, event, cx);
1764 })
1765 .detach();
1766
1767 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1768 if file.is_local {
1769 self.local_buffer_ids_by_path.insert(
1770 ProjectPath {
1771 worktree_id: file.worktree_id(cx),
1772 path: file.path.clone(),
1773 },
1774 remote_id,
1775 );
1776
1777 self.local_buffer_ids_by_entry_id
1778 .insert(file.entry_id, remote_id);
1779 }
1780 }
1781
1782 self.detect_language_for_buffer(buffer, cx);
1783 self.register_buffer_with_language_servers(buffer, cx);
1784 self.register_buffer_with_copilot(buffer, cx);
1785 cx.observe_release(buffer, |this, buffer, cx| {
1786 if let Some(file) = File::from_dyn(buffer.file()) {
1787 if file.is_local() {
1788 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1789 for server in this.language_servers_for_buffer(buffer, cx) {
1790 server
1791 .1
1792 .notify::<lsp::notification::DidCloseTextDocument>(
1793 lsp::DidCloseTextDocumentParams {
1794 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1795 },
1796 )
1797 .log_err();
1798 }
1799 }
1800 }
1801 })
1802 .detach();
1803
1804 *self.opened_buffer.0.borrow_mut() = ();
1805 Ok(())
1806 }
1807
1808 fn register_buffer_with_language_servers(
1809 &mut self,
1810 buffer_handle: &ModelHandle<Buffer>,
1811 cx: &mut ModelContext<Self>,
1812 ) {
1813 let buffer = buffer_handle.read(cx);
1814 let buffer_id = buffer.remote_id();
1815
1816 if let Some(file) = File::from_dyn(buffer.file()) {
1817 if !file.is_local() {
1818 return;
1819 }
1820
1821 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1822 let initial_snapshot = buffer.text_snapshot();
1823 let language = buffer.language().cloned();
1824 let worktree_id = file.worktree_id(cx);
1825
1826 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1827 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
1828 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
1829 .log_err();
1830 }
1831 }
1832
1833 if let Some(language) = language {
1834 for adapter in language.lsp_adapters() {
1835 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
1836 let server = self
1837 .language_server_ids
1838 .get(&(worktree_id, adapter.name.clone()))
1839 .and_then(|id| self.language_servers.get(id))
1840 .and_then(|server_state| {
1841 if let LanguageServerState::Running { server, .. } = server_state {
1842 Some(server.clone())
1843 } else {
1844 None
1845 }
1846 });
1847 let server = match server {
1848 Some(server) => server,
1849 None => continue,
1850 };
1851
1852 server
1853 .notify::<lsp::notification::DidOpenTextDocument>(
1854 lsp::DidOpenTextDocumentParams {
1855 text_document: lsp::TextDocumentItem::new(
1856 uri.clone(),
1857 language_id.unwrap_or_default(),
1858 0,
1859 initial_snapshot.text(),
1860 ),
1861 },
1862 )
1863 .log_err();
1864
1865 buffer_handle.update(cx, |buffer, cx| {
1866 buffer.set_completion_triggers(
1867 server
1868 .capabilities()
1869 .completion_provider
1870 .as_ref()
1871 .and_then(|provider| provider.trigger_characters.clone())
1872 .unwrap_or_default(),
1873 cx,
1874 );
1875 });
1876
1877 let snapshot = LspBufferSnapshot {
1878 version: 0,
1879 snapshot: initial_snapshot.clone(),
1880 };
1881 self.buffer_snapshots
1882 .entry(buffer_id)
1883 .or_default()
1884 .insert(server.server_id(), vec![snapshot]);
1885 }
1886 }
1887 }
1888 }
1889
1890 fn unregister_buffer_from_language_servers(
1891 &mut self,
1892 buffer: &ModelHandle<Buffer>,
1893 old_file: &File,
1894 cx: &mut ModelContext<Self>,
1895 ) {
1896 let old_path = match old_file.as_local() {
1897 Some(local) => local.abs_path(cx),
1898 None => return,
1899 };
1900
1901 buffer.update(cx, |buffer, cx| {
1902 let worktree_id = old_file.worktree_id(cx);
1903 let ids = &self.language_server_ids;
1904
1905 let language = buffer.language().cloned();
1906 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
1907 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
1908 buffer.update_diagnostics(server_id, Default::default(), cx);
1909 }
1910
1911 self.buffer_snapshots.remove(&buffer.remote_id());
1912 let file_url = lsp::Url::from_file_path(old_path).unwrap();
1913 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
1914 language_server
1915 .notify::<lsp::notification::DidCloseTextDocument>(
1916 lsp::DidCloseTextDocumentParams {
1917 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
1918 },
1919 )
1920 .log_err();
1921 }
1922 });
1923 }
1924
1925 fn register_buffer_with_copilot(
1926 &self,
1927 buffer_handle: &ModelHandle<Buffer>,
1928 cx: &mut ModelContext<Self>,
1929 ) {
1930 if let Some(copilot) = Copilot::global(cx) {
1931 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
1932 }
1933 }
1934
1935 async fn send_buffer_ordered_messages(
1936 this: WeakModelHandle<Self>,
1937 rx: UnboundedReceiver<BufferOrderedMessage>,
1938 mut cx: AsyncAppContext,
1939 ) -> Option<()> {
1940 const MAX_BATCH_SIZE: usize = 128;
1941
1942 let mut operations_by_buffer_id = HashMap::default();
1943 async fn flush_operations(
1944 this: &ModelHandle<Project>,
1945 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
1946 needs_resync_with_host: &mut bool,
1947 is_local: bool,
1948 cx: &AsyncAppContext,
1949 ) {
1950 for (buffer_id, operations) in operations_by_buffer_id.drain() {
1951 let request = this.read_with(cx, |this, _| {
1952 let project_id = this.remote_id()?;
1953 Some(this.client.request(proto::UpdateBuffer {
1954 buffer_id,
1955 project_id,
1956 operations,
1957 }))
1958 });
1959 if let Some(request) = request {
1960 if request.await.is_err() && !is_local {
1961 *needs_resync_with_host = true;
1962 break;
1963 }
1964 }
1965 }
1966 }
1967
1968 let mut needs_resync_with_host = false;
1969 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
1970
1971 while let Some(changes) = changes.next().await {
1972 let this = this.upgrade(&mut cx)?;
1973 let is_local = this.read_with(&cx, |this, _| this.is_local());
1974
1975 for change in changes {
1976 match change {
1977 BufferOrderedMessage::Operation {
1978 buffer_id,
1979 operation,
1980 } => {
1981 if needs_resync_with_host {
1982 continue;
1983 }
1984
1985 operations_by_buffer_id
1986 .entry(buffer_id)
1987 .or_insert(Vec::new())
1988 .push(operation);
1989 }
1990
1991 BufferOrderedMessage::Resync => {
1992 operations_by_buffer_id.clear();
1993 if this
1994 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
1995 .await
1996 .is_ok()
1997 {
1998 needs_resync_with_host = false;
1999 }
2000 }
2001
2002 BufferOrderedMessage::LanguageServerUpdate {
2003 language_server_id,
2004 message,
2005 } => {
2006 flush_operations(
2007 &this,
2008 &mut operations_by_buffer_id,
2009 &mut needs_resync_with_host,
2010 is_local,
2011 &cx,
2012 )
2013 .await;
2014
2015 this.read_with(&cx, |this, _| {
2016 if let Some(project_id) = this.remote_id() {
2017 this.client
2018 .send(proto::UpdateLanguageServer {
2019 project_id,
2020 language_server_id: language_server_id.0 as u64,
2021 variant: Some(message),
2022 })
2023 .log_err();
2024 }
2025 });
2026 }
2027 }
2028 }
2029
2030 flush_operations(
2031 &this,
2032 &mut operations_by_buffer_id,
2033 &mut needs_resync_with_host,
2034 is_local,
2035 &cx,
2036 )
2037 .await;
2038 }
2039
2040 None
2041 }
2042
2043 fn on_buffer_event(
2044 &mut self,
2045 buffer: ModelHandle<Buffer>,
2046 event: &BufferEvent,
2047 cx: &mut ModelContext<Self>,
2048 ) -> Option<()> {
2049 if matches!(
2050 event,
2051 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2052 ) {
2053 self.request_buffer_diff_recalculation(&buffer, cx);
2054 }
2055
2056 match event {
2057 BufferEvent::Operation(operation) => {
2058 self.buffer_ordered_messages_tx
2059 .unbounded_send(BufferOrderedMessage::Operation {
2060 buffer_id: buffer.read(cx).remote_id(),
2061 operation: language::proto::serialize_operation(operation),
2062 })
2063 .ok();
2064 }
2065
2066 BufferEvent::Edited { .. } => {
2067 let buffer = buffer.read(cx);
2068 let file = File::from_dyn(buffer.file())?;
2069 let abs_path = file.as_local()?.abs_path(cx);
2070 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2071 let next_snapshot = buffer.text_snapshot();
2072
2073 let language_servers: Vec<_> = self
2074 .language_servers_for_buffer(buffer, cx)
2075 .map(|i| i.1.clone())
2076 .collect();
2077
2078 for language_server in language_servers {
2079 let language_server = language_server.clone();
2080
2081 let buffer_snapshots = self
2082 .buffer_snapshots
2083 .get_mut(&buffer.remote_id())
2084 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2085 let previous_snapshot = buffer_snapshots.last()?;
2086 let next_version = previous_snapshot.version + 1;
2087
2088 let content_changes = buffer
2089 .edits_since::<(PointUtf16, usize)>(previous_snapshot.snapshot.version())
2090 .map(|edit| {
2091 let edit_start = edit.new.start.0;
2092 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2093 let new_text = next_snapshot
2094 .text_for_range(edit.new.start.1..edit.new.end.1)
2095 .collect();
2096 lsp::TextDocumentContentChangeEvent {
2097 range: Some(lsp::Range::new(
2098 point_to_lsp(edit_start),
2099 point_to_lsp(edit_end),
2100 )),
2101 range_length: None,
2102 text: new_text,
2103 }
2104 })
2105 .collect();
2106
2107 buffer_snapshots.push(LspBufferSnapshot {
2108 version: next_version,
2109 snapshot: next_snapshot.clone(),
2110 });
2111
2112 language_server
2113 .notify::<lsp::notification::DidChangeTextDocument>(
2114 lsp::DidChangeTextDocumentParams {
2115 text_document: lsp::VersionedTextDocumentIdentifier::new(
2116 uri.clone(),
2117 next_version,
2118 ),
2119 content_changes,
2120 },
2121 )
2122 .log_err();
2123 }
2124 }
2125
2126 BufferEvent::Saved => {
2127 let file = File::from_dyn(buffer.read(cx).file())?;
2128 let worktree_id = file.worktree_id(cx);
2129 let abs_path = file.as_local()?.abs_path(cx);
2130 let text_document = lsp::TextDocumentIdentifier {
2131 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2132 };
2133
2134 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2135 server
2136 .notify::<lsp::notification::DidSaveTextDocument>(
2137 lsp::DidSaveTextDocumentParams {
2138 text_document: text_document.clone(),
2139 text: None,
2140 },
2141 )
2142 .log_err();
2143 }
2144
2145 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2146 for language_server_id in language_server_ids {
2147 if let Some(LanguageServerState::Running {
2148 adapter,
2149 simulate_disk_based_diagnostics_completion,
2150 ..
2151 }) = self.language_servers.get_mut(&language_server_id)
2152 {
2153 // After saving a buffer using a language server that doesn't provide
2154 // a disk-based progress token, kick off a timer that will reset every
2155 // time the buffer is saved. If the timer eventually fires, simulate
2156 // disk-based diagnostics being finished so that other pieces of UI
2157 // (e.g., project diagnostics view, diagnostic status bar) can update.
2158 // We don't emit an event right away because the language server might take
2159 // some time to publish diagnostics.
2160 if adapter.disk_based_diagnostics_progress_token.is_none() {
2161 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2162 Duration::from_secs(1);
2163
2164 let task = cx.spawn_weak(|this, mut cx| async move {
2165 cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2166 if let Some(this) = this.upgrade(&cx) {
2167 this.update(&mut cx, |this, cx| {
2168 this.disk_based_diagnostics_finished(
2169 language_server_id,
2170 cx,
2171 );
2172 this.buffer_ordered_messages_tx
2173 .unbounded_send(
2174 BufferOrderedMessage::LanguageServerUpdate {
2175 language_server_id,
2176 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2177 },
2178 )
2179 .ok();
2180 });
2181 }
2182 });
2183 *simulate_disk_based_diagnostics_completion = Some(task);
2184 }
2185 }
2186 }
2187 }
2188
2189 _ => {}
2190 }
2191
2192 None
2193 }
2194
2195 fn request_buffer_diff_recalculation(
2196 &mut self,
2197 buffer: &ModelHandle<Buffer>,
2198 cx: &mut ModelContext<Self>,
2199 ) {
2200 self.buffers_needing_diff.insert(buffer.downgrade());
2201 let first_insertion = self.buffers_needing_diff.len() == 1;
2202
2203 let settings = settings::get::<ProjectSettings>(cx);
2204 let delay = if let Some(delay) = settings.git.gutter_debounce {
2205 delay
2206 } else {
2207 if first_insertion {
2208 let this = cx.weak_handle();
2209 cx.defer(move |cx| {
2210 if let Some(this) = this.upgrade(cx) {
2211 this.update(cx, |this, cx| {
2212 this.recalculate_buffer_diffs(cx).detach();
2213 });
2214 }
2215 });
2216 }
2217 return;
2218 };
2219
2220 const MIN_DELAY: u64 = 50;
2221 let delay = delay.max(MIN_DELAY);
2222 let duration = Duration::from_millis(delay);
2223
2224 self.git_diff_debouncer
2225 .fire_new(duration, cx, move |this, cx| {
2226 this.recalculate_buffer_diffs(cx)
2227 });
2228 }
2229
2230 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2231 cx.spawn(|this, mut cx| async move {
2232 let buffers: Vec<_> = this.update(&mut cx, |this, _| {
2233 this.buffers_needing_diff.drain().collect()
2234 });
2235
2236 let tasks: Vec<_> = this.update(&mut cx, |_, cx| {
2237 buffers
2238 .iter()
2239 .filter_map(|buffer| {
2240 let buffer = buffer.upgrade(cx)?;
2241 buffer.update(cx, |buffer, cx| buffer.git_diff_recalc(cx))
2242 })
2243 .collect()
2244 });
2245
2246 futures::future::join_all(tasks).await;
2247
2248 this.update(&mut cx, |this, cx| {
2249 if !this.buffers_needing_diff.is_empty() {
2250 this.recalculate_buffer_diffs(cx).detach();
2251 } else {
2252 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2253 for buffer in buffers {
2254 if let Some(buffer) = buffer.upgrade(cx) {
2255 buffer.update(cx, |_, cx| cx.notify());
2256 }
2257 }
2258 }
2259 });
2260 })
2261 }
2262
2263 fn language_servers_for_worktree(
2264 &self,
2265 worktree_id: WorktreeId,
2266 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2267 self.language_server_ids
2268 .iter()
2269 .filter_map(move |((language_server_worktree_id, _), id)| {
2270 if *language_server_worktree_id == worktree_id {
2271 if let Some(LanguageServerState::Running {
2272 adapter,
2273 language,
2274 server,
2275 ..
2276 }) = self.language_servers.get(id)
2277 {
2278 return Some((adapter, language, server));
2279 }
2280 }
2281 None
2282 })
2283 }
2284
2285 fn maintain_buffer_languages(
2286 languages: Arc<LanguageRegistry>,
2287 cx: &mut ModelContext<Project>,
2288 ) -> Task<()> {
2289 let mut subscription = languages.subscribe();
2290 let mut prev_reload_count = languages.reload_count();
2291 cx.spawn_weak(|project, mut cx| async move {
2292 while let Some(()) = subscription.next().await {
2293 if let Some(project) = project.upgrade(&cx) {
2294 // If the language registry has been reloaded, then remove and
2295 // re-assign the languages on all open buffers.
2296 let reload_count = languages.reload_count();
2297 if reload_count > prev_reload_count {
2298 prev_reload_count = reload_count;
2299 project.update(&mut cx, |this, cx| {
2300 let buffers = this
2301 .opened_buffers
2302 .values()
2303 .filter_map(|b| b.upgrade(cx))
2304 .collect::<Vec<_>>();
2305 for buffer in buffers {
2306 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() {
2307 this.unregister_buffer_from_language_servers(&buffer, &f, cx);
2308 buffer.update(cx, |buffer, cx| buffer.set_language(None, cx));
2309 }
2310 }
2311 });
2312 }
2313
2314 project.update(&mut cx, |project, cx| {
2315 let mut plain_text_buffers = Vec::new();
2316 let mut buffers_with_unknown_injections = Vec::new();
2317 for buffer in project.opened_buffers.values() {
2318 if let Some(handle) = buffer.upgrade(cx) {
2319 let buffer = &handle.read(cx);
2320 if buffer.language().is_none()
2321 || buffer.language() == Some(&*language::PLAIN_TEXT)
2322 {
2323 plain_text_buffers.push(handle);
2324 } else if buffer.contains_unknown_injections() {
2325 buffers_with_unknown_injections.push(handle);
2326 }
2327 }
2328 }
2329
2330 for buffer in plain_text_buffers {
2331 project.detect_language_for_buffer(&buffer, cx);
2332 project.register_buffer_with_language_servers(&buffer, cx);
2333 }
2334
2335 for buffer in buffers_with_unknown_injections {
2336 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2337 }
2338 });
2339 }
2340 }
2341 })
2342 }
2343
2344 fn maintain_workspace_config(
2345 languages: Arc<LanguageRegistry>,
2346 cx: &mut ModelContext<Project>,
2347 ) -> Task<()> {
2348 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2349 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2350
2351 let settings_observation = cx.observe_global::<SettingsStore, _>(move |_, _| {
2352 *settings_changed_tx.borrow_mut() = ();
2353 });
2354 cx.spawn_weak(|this, mut cx| async move {
2355 while let Some(_) = settings_changed_rx.next().await {
2356 let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await;
2357 if let Some(this) = this.upgrade(&cx) {
2358 this.read_with(&cx, |this, _| {
2359 for server_state in this.language_servers.values() {
2360 if let LanguageServerState::Running { server, .. } = server_state {
2361 server
2362 .notify::<lsp::notification::DidChangeConfiguration>(
2363 lsp::DidChangeConfigurationParams {
2364 settings: workspace_config.clone(),
2365 },
2366 )
2367 .ok();
2368 }
2369 }
2370 })
2371 } else {
2372 break;
2373 }
2374 }
2375
2376 drop(settings_observation);
2377 })
2378 }
2379
2380 fn detect_language_for_buffer(
2381 &mut self,
2382 buffer_handle: &ModelHandle<Buffer>,
2383 cx: &mut ModelContext<Self>,
2384 ) -> Option<()> {
2385 // If the buffer has a language, set it and start the language server if we haven't already.
2386 let buffer = buffer_handle.read(cx);
2387 let full_path = buffer.file()?.full_path(cx);
2388 let content = buffer.as_rope();
2389 let new_language = self
2390 .languages
2391 .language_for_file(&full_path, Some(content))
2392 .now_or_never()?
2393 .ok()?;
2394 self.set_language_for_buffer(buffer_handle, new_language, cx);
2395 None
2396 }
2397
2398 pub fn set_language_for_buffer(
2399 &mut self,
2400 buffer: &ModelHandle<Buffer>,
2401 new_language: Arc<Language>,
2402 cx: &mut ModelContext<Self>,
2403 ) {
2404 buffer.update(cx, |buffer, cx| {
2405 if buffer.language().map_or(true, |old_language| {
2406 !Arc::ptr_eq(old_language, &new_language)
2407 }) {
2408 buffer.set_language(Some(new_language.clone()), cx);
2409 }
2410 });
2411
2412 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2413 let worktree = file.worktree.clone();
2414 if let Some(tree) = worktree.read(cx).as_local() {
2415 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2416 }
2417 }
2418 }
2419
2420 fn start_language_servers(
2421 &mut self,
2422 worktree: &ModelHandle<Worktree>,
2423 worktree_path: Arc<Path>,
2424 language: Arc<Language>,
2425 cx: &mut ModelContext<Self>,
2426 ) {
2427 if !language_settings(
2428 Some(&language),
2429 worktree
2430 .update(cx, |tree, cx| tree.root_file(cx))
2431 .map(|f| f as _)
2432 .as_ref(),
2433 cx,
2434 )
2435 .enable_language_server
2436 {
2437 return;
2438 }
2439
2440 let worktree_id = worktree.read(cx).id();
2441 for adapter in language.lsp_adapters() {
2442 let key = (worktree_id, adapter.name.clone());
2443 if self.language_server_ids.contains_key(&key) {
2444 continue;
2445 }
2446
2447 let pending_server = match self.languages.start_language_server(
2448 language.clone(),
2449 adapter.clone(),
2450 worktree_path.clone(),
2451 ProjectLspAdapterDelegate::new(self, cx),
2452 cx,
2453 ) {
2454 Some(pending_server) => pending_server,
2455 None => continue,
2456 };
2457
2458 let lsp = settings::get::<ProjectSettings>(cx)
2459 .lsp
2460 .get(&adapter.name.0);
2461 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2462
2463 let mut initialization_options = adapter.initialization_options.clone();
2464 match (&mut initialization_options, override_options) {
2465 (Some(initialization_options), Some(override_options)) => {
2466 merge_json_value_into(override_options, initialization_options);
2467 }
2468 (None, override_options) => initialization_options = override_options,
2469 _ => {}
2470 }
2471
2472 let server_id = pending_server.server_id;
2473 let state = self.setup_pending_language_server(
2474 initialization_options,
2475 pending_server,
2476 adapter.clone(),
2477 language.clone(),
2478 key.clone(),
2479 cx,
2480 );
2481 self.language_servers.insert(server_id, state);
2482 self.language_server_ids.insert(key.clone(), server_id);
2483 }
2484 }
2485
2486 fn setup_pending_language_server(
2487 &mut self,
2488 initialization_options: Option<serde_json::Value>,
2489 pending_server: PendingLanguageServer,
2490 adapter: Arc<CachedLspAdapter>,
2491 language: Arc<Language>,
2492 key: (WorktreeId, LanguageServerName),
2493 cx: &mut ModelContext<Project>,
2494 ) -> LanguageServerState {
2495 let server_id = pending_server.server_id;
2496 let languages = self.languages.clone();
2497
2498 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2499 let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await;
2500 let language_server = pending_server.task.await.log_err()?;
2501
2502 language_server
2503 .on_notification::<lsp::notification::LogMessage, _>({
2504 move |params, mut cx| {
2505 if let Some(this) = this.upgrade(&cx) {
2506 this.update(&mut cx, |_, cx| {
2507 cx.emit(Event::LanguageServerLog(server_id, params.message))
2508 });
2509 }
2510 }
2511 })
2512 .detach();
2513
2514 language_server
2515 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2516 let adapter = adapter.clone();
2517 move |mut params, cx| {
2518 let adapter = adapter.clone();
2519 cx.spawn(|mut cx| async move {
2520 adapter.process_diagnostics(&mut params).await;
2521 if let Some(this) = this.upgrade(&cx) {
2522 this.update(&mut cx, |this, cx| {
2523 this.update_diagnostics(
2524 server_id,
2525 params,
2526 &adapter.disk_based_diagnostic_sources,
2527 cx,
2528 )
2529 .log_err();
2530 });
2531 }
2532 })
2533 .detach();
2534 }
2535 })
2536 .detach();
2537
2538 language_server
2539 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2540 let languages = languages.clone();
2541 move |params, mut cx| {
2542 let languages = languages.clone();
2543 async move {
2544 let workspace_config =
2545 cx.update(|cx| languages.workspace_configuration(cx)).await;
2546 Ok(params
2547 .items
2548 .into_iter()
2549 .map(|item| {
2550 if let Some(section) = &item.section {
2551 workspace_config
2552 .get(section)
2553 .cloned()
2554 .unwrap_or(serde_json::Value::Null)
2555 } else {
2556 workspace_config.clone()
2557 }
2558 })
2559 .collect())
2560 }
2561 }
2562 })
2563 .detach();
2564
2565 // Even though we don't have handling for these requests, respond to them to
2566 // avoid stalling any language server like `gopls` which waits for a response
2567 // to these requests when initializing.
2568 language_server
2569 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(
2570 move |params, mut cx| async move {
2571 if let Some(this) = this.upgrade(&cx) {
2572 this.update(&mut cx, |this, _| {
2573 if let Some(status) =
2574 this.language_server_statuses.get_mut(&server_id)
2575 {
2576 if let lsp::NumberOrString::String(token) = params.token {
2577 status.progress_tokens.insert(token);
2578 }
2579 }
2580 });
2581 }
2582 Ok(())
2583 },
2584 )
2585 .detach();
2586 language_server
2587 .on_request::<lsp::request::RegisterCapability, _, _>(
2588 move |params, mut cx| async move {
2589 let this = this
2590 .upgrade(&cx)
2591 .ok_or_else(|| anyhow!("project dropped"))?;
2592 for reg in params.registrations {
2593 if reg.method == "workspace/didChangeWatchedFiles" {
2594 if let Some(options) = reg.register_options {
2595 let options = serde_json::from_value(options)?;
2596 this.update(&mut cx, |this, cx| {
2597 this.on_lsp_did_change_watched_files(
2598 server_id, options, cx,
2599 );
2600 });
2601 }
2602 }
2603 }
2604 Ok(())
2605 },
2606 )
2607 .detach();
2608
2609 language_server
2610 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2611 let adapter = adapter.clone();
2612 move |params, cx| {
2613 Self::on_lsp_workspace_edit(this, params, server_id, adapter.clone(), cx)
2614 }
2615 })
2616 .detach();
2617
2618 let disk_based_diagnostics_progress_token =
2619 adapter.disk_based_diagnostics_progress_token.clone();
2620
2621 language_server
2622 .on_notification::<lsp::notification::Progress, _>({
2623 move |params, mut cx| {
2624 if let Some(this) = this.upgrade(&cx) {
2625 this.update(&mut cx, |this, cx| {
2626 this.on_lsp_progress(
2627 params,
2628 server_id,
2629 disk_based_diagnostics_progress_token.clone(),
2630 cx,
2631 );
2632 });
2633 }
2634 }
2635 })
2636 .detach();
2637
2638 let language_server = language_server
2639 .initialize(initialization_options)
2640 .await
2641 .log_err()?;
2642 language_server
2643 .notify::<lsp::notification::DidChangeConfiguration>(
2644 lsp::DidChangeConfigurationParams {
2645 settings: workspace_config,
2646 },
2647 )
2648 .ok();
2649
2650 let this = this.upgrade(&cx)?;
2651 this.update(&mut cx, |this, cx| {
2652 // If the language server for this key doesn't match the server id, don't store the
2653 // server. Which will cause it to be dropped, killing the process
2654 if this
2655 .language_server_ids
2656 .get(&key)
2657 .map(|id| id != &server_id)
2658 .unwrap_or(false)
2659 {
2660 return None;
2661 }
2662
2663 // Update language_servers collection with Running variant of LanguageServerState
2664 // indicating that the server is up and running and ready
2665 this.language_servers.insert(
2666 server_id,
2667 LanguageServerState::Running {
2668 adapter: adapter.clone(),
2669 language: language.clone(),
2670 watched_paths: Default::default(),
2671 server: language_server.clone(),
2672 simulate_disk_based_diagnostics_completion: None,
2673 },
2674 );
2675 this.language_server_statuses.insert(
2676 server_id,
2677 LanguageServerStatus {
2678 name: language_server.name().to_string(),
2679 pending_work: Default::default(),
2680 has_pending_diagnostic_updates: false,
2681 progress_tokens: Default::default(),
2682 },
2683 );
2684
2685 cx.emit(Event::LanguageServerAdded(server_id));
2686
2687 if let Some(project_id) = this.remote_id() {
2688 this.client
2689 .send(proto::StartLanguageServer {
2690 project_id,
2691 server: Some(proto::LanguageServer {
2692 id: server_id.0 as u64,
2693 name: language_server.name().to_string(),
2694 }),
2695 })
2696 .log_err();
2697 }
2698
2699 // Tell the language server about every open buffer in the worktree that matches the language.
2700 for buffer in this.opened_buffers.values() {
2701 if let Some(buffer_handle) = buffer.upgrade(cx) {
2702 let buffer = buffer_handle.read(cx);
2703 let file = match File::from_dyn(buffer.file()) {
2704 Some(file) => file,
2705 None => continue,
2706 };
2707 let language = match buffer.language() {
2708 Some(language) => language,
2709 None => continue,
2710 };
2711
2712 if file.worktree.read(cx).id() != key.0
2713 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
2714 {
2715 continue;
2716 }
2717
2718 let file = file.as_local()?;
2719 let versions = this
2720 .buffer_snapshots
2721 .entry(buffer.remote_id())
2722 .or_default()
2723 .entry(server_id)
2724 .or_insert_with(|| {
2725 vec![LspBufferSnapshot {
2726 version: 0,
2727 snapshot: buffer.text_snapshot(),
2728 }]
2729 });
2730
2731 let snapshot = versions.last().unwrap();
2732 let version = snapshot.version;
2733 let initial_snapshot = &snapshot.snapshot;
2734 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2735 language_server
2736 .notify::<lsp::notification::DidOpenTextDocument>(
2737 lsp::DidOpenTextDocumentParams {
2738 text_document: lsp::TextDocumentItem::new(
2739 uri,
2740 adapter
2741 .language_ids
2742 .get(language.name().as_ref())
2743 .cloned()
2744 .unwrap_or_default(),
2745 version,
2746 initial_snapshot.text(),
2747 ),
2748 },
2749 )
2750 .log_err()?;
2751 buffer_handle.update(cx, |buffer, cx| {
2752 buffer.set_completion_triggers(
2753 language_server
2754 .capabilities()
2755 .completion_provider
2756 .as_ref()
2757 .and_then(|provider| provider.trigger_characters.clone())
2758 .unwrap_or_default(),
2759 cx,
2760 )
2761 });
2762 }
2763 }
2764
2765 cx.notify();
2766 Some(language_server)
2767 })
2768 }))
2769 }
2770
2771 // Returns a list of all of the worktrees which no longer have a language server and the root path
2772 // for the stopped server
2773 fn stop_language_server(
2774 &mut self,
2775 worktree_id: WorktreeId,
2776 adapter_name: LanguageServerName,
2777 cx: &mut ModelContext<Self>,
2778 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
2779 let key = (worktree_id, adapter_name);
2780 if let Some(server_id) = self.language_server_ids.remove(&key) {
2781 // Remove other entries for this language server as well
2782 let mut orphaned_worktrees = vec![worktree_id];
2783 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
2784 for other_key in other_keys {
2785 if self.language_server_ids.get(&other_key) == Some(&server_id) {
2786 self.language_server_ids.remove(&other_key);
2787 orphaned_worktrees.push(other_key.0);
2788 }
2789 }
2790
2791 for buffer in self.opened_buffers.values() {
2792 if let Some(buffer) = buffer.upgrade(cx) {
2793 buffer.update(cx, |buffer, cx| {
2794 buffer.update_diagnostics(server_id, Default::default(), cx);
2795 });
2796 }
2797 }
2798 for worktree in &self.worktrees {
2799 if let Some(worktree) = worktree.upgrade(cx) {
2800 worktree.update(cx, |worktree, cx| {
2801 if let Some(worktree) = worktree.as_local_mut() {
2802 worktree.clear_diagnostics_for_language_server(server_id, cx);
2803 }
2804 });
2805 }
2806 }
2807
2808 self.language_server_statuses.remove(&server_id);
2809 cx.notify();
2810
2811 let server_state = self.language_servers.remove(&server_id);
2812 cx.emit(Event::LanguageServerRemoved(server_id));
2813 cx.spawn_weak(|this, mut cx| async move {
2814 let mut root_path = None;
2815
2816 let server = match server_state {
2817 Some(LanguageServerState::Starting(started_language_server)) => {
2818 started_language_server.await
2819 }
2820 Some(LanguageServerState::Running { server, .. }) => Some(server),
2821 None => None,
2822 };
2823
2824 if let Some(server) = server {
2825 root_path = Some(server.root_path().clone());
2826 if let Some(shutdown) = server.shutdown() {
2827 shutdown.await;
2828 }
2829 }
2830
2831 if let Some(this) = this.upgrade(&cx) {
2832 this.update(&mut cx, |this, cx| {
2833 this.language_server_statuses.remove(&server_id);
2834 cx.notify();
2835 });
2836 }
2837
2838 (root_path, orphaned_worktrees)
2839 })
2840 } else {
2841 Task::ready((None, Vec::new()))
2842 }
2843 }
2844
2845 pub fn restart_language_servers_for_buffers(
2846 &mut self,
2847 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2848 cx: &mut ModelContext<Self>,
2849 ) -> Option<()> {
2850 let language_server_lookup_info: HashSet<(ModelHandle<Worktree>, Arc<Language>)> = buffers
2851 .into_iter()
2852 .filter_map(|buffer| {
2853 let buffer = buffer.read(cx);
2854 let file = File::from_dyn(buffer.file())?;
2855 let full_path = file.full_path(cx);
2856 let language = self
2857 .languages
2858 .language_for_file(&full_path, Some(buffer.as_rope()))
2859 .now_or_never()?
2860 .ok()?;
2861 Some((file.worktree.clone(), language))
2862 })
2863 .collect();
2864 for (worktree, language) in language_server_lookup_info {
2865 self.restart_language_servers(worktree, language, cx);
2866 }
2867
2868 None
2869 }
2870
2871 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
2872 fn restart_language_servers(
2873 &mut self,
2874 worktree: ModelHandle<Worktree>,
2875 language: Arc<Language>,
2876 cx: &mut ModelContext<Self>,
2877 ) {
2878 let worktree_id = worktree.read(cx).id();
2879 let fallback_path = worktree.read(cx).abs_path();
2880
2881 let mut stops = Vec::new();
2882 for adapter in language.lsp_adapters() {
2883 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
2884 }
2885
2886 if stops.is_empty() {
2887 return;
2888 }
2889 let mut stops = stops.into_iter();
2890
2891 cx.spawn_weak(|this, mut cx| async move {
2892 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
2893 for stop in stops {
2894 let (_, worktrees) = stop.await;
2895 orphaned_worktrees.extend_from_slice(&worktrees);
2896 }
2897
2898 let this = match this.upgrade(&cx) {
2899 Some(this) => this,
2900 None => return,
2901 };
2902
2903 this.update(&mut cx, |this, cx| {
2904 // Attempt to restart using original server path. Fallback to passed in
2905 // path if we could not retrieve the root path
2906 let root_path = original_root_path
2907 .map(|path_buf| Arc::from(path_buf.as_path()))
2908 .unwrap_or(fallback_path);
2909
2910 this.start_language_servers(&worktree, root_path, language.clone(), cx);
2911
2912 // Lookup new server ids and set them for each of the orphaned worktrees
2913 for adapter in language.lsp_adapters() {
2914 if let Some(new_server_id) = this
2915 .language_server_ids
2916 .get(&(worktree_id, adapter.name.clone()))
2917 .cloned()
2918 {
2919 for &orphaned_worktree in &orphaned_worktrees {
2920 this.language_server_ids
2921 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
2922 }
2923 }
2924 }
2925 });
2926 })
2927 .detach();
2928 }
2929
2930 fn on_lsp_progress(
2931 &mut self,
2932 progress: lsp::ProgressParams,
2933 language_server_id: LanguageServerId,
2934 disk_based_diagnostics_progress_token: Option<String>,
2935 cx: &mut ModelContext<Self>,
2936 ) {
2937 let token = match progress.token {
2938 lsp::NumberOrString::String(token) => token,
2939 lsp::NumberOrString::Number(token) => {
2940 log::info!("skipping numeric progress token {}", token);
2941 return;
2942 }
2943 };
2944 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
2945 let language_server_status =
2946 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2947 status
2948 } else {
2949 return;
2950 };
2951
2952 if !language_server_status.progress_tokens.contains(&token) {
2953 return;
2954 }
2955
2956 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
2957 .as_ref()
2958 .map_or(false, |disk_based_token| {
2959 token.starts_with(disk_based_token)
2960 });
2961
2962 match progress {
2963 lsp::WorkDoneProgress::Begin(report) => {
2964 if is_disk_based_diagnostics_progress {
2965 language_server_status.has_pending_diagnostic_updates = true;
2966 self.disk_based_diagnostics_started(language_server_id, cx);
2967 self.buffer_ordered_messages_tx
2968 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2969 language_server_id,
2970 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
2971 })
2972 .ok();
2973 } else {
2974 self.on_lsp_work_start(
2975 language_server_id,
2976 token.clone(),
2977 LanguageServerProgress {
2978 message: report.message.clone(),
2979 percentage: report.percentage.map(|p| p as usize),
2980 last_update_at: Instant::now(),
2981 },
2982 cx,
2983 );
2984 self.buffer_ordered_messages_tx
2985 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2986 language_server_id,
2987 message: proto::update_language_server::Variant::WorkStart(
2988 proto::LspWorkStart {
2989 token,
2990 message: report.message,
2991 percentage: report.percentage.map(|p| p as u32),
2992 },
2993 ),
2994 })
2995 .ok();
2996 }
2997 }
2998 lsp::WorkDoneProgress::Report(report) => {
2999 if !is_disk_based_diagnostics_progress {
3000 self.on_lsp_work_progress(
3001 language_server_id,
3002 token.clone(),
3003 LanguageServerProgress {
3004 message: report.message.clone(),
3005 percentage: report.percentage.map(|p| p as usize),
3006 last_update_at: Instant::now(),
3007 },
3008 cx,
3009 );
3010 self.buffer_ordered_messages_tx
3011 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3012 language_server_id,
3013 message: proto::update_language_server::Variant::WorkProgress(
3014 proto::LspWorkProgress {
3015 token,
3016 message: report.message,
3017 percentage: report.percentage.map(|p| p as u32),
3018 },
3019 ),
3020 })
3021 .ok();
3022 }
3023 }
3024 lsp::WorkDoneProgress::End(_) => {
3025 language_server_status.progress_tokens.remove(&token);
3026
3027 if is_disk_based_diagnostics_progress {
3028 language_server_status.has_pending_diagnostic_updates = false;
3029 self.disk_based_diagnostics_finished(language_server_id, cx);
3030 self.buffer_ordered_messages_tx
3031 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3032 language_server_id,
3033 message:
3034 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3035 Default::default(),
3036 ),
3037 })
3038 .ok();
3039 } else {
3040 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3041 self.buffer_ordered_messages_tx
3042 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3043 language_server_id,
3044 message: proto::update_language_server::Variant::WorkEnd(
3045 proto::LspWorkEnd { token },
3046 ),
3047 })
3048 .ok();
3049 }
3050 }
3051 }
3052 }
3053
3054 fn on_lsp_work_start(
3055 &mut self,
3056 language_server_id: LanguageServerId,
3057 token: String,
3058 progress: LanguageServerProgress,
3059 cx: &mut ModelContext<Self>,
3060 ) {
3061 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3062 status.pending_work.insert(token, progress);
3063 cx.notify();
3064 }
3065 }
3066
3067 fn on_lsp_work_progress(
3068 &mut self,
3069 language_server_id: LanguageServerId,
3070 token: String,
3071 progress: LanguageServerProgress,
3072 cx: &mut ModelContext<Self>,
3073 ) {
3074 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3075 let entry = status
3076 .pending_work
3077 .entry(token)
3078 .or_insert(LanguageServerProgress {
3079 message: Default::default(),
3080 percentage: Default::default(),
3081 last_update_at: progress.last_update_at,
3082 });
3083 if progress.message.is_some() {
3084 entry.message = progress.message;
3085 }
3086 if progress.percentage.is_some() {
3087 entry.percentage = progress.percentage;
3088 }
3089 entry.last_update_at = progress.last_update_at;
3090 cx.notify();
3091 }
3092 }
3093
3094 fn on_lsp_work_end(
3095 &mut self,
3096 language_server_id: LanguageServerId,
3097 token: String,
3098 cx: &mut ModelContext<Self>,
3099 ) {
3100 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3101 status.pending_work.remove(&token);
3102 cx.notify();
3103 }
3104 }
3105
3106 fn on_lsp_did_change_watched_files(
3107 &mut self,
3108 language_server_id: LanguageServerId,
3109 params: DidChangeWatchedFilesRegistrationOptions,
3110 cx: &mut ModelContext<Self>,
3111 ) {
3112 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3113 self.language_servers.get_mut(&language_server_id)
3114 {
3115 let mut builders = HashMap::default();
3116 for watcher in params.watchers {
3117 for worktree in &self.worktrees {
3118 if let Some(worktree) = worktree.upgrade(cx) {
3119 let worktree = worktree.read(cx);
3120 if let Some(abs_path) = worktree.abs_path().to_str() {
3121 if let Some(suffix) = match &watcher.glob_pattern {
3122 lsp::GlobPattern::String(s) => s,
3123 lsp::GlobPattern::Relative(rp) => &rp.pattern,
3124 }
3125 .strip_prefix(abs_path)
3126 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR))
3127 {
3128 if let Some(glob) = Glob::new(suffix).log_err() {
3129 builders
3130 .entry(worktree.id())
3131 .or_insert_with(|| GlobSetBuilder::new())
3132 .add(glob);
3133 }
3134 break;
3135 }
3136 }
3137 }
3138 }
3139 }
3140
3141 watched_paths.clear();
3142 for (worktree_id, builder) in builders {
3143 if let Ok(globset) = builder.build() {
3144 watched_paths.insert(worktree_id, globset);
3145 }
3146 }
3147
3148 cx.notify();
3149 }
3150 }
3151
3152 async fn on_lsp_workspace_edit(
3153 this: WeakModelHandle<Self>,
3154 params: lsp::ApplyWorkspaceEditParams,
3155 server_id: LanguageServerId,
3156 adapter: Arc<CachedLspAdapter>,
3157 mut cx: AsyncAppContext,
3158 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3159 let this = this
3160 .upgrade(&cx)
3161 .ok_or_else(|| anyhow!("project project closed"))?;
3162 let language_server = this
3163 .read_with(&cx, |this, _| this.language_server_for_id(server_id))
3164 .ok_or_else(|| anyhow!("language server not found"))?;
3165 let transaction = Self::deserialize_workspace_edit(
3166 this.clone(),
3167 params.edit,
3168 true,
3169 adapter.clone(),
3170 language_server.clone(),
3171 &mut cx,
3172 )
3173 .await
3174 .log_err();
3175 this.update(&mut cx, |this, _| {
3176 if let Some(transaction) = transaction {
3177 this.last_workspace_edits_by_language_server
3178 .insert(server_id, transaction);
3179 }
3180 });
3181 Ok(lsp::ApplyWorkspaceEditResponse {
3182 applied: true,
3183 failed_change: None,
3184 failure_reason: None,
3185 })
3186 }
3187
3188 pub fn language_server_statuses(
3189 &self,
3190 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3191 self.language_server_statuses.values()
3192 }
3193
3194 pub fn update_diagnostics(
3195 &mut self,
3196 language_server_id: LanguageServerId,
3197 mut params: lsp::PublishDiagnosticsParams,
3198 disk_based_sources: &[String],
3199 cx: &mut ModelContext<Self>,
3200 ) -> Result<()> {
3201 let abs_path = params
3202 .uri
3203 .to_file_path()
3204 .map_err(|_| anyhow!("URI is not a file"))?;
3205 let mut diagnostics = Vec::default();
3206 let mut primary_diagnostic_group_ids = HashMap::default();
3207 let mut sources_by_group_id = HashMap::default();
3208 let mut supporting_diagnostics = HashMap::default();
3209
3210 // Ensure that primary diagnostics are always the most severe
3211 params.diagnostics.sort_by_key(|item| item.severity);
3212
3213 for diagnostic in ¶ms.diagnostics {
3214 let source = diagnostic.source.as_ref();
3215 let code = diagnostic.code.as_ref().map(|code| match code {
3216 lsp::NumberOrString::Number(code) => code.to_string(),
3217 lsp::NumberOrString::String(code) => code.clone(),
3218 });
3219 let range = range_from_lsp(diagnostic.range);
3220 let is_supporting = diagnostic
3221 .related_information
3222 .as_ref()
3223 .map_or(false, |infos| {
3224 infos.iter().any(|info| {
3225 primary_diagnostic_group_ids.contains_key(&(
3226 source,
3227 code.clone(),
3228 range_from_lsp(info.location.range),
3229 ))
3230 })
3231 });
3232
3233 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3234 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3235 });
3236
3237 if is_supporting {
3238 supporting_diagnostics.insert(
3239 (source, code.clone(), range),
3240 (diagnostic.severity, is_unnecessary),
3241 );
3242 } else {
3243 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3244 let is_disk_based =
3245 source.map_or(false, |source| disk_based_sources.contains(source));
3246
3247 sources_by_group_id.insert(group_id, source);
3248 primary_diagnostic_group_ids
3249 .insert((source, code.clone(), range.clone()), group_id);
3250
3251 diagnostics.push(DiagnosticEntry {
3252 range,
3253 diagnostic: Diagnostic {
3254 source: diagnostic.source.clone(),
3255 code: code.clone(),
3256 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3257 message: diagnostic.message.clone(),
3258 group_id,
3259 is_primary: true,
3260 is_valid: true,
3261 is_disk_based,
3262 is_unnecessary,
3263 },
3264 });
3265 if let Some(infos) = &diagnostic.related_information {
3266 for info in infos {
3267 if info.location.uri == params.uri && !info.message.is_empty() {
3268 let range = range_from_lsp(info.location.range);
3269 diagnostics.push(DiagnosticEntry {
3270 range,
3271 diagnostic: Diagnostic {
3272 source: diagnostic.source.clone(),
3273 code: code.clone(),
3274 severity: DiagnosticSeverity::INFORMATION,
3275 message: info.message.clone(),
3276 group_id,
3277 is_primary: false,
3278 is_valid: true,
3279 is_disk_based,
3280 is_unnecessary: false,
3281 },
3282 });
3283 }
3284 }
3285 }
3286 }
3287 }
3288
3289 for entry in &mut diagnostics {
3290 let diagnostic = &mut entry.diagnostic;
3291 if !diagnostic.is_primary {
3292 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3293 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3294 source,
3295 diagnostic.code.clone(),
3296 entry.range.clone(),
3297 )) {
3298 if let Some(severity) = severity {
3299 diagnostic.severity = severity;
3300 }
3301 diagnostic.is_unnecessary = is_unnecessary;
3302 }
3303 }
3304 }
3305
3306 self.update_diagnostic_entries(
3307 language_server_id,
3308 abs_path,
3309 params.version,
3310 diagnostics,
3311 cx,
3312 )?;
3313 Ok(())
3314 }
3315
3316 pub fn update_diagnostic_entries(
3317 &mut self,
3318 server_id: LanguageServerId,
3319 abs_path: PathBuf,
3320 version: Option<i32>,
3321 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3322 cx: &mut ModelContext<Project>,
3323 ) -> Result<(), anyhow::Error> {
3324 let (worktree, relative_path) = self
3325 .find_local_worktree(&abs_path, cx)
3326 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3327
3328 let project_path = ProjectPath {
3329 worktree_id: worktree.read(cx).id(),
3330 path: relative_path.into(),
3331 };
3332
3333 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3334 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3335 }
3336
3337 let updated = worktree.update(cx, |worktree, cx| {
3338 worktree
3339 .as_local_mut()
3340 .ok_or_else(|| anyhow!("not a local worktree"))?
3341 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3342 })?;
3343 if updated {
3344 cx.emit(Event::DiagnosticsUpdated {
3345 language_server_id: server_id,
3346 path: project_path,
3347 });
3348 }
3349 Ok(())
3350 }
3351
3352 fn update_buffer_diagnostics(
3353 &mut self,
3354 buffer: &ModelHandle<Buffer>,
3355 server_id: LanguageServerId,
3356 version: Option<i32>,
3357 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3358 cx: &mut ModelContext<Self>,
3359 ) -> Result<()> {
3360 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3361 Ordering::Equal
3362 .then_with(|| b.is_primary.cmp(&a.is_primary))
3363 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3364 .then_with(|| a.severity.cmp(&b.severity))
3365 .then_with(|| a.message.cmp(&b.message))
3366 }
3367
3368 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3369
3370 diagnostics.sort_unstable_by(|a, b| {
3371 Ordering::Equal
3372 .then_with(|| a.range.start.cmp(&b.range.start))
3373 .then_with(|| b.range.end.cmp(&a.range.end))
3374 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3375 });
3376
3377 let mut sanitized_diagnostics = Vec::new();
3378 let edits_since_save = Patch::new(
3379 snapshot
3380 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
3381 .collect(),
3382 );
3383 for entry in diagnostics {
3384 let start;
3385 let end;
3386 if entry.diagnostic.is_disk_based {
3387 // Some diagnostics are based on files on disk instead of buffers'
3388 // current contents. Adjust these diagnostics' ranges to reflect
3389 // any unsaved edits.
3390 start = edits_since_save.old_to_new(entry.range.start);
3391 end = edits_since_save.old_to_new(entry.range.end);
3392 } else {
3393 start = entry.range.start;
3394 end = entry.range.end;
3395 }
3396
3397 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
3398 ..snapshot.clip_point_utf16(end, Bias::Right);
3399
3400 // Expand empty ranges by one codepoint
3401 if range.start == range.end {
3402 // This will be go to the next boundary when being clipped
3403 range.end.column += 1;
3404 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
3405 if range.start == range.end && range.end.column > 0 {
3406 range.start.column -= 1;
3407 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
3408 }
3409 }
3410
3411 sanitized_diagnostics.push(DiagnosticEntry {
3412 range,
3413 diagnostic: entry.diagnostic,
3414 });
3415 }
3416 drop(edits_since_save);
3417
3418 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
3419 buffer.update(cx, |buffer, cx| {
3420 buffer.update_diagnostics(server_id, set, cx)
3421 });
3422 Ok(())
3423 }
3424
3425 pub fn reload_buffers(
3426 &self,
3427 buffers: HashSet<ModelHandle<Buffer>>,
3428 push_to_history: bool,
3429 cx: &mut ModelContext<Self>,
3430 ) -> Task<Result<ProjectTransaction>> {
3431 let mut local_buffers = Vec::new();
3432 let mut remote_buffers = None;
3433 for buffer_handle in buffers {
3434 let buffer = buffer_handle.read(cx);
3435 if buffer.is_dirty() {
3436 if let Some(file) = File::from_dyn(buffer.file()) {
3437 if file.is_local() {
3438 local_buffers.push(buffer_handle);
3439 } else {
3440 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
3441 }
3442 }
3443 }
3444 }
3445
3446 let remote_buffers = self.remote_id().zip(remote_buffers);
3447 let client = self.client.clone();
3448
3449 cx.spawn(|this, mut cx| async move {
3450 let mut project_transaction = ProjectTransaction::default();
3451
3452 if let Some((project_id, remote_buffers)) = remote_buffers {
3453 let response = client
3454 .request(proto::ReloadBuffers {
3455 project_id,
3456 buffer_ids: remote_buffers
3457 .iter()
3458 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3459 .collect(),
3460 })
3461 .await?
3462 .transaction
3463 .ok_or_else(|| anyhow!("missing transaction"))?;
3464 project_transaction = this
3465 .update(&mut cx, |this, cx| {
3466 this.deserialize_project_transaction(response, push_to_history, cx)
3467 })
3468 .await?;
3469 }
3470
3471 for buffer in local_buffers {
3472 let transaction = buffer
3473 .update(&mut cx, |buffer, cx| buffer.reload(cx))
3474 .await?;
3475 buffer.update(&mut cx, |buffer, cx| {
3476 if let Some(transaction) = transaction {
3477 if !push_to_history {
3478 buffer.forget_transaction(transaction.id);
3479 }
3480 project_transaction.0.insert(cx.handle(), transaction);
3481 }
3482 });
3483 }
3484
3485 Ok(project_transaction)
3486 })
3487 }
3488
3489 pub fn format(
3490 &self,
3491 buffers: HashSet<ModelHandle<Buffer>>,
3492 push_to_history: bool,
3493 trigger: FormatTrigger,
3494 cx: &mut ModelContext<Project>,
3495 ) -> Task<Result<ProjectTransaction>> {
3496 if self.is_local() {
3497 let mut buffers_with_paths_and_servers = buffers
3498 .into_iter()
3499 .filter_map(|buffer_handle| {
3500 let buffer = buffer_handle.read(cx);
3501 let file = File::from_dyn(buffer.file())?;
3502 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3503 let server = self
3504 .primary_language_servers_for_buffer(buffer, cx)
3505 .map(|s| s.1.clone());
3506 Some((buffer_handle, buffer_abs_path, server))
3507 })
3508 .collect::<Vec<_>>();
3509
3510 cx.spawn(|this, mut cx| async move {
3511 // Do not allow multiple concurrent formatting requests for the
3512 // same buffer.
3513 this.update(&mut cx, |this, cx| {
3514 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
3515 this.buffers_being_formatted
3516 .insert(buffer.read(cx).remote_id())
3517 });
3518 });
3519
3520 let _cleanup = defer({
3521 let this = this.clone();
3522 let mut cx = cx.clone();
3523 let buffers = &buffers_with_paths_and_servers;
3524 move || {
3525 this.update(&mut cx, |this, cx| {
3526 for (buffer, _, _) in buffers {
3527 this.buffers_being_formatted
3528 .remove(&buffer.read(cx).remote_id());
3529 }
3530 });
3531 }
3532 });
3533
3534 let mut project_transaction = ProjectTransaction::default();
3535 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
3536 let settings = buffer.read_with(&cx, |buffer, cx| {
3537 language_settings(buffer.language(), buffer.file(), cx).clone()
3538 });
3539
3540 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
3541 let ensure_final_newline = settings.ensure_final_newline_on_save;
3542 let format_on_save = settings.format_on_save.clone();
3543 let formatter = settings.formatter.clone();
3544 let tab_size = settings.tab_size;
3545
3546 // First, format buffer's whitespace according to the settings.
3547 let trailing_whitespace_diff = if remove_trailing_whitespace {
3548 Some(
3549 buffer
3550 .read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
3551 .await,
3552 )
3553 } else {
3554 None
3555 };
3556 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
3557 buffer.finalize_last_transaction();
3558 buffer.start_transaction();
3559 if let Some(diff) = trailing_whitespace_diff {
3560 buffer.apply_diff(diff, cx);
3561 }
3562 if ensure_final_newline {
3563 buffer.ensure_final_newline(cx);
3564 }
3565 buffer.end_transaction(cx)
3566 });
3567
3568 // Currently, formatting operations are represented differently depending on
3569 // whether they come from a language server or an external command.
3570 enum FormatOperation {
3571 Lsp(Vec<(Range<Anchor>, String)>),
3572 External(Diff),
3573 }
3574
3575 // Apply language-specific formatting using either a language server
3576 // or external command.
3577 let mut format_operation = None;
3578 match (formatter, format_on_save) {
3579 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
3580
3581 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
3582 | (_, FormatOnSave::LanguageServer) => {
3583 if let Some((language_server, buffer_abs_path)) =
3584 language_server.as_ref().zip(buffer_abs_path.as_ref())
3585 {
3586 format_operation = Some(FormatOperation::Lsp(
3587 Self::format_via_lsp(
3588 &this,
3589 &buffer,
3590 buffer_abs_path,
3591 &language_server,
3592 tab_size,
3593 &mut cx,
3594 )
3595 .await
3596 .context("failed to format via language server")?,
3597 ));
3598 }
3599 }
3600
3601 (
3602 Formatter::External { command, arguments },
3603 FormatOnSave::On | FormatOnSave::Off,
3604 )
3605 | (_, FormatOnSave::External { command, arguments }) => {
3606 if let Some(buffer_abs_path) = buffer_abs_path {
3607 format_operation = Self::format_via_external_command(
3608 &buffer,
3609 &buffer_abs_path,
3610 &command,
3611 &arguments,
3612 &mut cx,
3613 )
3614 .await
3615 .context(format!(
3616 "failed to format via external command {:?}",
3617 command
3618 ))?
3619 .map(FormatOperation::External);
3620 }
3621 }
3622 };
3623
3624 buffer.update(&mut cx, |b, cx| {
3625 // If the buffer had its whitespace formatted and was edited while the language-specific
3626 // formatting was being computed, avoid applying the language-specific formatting, because
3627 // it can't be grouped with the whitespace formatting in the undo history.
3628 if let Some(transaction_id) = whitespace_transaction_id {
3629 if b.peek_undo_stack()
3630 .map_or(true, |e| e.transaction_id() != transaction_id)
3631 {
3632 format_operation.take();
3633 }
3634 }
3635
3636 // Apply any language-specific formatting, and group the two formatting operations
3637 // in the buffer's undo history.
3638 if let Some(operation) = format_operation {
3639 match operation {
3640 FormatOperation::Lsp(edits) => {
3641 b.edit(edits, None, cx);
3642 }
3643 FormatOperation::External(diff) => {
3644 b.apply_diff(diff, cx);
3645 }
3646 }
3647
3648 if let Some(transaction_id) = whitespace_transaction_id {
3649 b.group_until_transaction(transaction_id);
3650 }
3651 }
3652
3653 if let Some(transaction) = b.finalize_last_transaction().cloned() {
3654 if !push_to_history {
3655 b.forget_transaction(transaction.id);
3656 }
3657 project_transaction.0.insert(buffer.clone(), transaction);
3658 }
3659 });
3660 }
3661
3662 Ok(project_transaction)
3663 })
3664 } else {
3665 let remote_id = self.remote_id();
3666 let client = self.client.clone();
3667 cx.spawn(|this, mut cx| async move {
3668 let mut project_transaction = ProjectTransaction::default();
3669 if let Some(project_id) = remote_id {
3670 let response = client
3671 .request(proto::FormatBuffers {
3672 project_id,
3673 trigger: trigger as i32,
3674 buffer_ids: buffers
3675 .iter()
3676 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3677 .collect(),
3678 })
3679 .await?
3680 .transaction
3681 .ok_or_else(|| anyhow!("missing transaction"))?;
3682 project_transaction = this
3683 .update(&mut cx, |this, cx| {
3684 this.deserialize_project_transaction(response, push_to_history, cx)
3685 })
3686 .await?;
3687 }
3688 Ok(project_transaction)
3689 })
3690 }
3691 }
3692
3693 async fn format_via_lsp(
3694 this: &ModelHandle<Self>,
3695 buffer: &ModelHandle<Buffer>,
3696 abs_path: &Path,
3697 language_server: &Arc<LanguageServer>,
3698 tab_size: NonZeroU32,
3699 cx: &mut AsyncAppContext,
3700 ) -> Result<Vec<(Range<Anchor>, String)>> {
3701 let text_document =
3702 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(abs_path).unwrap());
3703 let capabilities = &language_server.capabilities();
3704 let lsp_edits = if capabilities
3705 .document_formatting_provider
3706 .as_ref()
3707 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
3708 {
3709 language_server
3710 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
3711 text_document,
3712 options: lsp_command::lsp_formatting_options(tab_size.get()),
3713 work_done_progress_params: Default::default(),
3714 })
3715 .await?
3716 } else if capabilities
3717 .document_range_formatting_provider
3718 .as_ref()
3719 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
3720 {
3721 let buffer_start = lsp::Position::new(0, 0);
3722 let buffer_end =
3723 buffer.read_with(cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
3724 language_server
3725 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
3726 text_document,
3727 range: lsp::Range::new(buffer_start, buffer_end),
3728 options: lsp_command::lsp_formatting_options(tab_size.get()),
3729 work_done_progress_params: Default::default(),
3730 })
3731 .await?
3732 } else {
3733 None
3734 };
3735
3736 if let Some(lsp_edits) = lsp_edits {
3737 this.update(cx, |this, cx| {
3738 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
3739 })
3740 .await
3741 } else {
3742 Ok(Default::default())
3743 }
3744 }
3745
3746 async fn format_via_external_command(
3747 buffer: &ModelHandle<Buffer>,
3748 buffer_abs_path: &Path,
3749 command: &str,
3750 arguments: &[String],
3751 cx: &mut AsyncAppContext,
3752 ) -> Result<Option<Diff>> {
3753 let working_dir_path = buffer.read_with(cx, |buffer, cx| {
3754 let file = File::from_dyn(buffer.file())?;
3755 let worktree = file.worktree.read(cx).as_local()?;
3756 let mut worktree_path = worktree.abs_path().to_path_buf();
3757 if worktree.root_entry()?.is_file() {
3758 worktree_path.pop();
3759 }
3760 Some(worktree_path)
3761 });
3762
3763 if let Some(working_dir_path) = working_dir_path {
3764 let mut child =
3765 smol::process::Command::new(command)
3766 .args(arguments.iter().map(|arg| {
3767 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
3768 }))
3769 .current_dir(&working_dir_path)
3770 .stdin(smol::process::Stdio::piped())
3771 .stdout(smol::process::Stdio::piped())
3772 .stderr(smol::process::Stdio::piped())
3773 .spawn()?;
3774 let stdin = child
3775 .stdin
3776 .as_mut()
3777 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
3778 let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
3779 for chunk in text.chunks() {
3780 stdin.write_all(chunk.as_bytes()).await?;
3781 }
3782 stdin.flush().await?;
3783
3784 let output = child.output().await?;
3785 if !output.status.success() {
3786 return Err(anyhow!(
3787 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
3788 output.status.code(),
3789 String::from_utf8_lossy(&output.stdout),
3790 String::from_utf8_lossy(&output.stderr),
3791 ));
3792 }
3793
3794 let stdout = String::from_utf8(output.stdout)?;
3795 Ok(Some(
3796 buffer
3797 .read_with(cx, |buffer, cx| buffer.diff(stdout, cx))
3798 .await,
3799 ))
3800 } else {
3801 Ok(None)
3802 }
3803 }
3804
3805 pub fn definition<T: ToPointUtf16>(
3806 &self,
3807 buffer: &ModelHandle<Buffer>,
3808 position: T,
3809 cx: &mut ModelContext<Self>,
3810 ) -> Task<Result<Vec<LocationLink>>> {
3811 let position = position.to_point_utf16(buffer.read(cx));
3812 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3813 }
3814
3815 pub fn type_definition<T: ToPointUtf16>(
3816 &self,
3817 buffer: &ModelHandle<Buffer>,
3818 position: T,
3819 cx: &mut ModelContext<Self>,
3820 ) -> Task<Result<Vec<LocationLink>>> {
3821 let position = position.to_point_utf16(buffer.read(cx));
3822 self.request_lsp(buffer.clone(), GetTypeDefinition { position }, cx)
3823 }
3824
3825 pub fn references<T: ToPointUtf16>(
3826 &self,
3827 buffer: &ModelHandle<Buffer>,
3828 position: T,
3829 cx: &mut ModelContext<Self>,
3830 ) -> Task<Result<Vec<Location>>> {
3831 let position = position.to_point_utf16(buffer.read(cx));
3832 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3833 }
3834
3835 pub fn document_highlights<T: ToPointUtf16>(
3836 &self,
3837 buffer: &ModelHandle<Buffer>,
3838 position: T,
3839 cx: &mut ModelContext<Self>,
3840 ) -> Task<Result<Vec<DocumentHighlight>>> {
3841 let position = position.to_point_utf16(buffer.read(cx));
3842 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3843 }
3844
3845 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3846 if self.is_local() {
3847 let mut requests = Vec::new();
3848 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3849 let worktree_id = *worktree_id;
3850 if let Some(worktree) = self
3851 .worktree_for_id(worktree_id, cx)
3852 .and_then(|worktree| worktree.read(cx).as_local())
3853 {
3854 if let Some(LanguageServerState::Running {
3855 adapter,
3856 language,
3857 server,
3858 ..
3859 }) = self.language_servers.get(server_id)
3860 {
3861 let adapter = adapter.clone();
3862 let language = language.clone();
3863 let worktree_abs_path = worktree.abs_path().clone();
3864 requests.push(
3865 server
3866 .request::<lsp::request::WorkspaceSymbolRequest>(
3867 lsp::WorkspaceSymbolParams {
3868 query: query.to_string(),
3869 ..Default::default()
3870 },
3871 )
3872 .log_err()
3873 .map(move |response| {
3874 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
3875 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
3876 flat_responses.into_iter().map(|lsp_symbol| {
3877 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
3878 }).collect::<Vec<_>>()
3879 }
3880 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
3881 nested_responses.into_iter().filter_map(|lsp_symbol| {
3882 let location = match lsp_symbol.location {
3883 lsp::OneOf::Left(location) => location,
3884 lsp::OneOf::Right(_) => {
3885 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
3886 return None
3887 }
3888 };
3889 Some((lsp_symbol.name, lsp_symbol.kind, location))
3890 }).collect::<Vec<_>>()
3891 }
3892 }).unwrap_or_default();
3893
3894 (
3895 adapter,
3896 language,
3897 worktree_id,
3898 worktree_abs_path,
3899 lsp_symbols,
3900 )
3901 }),
3902 );
3903 }
3904 }
3905 }
3906
3907 cx.spawn_weak(|this, cx| async move {
3908 let responses = futures::future::join_all(requests).await;
3909 let this = if let Some(this) = this.upgrade(&cx) {
3910 this
3911 } else {
3912 return Ok(Default::default());
3913 };
3914 let symbols = this.read_with(&cx, |this, cx| {
3915 let mut symbols = Vec::new();
3916 for (
3917 adapter,
3918 adapter_language,
3919 source_worktree_id,
3920 worktree_abs_path,
3921 lsp_symbols,
3922 ) in responses
3923 {
3924 symbols.extend(lsp_symbols.into_iter().filter_map(
3925 |(symbol_name, symbol_kind, symbol_location)| {
3926 let abs_path = symbol_location.uri.to_file_path().ok()?;
3927 let mut worktree_id = source_worktree_id;
3928 let path;
3929 if let Some((worktree, rel_path)) =
3930 this.find_local_worktree(&abs_path, cx)
3931 {
3932 worktree_id = worktree.read(cx).id();
3933 path = rel_path;
3934 } else {
3935 path = relativize_path(&worktree_abs_path, &abs_path);
3936 }
3937
3938 let project_path = ProjectPath {
3939 worktree_id,
3940 path: path.into(),
3941 };
3942 let signature = this.symbol_signature(&project_path);
3943 let adapter_language = adapter_language.clone();
3944 let language = this
3945 .languages
3946 .language_for_file(&project_path.path, None)
3947 .unwrap_or_else(move |_| adapter_language);
3948 let language_server_name = adapter.name.clone();
3949 Some(async move {
3950 let language = language.await;
3951 let label =
3952 language.label_for_symbol(&symbol_name, symbol_kind).await;
3953
3954 Symbol {
3955 language_server_name,
3956 source_worktree_id,
3957 path: project_path,
3958 label: label.unwrap_or_else(|| {
3959 CodeLabel::plain(symbol_name.clone(), None)
3960 }),
3961 kind: symbol_kind,
3962 name: symbol_name,
3963 range: range_from_lsp(symbol_location.range),
3964 signature,
3965 }
3966 })
3967 },
3968 ));
3969 }
3970 symbols
3971 });
3972 Ok(futures::future::join_all(symbols).await)
3973 })
3974 } else if let Some(project_id) = self.remote_id() {
3975 let request = self.client.request(proto::GetProjectSymbols {
3976 project_id,
3977 query: query.to_string(),
3978 });
3979 cx.spawn_weak(|this, cx| async move {
3980 let response = request.await?;
3981 let mut symbols = Vec::new();
3982 if let Some(this) = this.upgrade(&cx) {
3983 let new_symbols = this.read_with(&cx, |this, _| {
3984 response
3985 .symbols
3986 .into_iter()
3987 .map(|symbol| this.deserialize_symbol(symbol))
3988 .collect::<Vec<_>>()
3989 });
3990 symbols = futures::future::join_all(new_symbols)
3991 .await
3992 .into_iter()
3993 .filter_map(|symbol| symbol.log_err())
3994 .collect::<Vec<_>>();
3995 }
3996 Ok(symbols)
3997 })
3998 } else {
3999 Task::ready(Ok(Default::default()))
4000 }
4001 }
4002
4003 pub fn open_buffer_for_symbol(
4004 &mut self,
4005 symbol: &Symbol,
4006 cx: &mut ModelContext<Self>,
4007 ) -> Task<Result<ModelHandle<Buffer>>> {
4008 if self.is_local() {
4009 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4010 symbol.source_worktree_id,
4011 symbol.language_server_name.clone(),
4012 )) {
4013 *id
4014 } else {
4015 return Task::ready(Err(anyhow!(
4016 "language server for worktree and language not found"
4017 )));
4018 };
4019
4020 let worktree_abs_path = if let Some(worktree_abs_path) = self
4021 .worktree_for_id(symbol.path.worktree_id, cx)
4022 .and_then(|worktree| worktree.read(cx).as_local())
4023 .map(|local_worktree| local_worktree.abs_path())
4024 {
4025 worktree_abs_path
4026 } else {
4027 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4028 };
4029 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
4030 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4031 uri
4032 } else {
4033 return Task::ready(Err(anyhow!("invalid symbol path")));
4034 };
4035
4036 self.open_local_buffer_via_lsp(
4037 symbol_uri,
4038 language_server_id,
4039 symbol.language_server_name.clone(),
4040 cx,
4041 )
4042 } else if let Some(project_id) = self.remote_id() {
4043 let request = self.client.request(proto::OpenBufferForSymbol {
4044 project_id,
4045 symbol: Some(serialize_symbol(symbol)),
4046 });
4047 cx.spawn(|this, mut cx| async move {
4048 let response = request.await?;
4049 this.update(&mut cx, |this, cx| {
4050 this.wait_for_remote_buffer(response.buffer_id, cx)
4051 })
4052 .await
4053 })
4054 } else {
4055 Task::ready(Err(anyhow!("project does not have a remote id")))
4056 }
4057 }
4058
4059 pub fn hover<T: ToPointUtf16>(
4060 &self,
4061 buffer: &ModelHandle<Buffer>,
4062 position: T,
4063 cx: &mut ModelContext<Self>,
4064 ) -> Task<Result<Option<Hover>>> {
4065 let position = position.to_point_utf16(buffer.read(cx));
4066 self.request_lsp(buffer.clone(), GetHover { position }, cx)
4067 }
4068
4069 pub fn completions<T: ToPointUtf16>(
4070 &self,
4071 buffer: &ModelHandle<Buffer>,
4072 position: T,
4073 cx: &mut ModelContext<Self>,
4074 ) -> Task<Result<Vec<Completion>>> {
4075 let position = position.to_point_utf16(buffer.read(cx));
4076 self.request_lsp(buffer.clone(), GetCompletions { position }, cx)
4077 }
4078
4079 pub fn apply_additional_edits_for_completion(
4080 &self,
4081 buffer_handle: ModelHandle<Buffer>,
4082 completion: Completion,
4083 push_to_history: bool,
4084 cx: &mut ModelContext<Self>,
4085 ) -> Task<Result<Option<Transaction>>> {
4086 let buffer = buffer_handle.read(cx);
4087 let buffer_id = buffer.remote_id();
4088
4089 if self.is_local() {
4090 let lang_server = match self.primary_language_servers_for_buffer(buffer, cx) {
4091 Some((_, server)) => server.clone(),
4092 _ => return Task::ready(Ok(Default::default())),
4093 };
4094
4095 cx.spawn(|this, mut cx| async move {
4096 let resolved_completion = lang_server
4097 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4098 .await?;
4099
4100 if let Some(edits) = resolved_completion.additional_text_edits {
4101 let edits = this
4102 .update(&mut cx, |this, cx| {
4103 this.edits_from_lsp(
4104 &buffer_handle,
4105 edits,
4106 lang_server.server_id(),
4107 None,
4108 cx,
4109 )
4110 })
4111 .await?;
4112
4113 buffer_handle.update(&mut cx, |buffer, cx| {
4114 buffer.finalize_last_transaction();
4115 buffer.start_transaction();
4116
4117 for (range, text) in edits {
4118 let primary = &completion.old_range;
4119 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4120 && primary.end.cmp(&range.start, buffer).is_ge();
4121 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4122 && range.end.cmp(&primary.end, buffer).is_ge();
4123
4124 //Skip additional edits which overlap with the primary completion edit
4125 //https://github.com/zed-industries/zed/pull/1871
4126 if !start_within && !end_within {
4127 buffer.edit([(range, text)], None, cx);
4128 }
4129 }
4130
4131 let transaction = if buffer.end_transaction(cx).is_some() {
4132 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4133 if !push_to_history {
4134 buffer.forget_transaction(transaction.id);
4135 }
4136 Some(transaction)
4137 } else {
4138 None
4139 };
4140 Ok(transaction)
4141 })
4142 } else {
4143 Ok(None)
4144 }
4145 })
4146 } else if let Some(project_id) = self.remote_id() {
4147 let client = self.client.clone();
4148 cx.spawn(|_, mut cx| async move {
4149 let response = client
4150 .request(proto::ApplyCompletionAdditionalEdits {
4151 project_id,
4152 buffer_id,
4153 completion: Some(language::proto::serialize_completion(&completion)),
4154 })
4155 .await?;
4156
4157 if let Some(transaction) = response.transaction {
4158 let transaction = language::proto::deserialize_transaction(transaction)?;
4159 buffer_handle
4160 .update(&mut cx, |buffer, _| {
4161 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4162 })
4163 .await?;
4164 if push_to_history {
4165 buffer_handle.update(&mut cx, |buffer, _| {
4166 buffer.push_transaction(transaction.clone(), Instant::now());
4167 });
4168 }
4169 Ok(Some(transaction))
4170 } else {
4171 Ok(None)
4172 }
4173 })
4174 } else {
4175 Task::ready(Err(anyhow!("project does not have a remote id")))
4176 }
4177 }
4178
4179 pub fn code_actions<T: Clone + ToOffset>(
4180 &self,
4181 buffer_handle: &ModelHandle<Buffer>,
4182 range: Range<T>,
4183 cx: &mut ModelContext<Self>,
4184 ) -> Task<Result<Vec<CodeAction>>> {
4185 let buffer = buffer_handle.read(cx);
4186 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4187 self.request_lsp(buffer_handle.clone(), GetCodeActions { range }, cx)
4188 }
4189
4190 pub fn apply_code_action(
4191 &self,
4192 buffer_handle: ModelHandle<Buffer>,
4193 mut action: CodeAction,
4194 push_to_history: bool,
4195 cx: &mut ModelContext<Self>,
4196 ) -> Task<Result<ProjectTransaction>> {
4197 if self.is_local() {
4198 let buffer = buffer_handle.read(cx);
4199 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4200 self.language_server_for_buffer(buffer, action.server_id, cx)
4201 {
4202 (adapter.clone(), server.clone())
4203 } else {
4204 return Task::ready(Ok(Default::default()));
4205 };
4206 let range = action.range.to_point_utf16(buffer);
4207
4208 cx.spawn(|this, mut cx| async move {
4209 if let Some(lsp_range) = action
4210 .lsp_action
4211 .data
4212 .as_mut()
4213 .and_then(|d| d.get_mut("codeActionParams"))
4214 .and_then(|d| d.get_mut("range"))
4215 {
4216 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4217 action.lsp_action = lang_server
4218 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
4219 .await?;
4220 } else {
4221 let actions = this
4222 .update(&mut cx, |this, cx| {
4223 this.code_actions(&buffer_handle, action.range, cx)
4224 })
4225 .await?;
4226 action.lsp_action = actions
4227 .into_iter()
4228 .find(|a| a.lsp_action.title == action.lsp_action.title)
4229 .ok_or_else(|| anyhow!("code action is outdated"))?
4230 .lsp_action;
4231 }
4232
4233 if let Some(edit) = action.lsp_action.edit {
4234 if edit.changes.is_some() || edit.document_changes.is_some() {
4235 return Self::deserialize_workspace_edit(
4236 this,
4237 edit,
4238 push_to_history,
4239 lsp_adapter.clone(),
4240 lang_server.clone(),
4241 &mut cx,
4242 )
4243 .await;
4244 }
4245 }
4246
4247 if let Some(command) = action.lsp_action.command {
4248 this.update(&mut cx, |this, _| {
4249 this.last_workspace_edits_by_language_server
4250 .remove(&lang_server.server_id());
4251 });
4252 lang_server
4253 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
4254 command: command.command,
4255 arguments: command.arguments.unwrap_or_default(),
4256 ..Default::default()
4257 })
4258 .await?;
4259 return Ok(this.update(&mut cx, |this, _| {
4260 this.last_workspace_edits_by_language_server
4261 .remove(&lang_server.server_id())
4262 .unwrap_or_default()
4263 }));
4264 }
4265
4266 Ok(ProjectTransaction::default())
4267 })
4268 } else if let Some(project_id) = self.remote_id() {
4269 let client = self.client.clone();
4270 let request = proto::ApplyCodeAction {
4271 project_id,
4272 buffer_id: buffer_handle.read(cx).remote_id(),
4273 action: Some(language::proto::serialize_code_action(&action)),
4274 };
4275 cx.spawn(|this, mut cx| async move {
4276 let response = client
4277 .request(request)
4278 .await?
4279 .transaction
4280 .ok_or_else(|| anyhow!("missing transaction"))?;
4281 this.update(&mut cx, |this, cx| {
4282 this.deserialize_project_transaction(response, push_to_history, cx)
4283 })
4284 .await
4285 })
4286 } else {
4287 Task::ready(Err(anyhow!("project does not have a remote id")))
4288 }
4289 }
4290
4291 fn apply_on_type_formatting(
4292 &self,
4293 buffer: ModelHandle<Buffer>,
4294 position: Anchor,
4295 trigger: String,
4296 cx: &mut ModelContext<Self>,
4297 ) -> Task<Result<Option<Transaction>>> {
4298 if self.is_local() {
4299 cx.spawn(|this, mut cx| async move {
4300 // Do not allow multiple concurrent formatting requests for the
4301 // same buffer.
4302 this.update(&mut cx, |this, cx| {
4303 this.buffers_being_formatted
4304 .insert(buffer.read(cx).remote_id())
4305 });
4306
4307 let _cleanup = defer({
4308 let this = this.clone();
4309 let mut cx = cx.clone();
4310 let closure_buffer = buffer.clone();
4311 move || {
4312 this.update(&mut cx, |this, cx| {
4313 this.buffers_being_formatted
4314 .remove(&closure_buffer.read(cx).remote_id());
4315 });
4316 }
4317 });
4318
4319 buffer
4320 .update(&mut cx, |buffer, _| {
4321 buffer.wait_for_edits(Some(position.timestamp))
4322 })
4323 .await?;
4324 this.update(&mut cx, |this, cx| {
4325 let position = position.to_point_utf16(buffer.read(cx));
4326 this.on_type_format(buffer, position, trigger, false, cx)
4327 })
4328 .await
4329 })
4330 } else if let Some(project_id) = self.remote_id() {
4331 let client = self.client.clone();
4332 let request = proto::OnTypeFormatting {
4333 project_id,
4334 buffer_id: buffer.read(cx).remote_id(),
4335 position: Some(serialize_anchor(&position)),
4336 trigger,
4337 version: serialize_version(&buffer.read(cx).version()),
4338 };
4339 cx.spawn(|_, _| async move {
4340 client
4341 .request(request)
4342 .await?
4343 .transaction
4344 .map(language::proto::deserialize_transaction)
4345 .transpose()
4346 })
4347 } else {
4348 Task::ready(Err(anyhow!("project does not have a remote id")))
4349 }
4350 }
4351
4352 async fn deserialize_edits(
4353 this: ModelHandle<Self>,
4354 buffer_to_edit: ModelHandle<Buffer>,
4355 edits: Vec<lsp::TextEdit>,
4356 push_to_history: bool,
4357 _: Arc<CachedLspAdapter>,
4358 language_server: Arc<LanguageServer>,
4359 cx: &mut AsyncAppContext,
4360 ) -> Result<Option<Transaction>> {
4361 let edits = this
4362 .update(cx, |this, cx| {
4363 this.edits_from_lsp(
4364 &buffer_to_edit,
4365 edits,
4366 language_server.server_id(),
4367 None,
4368 cx,
4369 )
4370 })
4371 .await?;
4372
4373 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4374 buffer.finalize_last_transaction();
4375 buffer.start_transaction();
4376 for (range, text) in edits {
4377 buffer.edit([(range, text)], None, cx);
4378 }
4379
4380 if buffer.end_transaction(cx).is_some() {
4381 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4382 if !push_to_history {
4383 buffer.forget_transaction(transaction.id);
4384 }
4385 Some(transaction)
4386 } else {
4387 None
4388 }
4389 });
4390
4391 Ok(transaction)
4392 }
4393
4394 async fn deserialize_workspace_edit(
4395 this: ModelHandle<Self>,
4396 edit: lsp::WorkspaceEdit,
4397 push_to_history: bool,
4398 lsp_adapter: Arc<CachedLspAdapter>,
4399 language_server: Arc<LanguageServer>,
4400 cx: &mut AsyncAppContext,
4401 ) -> Result<ProjectTransaction> {
4402 let fs = this.read_with(cx, |this, _| this.fs.clone());
4403 let mut operations = Vec::new();
4404 if let Some(document_changes) = edit.document_changes {
4405 match document_changes {
4406 lsp::DocumentChanges::Edits(edits) => {
4407 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
4408 }
4409 lsp::DocumentChanges::Operations(ops) => operations = ops,
4410 }
4411 } else if let Some(changes) = edit.changes {
4412 operations.extend(changes.into_iter().map(|(uri, edits)| {
4413 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
4414 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4415 uri,
4416 version: None,
4417 },
4418 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
4419 })
4420 }));
4421 }
4422
4423 let mut project_transaction = ProjectTransaction::default();
4424 for operation in operations {
4425 match operation {
4426 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
4427 let abs_path = op
4428 .uri
4429 .to_file_path()
4430 .map_err(|_| anyhow!("can't convert URI to path"))?;
4431
4432 if let Some(parent_path) = abs_path.parent() {
4433 fs.create_dir(parent_path).await?;
4434 }
4435 if abs_path.ends_with("/") {
4436 fs.create_dir(&abs_path).await?;
4437 } else {
4438 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
4439 .await?;
4440 }
4441 }
4442
4443 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
4444 let source_abs_path = op
4445 .old_uri
4446 .to_file_path()
4447 .map_err(|_| anyhow!("can't convert URI to path"))?;
4448 let target_abs_path = op
4449 .new_uri
4450 .to_file_path()
4451 .map_err(|_| anyhow!("can't convert URI to path"))?;
4452 fs.rename(
4453 &source_abs_path,
4454 &target_abs_path,
4455 op.options.map(Into::into).unwrap_or_default(),
4456 )
4457 .await?;
4458 }
4459
4460 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
4461 let abs_path = op
4462 .uri
4463 .to_file_path()
4464 .map_err(|_| anyhow!("can't convert URI to path"))?;
4465 let options = op.options.map(Into::into).unwrap_or_default();
4466 if abs_path.ends_with("/") {
4467 fs.remove_dir(&abs_path, options).await?;
4468 } else {
4469 fs.remove_file(&abs_path, options).await?;
4470 }
4471 }
4472
4473 lsp::DocumentChangeOperation::Edit(op) => {
4474 let buffer_to_edit = this
4475 .update(cx, |this, cx| {
4476 this.open_local_buffer_via_lsp(
4477 op.text_document.uri,
4478 language_server.server_id(),
4479 lsp_adapter.name.clone(),
4480 cx,
4481 )
4482 })
4483 .await?;
4484
4485 let edits = this
4486 .update(cx, |this, cx| {
4487 let edits = op.edits.into_iter().map(|edit| match edit {
4488 lsp::OneOf::Left(edit) => edit,
4489 lsp::OneOf::Right(edit) => edit.text_edit,
4490 });
4491 this.edits_from_lsp(
4492 &buffer_to_edit,
4493 edits,
4494 language_server.server_id(),
4495 op.text_document.version,
4496 cx,
4497 )
4498 })
4499 .await?;
4500
4501 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4502 buffer.finalize_last_transaction();
4503 buffer.start_transaction();
4504 for (range, text) in edits {
4505 buffer.edit([(range, text)], None, cx);
4506 }
4507 let transaction = if buffer.end_transaction(cx).is_some() {
4508 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4509 if !push_to_history {
4510 buffer.forget_transaction(transaction.id);
4511 }
4512 Some(transaction)
4513 } else {
4514 None
4515 };
4516
4517 transaction
4518 });
4519 if let Some(transaction) = transaction {
4520 project_transaction.0.insert(buffer_to_edit, transaction);
4521 }
4522 }
4523 }
4524 }
4525
4526 Ok(project_transaction)
4527 }
4528
4529 pub fn prepare_rename<T: ToPointUtf16>(
4530 &self,
4531 buffer: ModelHandle<Buffer>,
4532 position: T,
4533 cx: &mut ModelContext<Self>,
4534 ) -> Task<Result<Option<Range<Anchor>>>> {
4535 let position = position.to_point_utf16(buffer.read(cx));
4536 self.request_lsp(buffer, PrepareRename { position }, cx)
4537 }
4538
4539 pub fn perform_rename<T: ToPointUtf16>(
4540 &self,
4541 buffer: ModelHandle<Buffer>,
4542 position: T,
4543 new_name: String,
4544 push_to_history: bool,
4545 cx: &mut ModelContext<Self>,
4546 ) -> Task<Result<ProjectTransaction>> {
4547 let position = position.to_point_utf16(buffer.read(cx));
4548 self.request_lsp(
4549 buffer,
4550 PerformRename {
4551 position,
4552 new_name,
4553 push_to_history,
4554 },
4555 cx,
4556 )
4557 }
4558
4559 pub fn on_type_format<T: ToPointUtf16>(
4560 &self,
4561 buffer: ModelHandle<Buffer>,
4562 position: T,
4563 trigger: String,
4564 push_to_history: bool,
4565 cx: &mut ModelContext<Self>,
4566 ) -> Task<Result<Option<Transaction>>> {
4567 let (position, tab_size) = buffer.read_with(cx, |buffer, cx| {
4568 let position = position.to_point_utf16(buffer);
4569 (
4570 position,
4571 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
4572 .tab_size,
4573 )
4574 });
4575 self.request_lsp(
4576 buffer.clone(),
4577 OnTypeFormatting {
4578 position,
4579 trigger,
4580 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
4581 push_to_history,
4582 },
4583 cx,
4584 )
4585 }
4586
4587 #[allow(clippy::type_complexity)]
4588 pub fn search(
4589 &self,
4590 query: SearchQuery,
4591 cx: &mut ModelContext<Self>,
4592 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
4593 if self.is_local() {
4594 let snapshots = self
4595 .visible_worktrees(cx)
4596 .filter_map(|tree| {
4597 let tree = tree.read(cx).as_local()?;
4598 Some(tree.snapshot())
4599 })
4600 .collect::<Vec<_>>();
4601
4602 let background = cx.background().clone();
4603 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
4604 if path_count == 0 {
4605 return Task::ready(Ok(Default::default()));
4606 }
4607 let workers = background.num_cpus().min(path_count);
4608 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
4609 cx.background()
4610 .spawn({
4611 let fs = self.fs.clone();
4612 let background = cx.background().clone();
4613 let query = query.clone();
4614 async move {
4615 let fs = &fs;
4616 let query = &query;
4617 let matching_paths_tx = &matching_paths_tx;
4618 let paths_per_worker = (path_count + workers - 1) / workers;
4619 let snapshots = &snapshots;
4620 background
4621 .scoped(|scope| {
4622 for worker_ix in 0..workers {
4623 let worker_start_ix = worker_ix * paths_per_worker;
4624 let worker_end_ix = worker_start_ix + paths_per_worker;
4625 scope.spawn(async move {
4626 let mut snapshot_start_ix = 0;
4627 let mut abs_path = PathBuf::new();
4628 for snapshot in snapshots {
4629 let snapshot_end_ix =
4630 snapshot_start_ix + snapshot.visible_file_count();
4631 if worker_end_ix <= snapshot_start_ix {
4632 break;
4633 } else if worker_start_ix > snapshot_end_ix {
4634 snapshot_start_ix = snapshot_end_ix;
4635 continue;
4636 } else {
4637 let start_in_snapshot = worker_start_ix
4638 .saturating_sub(snapshot_start_ix);
4639 let end_in_snapshot =
4640 cmp::min(worker_end_ix, snapshot_end_ix)
4641 - snapshot_start_ix;
4642
4643 for entry in snapshot
4644 .files(false, start_in_snapshot)
4645 .take(end_in_snapshot - start_in_snapshot)
4646 {
4647 if matching_paths_tx.is_closed() {
4648 break;
4649 }
4650 let matches = if query
4651 .file_matches(Some(&entry.path))
4652 {
4653 abs_path.clear();
4654 abs_path.push(&snapshot.abs_path());
4655 abs_path.push(&entry.path);
4656 if let Some(file) =
4657 fs.open_sync(&abs_path).await.log_err()
4658 {
4659 query.detect(file).unwrap_or(false)
4660 } else {
4661 false
4662 }
4663 } else {
4664 false
4665 };
4666
4667 if matches {
4668 let project_path =
4669 (snapshot.id(), entry.path.clone());
4670 if matching_paths_tx
4671 .send(project_path)
4672 .await
4673 .is_err()
4674 {
4675 break;
4676 }
4677 }
4678 }
4679
4680 snapshot_start_ix = snapshot_end_ix;
4681 }
4682 }
4683 });
4684 }
4685 })
4686 .await;
4687 }
4688 })
4689 .detach();
4690
4691 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
4692 let open_buffers = self
4693 .opened_buffers
4694 .values()
4695 .filter_map(|b| b.upgrade(cx))
4696 .collect::<HashSet<_>>();
4697 cx.spawn(|this, cx| async move {
4698 for buffer in &open_buffers {
4699 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4700 buffers_tx.send((buffer.clone(), snapshot)).await?;
4701 }
4702
4703 let open_buffers = Rc::new(RefCell::new(open_buffers));
4704 while let Some(project_path) = matching_paths_rx.next().await {
4705 if buffers_tx.is_closed() {
4706 break;
4707 }
4708
4709 let this = this.clone();
4710 let open_buffers = open_buffers.clone();
4711 let buffers_tx = buffers_tx.clone();
4712 cx.spawn(|mut cx| async move {
4713 if let Some(buffer) = this
4714 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
4715 .await
4716 .log_err()
4717 {
4718 if open_buffers.borrow_mut().insert(buffer.clone()) {
4719 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4720 buffers_tx.send((buffer, snapshot)).await?;
4721 }
4722 }
4723
4724 Ok::<_, anyhow::Error>(())
4725 })
4726 .detach();
4727 }
4728
4729 Ok::<_, anyhow::Error>(())
4730 })
4731 .detach_and_log_err(cx);
4732
4733 let background = cx.background().clone();
4734 cx.background().spawn(async move {
4735 let query = &query;
4736 let mut matched_buffers = Vec::new();
4737 for _ in 0..workers {
4738 matched_buffers.push(HashMap::default());
4739 }
4740 background
4741 .scoped(|scope| {
4742 for worker_matched_buffers in matched_buffers.iter_mut() {
4743 let mut buffers_rx = buffers_rx.clone();
4744 scope.spawn(async move {
4745 while let Some((buffer, snapshot)) = buffers_rx.next().await {
4746 let buffer_matches = if query.file_matches(
4747 snapshot.file().map(|file| file.path().as_ref()),
4748 ) {
4749 query
4750 .search(snapshot.as_rope())
4751 .await
4752 .iter()
4753 .map(|range| {
4754 snapshot.anchor_before(range.start)
4755 ..snapshot.anchor_after(range.end)
4756 })
4757 .collect()
4758 } else {
4759 Vec::new()
4760 };
4761 if !buffer_matches.is_empty() {
4762 worker_matched_buffers
4763 .insert(buffer.clone(), buffer_matches);
4764 }
4765 }
4766 });
4767 }
4768 })
4769 .await;
4770 Ok(matched_buffers.into_iter().flatten().collect())
4771 })
4772 } else if let Some(project_id) = self.remote_id() {
4773 let request = self.client.request(query.to_proto(project_id));
4774 cx.spawn(|this, mut cx| async move {
4775 let response = request.await?;
4776 let mut result = HashMap::default();
4777 for location in response.locations {
4778 let target_buffer = this
4779 .update(&mut cx, |this, cx| {
4780 this.wait_for_remote_buffer(location.buffer_id, cx)
4781 })
4782 .await?;
4783 let start = location
4784 .start
4785 .and_then(deserialize_anchor)
4786 .ok_or_else(|| anyhow!("missing target start"))?;
4787 let end = location
4788 .end
4789 .and_then(deserialize_anchor)
4790 .ok_or_else(|| anyhow!("missing target end"))?;
4791 result
4792 .entry(target_buffer)
4793 .or_insert(Vec::new())
4794 .push(start..end)
4795 }
4796 Ok(result)
4797 })
4798 } else {
4799 Task::ready(Ok(Default::default()))
4800 }
4801 }
4802
4803 // TODO: Wire this up to allow selecting a server?
4804 fn request_lsp<R: LspCommand>(
4805 &self,
4806 buffer_handle: ModelHandle<Buffer>,
4807 request: R,
4808 cx: &mut ModelContext<Self>,
4809 ) -> Task<Result<R::Response>>
4810 where
4811 <R::LspRequest as lsp::request::Request>::Result: Send,
4812 {
4813 let buffer = buffer_handle.read(cx);
4814 if self.is_local() {
4815 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4816 if let Some((file, language_server)) = file.zip(
4817 self.primary_language_servers_for_buffer(buffer, cx)
4818 .map(|(_, server)| server.clone()),
4819 ) {
4820 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
4821 return cx.spawn(|this, cx| async move {
4822 if !request.check_capabilities(language_server.capabilities()) {
4823 return Ok(Default::default());
4824 }
4825
4826 let response = language_server
4827 .request::<R::LspRequest>(lsp_params)
4828 .await
4829 .context("lsp request failed")?;
4830 request
4831 .response_from_lsp(
4832 response,
4833 this,
4834 buffer_handle,
4835 language_server.server_id(),
4836 cx,
4837 )
4838 .await
4839 });
4840 }
4841 } else if let Some(project_id) = self.remote_id() {
4842 let rpc = self.client.clone();
4843 let message = request.to_proto(project_id, buffer);
4844 return cx.spawn_weak(|this, cx| async move {
4845 // Ensure the project is still alive by the time the task
4846 // is scheduled.
4847 this.upgrade(&cx)
4848 .ok_or_else(|| anyhow!("project dropped"))?;
4849
4850 let response = rpc.request(message).await?;
4851
4852 let this = this
4853 .upgrade(&cx)
4854 .ok_or_else(|| anyhow!("project dropped"))?;
4855 if this.read_with(&cx, |this, _| this.is_read_only()) {
4856 Err(anyhow!("disconnected before completing request"))
4857 } else {
4858 request
4859 .response_from_proto(response, this, buffer_handle, cx)
4860 .await
4861 }
4862 });
4863 }
4864 Task::ready(Ok(Default::default()))
4865 }
4866
4867 pub fn find_or_create_local_worktree(
4868 &mut self,
4869 abs_path: impl AsRef<Path>,
4870 visible: bool,
4871 cx: &mut ModelContext<Self>,
4872 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4873 let abs_path = abs_path.as_ref();
4874 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4875 Task::ready(Ok((tree, relative_path)))
4876 } else {
4877 let worktree = self.create_local_worktree(abs_path, visible, cx);
4878 cx.foreground()
4879 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4880 }
4881 }
4882
4883 pub fn find_local_worktree(
4884 &self,
4885 abs_path: &Path,
4886 cx: &AppContext,
4887 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4888 for tree in &self.worktrees {
4889 if let Some(tree) = tree.upgrade(cx) {
4890 if let Some(relative_path) = tree
4891 .read(cx)
4892 .as_local()
4893 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4894 {
4895 return Some((tree.clone(), relative_path.into()));
4896 }
4897 }
4898 }
4899 None
4900 }
4901
4902 pub fn is_shared(&self) -> bool {
4903 match &self.client_state {
4904 Some(ProjectClientState::Local { .. }) => true,
4905 _ => false,
4906 }
4907 }
4908
4909 fn create_local_worktree(
4910 &mut self,
4911 abs_path: impl AsRef<Path>,
4912 visible: bool,
4913 cx: &mut ModelContext<Self>,
4914 ) -> Task<Result<ModelHandle<Worktree>>> {
4915 let fs = self.fs.clone();
4916 let client = self.client.clone();
4917 let next_entry_id = self.next_entry_id.clone();
4918 let path: Arc<Path> = abs_path.as_ref().into();
4919 let task = self
4920 .loading_local_worktrees
4921 .entry(path.clone())
4922 .or_insert_with(|| {
4923 cx.spawn(|project, mut cx| {
4924 async move {
4925 let worktree = Worktree::local(
4926 client.clone(),
4927 path.clone(),
4928 visible,
4929 fs,
4930 next_entry_id,
4931 &mut cx,
4932 )
4933 .await;
4934
4935 project.update(&mut cx, |project, _| {
4936 project.loading_local_worktrees.remove(&path);
4937 });
4938
4939 let worktree = worktree?;
4940 project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
4941 Ok(worktree)
4942 }
4943 .map_err(Arc::new)
4944 })
4945 .shared()
4946 })
4947 .clone();
4948 cx.foreground().spawn(async move {
4949 match task.await {
4950 Ok(worktree) => Ok(worktree),
4951 Err(err) => Err(anyhow!("{}", err)),
4952 }
4953 })
4954 }
4955
4956 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4957 self.worktrees.retain(|worktree| {
4958 if let Some(worktree) = worktree.upgrade(cx) {
4959 let id = worktree.read(cx).id();
4960 if id == id_to_remove {
4961 cx.emit(Event::WorktreeRemoved(id));
4962 false
4963 } else {
4964 true
4965 }
4966 } else {
4967 false
4968 }
4969 });
4970 self.metadata_changed(cx);
4971 }
4972
4973 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4974 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
4975 if worktree.read(cx).is_local() {
4976 cx.subscribe(worktree, |this, worktree, event, cx| match event {
4977 worktree::Event::UpdatedEntries(changes) => {
4978 this.update_local_worktree_buffers(&worktree, changes, cx);
4979 this.update_local_worktree_language_servers(&worktree, changes, cx);
4980 this.update_local_worktree_settings(&worktree, changes, cx);
4981 }
4982 worktree::Event::UpdatedGitRepositories(updated_repos) => {
4983 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
4984 }
4985 })
4986 .detach();
4987 }
4988
4989 let push_strong_handle = {
4990 let worktree = worktree.read(cx);
4991 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4992 };
4993 if push_strong_handle {
4994 self.worktrees
4995 .push(WorktreeHandle::Strong(worktree.clone()));
4996 } else {
4997 self.worktrees
4998 .push(WorktreeHandle::Weak(worktree.downgrade()));
4999 }
5000
5001 let handle_id = worktree.id();
5002 cx.observe_release(worktree, move |this, worktree, cx| {
5003 let _ = this.remove_worktree(worktree.id(), cx);
5004 cx.update_global::<SettingsStore, _, _>(|store, cx| {
5005 store.clear_local_settings(handle_id, cx).log_err()
5006 });
5007 })
5008 .detach();
5009
5010 cx.emit(Event::WorktreeAdded);
5011 self.metadata_changed(cx);
5012 }
5013
5014 fn update_local_worktree_buffers(
5015 &mut self,
5016 worktree_handle: &ModelHandle<Worktree>,
5017 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
5018 cx: &mut ModelContext<Self>,
5019 ) {
5020 let snapshot = worktree_handle.read(cx).snapshot();
5021
5022 let mut renamed_buffers = Vec::new();
5023 for (path, entry_id, _) in changes {
5024 let worktree_id = worktree_handle.read(cx).id();
5025 let project_path = ProjectPath {
5026 worktree_id,
5027 path: path.clone(),
5028 };
5029
5030 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
5031 Some(&buffer_id) => buffer_id,
5032 None => match self.local_buffer_ids_by_path.get(&project_path) {
5033 Some(&buffer_id) => buffer_id,
5034 None => continue,
5035 },
5036 };
5037
5038 let open_buffer = self.opened_buffers.get(&buffer_id);
5039 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
5040 buffer
5041 } else {
5042 self.opened_buffers.remove(&buffer_id);
5043 self.local_buffer_ids_by_path.remove(&project_path);
5044 self.local_buffer_ids_by_entry_id.remove(entry_id);
5045 continue;
5046 };
5047
5048 buffer.update(cx, |buffer, cx| {
5049 if let Some(old_file) = File::from_dyn(buffer.file()) {
5050 if old_file.worktree != *worktree_handle {
5051 return;
5052 }
5053
5054 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
5055 File {
5056 is_local: true,
5057 entry_id: entry.id,
5058 mtime: entry.mtime,
5059 path: entry.path.clone(),
5060 worktree: worktree_handle.clone(),
5061 is_deleted: false,
5062 }
5063 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
5064 File {
5065 is_local: true,
5066 entry_id: entry.id,
5067 mtime: entry.mtime,
5068 path: entry.path.clone(),
5069 worktree: worktree_handle.clone(),
5070 is_deleted: false,
5071 }
5072 } else {
5073 File {
5074 is_local: true,
5075 entry_id: old_file.entry_id,
5076 path: old_file.path().clone(),
5077 mtime: old_file.mtime(),
5078 worktree: worktree_handle.clone(),
5079 is_deleted: true,
5080 }
5081 };
5082
5083 let old_path = old_file.abs_path(cx);
5084 if new_file.abs_path(cx) != old_path {
5085 renamed_buffers.push((cx.handle(), old_file.clone()));
5086 self.local_buffer_ids_by_path.remove(&project_path);
5087 self.local_buffer_ids_by_path.insert(
5088 ProjectPath {
5089 worktree_id,
5090 path: path.clone(),
5091 },
5092 buffer_id,
5093 );
5094 }
5095
5096 if new_file.entry_id != *entry_id {
5097 self.local_buffer_ids_by_entry_id.remove(entry_id);
5098 self.local_buffer_ids_by_entry_id
5099 .insert(new_file.entry_id, buffer_id);
5100 }
5101
5102 if new_file != *old_file {
5103 if let Some(project_id) = self.remote_id() {
5104 self.client
5105 .send(proto::UpdateBufferFile {
5106 project_id,
5107 buffer_id: buffer_id as u64,
5108 file: Some(new_file.to_proto()),
5109 })
5110 .log_err();
5111 }
5112
5113 buffer.file_updated(Arc::new(new_file), cx).detach();
5114 }
5115 }
5116 });
5117 }
5118
5119 for (buffer, old_file) in renamed_buffers {
5120 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
5121 self.detect_language_for_buffer(&buffer, cx);
5122 self.register_buffer_with_language_servers(&buffer, cx);
5123 }
5124 }
5125
5126 fn update_local_worktree_language_servers(
5127 &mut self,
5128 worktree_handle: &ModelHandle<Worktree>,
5129 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
5130 cx: &mut ModelContext<Self>,
5131 ) {
5132 if changes.is_empty() {
5133 return;
5134 }
5135
5136 let worktree_id = worktree_handle.read(cx).id();
5137 let mut language_server_ids = self
5138 .language_server_ids
5139 .iter()
5140 .filter_map(|((server_worktree_id, _), server_id)| {
5141 (*server_worktree_id == worktree_id).then_some(*server_id)
5142 })
5143 .collect::<Vec<_>>();
5144 language_server_ids.sort();
5145 language_server_ids.dedup();
5146
5147 let abs_path = worktree_handle.read(cx).abs_path();
5148 for server_id in &language_server_ids {
5149 if let Some(server) = self.language_servers.get(server_id) {
5150 if let LanguageServerState::Running {
5151 server,
5152 watched_paths,
5153 ..
5154 } = server
5155 {
5156 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
5157 let params = lsp::DidChangeWatchedFilesParams {
5158 changes: changes
5159 .iter()
5160 .filter_map(|(path, _, change)| {
5161 if !watched_paths.is_match(&path) {
5162 return None;
5163 }
5164 let typ = match change {
5165 PathChange::Loaded => return None,
5166 PathChange::Added => lsp::FileChangeType::CREATED,
5167 PathChange::Removed => lsp::FileChangeType::DELETED,
5168 PathChange::Updated => lsp::FileChangeType::CHANGED,
5169 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
5170 };
5171 Some(lsp::FileEvent {
5172 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
5173 typ,
5174 })
5175 })
5176 .collect(),
5177 };
5178
5179 if !params.changes.is_empty() {
5180 server
5181 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
5182 .log_err();
5183 }
5184 }
5185 }
5186 }
5187 }
5188 }
5189
5190 fn update_local_worktree_buffers_git_repos(
5191 &mut self,
5192 worktree_handle: ModelHandle<Worktree>,
5193 changed_repos: &UpdatedGitRepositoriesSet,
5194 cx: &mut ModelContext<Self>,
5195 ) {
5196 debug_assert!(worktree_handle.read(cx).is_local());
5197
5198 // Identify the loading buffers whose containing repository that has changed.
5199 let future_buffers = self
5200 .loading_buffers_by_path
5201 .iter()
5202 .filter_map(|(project_path, receiver)| {
5203 if project_path.worktree_id != worktree_handle.read(cx).id() {
5204 return None;
5205 }
5206 let path = &project_path.path;
5207 changed_repos
5208 .iter()
5209 .find(|(work_dir, _)| path.starts_with(work_dir))?;
5210 let receiver = receiver.clone();
5211 let path = path.clone();
5212 Some(async move {
5213 wait_for_loading_buffer(receiver)
5214 .await
5215 .ok()
5216 .map(|buffer| (buffer, path))
5217 })
5218 })
5219 .collect::<FuturesUnordered<_>>();
5220
5221 // Identify the current buffers whose containing repository has changed.
5222 let current_buffers = self
5223 .opened_buffers
5224 .values()
5225 .filter_map(|buffer| {
5226 let buffer = buffer.upgrade(cx)?;
5227 let file = File::from_dyn(buffer.read(cx).file())?;
5228 if file.worktree != worktree_handle {
5229 return None;
5230 }
5231 let path = file.path();
5232 changed_repos
5233 .iter()
5234 .find(|(work_dir, _)| path.starts_with(work_dir))?;
5235 Some((buffer, path.clone()))
5236 })
5237 .collect::<Vec<_>>();
5238
5239 if future_buffers.len() + current_buffers.len() == 0 {
5240 return;
5241 }
5242
5243 let remote_id = self.remote_id();
5244 let client = self.client.clone();
5245 cx.spawn_weak(move |_, mut cx| async move {
5246 // Wait for all of the buffers to load.
5247 let future_buffers = future_buffers.collect::<Vec<_>>().await;
5248
5249 // Reload the diff base for every buffer whose containing git repository has changed.
5250 let snapshot =
5251 worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
5252 let diff_bases_by_buffer = cx
5253 .background()
5254 .spawn(async move {
5255 future_buffers
5256 .into_iter()
5257 .filter_map(|e| e)
5258 .chain(current_buffers)
5259 .filter_map(|(buffer, path)| {
5260 let (work_directory, repo) =
5261 snapshot.repository_and_work_directory_for_path(&path)?;
5262 let repo = snapshot.get_local_repo(&repo)?;
5263 let relative_path = path.strip_prefix(&work_directory).ok()?;
5264 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
5265 Some((buffer, base_text))
5266 })
5267 .collect::<Vec<_>>()
5268 })
5269 .await;
5270
5271 // Assign the new diff bases on all of the buffers.
5272 for (buffer, diff_base) in diff_bases_by_buffer {
5273 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
5274 buffer.set_diff_base(diff_base.clone(), cx);
5275 buffer.remote_id()
5276 });
5277 if let Some(project_id) = remote_id {
5278 client
5279 .send(proto::UpdateDiffBase {
5280 project_id,
5281 buffer_id,
5282 diff_base,
5283 })
5284 .log_err();
5285 }
5286 }
5287 })
5288 .detach();
5289 }
5290
5291 fn update_local_worktree_settings(
5292 &mut self,
5293 worktree: &ModelHandle<Worktree>,
5294 changes: &UpdatedEntriesSet,
5295 cx: &mut ModelContext<Self>,
5296 ) {
5297 let project_id = self.remote_id();
5298 let worktree_id = worktree.id();
5299 let worktree = worktree.read(cx).as_local().unwrap();
5300 let remote_worktree_id = worktree.id();
5301
5302 let mut settings_contents = Vec::new();
5303 for (path, _, change) in changes.iter() {
5304 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
5305 let settings_dir = Arc::from(
5306 path.ancestors()
5307 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
5308 .unwrap(),
5309 );
5310 let fs = self.fs.clone();
5311 let removed = *change == PathChange::Removed;
5312 let abs_path = worktree.absolutize(path);
5313 settings_contents.push(async move {
5314 (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
5315 });
5316 }
5317 }
5318
5319 if settings_contents.is_empty() {
5320 return;
5321 }
5322
5323 let client = self.client.clone();
5324 cx.spawn_weak(move |_, mut cx| async move {
5325 let settings_contents: Vec<(Arc<Path>, _)> =
5326 futures::future::join_all(settings_contents).await;
5327 cx.update(|cx| {
5328 cx.update_global::<SettingsStore, _, _>(|store, cx| {
5329 for (directory, file_content) in settings_contents {
5330 let file_content = file_content.and_then(|content| content.log_err());
5331 store
5332 .set_local_settings(
5333 worktree_id,
5334 directory.clone(),
5335 file_content.as_ref().map(String::as_str),
5336 cx,
5337 )
5338 .log_err();
5339 if let Some(remote_id) = project_id {
5340 client
5341 .send(proto::UpdateWorktreeSettings {
5342 project_id: remote_id,
5343 worktree_id: remote_worktree_id.to_proto(),
5344 path: directory.to_string_lossy().into_owned(),
5345 content: file_content,
5346 })
5347 .log_err();
5348 }
5349 }
5350 });
5351 });
5352 })
5353 .detach();
5354 }
5355
5356 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
5357 let new_active_entry = entry.and_then(|project_path| {
5358 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
5359 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
5360 Some(entry.id)
5361 });
5362 if new_active_entry != self.active_entry {
5363 self.active_entry = new_active_entry;
5364 cx.emit(Event::ActiveEntryChanged(new_active_entry));
5365 }
5366 }
5367
5368 pub fn language_servers_running_disk_based_diagnostics(
5369 &self,
5370 ) -> impl Iterator<Item = LanguageServerId> + '_ {
5371 self.language_server_statuses
5372 .iter()
5373 .filter_map(|(id, status)| {
5374 if status.has_pending_diagnostic_updates {
5375 Some(*id)
5376 } else {
5377 None
5378 }
5379 })
5380 }
5381
5382 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
5383 let mut summary = DiagnosticSummary::default();
5384 for (_, _, path_summary) in self.diagnostic_summaries(cx) {
5385 summary.error_count += path_summary.error_count;
5386 summary.warning_count += path_summary.warning_count;
5387 }
5388 summary
5389 }
5390
5391 pub fn diagnostic_summaries<'a>(
5392 &'a self,
5393 cx: &'a AppContext,
5394 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
5395 self.visible_worktrees(cx).flat_map(move |worktree| {
5396 let worktree = worktree.read(cx);
5397 let worktree_id = worktree.id();
5398 worktree
5399 .diagnostic_summaries()
5400 .map(move |(path, server_id, summary)| {
5401 (ProjectPath { worktree_id, path }, server_id, summary)
5402 })
5403 })
5404 }
5405
5406 pub fn disk_based_diagnostics_started(
5407 &mut self,
5408 language_server_id: LanguageServerId,
5409 cx: &mut ModelContext<Self>,
5410 ) {
5411 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
5412 }
5413
5414 pub fn disk_based_diagnostics_finished(
5415 &mut self,
5416 language_server_id: LanguageServerId,
5417 cx: &mut ModelContext<Self>,
5418 ) {
5419 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
5420 }
5421
5422 pub fn active_entry(&self) -> Option<ProjectEntryId> {
5423 self.active_entry
5424 }
5425
5426 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
5427 self.worktree_for_id(path.worktree_id, cx)?
5428 .read(cx)
5429 .entry_for_path(&path.path)
5430 .cloned()
5431 }
5432
5433 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
5434 let worktree = self.worktree_for_entry(entry_id, cx)?;
5435 let worktree = worktree.read(cx);
5436 let worktree_id = worktree.id();
5437 let path = worktree.entry_for_id(entry_id)?.path.clone();
5438 Some(ProjectPath { worktree_id, path })
5439 }
5440
5441 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
5442 let workspace_root = self
5443 .worktree_for_id(project_path.worktree_id, cx)?
5444 .read(cx)
5445 .abs_path();
5446 let project_path = project_path.path.as_ref();
5447
5448 Some(if project_path == Path::new("") {
5449 workspace_root.to_path_buf()
5450 } else {
5451 workspace_root.join(project_path)
5452 })
5453 }
5454
5455 // RPC message handlers
5456
5457 async fn handle_unshare_project(
5458 this: ModelHandle<Self>,
5459 _: TypedEnvelope<proto::UnshareProject>,
5460 _: Arc<Client>,
5461 mut cx: AsyncAppContext,
5462 ) -> Result<()> {
5463 this.update(&mut cx, |this, cx| {
5464 if this.is_local() {
5465 this.unshare(cx)?;
5466 } else {
5467 this.disconnected_from_host(cx);
5468 }
5469 Ok(())
5470 })
5471 }
5472
5473 async fn handle_add_collaborator(
5474 this: ModelHandle<Self>,
5475 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
5476 _: Arc<Client>,
5477 mut cx: AsyncAppContext,
5478 ) -> Result<()> {
5479 let collaborator = envelope
5480 .payload
5481 .collaborator
5482 .take()
5483 .ok_or_else(|| anyhow!("empty collaborator"))?;
5484
5485 let collaborator = Collaborator::from_proto(collaborator)?;
5486 this.update(&mut cx, |this, cx| {
5487 this.shared_buffers.remove(&collaborator.peer_id);
5488 this.collaborators
5489 .insert(collaborator.peer_id, collaborator);
5490 cx.notify();
5491 });
5492
5493 Ok(())
5494 }
5495
5496 async fn handle_update_project_collaborator(
5497 this: ModelHandle<Self>,
5498 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
5499 _: Arc<Client>,
5500 mut cx: AsyncAppContext,
5501 ) -> Result<()> {
5502 let old_peer_id = envelope
5503 .payload
5504 .old_peer_id
5505 .ok_or_else(|| anyhow!("missing old peer id"))?;
5506 let new_peer_id = envelope
5507 .payload
5508 .new_peer_id
5509 .ok_or_else(|| anyhow!("missing new peer id"))?;
5510 this.update(&mut cx, |this, cx| {
5511 let collaborator = this
5512 .collaborators
5513 .remove(&old_peer_id)
5514 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
5515 let is_host = collaborator.replica_id == 0;
5516 this.collaborators.insert(new_peer_id, collaborator);
5517
5518 let buffers = this.shared_buffers.remove(&old_peer_id);
5519 log::info!(
5520 "peer {} became {}. moving buffers {:?}",
5521 old_peer_id,
5522 new_peer_id,
5523 &buffers
5524 );
5525 if let Some(buffers) = buffers {
5526 this.shared_buffers.insert(new_peer_id, buffers);
5527 }
5528
5529 if is_host {
5530 this.opened_buffers
5531 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
5532 this.buffer_ordered_messages_tx
5533 .unbounded_send(BufferOrderedMessage::Resync)
5534 .unwrap();
5535 }
5536
5537 cx.emit(Event::CollaboratorUpdated {
5538 old_peer_id,
5539 new_peer_id,
5540 });
5541 cx.notify();
5542 Ok(())
5543 })
5544 }
5545
5546 async fn handle_remove_collaborator(
5547 this: ModelHandle<Self>,
5548 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
5549 _: Arc<Client>,
5550 mut cx: AsyncAppContext,
5551 ) -> Result<()> {
5552 this.update(&mut cx, |this, cx| {
5553 let peer_id = envelope
5554 .payload
5555 .peer_id
5556 .ok_or_else(|| anyhow!("invalid peer id"))?;
5557 let replica_id = this
5558 .collaborators
5559 .remove(&peer_id)
5560 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
5561 .replica_id;
5562 for buffer in this.opened_buffers.values() {
5563 if let Some(buffer) = buffer.upgrade(cx) {
5564 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
5565 }
5566 }
5567 this.shared_buffers.remove(&peer_id);
5568
5569 cx.emit(Event::CollaboratorLeft(peer_id));
5570 cx.notify();
5571 Ok(())
5572 })
5573 }
5574
5575 async fn handle_update_project(
5576 this: ModelHandle<Self>,
5577 envelope: TypedEnvelope<proto::UpdateProject>,
5578 _: Arc<Client>,
5579 mut cx: AsyncAppContext,
5580 ) -> Result<()> {
5581 this.update(&mut cx, |this, cx| {
5582 // Don't handle messages that were sent before the response to us joining the project
5583 if envelope.message_id > this.join_project_response_message_id {
5584 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
5585 }
5586 Ok(())
5587 })
5588 }
5589
5590 async fn handle_update_worktree(
5591 this: ModelHandle<Self>,
5592 envelope: TypedEnvelope<proto::UpdateWorktree>,
5593 _: Arc<Client>,
5594 mut cx: AsyncAppContext,
5595 ) -> Result<()> {
5596 this.update(&mut cx, |this, cx| {
5597 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5598 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
5599 worktree.update(cx, |worktree, _| {
5600 let worktree = worktree.as_remote_mut().unwrap();
5601 worktree.update_from_remote(envelope.payload);
5602 });
5603 }
5604 Ok(())
5605 })
5606 }
5607
5608 async fn handle_update_worktree_settings(
5609 this: ModelHandle<Self>,
5610 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
5611 _: Arc<Client>,
5612 mut cx: AsyncAppContext,
5613 ) -> Result<()> {
5614 this.update(&mut cx, |this, cx| {
5615 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5616 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
5617 cx.update_global::<SettingsStore, _, _>(|store, cx| {
5618 store
5619 .set_local_settings(
5620 worktree.id(),
5621 PathBuf::from(&envelope.payload.path).into(),
5622 envelope.payload.content.as_ref().map(String::as_str),
5623 cx,
5624 )
5625 .log_err();
5626 });
5627 }
5628 Ok(())
5629 })
5630 }
5631
5632 async fn handle_create_project_entry(
5633 this: ModelHandle<Self>,
5634 envelope: TypedEnvelope<proto::CreateProjectEntry>,
5635 _: Arc<Client>,
5636 mut cx: AsyncAppContext,
5637 ) -> Result<proto::ProjectEntryResponse> {
5638 let worktree = this.update(&mut cx, |this, cx| {
5639 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5640 this.worktree_for_id(worktree_id, cx)
5641 .ok_or_else(|| anyhow!("worktree not found"))
5642 })?;
5643 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5644 let entry = worktree
5645 .update(&mut cx, |worktree, cx| {
5646 let worktree = worktree.as_local_mut().unwrap();
5647 let path = PathBuf::from(envelope.payload.path);
5648 worktree.create_entry(path, envelope.payload.is_directory, cx)
5649 })
5650 .await?;
5651 Ok(proto::ProjectEntryResponse {
5652 entry: Some((&entry).into()),
5653 worktree_scan_id: worktree_scan_id as u64,
5654 })
5655 }
5656
5657 async fn handle_rename_project_entry(
5658 this: ModelHandle<Self>,
5659 envelope: TypedEnvelope<proto::RenameProjectEntry>,
5660 _: Arc<Client>,
5661 mut cx: AsyncAppContext,
5662 ) -> Result<proto::ProjectEntryResponse> {
5663 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5664 let worktree = this.read_with(&cx, |this, cx| {
5665 this.worktree_for_entry(entry_id, cx)
5666 .ok_or_else(|| anyhow!("worktree not found"))
5667 })?;
5668 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5669 let entry = worktree
5670 .update(&mut cx, |worktree, cx| {
5671 let new_path = PathBuf::from(envelope.payload.new_path);
5672 worktree
5673 .as_local_mut()
5674 .unwrap()
5675 .rename_entry(entry_id, new_path, cx)
5676 .ok_or_else(|| anyhow!("invalid entry"))
5677 })?
5678 .await?;
5679 Ok(proto::ProjectEntryResponse {
5680 entry: Some((&entry).into()),
5681 worktree_scan_id: worktree_scan_id as u64,
5682 })
5683 }
5684
5685 async fn handle_copy_project_entry(
5686 this: ModelHandle<Self>,
5687 envelope: TypedEnvelope<proto::CopyProjectEntry>,
5688 _: Arc<Client>,
5689 mut cx: AsyncAppContext,
5690 ) -> Result<proto::ProjectEntryResponse> {
5691 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5692 let worktree = this.read_with(&cx, |this, cx| {
5693 this.worktree_for_entry(entry_id, cx)
5694 .ok_or_else(|| anyhow!("worktree not found"))
5695 })?;
5696 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5697 let entry = worktree
5698 .update(&mut cx, |worktree, cx| {
5699 let new_path = PathBuf::from(envelope.payload.new_path);
5700 worktree
5701 .as_local_mut()
5702 .unwrap()
5703 .copy_entry(entry_id, new_path, cx)
5704 .ok_or_else(|| anyhow!("invalid entry"))
5705 })?
5706 .await?;
5707 Ok(proto::ProjectEntryResponse {
5708 entry: Some((&entry).into()),
5709 worktree_scan_id: worktree_scan_id as u64,
5710 })
5711 }
5712
5713 async fn handle_delete_project_entry(
5714 this: ModelHandle<Self>,
5715 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
5716 _: Arc<Client>,
5717 mut cx: AsyncAppContext,
5718 ) -> Result<proto::ProjectEntryResponse> {
5719 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5720
5721 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
5722
5723 let worktree = this.read_with(&cx, |this, cx| {
5724 this.worktree_for_entry(entry_id, cx)
5725 .ok_or_else(|| anyhow!("worktree not found"))
5726 })?;
5727 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5728 worktree
5729 .update(&mut cx, |worktree, cx| {
5730 worktree
5731 .as_local_mut()
5732 .unwrap()
5733 .delete_entry(entry_id, cx)
5734 .ok_or_else(|| anyhow!("invalid entry"))
5735 })?
5736 .await?;
5737 Ok(proto::ProjectEntryResponse {
5738 entry: None,
5739 worktree_scan_id: worktree_scan_id as u64,
5740 })
5741 }
5742
5743 async fn handle_expand_project_entry(
5744 this: ModelHandle<Self>,
5745 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
5746 _: Arc<Client>,
5747 mut cx: AsyncAppContext,
5748 ) -> Result<proto::ExpandProjectEntryResponse> {
5749 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5750 let worktree = this
5751 .read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx))
5752 .ok_or_else(|| anyhow!("invalid request"))?;
5753 worktree
5754 .update(&mut cx, |worktree, cx| {
5755 worktree
5756 .as_local_mut()
5757 .unwrap()
5758 .expand_entry(entry_id, cx)
5759 .ok_or_else(|| anyhow!("invalid entry"))
5760 })?
5761 .await?;
5762 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
5763 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
5764 }
5765
5766 async fn handle_update_diagnostic_summary(
5767 this: ModelHandle<Self>,
5768 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
5769 _: Arc<Client>,
5770 mut cx: AsyncAppContext,
5771 ) -> Result<()> {
5772 this.update(&mut cx, |this, cx| {
5773 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5774 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
5775 if let Some(summary) = envelope.payload.summary {
5776 let project_path = ProjectPath {
5777 worktree_id,
5778 path: Path::new(&summary.path).into(),
5779 };
5780 worktree.update(cx, |worktree, _| {
5781 worktree
5782 .as_remote_mut()
5783 .unwrap()
5784 .update_diagnostic_summary(project_path.path.clone(), &summary);
5785 });
5786 cx.emit(Event::DiagnosticsUpdated {
5787 language_server_id: LanguageServerId(summary.language_server_id as usize),
5788 path: project_path,
5789 });
5790 }
5791 }
5792 Ok(())
5793 })
5794 }
5795
5796 async fn handle_start_language_server(
5797 this: ModelHandle<Self>,
5798 envelope: TypedEnvelope<proto::StartLanguageServer>,
5799 _: Arc<Client>,
5800 mut cx: AsyncAppContext,
5801 ) -> Result<()> {
5802 let server = envelope
5803 .payload
5804 .server
5805 .ok_or_else(|| anyhow!("invalid server"))?;
5806 this.update(&mut cx, |this, cx| {
5807 this.language_server_statuses.insert(
5808 LanguageServerId(server.id as usize),
5809 LanguageServerStatus {
5810 name: server.name,
5811 pending_work: Default::default(),
5812 has_pending_diagnostic_updates: false,
5813 progress_tokens: Default::default(),
5814 },
5815 );
5816 cx.notify();
5817 });
5818 Ok(())
5819 }
5820
5821 async fn handle_update_language_server(
5822 this: ModelHandle<Self>,
5823 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
5824 _: Arc<Client>,
5825 mut cx: AsyncAppContext,
5826 ) -> Result<()> {
5827 this.update(&mut cx, |this, cx| {
5828 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
5829
5830 match envelope
5831 .payload
5832 .variant
5833 .ok_or_else(|| anyhow!("invalid variant"))?
5834 {
5835 proto::update_language_server::Variant::WorkStart(payload) => {
5836 this.on_lsp_work_start(
5837 language_server_id,
5838 payload.token,
5839 LanguageServerProgress {
5840 message: payload.message,
5841 percentage: payload.percentage.map(|p| p as usize),
5842 last_update_at: Instant::now(),
5843 },
5844 cx,
5845 );
5846 }
5847
5848 proto::update_language_server::Variant::WorkProgress(payload) => {
5849 this.on_lsp_work_progress(
5850 language_server_id,
5851 payload.token,
5852 LanguageServerProgress {
5853 message: payload.message,
5854 percentage: payload.percentage.map(|p| p as usize),
5855 last_update_at: Instant::now(),
5856 },
5857 cx,
5858 );
5859 }
5860
5861 proto::update_language_server::Variant::WorkEnd(payload) => {
5862 this.on_lsp_work_end(language_server_id, payload.token, cx);
5863 }
5864
5865 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
5866 this.disk_based_diagnostics_started(language_server_id, cx);
5867 }
5868
5869 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
5870 this.disk_based_diagnostics_finished(language_server_id, cx)
5871 }
5872 }
5873
5874 Ok(())
5875 })
5876 }
5877
5878 async fn handle_update_buffer(
5879 this: ModelHandle<Self>,
5880 envelope: TypedEnvelope<proto::UpdateBuffer>,
5881 _: Arc<Client>,
5882 mut cx: AsyncAppContext,
5883 ) -> Result<proto::Ack> {
5884 this.update(&mut cx, |this, cx| {
5885 let payload = envelope.payload.clone();
5886 let buffer_id = payload.buffer_id;
5887 let ops = payload
5888 .operations
5889 .into_iter()
5890 .map(language::proto::deserialize_operation)
5891 .collect::<Result<Vec<_>, _>>()?;
5892 let is_remote = this.is_remote();
5893 match this.opened_buffers.entry(buffer_id) {
5894 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
5895 OpenBuffer::Strong(buffer) => {
5896 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
5897 }
5898 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
5899 OpenBuffer::Weak(_) => {}
5900 },
5901 hash_map::Entry::Vacant(e) => {
5902 assert!(
5903 is_remote,
5904 "received buffer update from {:?}",
5905 envelope.original_sender_id
5906 );
5907 e.insert(OpenBuffer::Operations(ops));
5908 }
5909 }
5910 Ok(proto::Ack {})
5911 })
5912 }
5913
5914 async fn handle_create_buffer_for_peer(
5915 this: ModelHandle<Self>,
5916 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
5917 _: Arc<Client>,
5918 mut cx: AsyncAppContext,
5919 ) -> Result<()> {
5920 this.update(&mut cx, |this, cx| {
5921 match envelope
5922 .payload
5923 .variant
5924 .ok_or_else(|| anyhow!("missing variant"))?
5925 {
5926 proto::create_buffer_for_peer::Variant::State(mut state) => {
5927 let mut buffer_file = None;
5928 if let Some(file) = state.file.take() {
5929 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5930 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5931 anyhow!("no worktree found for id {}", file.worktree_id)
5932 })?;
5933 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5934 as Arc<dyn language::File>);
5935 }
5936
5937 let buffer_id = state.id;
5938 let buffer = cx.add_model(|_| {
5939 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
5940 });
5941 this.incomplete_remote_buffers
5942 .insert(buffer_id, Some(buffer));
5943 }
5944 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
5945 let buffer = this
5946 .incomplete_remote_buffers
5947 .get(&chunk.buffer_id)
5948 .cloned()
5949 .flatten()
5950 .ok_or_else(|| {
5951 anyhow!(
5952 "received chunk for buffer {} without initial state",
5953 chunk.buffer_id
5954 )
5955 })?;
5956 let operations = chunk
5957 .operations
5958 .into_iter()
5959 .map(language::proto::deserialize_operation)
5960 .collect::<Result<Vec<_>>>()?;
5961 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
5962
5963 if chunk.is_last {
5964 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
5965 this.register_buffer(&buffer, cx)?;
5966 }
5967 }
5968 }
5969
5970 Ok(())
5971 })
5972 }
5973
5974 async fn handle_update_diff_base(
5975 this: ModelHandle<Self>,
5976 envelope: TypedEnvelope<proto::UpdateDiffBase>,
5977 _: Arc<Client>,
5978 mut cx: AsyncAppContext,
5979 ) -> Result<()> {
5980 this.update(&mut cx, |this, cx| {
5981 let buffer_id = envelope.payload.buffer_id;
5982 let diff_base = envelope.payload.diff_base;
5983 if let Some(buffer) = this
5984 .opened_buffers
5985 .get_mut(&buffer_id)
5986 .and_then(|b| b.upgrade(cx))
5987 .or_else(|| {
5988 this.incomplete_remote_buffers
5989 .get(&buffer_id)
5990 .cloned()
5991 .flatten()
5992 })
5993 {
5994 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
5995 }
5996 Ok(())
5997 })
5998 }
5999
6000 async fn handle_update_buffer_file(
6001 this: ModelHandle<Self>,
6002 envelope: TypedEnvelope<proto::UpdateBufferFile>,
6003 _: Arc<Client>,
6004 mut cx: AsyncAppContext,
6005 ) -> Result<()> {
6006 let buffer_id = envelope.payload.buffer_id;
6007
6008 this.update(&mut cx, |this, cx| {
6009 let payload = envelope.payload.clone();
6010 if let Some(buffer) = this
6011 .opened_buffers
6012 .get(&buffer_id)
6013 .and_then(|b| b.upgrade(cx))
6014 .or_else(|| {
6015 this.incomplete_remote_buffers
6016 .get(&buffer_id)
6017 .cloned()
6018 .flatten()
6019 })
6020 {
6021 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
6022 let worktree = this
6023 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
6024 .ok_or_else(|| anyhow!("no such worktree"))?;
6025 let file = File::from_proto(file, worktree, cx)?;
6026 buffer.update(cx, |buffer, cx| {
6027 buffer.file_updated(Arc::new(file), cx).detach();
6028 });
6029 this.detect_language_for_buffer(&buffer, cx);
6030 }
6031 Ok(())
6032 })
6033 }
6034
6035 async fn handle_save_buffer(
6036 this: ModelHandle<Self>,
6037 envelope: TypedEnvelope<proto::SaveBuffer>,
6038 _: Arc<Client>,
6039 mut cx: AsyncAppContext,
6040 ) -> Result<proto::BufferSaved> {
6041 let buffer_id = envelope.payload.buffer_id;
6042 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
6043 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
6044 let buffer = this
6045 .opened_buffers
6046 .get(&buffer_id)
6047 .and_then(|buffer| buffer.upgrade(cx))
6048 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
6049 anyhow::Ok((project_id, buffer))
6050 })?;
6051 buffer
6052 .update(&mut cx, |buffer, _| {
6053 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
6054 })
6055 .await?;
6056 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
6057
6058 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
6059 .await?;
6060 Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
6061 project_id,
6062 buffer_id,
6063 version: serialize_version(buffer.saved_version()),
6064 mtime: Some(buffer.saved_mtime().into()),
6065 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
6066 }))
6067 }
6068
6069 async fn handle_reload_buffers(
6070 this: ModelHandle<Self>,
6071 envelope: TypedEnvelope<proto::ReloadBuffers>,
6072 _: Arc<Client>,
6073 mut cx: AsyncAppContext,
6074 ) -> Result<proto::ReloadBuffersResponse> {
6075 let sender_id = envelope.original_sender_id()?;
6076 let reload = this.update(&mut cx, |this, cx| {
6077 let mut buffers = HashSet::default();
6078 for buffer_id in &envelope.payload.buffer_ids {
6079 buffers.insert(
6080 this.opened_buffers
6081 .get(buffer_id)
6082 .and_then(|buffer| buffer.upgrade(cx))
6083 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
6084 );
6085 }
6086 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
6087 })?;
6088
6089 let project_transaction = reload.await?;
6090 let project_transaction = this.update(&mut cx, |this, cx| {
6091 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
6092 });
6093 Ok(proto::ReloadBuffersResponse {
6094 transaction: Some(project_transaction),
6095 })
6096 }
6097
6098 async fn handle_synchronize_buffers(
6099 this: ModelHandle<Self>,
6100 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
6101 _: Arc<Client>,
6102 mut cx: AsyncAppContext,
6103 ) -> Result<proto::SynchronizeBuffersResponse> {
6104 let project_id = envelope.payload.project_id;
6105 let mut response = proto::SynchronizeBuffersResponse {
6106 buffers: Default::default(),
6107 };
6108
6109 this.update(&mut cx, |this, cx| {
6110 let Some(guest_id) = envelope.original_sender_id else {
6111 error!("missing original_sender_id on SynchronizeBuffers request");
6112 return;
6113 };
6114
6115 this.shared_buffers.entry(guest_id).or_default().clear();
6116 for buffer in envelope.payload.buffers {
6117 let buffer_id = buffer.id;
6118 let remote_version = language::proto::deserialize_version(&buffer.version);
6119 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
6120 this.shared_buffers
6121 .entry(guest_id)
6122 .or_default()
6123 .insert(buffer_id);
6124
6125 let buffer = buffer.read(cx);
6126 response.buffers.push(proto::BufferVersion {
6127 id: buffer_id,
6128 version: language::proto::serialize_version(&buffer.version),
6129 });
6130
6131 let operations = buffer.serialize_ops(Some(remote_version), cx);
6132 let client = this.client.clone();
6133 if let Some(file) = buffer.file() {
6134 client
6135 .send(proto::UpdateBufferFile {
6136 project_id,
6137 buffer_id: buffer_id as u64,
6138 file: Some(file.to_proto()),
6139 })
6140 .log_err();
6141 }
6142
6143 client
6144 .send(proto::UpdateDiffBase {
6145 project_id,
6146 buffer_id: buffer_id as u64,
6147 diff_base: buffer.diff_base().map(Into::into),
6148 })
6149 .log_err();
6150
6151 client
6152 .send(proto::BufferReloaded {
6153 project_id,
6154 buffer_id,
6155 version: language::proto::serialize_version(buffer.saved_version()),
6156 mtime: Some(buffer.saved_mtime().into()),
6157 fingerprint: language::proto::serialize_fingerprint(
6158 buffer.saved_version_fingerprint(),
6159 ),
6160 line_ending: language::proto::serialize_line_ending(
6161 buffer.line_ending(),
6162 ) as i32,
6163 })
6164 .log_err();
6165
6166 cx.background()
6167 .spawn(
6168 async move {
6169 let operations = operations.await;
6170 for chunk in split_operations(operations) {
6171 client
6172 .request(proto::UpdateBuffer {
6173 project_id,
6174 buffer_id,
6175 operations: chunk,
6176 })
6177 .await?;
6178 }
6179 anyhow::Ok(())
6180 }
6181 .log_err(),
6182 )
6183 .detach();
6184 }
6185 }
6186 });
6187
6188 Ok(response)
6189 }
6190
6191 async fn handle_format_buffers(
6192 this: ModelHandle<Self>,
6193 envelope: TypedEnvelope<proto::FormatBuffers>,
6194 _: Arc<Client>,
6195 mut cx: AsyncAppContext,
6196 ) -> Result<proto::FormatBuffersResponse> {
6197 let sender_id = envelope.original_sender_id()?;
6198 let format = this.update(&mut cx, |this, cx| {
6199 let mut buffers = HashSet::default();
6200 for buffer_id in &envelope.payload.buffer_ids {
6201 buffers.insert(
6202 this.opened_buffers
6203 .get(buffer_id)
6204 .and_then(|buffer| buffer.upgrade(cx))
6205 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
6206 );
6207 }
6208 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
6209 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
6210 })?;
6211
6212 let project_transaction = format.await?;
6213 let project_transaction = this.update(&mut cx, |this, cx| {
6214 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
6215 });
6216 Ok(proto::FormatBuffersResponse {
6217 transaction: Some(project_transaction),
6218 })
6219 }
6220
6221 async fn handle_apply_additional_edits_for_completion(
6222 this: ModelHandle<Self>,
6223 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
6224 _: Arc<Client>,
6225 mut cx: AsyncAppContext,
6226 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
6227 let (buffer, completion) = this.update(&mut cx, |this, cx| {
6228 let buffer = this
6229 .opened_buffers
6230 .get(&envelope.payload.buffer_id)
6231 .and_then(|buffer| buffer.upgrade(cx))
6232 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
6233 let language = buffer.read(cx).language();
6234 let completion = language::proto::deserialize_completion(
6235 envelope
6236 .payload
6237 .completion
6238 .ok_or_else(|| anyhow!("invalid completion"))?,
6239 language.cloned(),
6240 );
6241 Ok::<_, anyhow::Error>((buffer, completion))
6242 })?;
6243
6244 let completion = completion.await?;
6245
6246 let apply_additional_edits = this.update(&mut cx, |this, cx| {
6247 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
6248 });
6249
6250 Ok(proto::ApplyCompletionAdditionalEditsResponse {
6251 transaction: apply_additional_edits
6252 .await?
6253 .as_ref()
6254 .map(language::proto::serialize_transaction),
6255 })
6256 }
6257
6258 async fn handle_apply_code_action(
6259 this: ModelHandle<Self>,
6260 envelope: TypedEnvelope<proto::ApplyCodeAction>,
6261 _: Arc<Client>,
6262 mut cx: AsyncAppContext,
6263 ) -> Result<proto::ApplyCodeActionResponse> {
6264 let sender_id = envelope.original_sender_id()?;
6265 let action = language::proto::deserialize_code_action(
6266 envelope
6267 .payload
6268 .action
6269 .ok_or_else(|| anyhow!("invalid action"))?,
6270 )?;
6271 let apply_code_action = this.update(&mut cx, |this, cx| {
6272 let buffer = this
6273 .opened_buffers
6274 .get(&envelope.payload.buffer_id)
6275 .and_then(|buffer| buffer.upgrade(cx))
6276 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
6277 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
6278 })?;
6279
6280 let project_transaction = apply_code_action.await?;
6281 let project_transaction = this.update(&mut cx, |this, cx| {
6282 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
6283 });
6284 Ok(proto::ApplyCodeActionResponse {
6285 transaction: Some(project_transaction),
6286 })
6287 }
6288
6289 async fn handle_on_type_formatting(
6290 this: ModelHandle<Self>,
6291 envelope: TypedEnvelope<proto::OnTypeFormatting>,
6292 _: Arc<Client>,
6293 mut cx: AsyncAppContext,
6294 ) -> Result<proto::OnTypeFormattingResponse> {
6295 let on_type_formatting = this.update(&mut cx, |this, cx| {
6296 let buffer = this
6297 .opened_buffers
6298 .get(&envelope.payload.buffer_id)
6299 .and_then(|buffer| buffer.upgrade(cx))
6300 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
6301 let position = envelope
6302 .payload
6303 .position
6304 .and_then(deserialize_anchor)
6305 .ok_or_else(|| anyhow!("invalid position"))?;
6306 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
6307 buffer,
6308 position,
6309 envelope.payload.trigger.clone(),
6310 cx,
6311 ))
6312 })?;
6313
6314 let transaction = on_type_formatting
6315 .await?
6316 .as_ref()
6317 .map(language::proto::serialize_transaction);
6318 Ok(proto::OnTypeFormattingResponse { transaction })
6319 }
6320
6321 async fn handle_lsp_command<T: LspCommand>(
6322 this: ModelHandle<Self>,
6323 envelope: TypedEnvelope<T::ProtoRequest>,
6324 _: Arc<Client>,
6325 mut cx: AsyncAppContext,
6326 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
6327 where
6328 <T::LspRequest as lsp::request::Request>::Result: Send,
6329 {
6330 let sender_id = envelope.original_sender_id()?;
6331 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
6332 let buffer_handle = this.read_with(&cx, |this, _| {
6333 this.opened_buffers
6334 .get(&buffer_id)
6335 .and_then(|buffer| buffer.upgrade(&cx))
6336 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
6337 })?;
6338 let request = T::from_proto(
6339 envelope.payload,
6340 this.clone(),
6341 buffer_handle.clone(),
6342 cx.clone(),
6343 )
6344 .await?;
6345 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
6346 let response = this
6347 .update(&mut cx, |this, cx| {
6348 this.request_lsp(buffer_handle, request, cx)
6349 })
6350 .await?;
6351 this.update(&mut cx, |this, cx| {
6352 Ok(T::response_to_proto(
6353 response,
6354 this,
6355 sender_id,
6356 &buffer_version,
6357 cx,
6358 ))
6359 })
6360 }
6361
6362 async fn handle_get_project_symbols(
6363 this: ModelHandle<Self>,
6364 envelope: TypedEnvelope<proto::GetProjectSymbols>,
6365 _: Arc<Client>,
6366 mut cx: AsyncAppContext,
6367 ) -> Result<proto::GetProjectSymbolsResponse> {
6368 let symbols = this
6369 .update(&mut cx, |this, cx| {
6370 this.symbols(&envelope.payload.query, cx)
6371 })
6372 .await?;
6373
6374 Ok(proto::GetProjectSymbolsResponse {
6375 symbols: symbols.iter().map(serialize_symbol).collect(),
6376 })
6377 }
6378
6379 async fn handle_search_project(
6380 this: ModelHandle<Self>,
6381 envelope: TypedEnvelope<proto::SearchProject>,
6382 _: Arc<Client>,
6383 mut cx: AsyncAppContext,
6384 ) -> Result<proto::SearchProjectResponse> {
6385 let peer_id = envelope.original_sender_id()?;
6386 let query = SearchQuery::from_proto(envelope.payload)?;
6387 let result = this
6388 .update(&mut cx, |this, cx| this.search(query, cx))
6389 .await?;
6390
6391 this.update(&mut cx, |this, cx| {
6392 let mut locations = Vec::new();
6393 for (buffer, ranges) in result {
6394 for range in ranges {
6395 let start = serialize_anchor(&range.start);
6396 let end = serialize_anchor(&range.end);
6397 let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
6398 locations.push(proto::Location {
6399 buffer_id,
6400 start: Some(start),
6401 end: Some(end),
6402 });
6403 }
6404 }
6405 Ok(proto::SearchProjectResponse { locations })
6406 })
6407 }
6408
6409 async fn handle_open_buffer_for_symbol(
6410 this: ModelHandle<Self>,
6411 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
6412 _: Arc<Client>,
6413 mut cx: AsyncAppContext,
6414 ) -> Result<proto::OpenBufferForSymbolResponse> {
6415 let peer_id = envelope.original_sender_id()?;
6416 let symbol = envelope
6417 .payload
6418 .symbol
6419 .ok_or_else(|| anyhow!("invalid symbol"))?;
6420 let symbol = this
6421 .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
6422 .await?;
6423 let symbol = this.read_with(&cx, |this, _| {
6424 let signature = this.symbol_signature(&symbol.path);
6425 if signature == symbol.signature {
6426 Ok(symbol)
6427 } else {
6428 Err(anyhow!("invalid symbol signature"))
6429 }
6430 })?;
6431 let buffer = this
6432 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
6433 .await?;
6434
6435 Ok(proto::OpenBufferForSymbolResponse {
6436 buffer_id: this.update(&mut cx, |this, cx| {
6437 this.create_buffer_for_peer(&buffer, peer_id, cx)
6438 }),
6439 })
6440 }
6441
6442 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
6443 let mut hasher = Sha256::new();
6444 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
6445 hasher.update(project_path.path.to_string_lossy().as_bytes());
6446 hasher.update(self.nonce.to_be_bytes());
6447 hasher.finalize().as_slice().try_into().unwrap()
6448 }
6449
6450 async fn handle_open_buffer_by_id(
6451 this: ModelHandle<Self>,
6452 envelope: TypedEnvelope<proto::OpenBufferById>,
6453 _: Arc<Client>,
6454 mut cx: AsyncAppContext,
6455 ) -> Result<proto::OpenBufferResponse> {
6456 let peer_id = envelope.original_sender_id()?;
6457 let buffer = this
6458 .update(&mut cx, |this, cx| {
6459 this.open_buffer_by_id(envelope.payload.id, cx)
6460 })
6461 .await?;
6462 this.update(&mut cx, |this, cx| {
6463 Ok(proto::OpenBufferResponse {
6464 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
6465 })
6466 })
6467 }
6468
6469 async fn handle_open_buffer_by_path(
6470 this: ModelHandle<Self>,
6471 envelope: TypedEnvelope<proto::OpenBufferByPath>,
6472 _: Arc<Client>,
6473 mut cx: AsyncAppContext,
6474 ) -> Result<proto::OpenBufferResponse> {
6475 let peer_id = envelope.original_sender_id()?;
6476 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6477 let open_buffer = this.update(&mut cx, |this, cx| {
6478 this.open_buffer(
6479 ProjectPath {
6480 worktree_id,
6481 path: PathBuf::from(envelope.payload.path).into(),
6482 },
6483 cx,
6484 )
6485 });
6486
6487 let buffer = open_buffer.await?;
6488 this.update(&mut cx, |this, cx| {
6489 Ok(proto::OpenBufferResponse {
6490 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
6491 })
6492 })
6493 }
6494
6495 fn serialize_project_transaction_for_peer(
6496 &mut self,
6497 project_transaction: ProjectTransaction,
6498 peer_id: proto::PeerId,
6499 cx: &mut AppContext,
6500 ) -> proto::ProjectTransaction {
6501 let mut serialized_transaction = proto::ProjectTransaction {
6502 buffer_ids: Default::default(),
6503 transactions: Default::default(),
6504 };
6505 for (buffer, transaction) in project_transaction.0 {
6506 serialized_transaction
6507 .buffer_ids
6508 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
6509 serialized_transaction
6510 .transactions
6511 .push(language::proto::serialize_transaction(&transaction));
6512 }
6513 serialized_transaction
6514 }
6515
6516 fn deserialize_project_transaction(
6517 &mut self,
6518 message: proto::ProjectTransaction,
6519 push_to_history: bool,
6520 cx: &mut ModelContext<Self>,
6521 ) -> Task<Result<ProjectTransaction>> {
6522 cx.spawn(|this, mut cx| async move {
6523 let mut project_transaction = ProjectTransaction::default();
6524 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
6525 {
6526 let buffer = this
6527 .update(&mut cx, |this, cx| {
6528 this.wait_for_remote_buffer(buffer_id, cx)
6529 })
6530 .await?;
6531 let transaction = language::proto::deserialize_transaction(transaction)?;
6532 project_transaction.0.insert(buffer, transaction);
6533 }
6534
6535 for (buffer, transaction) in &project_transaction.0 {
6536 buffer
6537 .update(&mut cx, |buffer, _| {
6538 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
6539 })
6540 .await?;
6541
6542 if push_to_history {
6543 buffer.update(&mut cx, |buffer, _| {
6544 buffer.push_transaction(transaction.clone(), Instant::now());
6545 });
6546 }
6547 }
6548
6549 Ok(project_transaction)
6550 })
6551 }
6552
6553 fn create_buffer_for_peer(
6554 &mut self,
6555 buffer: &ModelHandle<Buffer>,
6556 peer_id: proto::PeerId,
6557 cx: &mut AppContext,
6558 ) -> u64 {
6559 let buffer_id = buffer.read(cx).remote_id();
6560 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
6561 updates_tx
6562 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
6563 .ok();
6564 }
6565 buffer_id
6566 }
6567
6568 fn wait_for_remote_buffer(
6569 &mut self,
6570 id: u64,
6571 cx: &mut ModelContext<Self>,
6572 ) -> Task<Result<ModelHandle<Buffer>>> {
6573 let mut opened_buffer_rx = self.opened_buffer.1.clone();
6574
6575 cx.spawn_weak(|this, mut cx| async move {
6576 let buffer = loop {
6577 let Some(this) = this.upgrade(&cx) else {
6578 return Err(anyhow!("project dropped"));
6579 };
6580
6581 let buffer = this.read_with(&cx, |this, cx| {
6582 this.opened_buffers
6583 .get(&id)
6584 .and_then(|buffer| buffer.upgrade(cx))
6585 });
6586
6587 if let Some(buffer) = buffer {
6588 break buffer;
6589 } else if this.read_with(&cx, |this, _| this.is_read_only()) {
6590 return Err(anyhow!("disconnected before buffer {} could be opened", id));
6591 }
6592
6593 this.update(&mut cx, |this, _| {
6594 this.incomplete_remote_buffers.entry(id).or_default();
6595 });
6596 drop(this);
6597
6598 opened_buffer_rx
6599 .next()
6600 .await
6601 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
6602 };
6603
6604 Ok(buffer)
6605 })
6606 }
6607
6608 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
6609 let project_id = match self.client_state.as_ref() {
6610 Some(ProjectClientState::Remote {
6611 sharing_has_stopped,
6612 remote_id,
6613 ..
6614 }) => {
6615 if *sharing_has_stopped {
6616 return Task::ready(Err(anyhow!(
6617 "can't synchronize remote buffers on a readonly project"
6618 )));
6619 } else {
6620 *remote_id
6621 }
6622 }
6623 Some(ProjectClientState::Local { .. }) | None => {
6624 return Task::ready(Err(anyhow!(
6625 "can't synchronize remote buffers on a local project"
6626 )))
6627 }
6628 };
6629
6630 let client = self.client.clone();
6631 cx.spawn(|this, cx| async move {
6632 let (buffers, incomplete_buffer_ids) = this.read_with(&cx, |this, cx| {
6633 let buffers = this
6634 .opened_buffers
6635 .iter()
6636 .filter_map(|(id, buffer)| {
6637 let buffer = buffer.upgrade(cx)?;
6638 Some(proto::BufferVersion {
6639 id: *id,
6640 version: language::proto::serialize_version(&buffer.read(cx).version),
6641 })
6642 })
6643 .collect();
6644 let incomplete_buffer_ids = this
6645 .incomplete_remote_buffers
6646 .keys()
6647 .copied()
6648 .collect::<Vec<_>>();
6649
6650 (buffers, incomplete_buffer_ids)
6651 });
6652 let response = client
6653 .request(proto::SynchronizeBuffers {
6654 project_id,
6655 buffers,
6656 })
6657 .await?;
6658
6659 let send_updates_for_buffers = response.buffers.into_iter().map(|buffer| {
6660 let client = client.clone();
6661 let buffer_id = buffer.id;
6662 let remote_version = language::proto::deserialize_version(&buffer.version);
6663 this.read_with(&cx, |this, cx| {
6664 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
6665 let operations = buffer.read(cx).serialize_ops(Some(remote_version), cx);
6666 cx.background().spawn(async move {
6667 let operations = operations.await;
6668 for chunk in split_operations(operations) {
6669 client
6670 .request(proto::UpdateBuffer {
6671 project_id,
6672 buffer_id,
6673 operations: chunk,
6674 })
6675 .await?;
6676 }
6677 anyhow::Ok(())
6678 })
6679 } else {
6680 Task::ready(Ok(()))
6681 }
6682 })
6683 });
6684
6685 // Any incomplete buffers have open requests waiting. Request that the host sends
6686 // creates these buffers for us again to unblock any waiting futures.
6687 for id in incomplete_buffer_ids {
6688 cx.background()
6689 .spawn(client.request(proto::OpenBufferById { project_id, id }))
6690 .detach();
6691 }
6692
6693 futures::future::join_all(send_updates_for_buffers)
6694 .await
6695 .into_iter()
6696 .collect()
6697 })
6698 }
6699
6700 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
6701 self.worktrees(cx)
6702 .map(|worktree| {
6703 let worktree = worktree.read(cx);
6704 proto::WorktreeMetadata {
6705 id: worktree.id().to_proto(),
6706 root_name: worktree.root_name().into(),
6707 visible: worktree.is_visible(),
6708 abs_path: worktree.abs_path().to_string_lossy().into(),
6709 }
6710 })
6711 .collect()
6712 }
6713
6714 fn set_worktrees_from_proto(
6715 &mut self,
6716 worktrees: Vec<proto::WorktreeMetadata>,
6717 cx: &mut ModelContext<Project>,
6718 ) -> Result<()> {
6719 let replica_id = self.replica_id();
6720 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
6721
6722 let mut old_worktrees_by_id = self
6723 .worktrees
6724 .drain(..)
6725 .filter_map(|worktree| {
6726 let worktree = worktree.upgrade(cx)?;
6727 Some((worktree.read(cx).id(), worktree))
6728 })
6729 .collect::<HashMap<_, _>>();
6730
6731 for worktree in worktrees {
6732 if let Some(old_worktree) =
6733 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
6734 {
6735 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
6736 } else {
6737 let worktree =
6738 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
6739 let _ = self.add_worktree(&worktree, cx);
6740 }
6741 }
6742
6743 self.metadata_changed(cx);
6744 for id in old_worktrees_by_id.keys() {
6745 cx.emit(Event::WorktreeRemoved(*id));
6746 }
6747
6748 Ok(())
6749 }
6750
6751 fn set_collaborators_from_proto(
6752 &mut self,
6753 messages: Vec<proto::Collaborator>,
6754 cx: &mut ModelContext<Self>,
6755 ) -> Result<()> {
6756 let mut collaborators = HashMap::default();
6757 for message in messages {
6758 let collaborator = Collaborator::from_proto(message)?;
6759 collaborators.insert(collaborator.peer_id, collaborator);
6760 }
6761 for old_peer_id in self.collaborators.keys() {
6762 if !collaborators.contains_key(old_peer_id) {
6763 cx.emit(Event::CollaboratorLeft(*old_peer_id));
6764 }
6765 }
6766 self.collaborators = collaborators;
6767 Ok(())
6768 }
6769
6770 fn deserialize_symbol(
6771 &self,
6772 serialized_symbol: proto::Symbol,
6773 ) -> impl Future<Output = Result<Symbol>> {
6774 let languages = self.languages.clone();
6775 async move {
6776 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
6777 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
6778 let start = serialized_symbol
6779 .start
6780 .ok_or_else(|| anyhow!("invalid start"))?;
6781 let end = serialized_symbol
6782 .end
6783 .ok_or_else(|| anyhow!("invalid end"))?;
6784 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
6785 let path = ProjectPath {
6786 worktree_id,
6787 path: PathBuf::from(serialized_symbol.path).into(),
6788 };
6789 let language = languages
6790 .language_for_file(&path.path, None)
6791 .await
6792 .log_err();
6793 Ok(Symbol {
6794 language_server_name: LanguageServerName(
6795 serialized_symbol.language_server_name.into(),
6796 ),
6797 source_worktree_id,
6798 path,
6799 label: {
6800 match language {
6801 Some(language) => {
6802 language
6803 .label_for_symbol(&serialized_symbol.name, kind)
6804 .await
6805 }
6806 None => None,
6807 }
6808 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
6809 },
6810
6811 name: serialized_symbol.name,
6812 range: Unclipped(PointUtf16::new(start.row, start.column))
6813 ..Unclipped(PointUtf16::new(end.row, end.column)),
6814 kind,
6815 signature: serialized_symbol
6816 .signature
6817 .try_into()
6818 .map_err(|_| anyhow!("invalid signature"))?,
6819 })
6820 }
6821 }
6822
6823 async fn handle_buffer_saved(
6824 this: ModelHandle<Self>,
6825 envelope: TypedEnvelope<proto::BufferSaved>,
6826 _: Arc<Client>,
6827 mut cx: AsyncAppContext,
6828 ) -> Result<()> {
6829 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
6830 let version = deserialize_version(&envelope.payload.version);
6831 let mtime = envelope
6832 .payload
6833 .mtime
6834 .ok_or_else(|| anyhow!("missing mtime"))?
6835 .into();
6836
6837 this.update(&mut cx, |this, cx| {
6838 let buffer = this
6839 .opened_buffers
6840 .get(&envelope.payload.buffer_id)
6841 .and_then(|buffer| buffer.upgrade(cx))
6842 .or_else(|| {
6843 this.incomplete_remote_buffers
6844 .get(&envelope.payload.buffer_id)
6845 .and_then(|b| b.clone())
6846 });
6847 if let Some(buffer) = buffer {
6848 buffer.update(cx, |buffer, cx| {
6849 buffer.did_save(version, fingerprint, mtime, cx);
6850 });
6851 }
6852 Ok(())
6853 })
6854 }
6855
6856 async fn handle_buffer_reloaded(
6857 this: ModelHandle<Self>,
6858 envelope: TypedEnvelope<proto::BufferReloaded>,
6859 _: Arc<Client>,
6860 mut cx: AsyncAppContext,
6861 ) -> Result<()> {
6862 let payload = envelope.payload;
6863 let version = deserialize_version(&payload.version);
6864 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
6865 let line_ending = deserialize_line_ending(
6866 proto::LineEnding::from_i32(payload.line_ending)
6867 .ok_or_else(|| anyhow!("missing line ending"))?,
6868 );
6869 let mtime = payload
6870 .mtime
6871 .ok_or_else(|| anyhow!("missing mtime"))?
6872 .into();
6873 this.update(&mut cx, |this, cx| {
6874 let buffer = this
6875 .opened_buffers
6876 .get(&payload.buffer_id)
6877 .and_then(|buffer| buffer.upgrade(cx))
6878 .or_else(|| {
6879 this.incomplete_remote_buffers
6880 .get(&payload.buffer_id)
6881 .cloned()
6882 .flatten()
6883 });
6884 if let Some(buffer) = buffer {
6885 buffer.update(cx, |buffer, cx| {
6886 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
6887 });
6888 }
6889 Ok(())
6890 })
6891 }
6892
6893 #[allow(clippy::type_complexity)]
6894 fn edits_from_lsp(
6895 &mut self,
6896 buffer: &ModelHandle<Buffer>,
6897 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
6898 server_id: LanguageServerId,
6899 version: Option<i32>,
6900 cx: &mut ModelContext<Self>,
6901 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
6902 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
6903 cx.background().spawn(async move {
6904 let snapshot = snapshot?;
6905 let mut lsp_edits = lsp_edits
6906 .into_iter()
6907 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
6908 .collect::<Vec<_>>();
6909 lsp_edits.sort_by_key(|(range, _)| range.start);
6910
6911 let mut lsp_edits = lsp_edits.into_iter().peekable();
6912 let mut edits = Vec::new();
6913 while let Some((range, mut new_text)) = lsp_edits.next() {
6914 // Clip invalid ranges provided by the language server.
6915 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
6916 ..snapshot.clip_point_utf16(range.end, Bias::Left);
6917
6918 // Combine any LSP edits that are adjacent.
6919 //
6920 // Also, combine LSP edits that are separated from each other by only
6921 // a newline. This is important because for some code actions,
6922 // Rust-analyzer rewrites the entire buffer via a series of edits that
6923 // are separated by unchanged newline characters.
6924 //
6925 // In order for the diffing logic below to work properly, any edits that
6926 // cancel each other out must be combined into one.
6927 while let Some((next_range, next_text)) = lsp_edits.peek() {
6928 if next_range.start.0 > range.end {
6929 if next_range.start.0.row > range.end.row + 1
6930 || next_range.start.0.column > 0
6931 || snapshot.clip_point_utf16(
6932 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
6933 Bias::Left,
6934 ) > range.end
6935 {
6936 break;
6937 }
6938 new_text.push('\n');
6939 }
6940 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
6941 new_text.push_str(next_text);
6942 lsp_edits.next();
6943 }
6944
6945 // For multiline edits, perform a diff of the old and new text so that
6946 // we can identify the changes more precisely, preserving the locations
6947 // of any anchors positioned in the unchanged regions.
6948 if range.end.row > range.start.row {
6949 let mut offset = range.start.to_offset(&snapshot);
6950 let old_text = snapshot.text_for_range(range).collect::<String>();
6951
6952 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
6953 let mut moved_since_edit = true;
6954 for change in diff.iter_all_changes() {
6955 let tag = change.tag();
6956 let value = change.value();
6957 match tag {
6958 ChangeTag::Equal => {
6959 offset += value.len();
6960 moved_since_edit = true;
6961 }
6962 ChangeTag::Delete => {
6963 let start = snapshot.anchor_after(offset);
6964 let end = snapshot.anchor_before(offset + value.len());
6965 if moved_since_edit {
6966 edits.push((start..end, String::new()));
6967 } else {
6968 edits.last_mut().unwrap().0.end = end;
6969 }
6970 offset += value.len();
6971 moved_since_edit = false;
6972 }
6973 ChangeTag::Insert => {
6974 if moved_since_edit {
6975 let anchor = snapshot.anchor_after(offset);
6976 edits.push((anchor..anchor, value.to_string()));
6977 } else {
6978 edits.last_mut().unwrap().1.push_str(value);
6979 }
6980 moved_since_edit = false;
6981 }
6982 }
6983 }
6984 } else if range.end == range.start {
6985 let anchor = snapshot.anchor_after(range.start);
6986 edits.push((anchor..anchor, new_text));
6987 } else {
6988 let edit_start = snapshot.anchor_after(range.start);
6989 let edit_end = snapshot.anchor_before(range.end);
6990 edits.push((edit_start..edit_end, new_text));
6991 }
6992 }
6993
6994 Ok(edits)
6995 })
6996 }
6997
6998 fn buffer_snapshot_for_lsp_version(
6999 &mut self,
7000 buffer: &ModelHandle<Buffer>,
7001 server_id: LanguageServerId,
7002 version: Option<i32>,
7003 cx: &AppContext,
7004 ) -> Result<TextBufferSnapshot> {
7005 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
7006
7007 if let Some(version) = version {
7008 let buffer_id = buffer.read(cx).remote_id();
7009 let snapshots = self
7010 .buffer_snapshots
7011 .get_mut(&buffer_id)
7012 .and_then(|m| m.get_mut(&server_id))
7013 .ok_or_else(|| {
7014 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
7015 })?;
7016
7017 let found_snapshot = snapshots
7018 .binary_search_by_key(&version, |e| e.version)
7019 .map(|ix| snapshots[ix].snapshot.clone())
7020 .map_err(|_| {
7021 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
7022 })?;
7023
7024 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
7025 Ok(found_snapshot)
7026 } else {
7027 Ok((buffer.read(cx)).text_snapshot())
7028 }
7029 }
7030
7031 pub fn language_servers(
7032 &self,
7033 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
7034 self.language_server_ids
7035 .iter()
7036 .map(|((worktree_id, server_name), server_id)| {
7037 (*server_id, server_name.clone(), *worktree_id)
7038 })
7039 }
7040
7041 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
7042 if let LanguageServerState::Running { server, .. } = self.language_servers.get(&id)? {
7043 Some(server.clone())
7044 } else {
7045 None
7046 }
7047 }
7048
7049 pub fn language_servers_for_buffer(
7050 &self,
7051 buffer: &Buffer,
7052 cx: &AppContext,
7053 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7054 self.language_server_ids_for_buffer(buffer, cx)
7055 .into_iter()
7056 .filter_map(|server_id| {
7057 let server = self.language_servers.get(&server_id)?;
7058 if let LanguageServerState::Running {
7059 adapter, server, ..
7060 } = server
7061 {
7062 Some((adapter, server))
7063 } else {
7064 None
7065 }
7066 })
7067 }
7068
7069 fn primary_language_servers_for_buffer(
7070 &self,
7071 buffer: &Buffer,
7072 cx: &AppContext,
7073 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7074 self.language_servers_for_buffer(buffer, cx).next()
7075 }
7076
7077 fn language_server_for_buffer(
7078 &self,
7079 buffer: &Buffer,
7080 server_id: LanguageServerId,
7081 cx: &AppContext,
7082 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
7083 self.language_servers_for_buffer(buffer, cx)
7084 .find(|(_, s)| s.server_id() == server_id)
7085 }
7086
7087 fn language_server_ids_for_buffer(
7088 &self,
7089 buffer: &Buffer,
7090 cx: &AppContext,
7091 ) -> Vec<LanguageServerId> {
7092 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
7093 let worktree_id = file.worktree_id(cx);
7094 language
7095 .lsp_adapters()
7096 .iter()
7097 .flat_map(|adapter| {
7098 let key = (worktree_id, adapter.name.clone());
7099 self.language_server_ids.get(&key).copied()
7100 })
7101 .collect()
7102 } else {
7103 Vec::new()
7104 }
7105 }
7106}
7107
7108impl WorktreeHandle {
7109 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
7110 match self {
7111 WorktreeHandle::Strong(handle) => Some(handle.clone()),
7112 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
7113 }
7114 }
7115
7116 pub fn handle_id(&self) -> usize {
7117 match self {
7118 WorktreeHandle::Strong(handle) => handle.id(),
7119 WorktreeHandle::Weak(handle) => handle.id(),
7120 }
7121 }
7122}
7123
7124impl OpenBuffer {
7125 pub fn upgrade(&self, cx: &impl BorrowAppContext) -> Option<ModelHandle<Buffer>> {
7126 match self {
7127 OpenBuffer::Strong(handle) => Some(handle.clone()),
7128 OpenBuffer::Weak(handle) => handle.upgrade(cx),
7129 OpenBuffer::Operations(_) => None,
7130 }
7131 }
7132}
7133
7134pub struct PathMatchCandidateSet {
7135 pub snapshot: Snapshot,
7136 pub include_ignored: bool,
7137 pub include_root_name: bool,
7138}
7139
7140impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
7141 type Candidates = PathMatchCandidateSetIter<'a>;
7142
7143 fn id(&self) -> usize {
7144 self.snapshot.id().to_usize()
7145 }
7146
7147 fn len(&self) -> usize {
7148 if self.include_ignored {
7149 self.snapshot.file_count()
7150 } else {
7151 self.snapshot.visible_file_count()
7152 }
7153 }
7154
7155 fn prefix(&self) -> Arc<str> {
7156 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
7157 self.snapshot.root_name().into()
7158 } else if self.include_root_name {
7159 format!("{}/", self.snapshot.root_name()).into()
7160 } else {
7161 "".into()
7162 }
7163 }
7164
7165 fn candidates(&'a self, start: usize) -> Self::Candidates {
7166 PathMatchCandidateSetIter {
7167 traversal: self.snapshot.files(self.include_ignored, start),
7168 }
7169 }
7170}
7171
7172pub struct PathMatchCandidateSetIter<'a> {
7173 traversal: Traversal<'a>,
7174}
7175
7176impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
7177 type Item = fuzzy::PathMatchCandidate<'a>;
7178
7179 fn next(&mut self) -> Option<Self::Item> {
7180 self.traversal.next().map(|entry| {
7181 if let EntryKind::File(char_bag) = entry.kind {
7182 fuzzy::PathMatchCandidate {
7183 path: &entry.path,
7184 char_bag,
7185 }
7186 } else {
7187 unreachable!()
7188 }
7189 })
7190 }
7191}
7192
7193impl Entity for Project {
7194 type Event = Event;
7195
7196 fn release(&mut self, cx: &mut gpui::AppContext) {
7197 match &self.client_state {
7198 Some(ProjectClientState::Local { .. }) => {
7199 let _ = self.unshare_internal(cx);
7200 }
7201 Some(ProjectClientState::Remote { remote_id, .. }) => {
7202 let _ = self.client.send(proto::LeaveProject {
7203 project_id: *remote_id,
7204 });
7205 self.disconnected_from_host_internal(cx);
7206 }
7207 _ => {}
7208 }
7209 }
7210
7211 fn app_will_quit(
7212 &mut self,
7213 _: &mut AppContext,
7214 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
7215 let shutdown_futures = self
7216 .language_servers
7217 .drain()
7218 .map(|(_, server_state)| async {
7219 match server_state {
7220 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
7221 LanguageServerState::Starting(starting_server) => {
7222 starting_server.await?.shutdown()?.await
7223 }
7224 }
7225 })
7226 .collect::<Vec<_>>();
7227
7228 Some(
7229 async move {
7230 futures::future::join_all(shutdown_futures).await;
7231 }
7232 .boxed(),
7233 )
7234 }
7235}
7236
7237impl Collaborator {
7238 fn from_proto(message: proto::Collaborator) -> Result<Self> {
7239 Ok(Self {
7240 peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
7241 replica_id: message.replica_id as ReplicaId,
7242 })
7243 }
7244}
7245
7246impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
7247 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
7248 Self {
7249 worktree_id,
7250 path: path.as_ref().into(),
7251 }
7252 }
7253}
7254
7255impl ProjectLspAdapterDelegate {
7256 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
7257 Arc::new(Self {
7258 project: cx.handle(),
7259 http_client: project.client.http_client(),
7260 })
7261 }
7262}
7263
7264impl LspAdapterDelegate for ProjectLspAdapterDelegate {
7265 fn show_notification(&self, message: &str, cx: &mut AppContext) {
7266 self.project
7267 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
7268 }
7269
7270 fn http_client(&self) -> Arc<dyn HttpClient> {
7271 self.http_client.clone()
7272 }
7273}
7274
7275fn split_operations(
7276 mut operations: Vec<proto::Operation>,
7277) -> impl Iterator<Item = Vec<proto::Operation>> {
7278 #[cfg(any(test, feature = "test-support"))]
7279 const CHUNK_SIZE: usize = 5;
7280
7281 #[cfg(not(any(test, feature = "test-support")))]
7282 const CHUNK_SIZE: usize = 100;
7283
7284 let mut done = false;
7285 std::iter::from_fn(move || {
7286 if done {
7287 return None;
7288 }
7289
7290 let operations = operations
7291 .drain(..cmp::min(CHUNK_SIZE, operations.len()))
7292 .collect::<Vec<_>>();
7293 if operations.is_empty() {
7294 done = true;
7295 }
7296 Some(operations)
7297 })
7298}
7299
7300fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
7301 proto::Symbol {
7302 language_server_name: symbol.language_server_name.0.to_string(),
7303 source_worktree_id: symbol.source_worktree_id.to_proto(),
7304 worktree_id: symbol.path.worktree_id.to_proto(),
7305 path: symbol.path.path.to_string_lossy().to_string(),
7306 name: symbol.name.clone(),
7307 kind: unsafe { mem::transmute(symbol.kind) },
7308 start: Some(proto::PointUtf16 {
7309 row: symbol.range.start.0.row,
7310 column: symbol.range.start.0.column,
7311 }),
7312 end: Some(proto::PointUtf16 {
7313 row: symbol.range.end.0.row,
7314 column: symbol.range.end.0.column,
7315 }),
7316 signature: symbol.signature.to_vec(),
7317 }
7318}
7319
7320fn relativize_path(base: &Path, path: &Path) -> PathBuf {
7321 let mut path_components = path.components();
7322 let mut base_components = base.components();
7323 let mut components: Vec<Component> = Vec::new();
7324 loop {
7325 match (path_components.next(), base_components.next()) {
7326 (None, None) => break,
7327 (Some(a), None) => {
7328 components.push(a);
7329 components.extend(path_components.by_ref());
7330 break;
7331 }
7332 (None, _) => components.push(Component::ParentDir),
7333 (Some(a), Some(b)) if components.is_empty() && a == b => (),
7334 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
7335 (Some(a), Some(_)) => {
7336 components.push(Component::ParentDir);
7337 for _ in base_components {
7338 components.push(Component::ParentDir);
7339 }
7340 components.push(a);
7341 components.extend(path_components.by_ref());
7342 break;
7343 }
7344 }
7345 }
7346 components.iter().map(|c| c.as_os_str()).collect()
7347}
7348
7349impl Item for Buffer {
7350 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
7351 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
7352 }
7353
7354 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
7355 File::from_dyn(self.file()).map(|file| ProjectPath {
7356 worktree_id: file.worktree_id(cx),
7357 path: file.path().clone(),
7358 })
7359 }
7360}
7361
7362async fn wait_for_loading_buffer(
7363 mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
7364) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
7365 loop {
7366 if let Some(result) = receiver.borrow().as_ref() {
7367 match result {
7368 Ok(buffer) => return Ok(buffer.to_owned()),
7369 Err(e) => return Err(e.to_owned()),
7370 }
7371 }
7372 receiver.next().await;
7373 }
7374}