1mod ignore;
2mod lsp_command;
3mod project_settings;
4pub mod search;
5pub mod terminals;
6pub mod worktree;
7
8#[cfg(test)]
9mod project_tests;
10
11use anyhow::{anyhow, Context, Result};
12use client::{proto, Client, TypedEnvelope, UserStore};
13use clock::ReplicaId;
14use collections::{hash_map, BTreeMap, HashMap, HashSet};
15use copilot::Copilot;
16use futures::{
17 channel::mpsc::{self, UnboundedReceiver},
18 future::{try_join_all, Shared},
19 stream::FuturesUnordered,
20 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
21};
22use globset::{Glob, GlobSet, GlobSetBuilder};
23use gpui::{
24 AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity, ModelContext,
25 ModelHandle, Task, WeakModelHandle,
26};
27use language::{
28 language_settings::{all_language_settings, language_settings, FormatOnSave, Formatter},
29 point_to_lsp,
30 proto::{
31 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
32 serialize_anchor, serialize_version,
33 },
34 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
35 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
36 Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
37 PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16,
38 Transaction, Unclipped,
39};
40use lsp::{
41 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
42 DocumentHighlightKind, LanguageServer, LanguageServerId,
43};
44use lsp_command::*;
45use postage::watch;
46use project_settings::ProjectSettings;
47use rand::prelude::*;
48use search::SearchQuery;
49use serde::Serialize;
50use settings::SettingsStore;
51use sha2::{Digest, Sha256};
52use similar::{ChangeTag, TextDiff};
53use std::{
54 cell::RefCell,
55 cmp::{self, Ordering},
56 convert::TryInto,
57 hash::Hash,
58 mem,
59 num::NonZeroU32,
60 ops::Range,
61 path::{Component, Path, PathBuf},
62 rc::Rc,
63 str,
64 sync::{
65 atomic::{AtomicUsize, Ordering::SeqCst},
66 Arc,
67 },
68 time::{Duration, Instant, SystemTime},
69};
70use terminals::Terminals;
71use util::{debug_panic, defer, merge_json_value_into, post_inc, ResultExt, TryFutureExt as _};
72
73pub use fs::*;
74pub use worktree::*;
75
76pub trait Item {
77 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
78 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
79}
80
81// Language server state is stored across 3 collections:
82// language_servers =>
83// a mapping from unique server id to LanguageServerState which can either be a task for a
84// server in the process of starting, or a running server with adapter and language server arcs
85// language_server_ids => a mapping from worktreeId and server name to the unique server id
86// language_server_statuses => a mapping from unique server id to the current server status
87//
88// Multiple worktrees can map to the same language server for example when you jump to the definition
89// of a file in the standard library. So language_server_ids is used to look up which server is active
90// for a given worktree and language server name
91//
92// When starting a language server, first the id map is checked to make sure a server isn't already available
93// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
94// the Starting variant of LanguageServerState is stored in the language_servers map.
95pub struct Project {
96 worktrees: Vec<WorktreeHandle>,
97 active_entry: Option<ProjectEntryId>,
98 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
99 languages: Arc<LanguageRegistry>,
100 language_servers: HashMap<LanguageServerId, LanguageServerState>,
101 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
102 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
103 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
104 client: Arc<client::Client>,
105 next_entry_id: Arc<AtomicUsize>,
106 join_project_response_message_id: u32,
107 next_diagnostic_group_id: usize,
108 user_store: ModelHandle<UserStore>,
109 fs: Arc<dyn Fs>,
110 client_state: Option<ProjectClientState>,
111 collaborators: HashMap<proto::PeerId, Collaborator>,
112 client_subscriptions: Vec<client::Subscription>,
113 _subscriptions: Vec<gpui::Subscription>,
114 next_buffer_id: u64,
115 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
116 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
117 #[allow(clippy::type_complexity)]
118 loading_buffers_by_path: HashMap<
119 ProjectPath,
120 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
121 >,
122 #[allow(clippy::type_complexity)]
123 loading_local_worktrees:
124 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
125 opened_buffers: HashMap<u64, OpenBuffer>,
126 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
127 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
128 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
129 /// Used for re-issuing buffer requests when peers temporarily disconnect
130 incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
131 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
132 buffers_being_formatted: HashSet<u64>,
133 nonce: u128,
134 _maintain_buffer_languages: Task<()>,
135 _maintain_workspace_config: Task<()>,
136 terminals: Terminals,
137 copilot_enabled: bool,
138}
139
140struct LspBufferSnapshot {
141 version: i32,
142 snapshot: TextBufferSnapshot,
143}
144
145/// Message ordered with respect to buffer operations
146enum BufferOrderedMessage {
147 Operation {
148 buffer_id: u64,
149 operation: proto::Operation,
150 },
151 LanguageServerUpdate {
152 language_server_id: LanguageServerId,
153 message: proto::update_language_server::Variant,
154 },
155 Resync,
156}
157
158enum LocalProjectUpdate {
159 WorktreesChanged,
160 CreateBufferForPeer {
161 peer_id: proto::PeerId,
162 buffer_id: u64,
163 },
164}
165
166enum OpenBuffer {
167 Strong(ModelHandle<Buffer>),
168 Weak(WeakModelHandle<Buffer>),
169 Operations(Vec<Operation>),
170}
171
172enum WorktreeHandle {
173 Strong(ModelHandle<Worktree>),
174 Weak(WeakModelHandle<Worktree>),
175}
176
177enum ProjectClientState {
178 Local {
179 remote_id: u64,
180 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
181 _send_updates: Task<()>,
182 },
183 Remote {
184 sharing_has_stopped: bool,
185 remote_id: u64,
186 replica_id: ReplicaId,
187 },
188}
189
190#[derive(Clone, Debug)]
191pub struct Collaborator {
192 pub peer_id: proto::PeerId,
193 pub replica_id: ReplicaId,
194}
195
196#[derive(Clone, Debug, PartialEq, Eq)]
197pub enum Event {
198 LanguageServerAdded(LanguageServerId),
199 LanguageServerRemoved(LanguageServerId),
200 ActiveEntryChanged(Option<ProjectEntryId>),
201 WorktreeAdded,
202 WorktreeRemoved(WorktreeId),
203 DiskBasedDiagnosticsStarted {
204 language_server_id: LanguageServerId,
205 },
206 DiskBasedDiagnosticsFinished {
207 language_server_id: LanguageServerId,
208 },
209 DiagnosticsUpdated {
210 path: ProjectPath,
211 language_server_id: LanguageServerId,
212 },
213 RemoteIdChanged(Option<u64>),
214 DisconnectedFromHost,
215 Closed,
216 DeletedEntry(ProjectEntryId),
217 CollaboratorUpdated {
218 old_peer_id: proto::PeerId,
219 new_peer_id: proto::PeerId,
220 },
221 CollaboratorLeft(proto::PeerId),
222}
223
224pub enum LanguageServerState {
225 Starting(Task<Option<Arc<LanguageServer>>>),
226 Running {
227 language: Arc<Language>,
228 adapter: Arc<CachedLspAdapter>,
229 server: Arc<LanguageServer>,
230 watched_paths: HashMap<WorktreeId, GlobSet>,
231 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
232 },
233}
234
235#[derive(Serialize)]
236pub struct LanguageServerStatus {
237 pub name: String,
238 pub pending_work: BTreeMap<String, LanguageServerProgress>,
239 pub has_pending_diagnostic_updates: bool,
240 progress_tokens: HashSet<String>,
241}
242
243#[derive(Clone, Debug, Serialize)]
244pub struct LanguageServerProgress {
245 pub message: Option<String>,
246 pub percentage: Option<usize>,
247 #[serde(skip_serializing)]
248 pub last_update_at: Instant,
249}
250
251#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
252pub struct ProjectPath {
253 pub worktree_id: WorktreeId,
254 pub path: Arc<Path>,
255}
256
257#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
258pub struct DiagnosticSummary {
259 pub error_count: usize,
260 pub warning_count: usize,
261}
262
263#[derive(Debug, Clone)]
264pub struct Location {
265 pub buffer: ModelHandle<Buffer>,
266 pub range: Range<language::Anchor>,
267}
268
269#[derive(Debug, Clone)]
270pub struct LocationLink {
271 pub origin: Option<Location>,
272 pub target: Location,
273}
274
275#[derive(Debug)]
276pub struct DocumentHighlight {
277 pub range: Range<language::Anchor>,
278 pub kind: DocumentHighlightKind,
279}
280
281#[derive(Clone, Debug)]
282pub struct Symbol {
283 pub language_server_name: LanguageServerName,
284 pub source_worktree_id: WorktreeId,
285 pub path: ProjectPath,
286 pub label: CodeLabel,
287 pub name: String,
288 pub kind: lsp::SymbolKind,
289 pub range: Range<Unclipped<PointUtf16>>,
290 pub signature: [u8; 32],
291}
292
293#[derive(Clone, Debug, PartialEq)]
294pub struct HoverBlock {
295 pub text: String,
296 pub kind: HoverBlockKind,
297}
298
299#[derive(Clone, Debug, PartialEq)]
300pub enum HoverBlockKind {
301 PlainText,
302 Markdown,
303 Code { language: String },
304}
305
306#[derive(Debug)]
307pub struct Hover {
308 pub contents: Vec<HoverBlock>,
309 pub range: Option<Range<language::Anchor>>,
310}
311
312#[derive(Default)]
313pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
314
315impl DiagnosticSummary {
316 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
317 let mut this = Self {
318 error_count: 0,
319 warning_count: 0,
320 };
321
322 for entry in diagnostics {
323 if entry.diagnostic.is_primary {
324 match entry.diagnostic.severity {
325 DiagnosticSeverity::ERROR => this.error_count += 1,
326 DiagnosticSeverity::WARNING => this.warning_count += 1,
327 _ => {}
328 }
329 }
330 }
331
332 this
333 }
334
335 pub fn is_empty(&self) -> bool {
336 self.error_count == 0 && self.warning_count == 0
337 }
338
339 pub fn to_proto(
340 &self,
341 language_server_id: LanguageServerId,
342 path: &Path,
343 ) -> proto::DiagnosticSummary {
344 proto::DiagnosticSummary {
345 path: path.to_string_lossy().to_string(),
346 language_server_id: language_server_id.0 as u64,
347 error_count: self.error_count as u32,
348 warning_count: self.warning_count as u32,
349 }
350 }
351}
352
353#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
354pub struct ProjectEntryId(usize);
355
356impl ProjectEntryId {
357 pub const MAX: Self = Self(usize::MAX);
358
359 pub fn new(counter: &AtomicUsize) -> Self {
360 Self(counter.fetch_add(1, SeqCst))
361 }
362
363 pub fn from_proto(id: u64) -> Self {
364 Self(id as usize)
365 }
366
367 pub fn to_proto(&self) -> u64 {
368 self.0 as u64
369 }
370
371 pub fn to_usize(&self) -> usize {
372 self.0
373 }
374}
375
376#[derive(Debug, Clone, Copy, PartialEq, Eq)]
377pub enum FormatTrigger {
378 Save,
379 Manual,
380}
381
382impl FormatTrigger {
383 fn from_proto(value: i32) -> FormatTrigger {
384 match value {
385 0 => FormatTrigger::Save,
386 1 => FormatTrigger::Manual,
387 _ => FormatTrigger::Save,
388 }
389 }
390}
391
392impl Project {
393 pub fn init_settings(cx: &mut AppContext) {
394 settings::register::<ProjectSettings>(cx);
395 }
396
397 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
398 Self::init_settings(cx);
399
400 client.add_model_message_handler(Self::handle_add_collaborator);
401 client.add_model_message_handler(Self::handle_update_project_collaborator);
402 client.add_model_message_handler(Self::handle_remove_collaborator);
403 client.add_model_message_handler(Self::handle_buffer_reloaded);
404 client.add_model_message_handler(Self::handle_buffer_saved);
405 client.add_model_message_handler(Self::handle_start_language_server);
406 client.add_model_message_handler(Self::handle_update_language_server);
407 client.add_model_message_handler(Self::handle_update_project);
408 client.add_model_message_handler(Self::handle_unshare_project);
409 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
410 client.add_model_message_handler(Self::handle_update_buffer_file);
411 client.add_model_request_handler(Self::handle_update_buffer);
412 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
413 client.add_model_message_handler(Self::handle_update_worktree);
414 client.add_model_request_handler(Self::handle_create_project_entry);
415 client.add_model_request_handler(Self::handle_rename_project_entry);
416 client.add_model_request_handler(Self::handle_copy_project_entry);
417 client.add_model_request_handler(Self::handle_delete_project_entry);
418 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
419 client.add_model_request_handler(Self::handle_apply_code_action);
420 client.add_model_request_handler(Self::handle_on_type_formatting);
421 client.add_model_request_handler(Self::handle_reload_buffers);
422 client.add_model_request_handler(Self::handle_synchronize_buffers);
423 client.add_model_request_handler(Self::handle_format_buffers);
424 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
425 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
426 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
427 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
428 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
429 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
430 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
431 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
432 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
433 client.add_model_request_handler(Self::handle_search_project);
434 client.add_model_request_handler(Self::handle_get_project_symbols);
435 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
436 client.add_model_request_handler(Self::handle_open_buffer_by_id);
437 client.add_model_request_handler(Self::handle_open_buffer_by_path);
438 client.add_model_request_handler(Self::handle_save_buffer);
439 client.add_model_message_handler(Self::handle_update_diff_base);
440 }
441
442 pub fn local(
443 client: Arc<Client>,
444 user_store: ModelHandle<UserStore>,
445 languages: Arc<LanguageRegistry>,
446 fs: Arc<dyn Fs>,
447 cx: &mut AppContext,
448 ) -> ModelHandle<Self> {
449 cx.add_model(|cx: &mut ModelContext<Self>| {
450 let (tx, rx) = mpsc::unbounded();
451 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
452 .detach();
453 Self {
454 worktrees: Default::default(),
455 buffer_ordered_messages_tx: tx,
456 collaborators: Default::default(),
457 next_buffer_id: 0,
458 opened_buffers: Default::default(),
459 shared_buffers: Default::default(),
460 incomplete_remote_buffers: Default::default(),
461 loading_buffers_by_path: Default::default(),
462 loading_local_worktrees: Default::default(),
463 local_buffer_ids_by_path: Default::default(),
464 local_buffer_ids_by_entry_id: Default::default(),
465 buffer_snapshots: Default::default(),
466 join_project_response_message_id: 0,
467 client_state: None,
468 opened_buffer: watch::channel(),
469 client_subscriptions: Vec::new(),
470 _subscriptions: vec![
471 cx.observe_global::<SettingsStore, _>(Self::on_settings_changed)
472 ],
473 _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
474 _maintain_workspace_config: Self::maintain_workspace_config(languages.clone(), cx),
475 active_entry: None,
476 languages,
477 client,
478 user_store,
479 fs,
480 next_entry_id: Default::default(),
481 next_diagnostic_group_id: Default::default(),
482 language_servers: Default::default(),
483 language_server_ids: Default::default(),
484 language_server_statuses: Default::default(),
485 last_workspace_edits_by_language_server: Default::default(),
486 buffers_being_formatted: Default::default(),
487 nonce: StdRng::from_entropy().gen(),
488 terminals: Terminals {
489 local_handles: Vec::new(),
490 },
491 copilot_enabled: Copilot::global(cx).is_some(),
492 }
493 })
494 }
495
496 pub async fn remote(
497 remote_id: u64,
498 client: Arc<Client>,
499 user_store: ModelHandle<UserStore>,
500 languages: Arc<LanguageRegistry>,
501 fs: Arc<dyn Fs>,
502 mut cx: AsyncAppContext,
503 ) -> Result<ModelHandle<Self>> {
504 client.authenticate_and_connect(true, &cx).await?;
505
506 let subscription = client.subscribe_to_entity(remote_id)?;
507 let response = client
508 .request_envelope(proto::JoinProject {
509 project_id: remote_id,
510 })
511 .await?;
512 let this = cx.add_model(|cx| {
513 let replica_id = response.payload.replica_id as ReplicaId;
514
515 let mut worktrees = Vec::new();
516 for worktree in response.payload.worktrees {
517 let worktree = cx.update(|cx| {
518 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
519 });
520 worktrees.push(worktree);
521 }
522
523 let (tx, rx) = mpsc::unbounded();
524 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
525 .detach();
526 let mut this = Self {
527 worktrees: Vec::new(),
528 buffer_ordered_messages_tx: tx,
529 loading_buffers_by_path: Default::default(),
530 next_buffer_id: 0,
531 opened_buffer: watch::channel(),
532 shared_buffers: Default::default(),
533 incomplete_remote_buffers: Default::default(),
534 loading_local_worktrees: Default::default(),
535 local_buffer_ids_by_path: Default::default(),
536 local_buffer_ids_by_entry_id: Default::default(),
537 active_entry: None,
538 collaborators: Default::default(),
539 join_project_response_message_id: response.message_id,
540 _maintain_buffer_languages: Self::maintain_buffer_languages(&languages, cx),
541 _maintain_workspace_config: Self::maintain_workspace_config(languages.clone(), cx),
542 languages,
543 user_store: user_store.clone(),
544 fs,
545 next_entry_id: Default::default(),
546 next_diagnostic_group_id: Default::default(),
547 client_subscriptions: Default::default(),
548 _subscriptions: Default::default(),
549 client: client.clone(),
550 client_state: Some(ProjectClientState::Remote {
551 sharing_has_stopped: false,
552 remote_id,
553 replica_id,
554 }),
555 language_servers: Default::default(),
556 language_server_ids: Default::default(),
557 language_server_statuses: response
558 .payload
559 .language_servers
560 .into_iter()
561 .map(|server| {
562 (
563 LanguageServerId(server.id as usize),
564 LanguageServerStatus {
565 name: server.name,
566 pending_work: Default::default(),
567 has_pending_diagnostic_updates: false,
568 progress_tokens: Default::default(),
569 },
570 )
571 })
572 .collect(),
573 last_workspace_edits_by_language_server: Default::default(),
574 opened_buffers: Default::default(),
575 buffers_being_formatted: Default::default(),
576 buffer_snapshots: Default::default(),
577 nonce: StdRng::from_entropy().gen(),
578 terminals: Terminals {
579 local_handles: Vec::new(),
580 },
581 copilot_enabled: Copilot::global(cx).is_some(),
582 };
583 for worktree in worktrees {
584 let _ = this.add_worktree(&worktree, cx);
585 }
586 this
587 });
588 let subscription = subscription.set_model(&this, &mut cx);
589
590 let user_ids = response
591 .payload
592 .collaborators
593 .iter()
594 .map(|peer| peer.user_id)
595 .collect();
596 user_store
597 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
598 .await?;
599
600 this.update(&mut cx, |this, cx| {
601 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
602 this.client_subscriptions.push(subscription);
603 anyhow::Ok(())
604 })?;
605
606 Ok(this)
607 }
608
609 #[cfg(any(test, feature = "test-support"))]
610 pub async fn test(
611 fs: Arc<dyn Fs>,
612 root_paths: impl IntoIterator<Item = &Path>,
613 cx: &mut gpui::TestAppContext,
614 ) -> ModelHandle<Project> {
615 let mut languages = LanguageRegistry::test();
616 languages.set_executor(cx.background());
617 let http_client = util::http::FakeHttpClient::with_404_response();
618 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
619 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
620 let project =
621 cx.update(|cx| Project::local(client, user_store, Arc::new(languages), fs, cx));
622 for path in root_paths {
623 let (tree, _) = project
624 .update(cx, |project, cx| {
625 project.find_or_create_local_worktree(path, true, cx)
626 })
627 .await
628 .unwrap();
629 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
630 .await;
631 }
632 project
633 }
634
635 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
636 let settings = all_language_settings(cx);
637
638 let mut language_servers_to_start = Vec::new();
639 for buffer in self.opened_buffers.values() {
640 if let Some(buffer) = buffer.upgrade(cx) {
641 let buffer = buffer.read(cx);
642 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
643 {
644 if settings
645 .language(Some(&language.name()))
646 .enable_language_server
647 {
648 let worktree = file.worktree.read(cx);
649 language_servers_to_start.push((
650 worktree.id(),
651 worktree.as_local().unwrap().abs_path().clone(),
652 language.clone(),
653 ));
654 }
655 }
656 }
657 }
658
659 let mut language_servers_to_stop = Vec::new();
660 for language in self.languages.to_vec() {
661 for lsp_adapter in language.lsp_adapters() {
662 if !settings
663 .language(Some(&language.name()))
664 .enable_language_server
665 {
666 let lsp_name = &lsp_adapter.name;
667 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
668 if lsp_name == started_lsp_name {
669 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
670 }
671 }
672 }
673 }
674 }
675
676 // Stop all newly-disabled language servers.
677 for (worktree_id, adapter_name) in language_servers_to_stop {
678 self.stop_language_server(worktree_id, adapter_name, cx)
679 .detach();
680 }
681
682 // Start all the newly-enabled language servers.
683 for (worktree_id, worktree_path, language) in language_servers_to_start {
684 self.start_language_servers(worktree_id, worktree_path, language, cx);
685 }
686
687 if !self.copilot_enabled && Copilot::global(cx).is_some() {
688 self.copilot_enabled = true;
689 for buffer in self.opened_buffers.values() {
690 if let Some(buffer) = buffer.upgrade(cx) {
691 self.register_buffer_with_copilot(&buffer, cx);
692 }
693 }
694 }
695
696 cx.notify();
697 }
698
699 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
700 self.opened_buffers
701 .get(&remote_id)
702 .and_then(|buffer| buffer.upgrade(cx))
703 }
704
705 pub fn languages(&self) -> &Arc<LanguageRegistry> {
706 &self.languages
707 }
708
709 pub fn client(&self) -> Arc<Client> {
710 self.client.clone()
711 }
712
713 pub fn user_store(&self) -> ModelHandle<UserStore> {
714 self.user_store.clone()
715 }
716
717 #[cfg(any(test, feature = "test-support"))]
718 pub fn opened_buffers(&self, cx: &AppContext) -> Vec<ModelHandle<Buffer>> {
719 self.opened_buffers
720 .values()
721 .filter_map(|b| b.upgrade(cx))
722 .collect()
723 }
724
725 #[cfg(any(test, feature = "test-support"))]
726 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
727 let path = path.into();
728 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
729 self.opened_buffers.iter().any(|(_, buffer)| {
730 if let Some(buffer) = buffer.upgrade(cx) {
731 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
732 if file.worktree == worktree && file.path() == &path.path {
733 return true;
734 }
735 }
736 }
737 false
738 })
739 } else {
740 false
741 }
742 }
743
744 pub fn fs(&self) -> &Arc<dyn Fs> {
745 &self.fs
746 }
747
748 pub fn remote_id(&self) -> Option<u64> {
749 match self.client_state.as_ref()? {
750 ProjectClientState::Local { remote_id, .. }
751 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
752 }
753 }
754
755 pub fn replica_id(&self) -> ReplicaId {
756 match &self.client_state {
757 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
758 _ => 0,
759 }
760 }
761
762 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
763 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
764 updates_tx
765 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
766 .ok();
767 }
768 cx.notify();
769 }
770
771 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
772 &self.collaborators
773 }
774
775 /// Collect all worktrees, including ones that don't appear in the project panel
776 pub fn worktrees<'a>(
777 &'a self,
778 cx: &'a AppContext,
779 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
780 self.worktrees
781 .iter()
782 .filter_map(move |worktree| worktree.upgrade(cx))
783 }
784
785 /// Collect all user-visible worktrees, the ones that appear in the project panel
786 pub fn visible_worktrees<'a>(
787 &'a self,
788 cx: &'a AppContext,
789 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
790 self.worktrees.iter().filter_map(|worktree| {
791 worktree.upgrade(cx).and_then(|worktree| {
792 if worktree.read(cx).is_visible() {
793 Some(worktree)
794 } else {
795 None
796 }
797 })
798 })
799 }
800
801 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
802 self.visible_worktrees(cx)
803 .map(|tree| tree.read(cx).root_name())
804 }
805
806 pub fn worktree_for_id(
807 &self,
808 id: WorktreeId,
809 cx: &AppContext,
810 ) -> Option<ModelHandle<Worktree>> {
811 self.worktrees(cx)
812 .find(|worktree| worktree.read(cx).id() == id)
813 }
814
815 pub fn worktree_for_entry(
816 &self,
817 entry_id: ProjectEntryId,
818 cx: &AppContext,
819 ) -> Option<ModelHandle<Worktree>> {
820 self.worktrees(cx)
821 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
822 }
823
824 pub fn worktree_id_for_entry(
825 &self,
826 entry_id: ProjectEntryId,
827 cx: &AppContext,
828 ) -> Option<WorktreeId> {
829 self.worktree_for_entry(entry_id, cx)
830 .map(|worktree| worktree.read(cx).id())
831 }
832
833 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
834 paths.iter().all(|path| self.contains_path(path, cx))
835 }
836
837 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
838 for worktree in self.worktrees(cx) {
839 let worktree = worktree.read(cx).as_local();
840 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
841 return true;
842 }
843 }
844 false
845 }
846
847 pub fn create_entry(
848 &mut self,
849 project_path: impl Into<ProjectPath>,
850 is_directory: bool,
851 cx: &mut ModelContext<Self>,
852 ) -> Option<Task<Result<Entry>>> {
853 let project_path = project_path.into();
854 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
855 if self.is_local() {
856 Some(worktree.update(cx, |worktree, cx| {
857 worktree
858 .as_local_mut()
859 .unwrap()
860 .create_entry(project_path.path, is_directory, cx)
861 }))
862 } else {
863 let client = self.client.clone();
864 let project_id = self.remote_id().unwrap();
865 Some(cx.spawn_weak(|_, mut cx| async move {
866 let response = client
867 .request(proto::CreateProjectEntry {
868 worktree_id: project_path.worktree_id.to_proto(),
869 project_id,
870 path: project_path.path.to_string_lossy().into(),
871 is_directory,
872 })
873 .await?;
874 let entry = response
875 .entry
876 .ok_or_else(|| anyhow!("missing entry in response"))?;
877 worktree
878 .update(&mut cx, |worktree, cx| {
879 worktree.as_remote_mut().unwrap().insert_entry(
880 entry,
881 response.worktree_scan_id as usize,
882 cx,
883 )
884 })
885 .await
886 }))
887 }
888 }
889
890 pub fn copy_entry(
891 &mut self,
892 entry_id: ProjectEntryId,
893 new_path: impl Into<Arc<Path>>,
894 cx: &mut ModelContext<Self>,
895 ) -> Option<Task<Result<Entry>>> {
896 let worktree = self.worktree_for_entry(entry_id, cx)?;
897 let new_path = new_path.into();
898 if self.is_local() {
899 worktree.update(cx, |worktree, cx| {
900 worktree
901 .as_local_mut()
902 .unwrap()
903 .copy_entry(entry_id, new_path, cx)
904 })
905 } else {
906 let client = self.client.clone();
907 let project_id = self.remote_id().unwrap();
908
909 Some(cx.spawn_weak(|_, mut cx| async move {
910 let response = client
911 .request(proto::CopyProjectEntry {
912 project_id,
913 entry_id: entry_id.to_proto(),
914 new_path: new_path.to_string_lossy().into(),
915 })
916 .await?;
917 let entry = response
918 .entry
919 .ok_or_else(|| anyhow!("missing entry in response"))?;
920 worktree
921 .update(&mut cx, |worktree, cx| {
922 worktree.as_remote_mut().unwrap().insert_entry(
923 entry,
924 response.worktree_scan_id as usize,
925 cx,
926 )
927 })
928 .await
929 }))
930 }
931 }
932
933 pub fn rename_entry(
934 &mut self,
935 entry_id: ProjectEntryId,
936 new_path: impl Into<Arc<Path>>,
937 cx: &mut ModelContext<Self>,
938 ) -> Option<Task<Result<Entry>>> {
939 let worktree = self.worktree_for_entry(entry_id, cx)?;
940 let new_path = new_path.into();
941 if self.is_local() {
942 worktree.update(cx, |worktree, cx| {
943 worktree
944 .as_local_mut()
945 .unwrap()
946 .rename_entry(entry_id, new_path, cx)
947 })
948 } else {
949 let client = self.client.clone();
950 let project_id = self.remote_id().unwrap();
951
952 Some(cx.spawn_weak(|_, mut cx| async move {
953 let response = client
954 .request(proto::RenameProjectEntry {
955 project_id,
956 entry_id: entry_id.to_proto(),
957 new_path: new_path.to_string_lossy().into(),
958 })
959 .await?;
960 let entry = response
961 .entry
962 .ok_or_else(|| anyhow!("missing entry in response"))?;
963 worktree
964 .update(&mut cx, |worktree, cx| {
965 worktree.as_remote_mut().unwrap().insert_entry(
966 entry,
967 response.worktree_scan_id as usize,
968 cx,
969 )
970 })
971 .await
972 }))
973 }
974 }
975
976 pub fn delete_entry(
977 &mut self,
978 entry_id: ProjectEntryId,
979 cx: &mut ModelContext<Self>,
980 ) -> Option<Task<Result<()>>> {
981 let worktree = self.worktree_for_entry(entry_id, cx)?;
982
983 cx.emit(Event::DeletedEntry(entry_id));
984
985 if self.is_local() {
986 worktree.update(cx, |worktree, cx| {
987 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
988 })
989 } else {
990 let client = self.client.clone();
991 let project_id = self.remote_id().unwrap();
992 Some(cx.spawn_weak(|_, mut cx| async move {
993 let response = client
994 .request(proto::DeleteProjectEntry {
995 project_id,
996 entry_id: entry_id.to_proto(),
997 })
998 .await?;
999 worktree
1000 .update(&mut cx, move |worktree, cx| {
1001 worktree.as_remote_mut().unwrap().delete_entry(
1002 entry_id,
1003 response.worktree_scan_id as usize,
1004 cx,
1005 )
1006 })
1007 .await
1008 }))
1009 }
1010 }
1011
1012 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1013 if self.client_state.is_some() {
1014 return Err(anyhow!("project was already shared"));
1015 }
1016 self.client_subscriptions.push(
1017 self.client
1018 .subscribe_to_entity(project_id)?
1019 .set_model(&cx.handle(), &mut cx.to_async()),
1020 );
1021
1022 for open_buffer in self.opened_buffers.values_mut() {
1023 match open_buffer {
1024 OpenBuffer::Strong(_) => {}
1025 OpenBuffer::Weak(buffer) => {
1026 if let Some(buffer) = buffer.upgrade(cx) {
1027 *open_buffer = OpenBuffer::Strong(buffer);
1028 }
1029 }
1030 OpenBuffer::Operations(_) => unreachable!(),
1031 }
1032 }
1033
1034 for worktree_handle in self.worktrees.iter_mut() {
1035 match worktree_handle {
1036 WorktreeHandle::Strong(_) => {}
1037 WorktreeHandle::Weak(worktree) => {
1038 if let Some(worktree) = worktree.upgrade(cx) {
1039 *worktree_handle = WorktreeHandle::Strong(worktree);
1040 }
1041 }
1042 }
1043 }
1044
1045 for (server_id, status) in &self.language_server_statuses {
1046 self.client
1047 .send(proto::StartLanguageServer {
1048 project_id,
1049 server: Some(proto::LanguageServer {
1050 id: server_id.0 as u64,
1051 name: status.name.clone(),
1052 }),
1053 })
1054 .log_err();
1055 }
1056
1057 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1058 let client = self.client.clone();
1059 self.client_state = Some(ProjectClientState::Local {
1060 remote_id: project_id,
1061 updates_tx,
1062 _send_updates: cx.spawn_weak(move |this, mut cx| async move {
1063 while let Some(update) = updates_rx.next().await {
1064 let Some(this) = this.upgrade(&cx) else { break };
1065
1066 match update {
1067 LocalProjectUpdate::WorktreesChanged => {
1068 let worktrees = this
1069 .read_with(&cx, |this, cx| this.worktrees(cx).collect::<Vec<_>>());
1070 let update_project = this
1071 .read_with(&cx, |this, cx| {
1072 this.client.request(proto::UpdateProject {
1073 project_id,
1074 worktrees: this.worktree_metadata_protos(cx),
1075 })
1076 })
1077 .await;
1078 if update_project.is_ok() {
1079 for worktree in worktrees {
1080 worktree.update(&mut cx, |worktree, cx| {
1081 let worktree = worktree.as_local_mut().unwrap();
1082 worktree.share(project_id, cx).detach_and_log_err(cx)
1083 });
1084 }
1085 }
1086 }
1087 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1088 let buffer = this.update(&mut cx, |this, _| {
1089 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1090 let shared_buffers =
1091 this.shared_buffers.entry(peer_id).or_default();
1092 if shared_buffers.insert(buffer_id) {
1093 if let OpenBuffer::Strong(buffer) = buffer {
1094 Some(buffer.clone())
1095 } else {
1096 None
1097 }
1098 } else {
1099 None
1100 }
1101 });
1102
1103 let Some(buffer) = buffer else { continue };
1104 let operations =
1105 buffer.read_with(&cx, |b, cx| b.serialize_ops(None, cx));
1106 let operations = operations.await;
1107 let state = buffer.read_with(&cx, |buffer, _| buffer.to_proto());
1108
1109 let initial_state = proto::CreateBufferForPeer {
1110 project_id,
1111 peer_id: Some(peer_id),
1112 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1113 };
1114 if client.send(initial_state).log_err().is_some() {
1115 let client = client.clone();
1116 cx.background()
1117 .spawn(async move {
1118 let mut chunks = split_operations(operations).peekable();
1119 while let Some(chunk) = chunks.next() {
1120 let is_last = chunks.peek().is_none();
1121 client.send(proto::CreateBufferForPeer {
1122 project_id,
1123 peer_id: Some(peer_id),
1124 variant: Some(
1125 proto::create_buffer_for_peer::Variant::Chunk(
1126 proto::BufferChunk {
1127 buffer_id,
1128 operations: chunk,
1129 is_last,
1130 },
1131 ),
1132 ),
1133 })?;
1134 }
1135 anyhow::Ok(())
1136 })
1137 .await
1138 .log_err();
1139 }
1140 }
1141 }
1142 }
1143 }),
1144 });
1145
1146 self.metadata_changed(cx);
1147 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1148 cx.notify();
1149 Ok(())
1150 }
1151
1152 pub fn reshared(
1153 &mut self,
1154 message: proto::ResharedProject,
1155 cx: &mut ModelContext<Self>,
1156 ) -> Result<()> {
1157 self.shared_buffers.clear();
1158 self.set_collaborators_from_proto(message.collaborators, cx)?;
1159 self.metadata_changed(cx);
1160 Ok(())
1161 }
1162
1163 pub fn rejoined(
1164 &mut self,
1165 message: proto::RejoinedProject,
1166 message_id: u32,
1167 cx: &mut ModelContext<Self>,
1168 ) -> Result<()> {
1169 self.join_project_response_message_id = message_id;
1170 self.set_worktrees_from_proto(message.worktrees, cx)?;
1171 self.set_collaborators_from_proto(message.collaborators, cx)?;
1172 self.language_server_statuses = message
1173 .language_servers
1174 .into_iter()
1175 .map(|server| {
1176 (
1177 LanguageServerId(server.id as usize),
1178 LanguageServerStatus {
1179 name: server.name,
1180 pending_work: Default::default(),
1181 has_pending_diagnostic_updates: false,
1182 progress_tokens: Default::default(),
1183 },
1184 )
1185 })
1186 .collect();
1187 self.buffer_ordered_messages_tx
1188 .unbounded_send(BufferOrderedMessage::Resync)
1189 .unwrap();
1190 cx.notify();
1191 Ok(())
1192 }
1193
1194 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1195 self.unshare_internal(cx)?;
1196 self.metadata_changed(cx);
1197 cx.notify();
1198 Ok(())
1199 }
1200
1201 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1202 if self.is_remote() {
1203 return Err(anyhow!("attempted to unshare a remote project"));
1204 }
1205
1206 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1207 self.collaborators.clear();
1208 self.shared_buffers.clear();
1209 self.client_subscriptions.clear();
1210
1211 for worktree_handle in self.worktrees.iter_mut() {
1212 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1213 let is_visible = worktree.update(cx, |worktree, _| {
1214 worktree.as_local_mut().unwrap().unshare();
1215 worktree.is_visible()
1216 });
1217 if !is_visible {
1218 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1219 }
1220 }
1221 }
1222
1223 for open_buffer in self.opened_buffers.values_mut() {
1224 // Wake up any tasks waiting for peers' edits to this buffer.
1225 if let Some(buffer) = open_buffer.upgrade(cx) {
1226 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1227 }
1228
1229 if let OpenBuffer::Strong(buffer) = open_buffer {
1230 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1231 }
1232 }
1233
1234 self.client.send(proto::UnshareProject {
1235 project_id: remote_id,
1236 })?;
1237
1238 Ok(())
1239 } else {
1240 Err(anyhow!("attempted to unshare an unshared project"))
1241 }
1242 }
1243
1244 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1245 self.disconnected_from_host_internal(cx);
1246 cx.emit(Event::DisconnectedFromHost);
1247 cx.notify();
1248 }
1249
1250 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1251 if let Some(ProjectClientState::Remote {
1252 sharing_has_stopped,
1253 ..
1254 }) = &mut self.client_state
1255 {
1256 *sharing_has_stopped = true;
1257
1258 self.collaborators.clear();
1259
1260 for worktree in &self.worktrees {
1261 if let Some(worktree) = worktree.upgrade(cx) {
1262 worktree.update(cx, |worktree, _| {
1263 if let Some(worktree) = worktree.as_remote_mut() {
1264 worktree.disconnected_from_host();
1265 }
1266 });
1267 }
1268 }
1269
1270 for open_buffer in self.opened_buffers.values_mut() {
1271 // Wake up any tasks waiting for peers' edits to this buffer.
1272 if let Some(buffer) = open_buffer.upgrade(cx) {
1273 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1274 }
1275
1276 if let OpenBuffer::Strong(buffer) = open_buffer {
1277 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1278 }
1279 }
1280
1281 // Wake up all futures currently waiting on a buffer to get opened,
1282 // to give them a chance to fail now that we've disconnected.
1283 *self.opened_buffer.0.borrow_mut() = ();
1284 }
1285 }
1286
1287 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1288 cx.emit(Event::Closed);
1289 }
1290
1291 pub fn is_read_only(&self) -> bool {
1292 match &self.client_state {
1293 Some(ProjectClientState::Remote {
1294 sharing_has_stopped,
1295 ..
1296 }) => *sharing_has_stopped,
1297 _ => false,
1298 }
1299 }
1300
1301 pub fn is_local(&self) -> bool {
1302 match &self.client_state {
1303 Some(ProjectClientState::Remote { .. }) => false,
1304 _ => true,
1305 }
1306 }
1307
1308 pub fn is_remote(&self) -> bool {
1309 !self.is_local()
1310 }
1311
1312 pub fn create_buffer(
1313 &mut self,
1314 text: &str,
1315 language: Option<Arc<Language>>,
1316 cx: &mut ModelContext<Self>,
1317 ) -> Result<ModelHandle<Buffer>> {
1318 if self.is_remote() {
1319 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1320 }
1321
1322 let buffer = cx.add_model(|cx| {
1323 Buffer::new(self.replica_id(), text, cx)
1324 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1325 });
1326 self.register_buffer(&buffer, cx)?;
1327 Ok(buffer)
1328 }
1329
1330 pub fn open_path(
1331 &mut self,
1332 path: impl Into<ProjectPath>,
1333 cx: &mut ModelContext<Self>,
1334 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1335 let task = self.open_buffer(path, cx);
1336 cx.spawn_weak(|_, cx| async move {
1337 let buffer = task.await?;
1338 let project_entry_id = buffer
1339 .read_with(&cx, |buffer, cx| {
1340 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1341 })
1342 .ok_or_else(|| anyhow!("no project entry"))?;
1343
1344 let buffer: &AnyModelHandle = &buffer;
1345 Ok((project_entry_id, buffer.clone()))
1346 })
1347 }
1348
1349 pub fn open_local_buffer(
1350 &mut self,
1351 abs_path: impl AsRef<Path>,
1352 cx: &mut ModelContext<Self>,
1353 ) -> Task<Result<ModelHandle<Buffer>>> {
1354 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1355 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1356 } else {
1357 Task::ready(Err(anyhow!("no such path")))
1358 }
1359 }
1360
1361 pub fn open_buffer(
1362 &mut self,
1363 path: impl Into<ProjectPath>,
1364 cx: &mut ModelContext<Self>,
1365 ) -> Task<Result<ModelHandle<Buffer>>> {
1366 let project_path = path.into();
1367 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1368 worktree
1369 } else {
1370 return Task::ready(Err(anyhow!("no such worktree")));
1371 };
1372
1373 // If there is already a buffer for the given path, then return it.
1374 let existing_buffer = self.get_open_buffer(&project_path, cx);
1375 if let Some(existing_buffer) = existing_buffer {
1376 return Task::ready(Ok(existing_buffer));
1377 }
1378
1379 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1380 // If the given path is already being loaded, then wait for that existing
1381 // task to complete and return the same buffer.
1382 hash_map::Entry::Occupied(e) => e.get().clone(),
1383
1384 // Otherwise, record the fact that this path is now being loaded.
1385 hash_map::Entry::Vacant(entry) => {
1386 let (mut tx, rx) = postage::watch::channel();
1387 entry.insert(rx.clone());
1388
1389 let load_buffer = if worktree.read(cx).is_local() {
1390 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1391 } else {
1392 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1393 };
1394
1395 cx.spawn(move |this, mut cx| async move {
1396 let load_result = load_buffer.await;
1397 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1398 // Record the fact that the buffer is no longer loading.
1399 this.loading_buffers_by_path.remove(&project_path);
1400 let buffer = load_result.map_err(Arc::new)?;
1401 Ok(buffer)
1402 }));
1403 })
1404 .detach();
1405 rx
1406 }
1407 };
1408
1409 cx.foreground().spawn(async move {
1410 pump_loading_buffer_reciever(loading_watch)
1411 .await
1412 .map_err(|error| anyhow!("{}", error))
1413 })
1414 }
1415
1416 fn open_local_buffer_internal(
1417 &mut self,
1418 path: &Arc<Path>,
1419 worktree: &ModelHandle<Worktree>,
1420 cx: &mut ModelContext<Self>,
1421 ) -> Task<Result<ModelHandle<Buffer>>> {
1422 let buffer_id = post_inc(&mut self.next_buffer_id);
1423 let load_buffer = worktree.update(cx, |worktree, cx| {
1424 let worktree = worktree.as_local_mut().unwrap();
1425 worktree.load_buffer(buffer_id, path, cx)
1426 });
1427 cx.spawn(|this, mut cx| async move {
1428 let buffer = load_buffer.await?;
1429 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1430 Ok(buffer)
1431 })
1432 }
1433
1434 fn open_remote_buffer_internal(
1435 &mut self,
1436 path: &Arc<Path>,
1437 worktree: &ModelHandle<Worktree>,
1438 cx: &mut ModelContext<Self>,
1439 ) -> Task<Result<ModelHandle<Buffer>>> {
1440 let rpc = self.client.clone();
1441 let project_id = self.remote_id().unwrap();
1442 let remote_worktree_id = worktree.read(cx).id();
1443 let path = path.clone();
1444 let path_string = path.to_string_lossy().to_string();
1445 cx.spawn(|this, mut cx| async move {
1446 let response = rpc
1447 .request(proto::OpenBufferByPath {
1448 project_id,
1449 worktree_id: remote_worktree_id.to_proto(),
1450 path: path_string,
1451 })
1452 .await?;
1453 this.update(&mut cx, |this, cx| {
1454 this.wait_for_remote_buffer(response.buffer_id, cx)
1455 })
1456 .await
1457 })
1458 }
1459
1460 /// LanguageServerName is owned, because it is inserted into a map
1461 fn open_local_buffer_via_lsp(
1462 &mut self,
1463 abs_path: lsp::Url,
1464 language_server_id: LanguageServerId,
1465 language_server_name: LanguageServerName,
1466 cx: &mut ModelContext<Self>,
1467 ) -> Task<Result<ModelHandle<Buffer>>> {
1468 cx.spawn(|this, mut cx| async move {
1469 let abs_path = abs_path
1470 .to_file_path()
1471 .map_err(|_| anyhow!("can't convert URI to path"))?;
1472 let (worktree, relative_path) = if let Some(result) =
1473 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1474 {
1475 result
1476 } else {
1477 let worktree = this
1478 .update(&mut cx, |this, cx| {
1479 this.create_local_worktree(&abs_path, false, cx)
1480 })
1481 .await?;
1482 this.update(&mut cx, |this, cx| {
1483 this.language_server_ids.insert(
1484 (worktree.read(cx).id(), language_server_name),
1485 language_server_id,
1486 );
1487 });
1488 (worktree, PathBuf::new())
1489 };
1490
1491 let project_path = ProjectPath {
1492 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1493 path: relative_path.into(),
1494 };
1495 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1496 .await
1497 })
1498 }
1499
1500 pub fn open_buffer_by_id(
1501 &mut self,
1502 id: u64,
1503 cx: &mut ModelContext<Self>,
1504 ) -> Task<Result<ModelHandle<Buffer>>> {
1505 if let Some(buffer) = self.buffer_for_id(id, cx) {
1506 Task::ready(Ok(buffer))
1507 } else if self.is_local() {
1508 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1509 } else if let Some(project_id) = self.remote_id() {
1510 let request = self
1511 .client
1512 .request(proto::OpenBufferById { project_id, id });
1513 cx.spawn(|this, mut cx| async move {
1514 let buffer_id = request.await?.buffer_id;
1515 this.update(&mut cx, |this, cx| {
1516 this.wait_for_remote_buffer(buffer_id, cx)
1517 })
1518 .await
1519 })
1520 } else {
1521 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1522 }
1523 }
1524
1525 pub fn save_buffers(
1526 &self,
1527 buffers: HashSet<ModelHandle<Buffer>>,
1528 cx: &mut ModelContext<Self>,
1529 ) -> Task<Result<()>> {
1530 cx.spawn(|this, mut cx| async move {
1531 let save_tasks = buffers
1532 .into_iter()
1533 .map(|buffer| this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx)));
1534 try_join_all(save_tasks).await?;
1535 Ok(())
1536 })
1537 }
1538
1539 pub fn save_buffer(
1540 &self,
1541 buffer: ModelHandle<Buffer>,
1542 cx: &mut ModelContext<Self>,
1543 ) -> Task<Result<(clock::Global, RopeFingerprint, SystemTime)>> {
1544 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1545 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1546 };
1547 let worktree = file.worktree.clone();
1548 let path = file.path.clone();
1549 worktree.update(cx, |worktree, cx| match worktree {
1550 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1551 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1552 })
1553 }
1554
1555 pub fn save_buffer_as(
1556 &mut self,
1557 buffer: ModelHandle<Buffer>,
1558 abs_path: PathBuf,
1559 cx: &mut ModelContext<Self>,
1560 ) -> Task<Result<()>> {
1561 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1562 let old_file = File::from_dyn(buffer.read(cx).file())
1563 .filter(|f| f.is_local())
1564 .cloned();
1565 cx.spawn(|this, mut cx| async move {
1566 if let Some(old_file) = &old_file {
1567 this.update(&mut cx, |this, cx| {
1568 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1569 });
1570 }
1571 let (worktree, path) = worktree_task.await?;
1572 worktree
1573 .update(&mut cx, |worktree, cx| match worktree {
1574 Worktree::Local(worktree) => {
1575 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1576 }
1577 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1578 })
1579 .await?;
1580 this.update(&mut cx, |this, cx| {
1581 this.detect_language_for_buffer(&buffer, cx);
1582 this.register_buffer_with_language_servers(&buffer, cx);
1583 });
1584 Ok(())
1585 })
1586 }
1587
1588 pub fn get_open_buffer(
1589 &mut self,
1590 path: &ProjectPath,
1591 cx: &mut ModelContext<Self>,
1592 ) -> Option<ModelHandle<Buffer>> {
1593 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1594 self.opened_buffers.values().find_map(|buffer| {
1595 let buffer = buffer.upgrade(cx)?;
1596 let file = File::from_dyn(buffer.read(cx).file())?;
1597 if file.worktree == worktree && file.path() == &path.path {
1598 Some(buffer)
1599 } else {
1600 None
1601 }
1602 })
1603 }
1604
1605 fn register_buffer(
1606 &mut self,
1607 buffer: &ModelHandle<Buffer>,
1608 cx: &mut ModelContext<Self>,
1609 ) -> Result<()> {
1610 buffer.update(cx, |buffer, _| {
1611 buffer.set_language_registry(self.languages.clone())
1612 });
1613
1614 let remote_id = buffer.read(cx).remote_id();
1615 let is_remote = self.is_remote();
1616 let open_buffer = if is_remote || self.is_shared() {
1617 OpenBuffer::Strong(buffer.clone())
1618 } else {
1619 OpenBuffer::Weak(buffer.downgrade())
1620 };
1621
1622 match self.opened_buffers.entry(remote_id) {
1623 hash_map::Entry::Vacant(entry) => {
1624 entry.insert(open_buffer);
1625 }
1626 hash_map::Entry::Occupied(mut entry) => {
1627 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1628 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1629 } else if entry.get().upgrade(cx).is_some() {
1630 if is_remote {
1631 return Ok(());
1632 } else {
1633 debug_panic!("buffer {} was already registered", remote_id);
1634 Err(anyhow!("buffer {} was already registered", remote_id))?;
1635 }
1636 }
1637 entry.insert(open_buffer);
1638 }
1639 }
1640 cx.subscribe(buffer, |this, buffer, event, cx| {
1641 this.on_buffer_event(buffer, event, cx);
1642 })
1643 .detach();
1644
1645 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1646 if file.is_local {
1647 self.local_buffer_ids_by_path.insert(
1648 ProjectPath {
1649 worktree_id: file.worktree_id(cx),
1650 path: file.path.clone(),
1651 },
1652 remote_id,
1653 );
1654
1655 self.local_buffer_ids_by_entry_id
1656 .insert(file.entry_id, remote_id);
1657 }
1658 }
1659
1660 self.detect_language_for_buffer(buffer, cx);
1661 self.register_buffer_with_language_servers(buffer, cx);
1662 self.register_buffer_with_copilot(buffer, cx);
1663 cx.observe_release(buffer, |this, buffer, cx| {
1664 if let Some(file) = File::from_dyn(buffer.file()) {
1665 if file.is_local() {
1666 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1667 for server in this.language_servers_for_buffer(buffer, cx) {
1668 server
1669 .1
1670 .notify::<lsp::notification::DidCloseTextDocument>(
1671 lsp::DidCloseTextDocumentParams {
1672 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1673 },
1674 )
1675 .log_err();
1676 }
1677 }
1678 }
1679 })
1680 .detach();
1681
1682 *self.opened_buffer.0.borrow_mut() = ();
1683 Ok(())
1684 }
1685
1686 fn register_buffer_with_language_servers(
1687 &mut self,
1688 buffer_handle: &ModelHandle<Buffer>,
1689 cx: &mut ModelContext<Self>,
1690 ) {
1691 let buffer = buffer_handle.read(cx);
1692 let buffer_id = buffer.remote_id();
1693
1694 if let Some(file) = File::from_dyn(buffer.file()) {
1695 if !file.is_local() {
1696 return;
1697 }
1698
1699 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1700 let initial_snapshot = buffer.text_snapshot();
1701 let language = buffer.language().cloned();
1702 let worktree_id = file.worktree_id(cx);
1703
1704 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1705 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
1706 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
1707 .log_err();
1708 }
1709 }
1710
1711 if let Some(language) = language {
1712 for adapter in language.lsp_adapters() {
1713 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
1714 let server = self
1715 .language_server_ids
1716 .get(&(worktree_id, adapter.name.clone()))
1717 .and_then(|id| self.language_servers.get(id))
1718 .and_then(|server_state| {
1719 if let LanguageServerState::Running { server, .. } = server_state {
1720 Some(server.clone())
1721 } else {
1722 None
1723 }
1724 });
1725 let server = match server {
1726 Some(server) => server,
1727 None => continue,
1728 };
1729
1730 server
1731 .notify::<lsp::notification::DidOpenTextDocument>(
1732 lsp::DidOpenTextDocumentParams {
1733 text_document: lsp::TextDocumentItem::new(
1734 uri.clone(),
1735 language_id.unwrap_or_default(),
1736 0,
1737 initial_snapshot.text(),
1738 ),
1739 },
1740 )
1741 .log_err();
1742
1743 buffer_handle.update(cx, |buffer, cx| {
1744 buffer.set_completion_triggers(
1745 server
1746 .capabilities()
1747 .completion_provider
1748 .as_ref()
1749 .and_then(|provider| provider.trigger_characters.clone())
1750 .unwrap_or_default(),
1751 cx,
1752 );
1753 });
1754
1755 let snapshot = LspBufferSnapshot {
1756 version: 0,
1757 snapshot: initial_snapshot.clone(),
1758 };
1759 self.buffer_snapshots
1760 .entry(buffer_id)
1761 .or_default()
1762 .insert(server.server_id(), vec![snapshot]);
1763 }
1764 }
1765 }
1766 }
1767
1768 fn unregister_buffer_from_language_servers(
1769 &mut self,
1770 buffer: &ModelHandle<Buffer>,
1771 old_file: &File,
1772 cx: &mut ModelContext<Self>,
1773 ) {
1774 let old_path = match old_file.as_local() {
1775 Some(local) => local.abs_path(cx),
1776 None => return,
1777 };
1778
1779 buffer.update(cx, |buffer, cx| {
1780 let worktree_id = old_file.worktree_id(cx);
1781 let ids = &self.language_server_ids;
1782
1783 let language = buffer.language().cloned();
1784 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
1785 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
1786 buffer.update_diagnostics(server_id, Default::default(), cx);
1787 }
1788
1789 self.buffer_snapshots.remove(&buffer.remote_id());
1790 let file_url = lsp::Url::from_file_path(old_path).unwrap();
1791 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
1792 language_server
1793 .notify::<lsp::notification::DidCloseTextDocument>(
1794 lsp::DidCloseTextDocumentParams {
1795 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
1796 },
1797 )
1798 .log_err();
1799 }
1800 });
1801 }
1802
1803 fn register_buffer_with_copilot(
1804 &self,
1805 buffer_handle: &ModelHandle<Buffer>,
1806 cx: &mut ModelContext<Self>,
1807 ) {
1808 if let Some(copilot) = Copilot::global(cx) {
1809 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
1810 }
1811 }
1812
1813 async fn send_buffer_ordered_messages(
1814 this: WeakModelHandle<Self>,
1815 rx: UnboundedReceiver<BufferOrderedMessage>,
1816 mut cx: AsyncAppContext,
1817 ) -> Option<()> {
1818 const MAX_BATCH_SIZE: usize = 128;
1819
1820 let mut operations_by_buffer_id = HashMap::default();
1821 async fn flush_operations(
1822 this: &ModelHandle<Project>,
1823 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
1824 needs_resync_with_host: &mut bool,
1825 is_local: bool,
1826 cx: &AsyncAppContext,
1827 ) {
1828 for (buffer_id, operations) in operations_by_buffer_id.drain() {
1829 let request = this.read_with(cx, |this, _| {
1830 let project_id = this.remote_id()?;
1831 Some(this.client.request(proto::UpdateBuffer {
1832 buffer_id,
1833 project_id,
1834 operations,
1835 }))
1836 });
1837 if let Some(request) = request {
1838 if request.await.is_err() && !is_local {
1839 *needs_resync_with_host = true;
1840 break;
1841 }
1842 }
1843 }
1844 }
1845
1846 let mut needs_resync_with_host = false;
1847 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
1848
1849 while let Some(changes) = changes.next().await {
1850 let this = this.upgrade(&mut cx)?;
1851 let is_local = this.read_with(&cx, |this, _| this.is_local());
1852
1853 for change in changes {
1854 match change {
1855 BufferOrderedMessage::Operation {
1856 buffer_id,
1857 operation,
1858 } => {
1859 if needs_resync_with_host {
1860 continue;
1861 }
1862
1863 operations_by_buffer_id
1864 .entry(buffer_id)
1865 .or_insert(Vec::new())
1866 .push(operation);
1867 }
1868
1869 BufferOrderedMessage::Resync => {
1870 operations_by_buffer_id.clear();
1871 if this
1872 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
1873 .await
1874 .is_ok()
1875 {
1876 needs_resync_with_host = false;
1877 }
1878 }
1879
1880 BufferOrderedMessage::LanguageServerUpdate {
1881 language_server_id,
1882 message,
1883 } => {
1884 flush_operations(
1885 &this,
1886 &mut operations_by_buffer_id,
1887 &mut needs_resync_with_host,
1888 is_local,
1889 &cx,
1890 )
1891 .await;
1892
1893 this.read_with(&cx, |this, _| {
1894 if let Some(project_id) = this.remote_id() {
1895 this.client
1896 .send(proto::UpdateLanguageServer {
1897 project_id,
1898 language_server_id: language_server_id.0 as u64,
1899 variant: Some(message),
1900 })
1901 .log_err();
1902 }
1903 });
1904 }
1905 }
1906 }
1907
1908 flush_operations(
1909 &this,
1910 &mut operations_by_buffer_id,
1911 &mut needs_resync_with_host,
1912 is_local,
1913 &cx,
1914 )
1915 .await;
1916 }
1917
1918 None
1919 }
1920
1921 fn on_buffer_event(
1922 &mut self,
1923 buffer: ModelHandle<Buffer>,
1924 event: &BufferEvent,
1925 cx: &mut ModelContext<Self>,
1926 ) -> Option<()> {
1927 match event {
1928 BufferEvent::Operation(operation) => {
1929 self.buffer_ordered_messages_tx
1930 .unbounded_send(BufferOrderedMessage::Operation {
1931 buffer_id: buffer.read(cx).remote_id(),
1932 operation: language::proto::serialize_operation(operation),
1933 })
1934 .ok();
1935 }
1936
1937 BufferEvent::Edited { .. } => {
1938 let buffer = buffer.read(cx);
1939 let file = File::from_dyn(buffer.file())?;
1940 let abs_path = file.as_local()?.abs_path(cx);
1941 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1942 let next_snapshot = buffer.text_snapshot();
1943
1944 let language_servers: Vec<_> = self
1945 .language_servers_for_buffer(buffer, cx)
1946 .map(|i| i.1.clone())
1947 .collect();
1948
1949 for language_server in language_servers {
1950 let language_server = language_server.clone();
1951
1952 let buffer_snapshots = self
1953 .buffer_snapshots
1954 .get_mut(&buffer.remote_id())
1955 .and_then(|m| m.get_mut(&language_server.server_id()))?;
1956 let previous_snapshot = buffer_snapshots.last()?;
1957 let next_version = previous_snapshot.version + 1;
1958
1959 let content_changes = buffer
1960 .edits_since::<(PointUtf16, usize)>(previous_snapshot.snapshot.version())
1961 .map(|edit| {
1962 let edit_start = edit.new.start.0;
1963 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1964 let new_text = next_snapshot
1965 .text_for_range(edit.new.start.1..edit.new.end.1)
1966 .collect();
1967 lsp::TextDocumentContentChangeEvent {
1968 range: Some(lsp::Range::new(
1969 point_to_lsp(edit_start),
1970 point_to_lsp(edit_end),
1971 )),
1972 range_length: None,
1973 text: new_text,
1974 }
1975 })
1976 .collect();
1977
1978 buffer_snapshots.push(LspBufferSnapshot {
1979 version: next_version,
1980 snapshot: next_snapshot.clone(),
1981 });
1982
1983 language_server
1984 .notify::<lsp::notification::DidChangeTextDocument>(
1985 lsp::DidChangeTextDocumentParams {
1986 text_document: lsp::VersionedTextDocumentIdentifier::new(
1987 uri.clone(),
1988 next_version,
1989 ),
1990 content_changes,
1991 },
1992 )
1993 .log_err();
1994 }
1995 }
1996
1997 BufferEvent::Saved => {
1998 let file = File::from_dyn(buffer.read(cx).file())?;
1999 let worktree_id = file.worktree_id(cx);
2000 let abs_path = file.as_local()?.abs_path(cx);
2001 let text_document = lsp::TextDocumentIdentifier {
2002 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2003 };
2004
2005 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2006 server
2007 .notify::<lsp::notification::DidSaveTextDocument>(
2008 lsp::DidSaveTextDocumentParams {
2009 text_document: text_document.clone(),
2010 text: None,
2011 },
2012 )
2013 .log_err();
2014 }
2015
2016 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2017 for language_server_id in language_server_ids {
2018 if let Some(LanguageServerState::Running {
2019 adapter,
2020 simulate_disk_based_diagnostics_completion,
2021 ..
2022 }) = self.language_servers.get_mut(&language_server_id)
2023 {
2024 // After saving a buffer using a language server that doesn't provide
2025 // a disk-based progress token, kick off a timer that will reset every
2026 // time the buffer is saved. If the timer eventually fires, simulate
2027 // disk-based diagnostics being finished so that other pieces of UI
2028 // (e.g., project diagnostics view, diagnostic status bar) can update.
2029 // We don't emit an event right away because the language server might take
2030 // some time to publish diagnostics.
2031 if adapter.disk_based_diagnostics_progress_token.is_none() {
2032 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2033 Duration::from_secs(1);
2034
2035 let task = cx.spawn_weak(|this, mut cx| async move {
2036 cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2037 if let Some(this) = this.upgrade(&cx) {
2038 this.update(&mut cx, |this, cx| {
2039 this.disk_based_diagnostics_finished(
2040 language_server_id,
2041 cx,
2042 );
2043 this.buffer_ordered_messages_tx
2044 .unbounded_send(
2045 BufferOrderedMessage::LanguageServerUpdate {
2046 language_server_id,
2047 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2048 },
2049 )
2050 .ok();
2051 });
2052 }
2053 });
2054 *simulate_disk_based_diagnostics_completion = Some(task);
2055 }
2056 }
2057 }
2058 }
2059
2060 _ => {}
2061 }
2062
2063 None
2064 }
2065
2066 fn language_servers_for_worktree(
2067 &self,
2068 worktree_id: WorktreeId,
2069 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2070 self.language_server_ids
2071 .iter()
2072 .filter_map(move |((language_server_worktree_id, _), id)| {
2073 if *language_server_worktree_id == worktree_id {
2074 if let Some(LanguageServerState::Running {
2075 adapter,
2076 language,
2077 server,
2078 ..
2079 }) = self.language_servers.get(id)
2080 {
2081 return Some((adapter, language, server));
2082 }
2083 }
2084 None
2085 })
2086 }
2087
2088 fn maintain_buffer_languages(
2089 languages: &LanguageRegistry,
2090 cx: &mut ModelContext<Project>,
2091 ) -> Task<()> {
2092 let mut subscription = languages.subscribe();
2093 cx.spawn_weak(|project, mut cx| async move {
2094 while let Some(()) = subscription.next().await {
2095 if let Some(project) = project.upgrade(&cx) {
2096 project.update(&mut cx, |project, cx| {
2097 let mut plain_text_buffers = Vec::new();
2098 let mut buffers_with_unknown_injections = Vec::new();
2099 for buffer in project.opened_buffers.values() {
2100 if let Some(handle) = buffer.upgrade(cx) {
2101 let buffer = &handle.read(cx);
2102 if buffer.language().is_none()
2103 || buffer.language() == Some(&*language::PLAIN_TEXT)
2104 {
2105 plain_text_buffers.push(handle);
2106 } else if buffer.contains_unknown_injections() {
2107 buffers_with_unknown_injections.push(handle);
2108 }
2109 }
2110 }
2111
2112 for buffer in plain_text_buffers {
2113 project.detect_language_for_buffer(&buffer, cx);
2114 project.register_buffer_with_language_servers(&buffer, cx);
2115 }
2116
2117 for buffer in buffers_with_unknown_injections {
2118 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2119 }
2120 });
2121 }
2122 }
2123 })
2124 }
2125
2126 fn maintain_workspace_config(
2127 languages: Arc<LanguageRegistry>,
2128 cx: &mut ModelContext<Project>,
2129 ) -> Task<()> {
2130 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2131 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2132
2133 let settings_observation = cx.observe_global::<SettingsStore, _>(move |_, _| {
2134 *settings_changed_tx.borrow_mut() = ();
2135 });
2136 cx.spawn_weak(|this, mut cx| async move {
2137 while let Some(_) = settings_changed_rx.next().await {
2138 let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await;
2139 if let Some(this) = this.upgrade(&cx) {
2140 this.read_with(&cx, |this, _| {
2141 for server_state in this.language_servers.values() {
2142 if let LanguageServerState::Running { server, .. } = server_state {
2143 server
2144 .notify::<lsp::notification::DidChangeConfiguration>(
2145 lsp::DidChangeConfigurationParams {
2146 settings: workspace_config.clone(),
2147 },
2148 )
2149 .ok();
2150 }
2151 }
2152 })
2153 } else {
2154 break;
2155 }
2156 }
2157
2158 drop(settings_observation);
2159 })
2160 }
2161
2162 fn detect_language_for_buffer(
2163 &mut self,
2164 buffer_handle: &ModelHandle<Buffer>,
2165 cx: &mut ModelContext<Self>,
2166 ) -> Option<()> {
2167 // If the buffer has a language, set it and start the language server if we haven't already.
2168 let buffer = buffer_handle.read(cx);
2169 let full_path = buffer.file()?.full_path(cx);
2170 let content = buffer.as_rope();
2171 let new_language = self
2172 .languages
2173 .language_for_file(&full_path, Some(content))
2174 .now_or_never()?
2175 .ok()?;
2176 self.set_language_for_buffer(buffer_handle, new_language, cx);
2177 None
2178 }
2179
2180 pub fn set_language_for_buffer(
2181 &mut self,
2182 buffer: &ModelHandle<Buffer>,
2183 new_language: Arc<Language>,
2184 cx: &mut ModelContext<Self>,
2185 ) {
2186 buffer.update(cx, |buffer, cx| {
2187 if buffer.language().map_or(true, |old_language| {
2188 !Arc::ptr_eq(old_language, &new_language)
2189 }) {
2190 buffer.set_language(Some(new_language.clone()), cx);
2191 }
2192 });
2193
2194 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2195 if let Some(worktree) = file.worktree.read(cx).as_local() {
2196 let worktree_id = worktree.id();
2197 let worktree_abs_path = worktree.abs_path().clone();
2198 self.start_language_servers(worktree_id, worktree_abs_path, new_language, cx);
2199 }
2200 }
2201 }
2202
2203 fn start_language_servers(
2204 &mut self,
2205 worktree_id: WorktreeId,
2206 worktree_path: Arc<Path>,
2207 language: Arc<Language>,
2208 cx: &mut ModelContext<Self>,
2209 ) {
2210 if !language_settings(Some(&language.name()), cx).enable_language_server {
2211 return;
2212 }
2213
2214 for adapter in language.lsp_adapters() {
2215 let key = (worktree_id, adapter.name.clone());
2216 if self.language_server_ids.contains_key(&key) {
2217 continue;
2218 }
2219
2220 let pending_server = match self.languages.start_language_server(
2221 language.clone(),
2222 adapter.clone(),
2223 worktree_path.clone(),
2224 self.client.http_client(),
2225 cx,
2226 ) {
2227 Some(pending_server) => pending_server,
2228 None => continue,
2229 };
2230
2231 let lsp = settings::get::<ProjectSettings>(cx)
2232 .lsp
2233 .get(&adapter.name.0);
2234 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2235
2236 let mut initialization_options = adapter.initialization_options.clone();
2237 match (&mut initialization_options, override_options) {
2238 (Some(initialization_options), Some(override_options)) => {
2239 merge_json_value_into(override_options, initialization_options);
2240 }
2241 (None, override_options) => initialization_options = override_options,
2242 _ => {}
2243 }
2244
2245 let server_id = pending_server.server_id;
2246 let state = self.setup_pending_language_server(
2247 initialization_options,
2248 pending_server,
2249 adapter.clone(),
2250 language.clone(),
2251 key.clone(),
2252 cx,
2253 );
2254 self.language_servers.insert(server_id, state);
2255 self.language_server_ids.insert(key.clone(), server_id);
2256 }
2257 }
2258
2259 fn setup_pending_language_server(
2260 &mut self,
2261 initialization_options: Option<serde_json::Value>,
2262 pending_server: PendingLanguageServer,
2263 adapter: Arc<CachedLspAdapter>,
2264 language: Arc<Language>,
2265 key: (WorktreeId, LanguageServerName),
2266 cx: &mut ModelContext<Project>,
2267 ) -> LanguageServerState {
2268 let server_id = pending_server.server_id;
2269 let languages = self.languages.clone();
2270
2271 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2272 let workspace_config = cx.update(|cx| languages.workspace_configuration(cx)).await;
2273 let language_server = pending_server.task.await.log_err()?;
2274 let language_server = language_server
2275 .initialize(initialization_options)
2276 .await
2277 .log_err()?;
2278 let this = this.upgrade(&cx)?;
2279
2280 language_server
2281 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2282 let this = this.downgrade();
2283 let adapter = adapter.clone();
2284 move |mut params, cx| {
2285 let this = this;
2286 let adapter = adapter.clone();
2287 cx.spawn(|mut cx| async move {
2288 adapter.process_diagnostics(&mut params).await;
2289 if let Some(this) = this.upgrade(&cx) {
2290 this.update(&mut cx, |this, cx| {
2291 this.update_diagnostics(
2292 server_id,
2293 params,
2294 &adapter.disk_based_diagnostic_sources,
2295 cx,
2296 )
2297 .log_err();
2298 });
2299 }
2300 })
2301 .detach();
2302 }
2303 })
2304 .detach();
2305
2306 language_server
2307 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2308 let languages = languages.clone();
2309 move |params, mut cx| {
2310 let languages = languages.clone();
2311 async move {
2312 let workspace_config =
2313 cx.update(|cx| languages.workspace_configuration(cx)).await;
2314 Ok(params
2315 .items
2316 .into_iter()
2317 .map(|item| {
2318 if let Some(section) = &item.section {
2319 workspace_config
2320 .get(section)
2321 .cloned()
2322 .unwrap_or(serde_json::Value::Null)
2323 } else {
2324 workspace_config.clone()
2325 }
2326 })
2327 .collect())
2328 }
2329 }
2330 })
2331 .detach();
2332
2333 // Even though we don't have handling for these requests, respond to them to
2334 // avoid stalling any language server like `gopls` which waits for a response
2335 // to these requests when initializing.
2336 language_server
2337 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2338 let this = this.downgrade();
2339 move |params, mut cx| async move {
2340 if let Some(this) = this.upgrade(&cx) {
2341 this.update(&mut cx, |this, _| {
2342 if let Some(status) =
2343 this.language_server_statuses.get_mut(&server_id)
2344 {
2345 if let lsp::NumberOrString::String(token) = params.token {
2346 status.progress_tokens.insert(token);
2347 }
2348 }
2349 });
2350 }
2351 Ok(())
2352 }
2353 })
2354 .detach();
2355 language_server
2356 .on_request::<lsp::request::RegisterCapability, _, _>({
2357 let this = this.downgrade();
2358 move |params, mut cx| async move {
2359 let this = this
2360 .upgrade(&cx)
2361 .ok_or_else(|| anyhow!("project dropped"))?;
2362 for reg in params.registrations {
2363 if reg.method == "workspace/didChangeWatchedFiles" {
2364 if let Some(options) = reg.register_options {
2365 let options = serde_json::from_value(options)?;
2366 this.update(&mut cx, |this, cx| {
2367 this.on_lsp_did_change_watched_files(
2368 server_id, options, cx,
2369 );
2370 });
2371 }
2372 }
2373 }
2374 Ok(())
2375 }
2376 })
2377 .detach();
2378
2379 language_server
2380 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2381 let this = this.downgrade();
2382 let adapter = adapter.clone();
2383 let language_server = language_server.clone();
2384 move |params, cx| {
2385 Self::on_lsp_workspace_edit(
2386 this,
2387 params,
2388 server_id,
2389 adapter.clone(),
2390 language_server.clone(),
2391 cx,
2392 )
2393 }
2394 })
2395 .detach();
2396
2397 let disk_based_diagnostics_progress_token =
2398 adapter.disk_based_diagnostics_progress_token.clone();
2399
2400 language_server
2401 .on_notification::<lsp::notification::Progress, _>({
2402 let this = this.downgrade();
2403 move |params, mut cx| {
2404 if let Some(this) = this.upgrade(&cx) {
2405 this.update(&mut cx, |this, cx| {
2406 this.on_lsp_progress(
2407 params,
2408 server_id,
2409 disk_based_diagnostics_progress_token.clone(),
2410 cx,
2411 );
2412 });
2413 }
2414 }
2415 })
2416 .detach();
2417
2418 language_server
2419 .notify::<lsp::notification::DidChangeConfiguration>(
2420 lsp::DidChangeConfigurationParams {
2421 settings: workspace_config,
2422 },
2423 )
2424 .ok();
2425
2426 this.update(&mut cx, |this, cx| {
2427 // If the language server for this key doesn't match the server id, don't store the
2428 // server. Which will cause it to be dropped, killing the process
2429 if this
2430 .language_server_ids
2431 .get(&key)
2432 .map(|id| id != &server_id)
2433 .unwrap_or(false)
2434 {
2435 return None;
2436 }
2437
2438 // Update language_servers collection with Running variant of LanguageServerState
2439 // indicating that the server is up and running and ready
2440 this.language_servers.insert(
2441 server_id,
2442 LanguageServerState::Running {
2443 adapter: adapter.clone(),
2444 language: language.clone(),
2445 watched_paths: Default::default(),
2446 server: language_server.clone(),
2447 simulate_disk_based_diagnostics_completion: None,
2448 },
2449 );
2450 this.language_server_statuses.insert(
2451 server_id,
2452 LanguageServerStatus {
2453 name: language_server.name().to_string(),
2454 pending_work: Default::default(),
2455 has_pending_diagnostic_updates: false,
2456 progress_tokens: Default::default(),
2457 },
2458 );
2459
2460 if let Some(project_id) = this.remote_id() {
2461 this.client
2462 .send(proto::StartLanguageServer {
2463 project_id,
2464 server: Some(proto::LanguageServer {
2465 id: server_id.0 as u64,
2466 name: language_server.name().to_string(),
2467 }),
2468 })
2469 .log_err();
2470 }
2471
2472 // Tell the language server about every open buffer in the worktree that matches the language.
2473 for buffer in this.opened_buffers.values() {
2474 if let Some(buffer_handle) = buffer.upgrade(cx) {
2475 let buffer = buffer_handle.read(cx);
2476 let file = match File::from_dyn(buffer.file()) {
2477 Some(file) => file,
2478 None => continue,
2479 };
2480 let language = match buffer.language() {
2481 Some(language) => language,
2482 None => continue,
2483 };
2484
2485 if file.worktree.read(cx).id() != key.0
2486 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
2487 {
2488 continue;
2489 }
2490
2491 let file = file.as_local()?;
2492 let versions = this
2493 .buffer_snapshots
2494 .entry(buffer.remote_id())
2495 .or_default()
2496 .entry(server_id)
2497 .or_insert_with(|| {
2498 vec![LspBufferSnapshot {
2499 version: 0,
2500 snapshot: buffer.text_snapshot(),
2501 }]
2502 });
2503
2504 let snapshot = versions.last().unwrap();
2505 let version = snapshot.version;
2506 let initial_snapshot = &snapshot.snapshot;
2507 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2508 language_server
2509 .notify::<lsp::notification::DidOpenTextDocument>(
2510 lsp::DidOpenTextDocumentParams {
2511 text_document: lsp::TextDocumentItem::new(
2512 uri,
2513 adapter
2514 .language_ids
2515 .get(language.name().as_ref())
2516 .cloned()
2517 .unwrap_or_default(),
2518 version,
2519 initial_snapshot.text(),
2520 ),
2521 },
2522 )
2523 .log_err()?;
2524 buffer_handle.update(cx, |buffer, cx| {
2525 buffer.set_completion_triggers(
2526 language_server
2527 .capabilities()
2528 .completion_provider
2529 .as_ref()
2530 .and_then(|provider| provider.trigger_characters.clone())
2531 .unwrap_or_default(),
2532 cx,
2533 )
2534 });
2535 }
2536 }
2537
2538 cx.notify();
2539 Some(language_server)
2540 })
2541 }))
2542 }
2543
2544 // Returns a list of all of the worktrees which no longer have a language server and the root path
2545 // for the stopped server
2546 fn stop_language_server(
2547 &mut self,
2548 worktree_id: WorktreeId,
2549 adapter_name: LanguageServerName,
2550 cx: &mut ModelContext<Self>,
2551 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
2552 let key = (worktree_id, adapter_name);
2553 if let Some(server_id) = self.language_server_ids.remove(&key) {
2554 // Remove other entries for this language server as well
2555 let mut orphaned_worktrees = vec![worktree_id];
2556 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
2557 for other_key in other_keys {
2558 if self.language_server_ids.get(&other_key) == Some(&server_id) {
2559 self.language_server_ids.remove(&other_key);
2560 orphaned_worktrees.push(other_key.0);
2561 }
2562 }
2563
2564 for buffer in self.opened_buffers.values() {
2565 if let Some(buffer) = buffer.upgrade(cx) {
2566 buffer.update(cx, |buffer, cx| {
2567 buffer.update_diagnostics(server_id, Default::default(), cx);
2568 });
2569 }
2570 }
2571 for worktree in &self.worktrees {
2572 if let Some(worktree) = worktree.upgrade(cx) {
2573 worktree.update(cx, |worktree, cx| {
2574 if let Some(worktree) = worktree.as_local_mut() {
2575 worktree.clear_diagnostics_for_language_server(server_id, cx);
2576 }
2577 });
2578 }
2579 }
2580
2581 self.language_server_statuses.remove(&server_id);
2582 cx.notify();
2583
2584 let server_state = self.language_servers.remove(&server_id);
2585 cx.spawn_weak(|this, mut cx| async move {
2586 let mut root_path = None;
2587
2588 let server = match server_state {
2589 Some(LanguageServerState::Starting(started_language_server)) => {
2590 started_language_server.await
2591 }
2592 Some(LanguageServerState::Running { server, .. }) => Some(server),
2593 None => None,
2594 };
2595
2596 if let Some(server) = server {
2597 root_path = Some(server.root_path().clone());
2598 if let Some(shutdown) = server.shutdown() {
2599 shutdown.await;
2600 }
2601 }
2602
2603 if let Some(this) = this.upgrade(&cx) {
2604 this.update(&mut cx, |this, cx| {
2605 this.language_server_statuses.remove(&server_id);
2606 cx.notify();
2607 });
2608 }
2609
2610 (root_path, orphaned_worktrees)
2611 })
2612 } else {
2613 Task::ready((None, Vec::new()))
2614 }
2615 }
2616
2617 pub fn restart_language_servers_for_buffers(
2618 &mut self,
2619 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2620 cx: &mut ModelContext<Self>,
2621 ) -> Option<()> {
2622 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, Arc<Language>)> = buffers
2623 .into_iter()
2624 .filter_map(|buffer| {
2625 let buffer = buffer.read(cx);
2626 let file = File::from_dyn(buffer.file())?;
2627 let worktree = file.worktree.read(cx).as_local()?;
2628 let full_path = file.full_path(cx);
2629 let language = self
2630 .languages
2631 .language_for_file(&full_path, Some(buffer.as_rope()))
2632 .now_or_never()?
2633 .ok()?;
2634 Some((worktree.id(), worktree.abs_path().clone(), language))
2635 })
2636 .collect();
2637 for (worktree_id, worktree_abs_path, language) in language_server_lookup_info {
2638 self.restart_language_servers(worktree_id, worktree_abs_path, language, cx);
2639 }
2640
2641 None
2642 }
2643
2644 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
2645 fn restart_language_servers(
2646 &mut self,
2647 worktree_id: WorktreeId,
2648 fallback_path: Arc<Path>,
2649 language: Arc<Language>,
2650 cx: &mut ModelContext<Self>,
2651 ) {
2652 let mut stops = Vec::new();
2653 for adapter in language.lsp_adapters() {
2654 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
2655 }
2656
2657 if stops.is_empty() {
2658 return;
2659 }
2660 let mut stops = stops.into_iter();
2661
2662 cx.spawn_weak(|this, mut cx| async move {
2663 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
2664 for stop in stops {
2665 let (_, worktrees) = stop.await;
2666 orphaned_worktrees.extend_from_slice(&worktrees);
2667 }
2668
2669 let this = match this.upgrade(&cx) {
2670 Some(this) => this,
2671 None => return,
2672 };
2673
2674 this.update(&mut cx, |this, cx| {
2675 // Attempt to restart using original server path. Fallback to passed in
2676 // path if we could not retrieve the root path
2677 let root_path = original_root_path
2678 .map(|path_buf| Arc::from(path_buf.as_path()))
2679 .unwrap_or(fallback_path);
2680
2681 this.start_language_servers(worktree_id, root_path, language.clone(), cx);
2682
2683 // Lookup new server ids and set them for each of the orphaned worktrees
2684 for adapter in language.lsp_adapters() {
2685 if let Some(new_server_id) = this
2686 .language_server_ids
2687 .get(&(worktree_id, adapter.name.clone()))
2688 .cloned()
2689 {
2690 for &orphaned_worktree in &orphaned_worktrees {
2691 this.language_server_ids
2692 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
2693 }
2694 }
2695 }
2696 });
2697 })
2698 .detach();
2699 }
2700
2701 fn on_lsp_progress(
2702 &mut self,
2703 progress: lsp::ProgressParams,
2704 language_server_id: LanguageServerId,
2705 disk_based_diagnostics_progress_token: Option<String>,
2706 cx: &mut ModelContext<Self>,
2707 ) {
2708 let token = match progress.token {
2709 lsp::NumberOrString::String(token) => token,
2710 lsp::NumberOrString::Number(token) => {
2711 log::info!("skipping numeric progress token {}", token);
2712 return;
2713 }
2714 };
2715 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
2716 let language_server_status =
2717 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2718 status
2719 } else {
2720 return;
2721 };
2722
2723 if !language_server_status.progress_tokens.contains(&token) {
2724 return;
2725 }
2726
2727 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
2728 .as_ref()
2729 .map_or(false, |disk_based_token| {
2730 token.starts_with(disk_based_token)
2731 });
2732
2733 match progress {
2734 lsp::WorkDoneProgress::Begin(report) => {
2735 if is_disk_based_diagnostics_progress {
2736 language_server_status.has_pending_diagnostic_updates = true;
2737 self.disk_based_diagnostics_started(language_server_id, cx);
2738 self.buffer_ordered_messages_tx
2739 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2740 language_server_id,
2741 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
2742 })
2743 .ok();
2744 } else {
2745 self.on_lsp_work_start(
2746 language_server_id,
2747 token.clone(),
2748 LanguageServerProgress {
2749 message: report.message.clone(),
2750 percentage: report.percentage.map(|p| p as usize),
2751 last_update_at: Instant::now(),
2752 },
2753 cx,
2754 );
2755 self.buffer_ordered_messages_tx
2756 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2757 language_server_id,
2758 message: proto::update_language_server::Variant::WorkStart(
2759 proto::LspWorkStart {
2760 token,
2761 message: report.message,
2762 percentage: report.percentage.map(|p| p as u32),
2763 },
2764 ),
2765 })
2766 .ok();
2767 }
2768 }
2769 lsp::WorkDoneProgress::Report(report) => {
2770 if !is_disk_based_diagnostics_progress {
2771 self.on_lsp_work_progress(
2772 language_server_id,
2773 token.clone(),
2774 LanguageServerProgress {
2775 message: report.message.clone(),
2776 percentage: report.percentage.map(|p| p as usize),
2777 last_update_at: Instant::now(),
2778 },
2779 cx,
2780 );
2781 self.buffer_ordered_messages_tx
2782 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2783 language_server_id,
2784 message: proto::update_language_server::Variant::WorkProgress(
2785 proto::LspWorkProgress {
2786 token,
2787 message: report.message,
2788 percentage: report.percentage.map(|p| p as u32),
2789 },
2790 ),
2791 })
2792 .ok();
2793 }
2794 }
2795 lsp::WorkDoneProgress::End(_) => {
2796 language_server_status.progress_tokens.remove(&token);
2797
2798 if is_disk_based_diagnostics_progress {
2799 language_server_status.has_pending_diagnostic_updates = false;
2800 self.disk_based_diagnostics_finished(language_server_id, cx);
2801 self.buffer_ordered_messages_tx
2802 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2803 language_server_id,
2804 message:
2805 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2806 Default::default(),
2807 ),
2808 })
2809 .ok();
2810 } else {
2811 self.on_lsp_work_end(language_server_id, token.clone(), cx);
2812 self.buffer_ordered_messages_tx
2813 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
2814 language_server_id,
2815 message: proto::update_language_server::Variant::WorkEnd(
2816 proto::LspWorkEnd { token },
2817 ),
2818 })
2819 .ok();
2820 }
2821 }
2822 }
2823 }
2824
2825 fn on_lsp_work_start(
2826 &mut self,
2827 language_server_id: LanguageServerId,
2828 token: String,
2829 progress: LanguageServerProgress,
2830 cx: &mut ModelContext<Self>,
2831 ) {
2832 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2833 status.pending_work.insert(token, progress);
2834 cx.notify();
2835 }
2836 }
2837
2838 fn on_lsp_work_progress(
2839 &mut self,
2840 language_server_id: LanguageServerId,
2841 token: String,
2842 progress: LanguageServerProgress,
2843 cx: &mut ModelContext<Self>,
2844 ) {
2845 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2846 let entry = status
2847 .pending_work
2848 .entry(token)
2849 .or_insert(LanguageServerProgress {
2850 message: Default::default(),
2851 percentage: Default::default(),
2852 last_update_at: progress.last_update_at,
2853 });
2854 if progress.message.is_some() {
2855 entry.message = progress.message;
2856 }
2857 if progress.percentage.is_some() {
2858 entry.percentage = progress.percentage;
2859 }
2860 entry.last_update_at = progress.last_update_at;
2861 cx.notify();
2862 }
2863 }
2864
2865 fn on_lsp_work_end(
2866 &mut self,
2867 language_server_id: LanguageServerId,
2868 token: String,
2869 cx: &mut ModelContext<Self>,
2870 ) {
2871 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2872 status.pending_work.remove(&token);
2873 cx.notify();
2874 }
2875 }
2876
2877 fn on_lsp_did_change_watched_files(
2878 &mut self,
2879 language_server_id: LanguageServerId,
2880 params: DidChangeWatchedFilesRegistrationOptions,
2881 cx: &mut ModelContext<Self>,
2882 ) {
2883 if let Some(LanguageServerState::Running { watched_paths, .. }) =
2884 self.language_servers.get_mut(&language_server_id)
2885 {
2886 let mut builders = HashMap::default();
2887 for watcher in params.watchers {
2888 for worktree in &self.worktrees {
2889 if let Some(worktree) = worktree.upgrade(cx) {
2890 let worktree = worktree.read(cx);
2891 if let Some(abs_path) = worktree.abs_path().to_str() {
2892 if let Some(suffix) = watcher
2893 .glob_pattern
2894 .strip_prefix(abs_path)
2895 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR))
2896 {
2897 if let Some(glob) = Glob::new(suffix).log_err() {
2898 builders
2899 .entry(worktree.id())
2900 .or_insert_with(|| GlobSetBuilder::new())
2901 .add(glob);
2902 }
2903 break;
2904 }
2905 }
2906 }
2907 }
2908 }
2909
2910 watched_paths.clear();
2911 for (worktree_id, builder) in builders {
2912 if let Ok(globset) = builder.build() {
2913 watched_paths.insert(worktree_id, globset);
2914 }
2915 }
2916
2917 cx.notify();
2918 }
2919 }
2920
2921 async fn on_lsp_workspace_edit(
2922 this: WeakModelHandle<Self>,
2923 params: lsp::ApplyWorkspaceEditParams,
2924 server_id: LanguageServerId,
2925 adapter: Arc<CachedLspAdapter>,
2926 language_server: Arc<LanguageServer>,
2927 mut cx: AsyncAppContext,
2928 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2929 let this = this
2930 .upgrade(&cx)
2931 .ok_or_else(|| anyhow!("project project closed"))?;
2932 let transaction = Self::deserialize_workspace_edit(
2933 this.clone(),
2934 params.edit,
2935 true,
2936 adapter.clone(),
2937 language_server.clone(),
2938 &mut cx,
2939 )
2940 .await
2941 .log_err();
2942 this.update(&mut cx, |this, _| {
2943 if let Some(transaction) = transaction {
2944 this.last_workspace_edits_by_language_server
2945 .insert(server_id, transaction);
2946 }
2947 });
2948 Ok(lsp::ApplyWorkspaceEditResponse {
2949 applied: true,
2950 failed_change: None,
2951 failure_reason: None,
2952 })
2953 }
2954
2955 pub fn language_server_statuses(
2956 &self,
2957 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2958 self.language_server_statuses.values()
2959 }
2960
2961 pub fn update_diagnostics(
2962 &mut self,
2963 language_server_id: LanguageServerId,
2964 mut params: lsp::PublishDiagnosticsParams,
2965 disk_based_sources: &[String],
2966 cx: &mut ModelContext<Self>,
2967 ) -> Result<()> {
2968 let abs_path = params
2969 .uri
2970 .to_file_path()
2971 .map_err(|_| anyhow!("URI is not a file"))?;
2972 let mut diagnostics = Vec::default();
2973 let mut primary_diagnostic_group_ids = HashMap::default();
2974 let mut sources_by_group_id = HashMap::default();
2975 let mut supporting_diagnostics = HashMap::default();
2976
2977 // Ensure that primary diagnostics are always the most severe
2978 params.diagnostics.sort_by_key(|item| item.severity);
2979
2980 for diagnostic in ¶ms.diagnostics {
2981 let source = diagnostic.source.as_ref();
2982 let code = diagnostic.code.as_ref().map(|code| match code {
2983 lsp::NumberOrString::Number(code) => code.to_string(),
2984 lsp::NumberOrString::String(code) => code.clone(),
2985 });
2986 let range = range_from_lsp(diagnostic.range);
2987 let is_supporting = diagnostic
2988 .related_information
2989 .as_ref()
2990 .map_or(false, |infos| {
2991 infos.iter().any(|info| {
2992 primary_diagnostic_group_ids.contains_key(&(
2993 source,
2994 code.clone(),
2995 range_from_lsp(info.location.range),
2996 ))
2997 })
2998 });
2999
3000 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3001 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3002 });
3003
3004 if is_supporting {
3005 supporting_diagnostics.insert(
3006 (source, code.clone(), range),
3007 (diagnostic.severity, is_unnecessary),
3008 );
3009 } else {
3010 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3011 let is_disk_based =
3012 source.map_or(false, |source| disk_based_sources.contains(source));
3013
3014 sources_by_group_id.insert(group_id, source);
3015 primary_diagnostic_group_ids
3016 .insert((source, code.clone(), range.clone()), group_id);
3017
3018 diagnostics.push(DiagnosticEntry {
3019 range,
3020 diagnostic: Diagnostic {
3021 source: diagnostic.source.clone(),
3022 code: code.clone(),
3023 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3024 message: diagnostic.message.clone(),
3025 group_id,
3026 is_primary: true,
3027 is_valid: true,
3028 is_disk_based,
3029 is_unnecessary,
3030 },
3031 });
3032 if let Some(infos) = &diagnostic.related_information {
3033 for info in infos {
3034 if info.location.uri == params.uri && !info.message.is_empty() {
3035 let range = range_from_lsp(info.location.range);
3036 diagnostics.push(DiagnosticEntry {
3037 range,
3038 diagnostic: Diagnostic {
3039 source: diagnostic.source.clone(),
3040 code: code.clone(),
3041 severity: DiagnosticSeverity::INFORMATION,
3042 message: info.message.clone(),
3043 group_id,
3044 is_primary: false,
3045 is_valid: true,
3046 is_disk_based,
3047 is_unnecessary: false,
3048 },
3049 });
3050 }
3051 }
3052 }
3053 }
3054 }
3055
3056 for entry in &mut diagnostics {
3057 let diagnostic = &mut entry.diagnostic;
3058 if !diagnostic.is_primary {
3059 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3060 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3061 source,
3062 diagnostic.code.clone(),
3063 entry.range.clone(),
3064 )) {
3065 if let Some(severity) = severity {
3066 diagnostic.severity = severity;
3067 }
3068 diagnostic.is_unnecessary = is_unnecessary;
3069 }
3070 }
3071 }
3072
3073 self.update_diagnostic_entries(
3074 language_server_id,
3075 abs_path,
3076 params.version,
3077 diagnostics,
3078 cx,
3079 )?;
3080 Ok(())
3081 }
3082
3083 pub fn update_diagnostic_entries(
3084 &mut self,
3085 server_id: LanguageServerId,
3086 abs_path: PathBuf,
3087 version: Option<i32>,
3088 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3089 cx: &mut ModelContext<Project>,
3090 ) -> Result<(), anyhow::Error> {
3091 let (worktree, relative_path) = self
3092 .find_local_worktree(&abs_path, cx)
3093 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
3094
3095 let project_path = ProjectPath {
3096 worktree_id: worktree.read(cx).id(),
3097 path: relative_path.into(),
3098 };
3099
3100 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3101 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3102 }
3103
3104 let updated = worktree.update(cx, |worktree, cx| {
3105 worktree
3106 .as_local_mut()
3107 .ok_or_else(|| anyhow!("not a local worktree"))?
3108 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3109 })?;
3110 if updated {
3111 cx.emit(Event::DiagnosticsUpdated {
3112 language_server_id: server_id,
3113 path: project_path,
3114 });
3115 }
3116 Ok(())
3117 }
3118
3119 fn update_buffer_diagnostics(
3120 &mut self,
3121 buffer: &ModelHandle<Buffer>,
3122 server_id: LanguageServerId,
3123 version: Option<i32>,
3124 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3125 cx: &mut ModelContext<Self>,
3126 ) -> Result<()> {
3127 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3128 Ordering::Equal
3129 .then_with(|| b.is_primary.cmp(&a.is_primary))
3130 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3131 .then_with(|| a.severity.cmp(&b.severity))
3132 .then_with(|| a.message.cmp(&b.message))
3133 }
3134
3135 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3136
3137 diagnostics.sort_unstable_by(|a, b| {
3138 Ordering::Equal
3139 .then_with(|| a.range.start.cmp(&b.range.start))
3140 .then_with(|| b.range.end.cmp(&a.range.end))
3141 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3142 });
3143
3144 let mut sanitized_diagnostics = Vec::new();
3145 let edits_since_save = Patch::new(
3146 snapshot
3147 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
3148 .collect(),
3149 );
3150 for entry in diagnostics {
3151 let start;
3152 let end;
3153 if entry.diagnostic.is_disk_based {
3154 // Some diagnostics are based on files on disk instead of buffers'
3155 // current contents. Adjust these diagnostics' ranges to reflect
3156 // any unsaved edits.
3157 start = edits_since_save.old_to_new(entry.range.start);
3158 end = edits_since_save.old_to_new(entry.range.end);
3159 } else {
3160 start = entry.range.start;
3161 end = entry.range.end;
3162 }
3163
3164 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
3165 ..snapshot.clip_point_utf16(end, Bias::Right);
3166
3167 // Expand empty ranges by one codepoint
3168 if range.start == range.end {
3169 // This will be go to the next boundary when being clipped
3170 range.end.column += 1;
3171 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
3172 if range.start == range.end && range.end.column > 0 {
3173 range.start.column -= 1;
3174 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
3175 }
3176 }
3177
3178 sanitized_diagnostics.push(DiagnosticEntry {
3179 range,
3180 diagnostic: entry.diagnostic,
3181 });
3182 }
3183 drop(edits_since_save);
3184
3185 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
3186 buffer.update(cx, |buffer, cx| {
3187 buffer.update_diagnostics(server_id, set, cx)
3188 });
3189 Ok(())
3190 }
3191
3192 pub fn reload_buffers(
3193 &self,
3194 buffers: HashSet<ModelHandle<Buffer>>,
3195 push_to_history: bool,
3196 cx: &mut ModelContext<Self>,
3197 ) -> Task<Result<ProjectTransaction>> {
3198 let mut local_buffers = Vec::new();
3199 let mut remote_buffers = None;
3200 for buffer_handle in buffers {
3201 let buffer = buffer_handle.read(cx);
3202 if buffer.is_dirty() {
3203 if let Some(file) = File::from_dyn(buffer.file()) {
3204 if file.is_local() {
3205 local_buffers.push(buffer_handle);
3206 } else {
3207 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
3208 }
3209 }
3210 }
3211 }
3212
3213 let remote_buffers = self.remote_id().zip(remote_buffers);
3214 let client = self.client.clone();
3215
3216 cx.spawn(|this, mut cx| async move {
3217 let mut project_transaction = ProjectTransaction::default();
3218
3219 if let Some((project_id, remote_buffers)) = remote_buffers {
3220 let response = client
3221 .request(proto::ReloadBuffers {
3222 project_id,
3223 buffer_ids: remote_buffers
3224 .iter()
3225 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3226 .collect(),
3227 })
3228 .await?
3229 .transaction
3230 .ok_or_else(|| anyhow!("missing transaction"))?;
3231 project_transaction = this
3232 .update(&mut cx, |this, cx| {
3233 this.deserialize_project_transaction(response, push_to_history, cx)
3234 })
3235 .await?;
3236 }
3237
3238 for buffer in local_buffers {
3239 let transaction = buffer
3240 .update(&mut cx, |buffer, cx| buffer.reload(cx))
3241 .await?;
3242 buffer.update(&mut cx, |buffer, cx| {
3243 if let Some(transaction) = transaction {
3244 if !push_to_history {
3245 buffer.forget_transaction(transaction.id);
3246 }
3247 project_transaction.0.insert(cx.handle(), transaction);
3248 }
3249 });
3250 }
3251
3252 Ok(project_transaction)
3253 })
3254 }
3255
3256 pub fn format(
3257 &self,
3258 buffers: HashSet<ModelHandle<Buffer>>,
3259 push_to_history: bool,
3260 trigger: FormatTrigger,
3261 cx: &mut ModelContext<Project>,
3262 ) -> Task<Result<ProjectTransaction>> {
3263 if self.is_local() {
3264 let mut buffers_with_paths_and_servers = buffers
3265 .into_iter()
3266 .filter_map(|buffer_handle| {
3267 let buffer = buffer_handle.read(cx);
3268 let file = File::from_dyn(buffer.file())?;
3269 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3270 let server = self
3271 .primary_language_servers_for_buffer(buffer, cx)
3272 .map(|s| s.1.clone());
3273 Some((buffer_handle, buffer_abs_path, server))
3274 })
3275 .collect::<Vec<_>>();
3276
3277 cx.spawn(|this, mut cx| async move {
3278 // Do not allow multiple concurrent formatting requests for the
3279 // same buffer.
3280 this.update(&mut cx, |this, cx| {
3281 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
3282 this.buffers_being_formatted
3283 .insert(buffer.read(cx).remote_id())
3284 });
3285 });
3286
3287 let _cleanup = defer({
3288 let this = this.clone();
3289 let mut cx = cx.clone();
3290 let buffers = &buffers_with_paths_and_servers;
3291 move || {
3292 this.update(&mut cx, |this, cx| {
3293 for (buffer, _, _) in buffers {
3294 this.buffers_being_formatted
3295 .remove(&buffer.read(cx).remote_id());
3296 }
3297 });
3298 }
3299 });
3300
3301 let mut project_transaction = ProjectTransaction::default();
3302 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
3303 let settings = buffer.read_with(&cx, |buffer, cx| {
3304 let language_name = buffer.language().map(|language| language.name());
3305 language_settings(language_name.as_deref(), cx).clone()
3306 });
3307
3308 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
3309 let ensure_final_newline = settings.ensure_final_newline_on_save;
3310 let format_on_save = settings.format_on_save.clone();
3311 let formatter = settings.formatter.clone();
3312 let tab_size = settings.tab_size;
3313
3314 // First, format buffer's whitespace according to the settings.
3315 let trailing_whitespace_diff = if remove_trailing_whitespace {
3316 Some(
3317 buffer
3318 .read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
3319 .await,
3320 )
3321 } else {
3322 None
3323 };
3324 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
3325 buffer.finalize_last_transaction();
3326 buffer.start_transaction();
3327 if let Some(diff) = trailing_whitespace_diff {
3328 buffer.apply_diff(diff, cx);
3329 }
3330 if ensure_final_newline {
3331 buffer.ensure_final_newline(cx);
3332 }
3333 buffer.end_transaction(cx)
3334 });
3335
3336 // Currently, formatting operations are represented differently depending on
3337 // whether they come from a language server or an external command.
3338 enum FormatOperation {
3339 Lsp(Vec<(Range<Anchor>, String)>),
3340 External(Diff),
3341 }
3342
3343 // Apply language-specific formatting using either a language server
3344 // or external command.
3345 let mut format_operation = None;
3346 match (formatter, format_on_save) {
3347 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
3348
3349 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
3350 | (_, FormatOnSave::LanguageServer) => {
3351 if let Some((language_server, buffer_abs_path)) =
3352 language_server.as_ref().zip(buffer_abs_path.as_ref())
3353 {
3354 format_operation = Some(FormatOperation::Lsp(
3355 Self::format_via_lsp(
3356 &this,
3357 &buffer,
3358 buffer_abs_path,
3359 &language_server,
3360 tab_size,
3361 &mut cx,
3362 )
3363 .await
3364 .context("failed to format via language server")?,
3365 ));
3366 }
3367 }
3368
3369 (
3370 Formatter::External { command, arguments },
3371 FormatOnSave::On | FormatOnSave::Off,
3372 )
3373 | (_, FormatOnSave::External { command, arguments }) => {
3374 if let Some(buffer_abs_path) = buffer_abs_path {
3375 format_operation = Self::format_via_external_command(
3376 &buffer,
3377 &buffer_abs_path,
3378 &command,
3379 &arguments,
3380 &mut cx,
3381 )
3382 .await
3383 .context(format!(
3384 "failed to format via external command {:?}",
3385 command
3386 ))?
3387 .map(FormatOperation::External);
3388 }
3389 }
3390 };
3391
3392 buffer.update(&mut cx, |b, cx| {
3393 // If the buffer had its whitespace formatted and was edited while the language-specific
3394 // formatting was being computed, avoid applying the language-specific formatting, because
3395 // it can't be grouped with the whitespace formatting in the undo history.
3396 if let Some(transaction_id) = whitespace_transaction_id {
3397 if b.peek_undo_stack()
3398 .map_or(true, |e| e.transaction_id() != transaction_id)
3399 {
3400 format_operation.take();
3401 }
3402 }
3403
3404 // Apply any language-specific formatting, and group the two formatting operations
3405 // in the buffer's undo history.
3406 if let Some(operation) = format_operation {
3407 match operation {
3408 FormatOperation::Lsp(edits) => {
3409 b.edit(edits, None, cx);
3410 }
3411 FormatOperation::External(diff) => {
3412 b.apply_diff(diff, cx);
3413 }
3414 }
3415
3416 if let Some(transaction_id) = whitespace_transaction_id {
3417 b.group_until_transaction(transaction_id);
3418 }
3419 }
3420
3421 if let Some(transaction) = b.finalize_last_transaction().cloned() {
3422 if !push_to_history {
3423 b.forget_transaction(transaction.id);
3424 }
3425 project_transaction.0.insert(buffer.clone(), transaction);
3426 }
3427 });
3428 }
3429
3430 Ok(project_transaction)
3431 })
3432 } else {
3433 let remote_id = self.remote_id();
3434 let client = self.client.clone();
3435 cx.spawn(|this, mut cx| async move {
3436 let mut project_transaction = ProjectTransaction::default();
3437 if let Some(project_id) = remote_id {
3438 let response = client
3439 .request(proto::FormatBuffers {
3440 project_id,
3441 trigger: trigger as i32,
3442 buffer_ids: buffers
3443 .iter()
3444 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
3445 .collect(),
3446 })
3447 .await?
3448 .transaction
3449 .ok_or_else(|| anyhow!("missing transaction"))?;
3450 project_transaction = this
3451 .update(&mut cx, |this, cx| {
3452 this.deserialize_project_transaction(response, push_to_history, cx)
3453 })
3454 .await?;
3455 }
3456 Ok(project_transaction)
3457 })
3458 }
3459 }
3460
3461 async fn format_via_lsp(
3462 this: &ModelHandle<Self>,
3463 buffer: &ModelHandle<Buffer>,
3464 abs_path: &Path,
3465 language_server: &Arc<LanguageServer>,
3466 tab_size: NonZeroU32,
3467 cx: &mut AsyncAppContext,
3468 ) -> Result<Vec<(Range<Anchor>, String)>> {
3469 let text_document =
3470 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(abs_path).unwrap());
3471 let capabilities = &language_server.capabilities();
3472 let lsp_edits = if capabilities
3473 .document_formatting_provider
3474 .as_ref()
3475 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
3476 {
3477 language_server
3478 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
3479 text_document,
3480 options: lsp_command::lsp_formatting_options(tab_size.get()),
3481 work_done_progress_params: Default::default(),
3482 })
3483 .await?
3484 } else if capabilities
3485 .document_range_formatting_provider
3486 .as_ref()
3487 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
3488 {
3489 let buffer_start = lsp::Position::new(0, 0);
3490 let buffer_end =
3491 buffer.read_with(cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
3492 language_server
3493 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
3494 text_document,
3495 range: lsp::Range::new(buffer_start, buffer_end),
3496 options: lsp_command::lsp_formatting_options(tab_size.get()),
3497 work_done_progress_params: Default::default(),
3498 })
3499 .await?
3500 } else {
3501 None
3502 };
3503
3504 if let Some(lsp_edits) = lsp_edits {
3505 this.update(cx, |this, cx| {
3506 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
3507 })
3508 .await
3509 } else {
3510 Ok(Default::default())
3511 }
3512 }
3513
3514 async fn format_via_external_command(
3515 buffer: &ModelHandle<Buffer>,
3516 buffer_abs_path: &Path,
3517 command: &str,
3518 arguments: &[String],
3519 cx: &mut AsyncAppContext,
3520 ) -> Result<Option<Diff>> {
3521 let working_dir_path = buffer.read_with(cx, |buffer, cx| {
3522 let file = File::from_dyn(buffer.file())?;
3523 let worktree = file.worktree.read(cx).as_local()?;
3524 let mut worktree_path = worktree.abs_path().to_path_buf();
3525 if worktree.root_entry()?.is_file() {
3526 worktree_path.pop();
3527 }
3528 Some(worktree_path)
3529 });
3530
3531 if let Some(working_dir_path) = working_dir_path {
3532 let mut child =
3533 smol::process::Command::new(command)
3534 .args(arguments.iter().map(|arg| {
3535 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
3536 }))
3537 .current_dir(&working_dir_path)
3538 .stdin(smol::process::Stdio::piped())
3539 .stdout(smol::process::Stdio::piped())
3540 .stderr(smol::process::Stdio::piped())
3541 .spawn()?;
3542 let stdin = child
3543 .stdin
3544 .as_mut()
3545 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
3546 let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
3547 for chunk in text.chunks() {
3548 stdin.write_all(chunk.as_bytes()).await?;
3549 }
3550 stdin.flush().await?;
3551
3552 let output = child.output().await?;
3553 if !output.status.success() {
3554 return Err(anyhow!(
3555 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
3556 output.status.code(),
3557 String::from_utf8_lossy(&output.stdout),
3558 String::from_utf8_lossy(&output.stderr),
3559 ));
3560 }
3561
3562 let stdout = String::from_utf8(output.stdout)?;
3563 Ok(Some(
3564 buffer
3565 .read_with(cx, |buffer, cx| buffer.diff(stdout, cx))
3566 .await,
3567 ))
3568 } else {
3569 Ok(None)
3570 }
3571 }
3572
3573 pub fn definition<T: ToPointUtf16>(
3574 &self,
3575 buffer: &ModelHandle<Buffer>,
3576 position: T,
3577 cx: &mut ModelContext<Self>,
3578 ) -> Task<Result<Vec<LocationLink>>> {
3579 let position = position.to_point_utf16(buffer.read(cx));
3580 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3581 }
3582
3583 pub fn type_definition<T: ToPointUtf16>(
3584 &self,
3585 buffer: &ModelHandle<Buffer>,
3586 position: T,
3587 cx: &mut ModelContext<Self>,
3588 ) -> Task<Result<Vec<LocationLink>>> {
3589 let position = position.to_point_utf16(buffer.read(cx));
3590 self.request_lsp(buffer.clone(), GetTypeDefinition { position }, cx)
3591 }
3592
3593 pub fn references<T: ToPointUtf16>(
3594 &self,
3595 buffer: &ModelHandle<Buffer>,
3596 position: T,
3597 cx: &mut ModelContext<Self>,
3598 ) -> Task<Result<Vec<Location>>> {
3599 let position = position.to_point_utf16(buffer.read(cx));
3600 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3601 }
3602
3603 pub fn document_highlights<T: ToPointUtf16>(
3604 &self,
3605 buffer: &ModelHandle<Buffer>,
3606 position: T,
3607 cx: &mut ModelContext<Self>,
3608 ) -> Task<Result<Vec<DocumentHighlight>>> {
3609 let position = position.to_point_utf16(buffer.read(cx));
3610 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3611 }
3612
3613 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3614 if self.is_local() {
3615 let mut requests = Vec::new();
3616 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3617 let worktree_id = *worktree_id;
3618 if let Some(worktree) = self
3619 .worktree_for_id(worktree_id, cx)
3620 .and_then(|worktree| worktree.read(cx).as_local())
3621 {
3622 if let Some(LanguageServerState::Running {
3623 adapter,
3624 language,
3625 server,
3626 ..
3627 }) = self.language_servers.get(server_id)
3628 {
3629 let adapter = adapter.clone();
3630 let language = language.clone();
3631 let worktree_abs_path = worktree.abs_path().clone();
3632 requests.push(
3633 server
3634 .request::<lsp::request::WorkspaceSymbol>(
3635 lsp::WorkspaceSymbolParams {
3636 query: query.to_string(),
3637 ..Default::default()
3638 },
3639 )
3640 .log_err()
3641 .map(move |response| {
3642 (
3643 adapter,
3644 language,
3645 worktree_id,
3646 worktree_abs_path,
3647 response.unwrap_or_default(),
3648 )
3649 }),
3650 );
3651 }
3652 }
3653 }
3654
3655 cx.spawn_weak(|this, cx| async move {
3656 let responses = futures::future::join_all(requests).await;
3657 let this = if let Some(this) = this.upgrade(&cx) {
3658 this
3659 } else {
3660 return Ok(Default::default());
3661 };
3662 let symbols = this.read_with(&cx, |this, cx| {
3663 let mut symbols = Vec::new();
3664 for (
3665 adapter,
3666 adapter_language,
3667 source_worktree_id,
3668 worktree_abs_path,
3669 response,
3670 ) in responses
3671 {
3672 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3673 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3674 let mut worktree_id = source_worktree_id;
3675 let path;
3676 if let Some((worktree, rel_path)) =
3677 this.find_local_worktree(&abs_path, cx)
3678 {
3679 worktree_id = worktree.read(cx).id();
3680 path = rel_path;
3681 } else {
3682 path = relativize_path(&worktree_abs_path, &abs_path);
3683 }
3684
3685 let project_path = ProjectPath {
3686 worktree_id,
3687 path: path.into(),
3688 };
3689 let signature = this.symbol_signature(&project_path);
3690 let adapter_language = adapter_language.clone();
3691 let language = this
3692 .languages
3693 .language_for_file(&project_path.path, None)
3694 .unwrap_or_else(move |_| adapter_language);
3695 let language_server_name = adapter.name.clone();
3696 Some(async move {
3697 let language = language.await;
3698 let label = language
3699 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3700 .await;
3701
3702 Symbol {
3703 language_server_name,
3704 source_worktree_id,
3705 path: project_path,
3706 label: label.unwrap_or_else(|| {
3707 CodeLabel::plain(lsp_symbol.name.clone(), None)
3708 }),
3709 kind: lsp_symbol.kind,
3710 name: lsp_symbol.name,
3711 range: range_from_lsp(lsp_symbol.location.range),
3712 signature,
3713 }
3714 })
3715 }));
3716 }
3717 symbols
3718 });
3719 Ok(futures::future::join_all(symbols).await)
3720 })
3721 } else if let Some(project_id) = self.remote_id() {
3722 let request = self.client.request(proto::GetProjectSymbols {
3723 project_id,
3724 query: query.to_string(),
3725 });
3726 cx.spawn_weak(|this, cx| async move {
3727 let response = request.await?;
3728 let mut symbols = Vec::new();
3729 if let Some(this) = this.upgrade(&cx) {
3730 let new_symbols = this.read_with(&cx, |this, _| {
3731 response
3732 .symbols
3733 .into_iter()
3734 .map(|symbol| this.deserialize_symbol(symbol))
3735 .collect::<Vec<_>>()
3736 });
3737 symbols = futures::future::join_all(new_symbols)
3738 .await
3739 .into_iter()
3740 .filter_map(|symbol| symbol.log_err())
3741 .collect::<Vec<_>>();
3742 }
3743 Ok(symbols)
3744 })
3745 } else {
3746 Task::ready(Ok(Default::default()))
3747 }
3748 }
3749
3750 pub fn open_buffer_for_symbol(
3751 &mut self,
3752 symbol: &Symbol,
3753 cx: &mut ModelContext<Self>,
3754 ) -> Task<Result<ModelHandle<Buffer>>> {
3755 if self.is_local() {
3756 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3757 symbol.source_worktree_id,
3758 symbol.language_server_name.clone(),
3759 )) {
3760 *id
3761 } else {
3762 return Task::ready(Err(anyhow!(
3763 "language server for worktree and language not found"
3764 )));
3765 };
3766
3767 let worktree_abs_path = if let Some(worktree_abs_path) = self
3768 .worktree_for_id(symbol.path.worktree_id, cx)
3769 .and_then(|worktree| worktree.read(cx).as_local())
3770 .map(|local_worktree| local_worktree.abs_path())
3771 {
3772 worktree_abs_path
3773 } else {
3774 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3775 };
3776 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
3777 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3778 uri
3779 } else {
3780 return Task::ready(Err(anyhow!("invalid symbol path")));
3781 };
3782
3783 self.open_local_buffer_via_lsp(
3784 symbol_uri,
3785 language_server_id,
3786 symbol.language_server_name.clone(),
3787 cx,
3788 )
3789 } else if let Some(project_id) = self.remote_id() {
3790 let request = self.client.request(proto::OpenBufferForSymbol {
3791 project_id,
3792 symbol: Some(serialize_symbol(symbol)),
3793 });
3794 cx.spawn(|this, mut cx| async move {
3795 let response = request.await?;
3796 this.update(&mut cx, |this, cx| {
3797 this.wait_for_remote_buffer(response.buffer_id, cx)
3798 })
3799 .await
3800 })
3801 } else {
3802 Task::ready(Err(anyhow!("project does not have a remote id")))
3803 }
3804 }
3805
3806 pub fn hover<T: ToPointUtf16>(
3807 &self,
3808 buffer: &ModelHandle<Buffer>,
3809 position: T,
3810 cx: &mut ModelContext<Self>,
3811 ) -> Task<Result<Option<Hover>>> {
3812 let position = position.to_point_utf16(buffer.read(cx));
3813 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3814 }
3815
3816 pub fn completions<T: ToPointUtf16>(
3817 &self,
3818 buffer: &ModelHandle<Buffer>,
3819 position: T,
3820 cx: &mut ModelContext<Self>,
3821 ) -> Task<Result<Vec<Completion>>> {
3822 let position = position.to_point_utf16(buffer.read(cx));
3823 self.request_lsp(buffer.clone(), GetCompletions { position }, cx)
3824 }
3825
3826 pub fn apply_additional_edits_for_completion(
3827 &self,
3828 buffer_handle: ModelHandle<Buffer>,
3829 completion: Completion,
3830 push_to_history: bool,
3831 cx: &mut ModelContext<Self>,
3832 ) -> Task<Result<Option<Transaction>>> {
3833 let buffer = buffer_handle.read(cx);
3834 let buffer_id = buffer.remote_id();
3835
3836 if self.is_local() {
3837 let lang_server = match self.primary_language_servers_for_buffer(buffer, cx) {
3838 Some((_, server)) => server.clone(),
3839 _ => return Task::ready(Ok(Default::default())),
3840 };
3841
3842 cx.spawn(|this, mut cx| async move {
3843 let resolved_completion = lang_server
3844 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3845 .await?;
3846
3847 if let Some(edits) = resolved_completion.additional_text_edits {
3848 let edits = this
3849 .update(&mut cx, |this, cx| {
3850 this.edits_from_lsp(
3851 &buffer_handle,
3852 edits,
3853 lang_server.server_id(),
3854 None,
3855 cx,
3856 )
3857 })
3858 .await?;
3859
3860 buffer_handle.update(&mut cx, |buffer, cx| {
3861 buffer.finalize_last_transaction();
3862 buffer.start_transaction();
3863
3864 for (range, text) in edits {
3865 let primary = &completion.old_range;
3866 let start_within = primary.start.cmp(&range.start, buffer).is_le()
3867 && primary.end.cmp(&range.start, buffer).is_ge();
3868 let end_within = range.start.cmp(&primary.end, buffer).is_le()
3869 && range.end.cmp(&primary.end, buffer).is_ge();
3870
3871 //Skip addtional edits which overlap with the primary completion edit
3872 //https://github.com/zed-industries/zed/pull/1871
3873 if !start_within && !end_within {
3874 buffer.edit([(range, text)], None, cx);
3875 }
3876 }
3877
3878 let transaction = if buffer.end_transaction(cx).is_some() {
3879 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3880 if !push_to_history {
3881 buffer.forget_transaction(transaction.id);
3882 }
3883 Some(transaction)
3884 } else {
3885 None
3886 };
3887 Ok(transaction)
3888 })
3889 } else {
3890 Ok(None)
3891 }
3892 })
3893 } else if let Some(project_id) = self.remote_id() {
3894 let client = self.client.clone();
3895 cx.spawn(|_, mut cx| async move {
3896 let response = client
3897 .request(proto::ApplyCompletionAdditionalEdits {
3898 project_id,
3899 buffer_id,
3900 completion: Some(language::proto::serialize_completion(&completion)),
3901 })
3902 .await?;
3903
3904 if let Some(transaction) = response.transaction {
3905 let transaction = language::proto::deserialize_transaction(transaction)?;
3906 buffer_handle
3907 .update(&mut cx, |buffer, _| {
3908 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3909 })
3910 .await?;
3911 if push_to_history {
3912 buffer_handle.update(&mut cx, |buffer, _| {
3913 buffer.push_transaction(transaction.clone(), Instant::now());
3914 });
3915 }
3916 Ok(Some(transaction))
3917 } else {
3918 Ok(None)
3919 }
3920 })
3921 } else {
3922 Task::ready(Err(anyhow!("project does not have a remote id")))
3923 }
3924 }
3925
3926 pub fn code_actions<T: Clone + ToOffset>(
3927 &self,
3928 buffer_handle: &ModelHandle<Buffer>,
3929 range: Range<T>,
3930 cx: &mut ModelContext<Self>,
3931 ) -> Task<Result<Vec<CodeAction>>> {
3932 let buffer = buffer_handle.read(cx);
3933 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3934 self.request_lsp(buffer_handle.clone(), GetCodeActions { range }, cx)
3935 }
3936
3937 pub fn apply_code_action(
3938 &self,
3939 buffer_handle: ModelHandle<Buffer>,
3940 mut action: CodeAction,
3941 push_to_history: bool,
3942 cx: &mut ModelContext<Self>,
3943 ) -> Task<Result<ProjectTransaction>> {
3944 if self.is_local() {
3945 let buffer = buffer_handle.read(cx);
3946 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
3947 self.language_server_for_buffer(buffer, action.server_id, cx)
3948 {
3949 (adapter.clone(), server.clone())
3950 } else {
3951 return Task::ready(Ok(Default::default()));
3952 };
3953 let range = action.range.to_point_utf16(buffer);
3954
3955 cx.spawn(|this, mut cx| async move {
3956 if let Some(lsp_range) = action
3957 .lsp_action
3958 .data
3959 .as_mut()
3960 .and_then(|d| d.get_mut("codeActionParams"))
3961 .and_then(|d| d.get_mut("range"))
3962 {
3963 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3964 action.lsp_action = lang_server
3965 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3966 .await?;
3967 } else {
3968 let actions = this
3969 .update(&mut cx, |this, cx| {
3970 this.code_actions(&buffer_handle, action.range, cx)
3971 })
3972 .await?;
3973 action.lsp_action = actions
3974 .into_iter()
3975 .find(|a| a.lsp_action.title == action.lsp_action.title)
3976 .ok_or_else(|| anyhow!("code action is outdated"))?
3977 .lsp_action;
3978 }
3979
3980 if let Some(edit) = action.lsp_action.edit {
3981 if edit.changes.is_some() || edit.document_changes.is_some() {
3982 return Self::deserialize_workspace_edit(
3983 this,
3984 edit,
3985 push_to_history,
3986 lsp_adapter.clone(),
3987 lang_server.clone(),
3988 &mut cx,
3989 )
3990 .await;
3991 }
3992 }
3993
3994 if let Some(command) = action.lsp_action.command {
3995 this.update(&mut cx, |this, _| {
3996 this.last_workspace_edits_by_language_server
3997 .remove(&lang_server.server_id());
3998 });
3999 lang_server
4000 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
4001 command: command.command,
4002 arguments: command.arguments.unwrap_or_default(),
4003 ..Default::default()
4004 })
4005 .await?;
4006 return Ok(this.update(&mut cx, |this, _| {
4007 this.last_workspace_edits_by_language_server
4008 .remove(&lang_server.server_id())
4009 .unwrap_or_default()
4010 }));
4011 }
4012
4013 Ok(ProjectTransaction::default())
4014 })
4015 } else if let Some(project_id) = self.remote_id() {
4016 let client = self.client.clone();
4017 let request = proto::ApplyCodeAction {
4018 project_id,
4019 buffer_id: buffer_handle.read(cx).remote_id(),
4020 action: Some(language::proto::serialize_code_action(&action)),
4021 };
4022 cx.spawn(|this, mut cx| async move {
4023 let response = client
4024 .request(request)
4025 .await?
4026 .transaction
4027 .ok_or_else(|| anyhow!("missing transaction"))?;
4028 this.update(&mut cx, |this, cx| {
4029 this.deserialize_project_transaction(response, push_to_history, cx)
4030 })
4031 .await
4032 })
4033 } else {
4034 Task::ready(Err(anyhow!("project does not have a remote id")))
4035 }
4036 }
4037
4038 fn apply_on_type_formatting(
4039 &self,
4040 buffer: ModelHandle<Buffer>,
4041 position: Anchor,
4042 trigger: String,
4043 push_to_history: bool,
4044 cx: &mut ModelContext<Self>,
4045 ) -> Task<Result<ProjectTransaction>> {
4046 if self.is_local() {
4047 cx.spawn(|this, mut cx| async move {
4048 // Do not allow multiple concurrent formatting requests for the
4049 // same buffer.
4050 this.update(&mut cx, |this, cx| {
4051 this.buffers_being_formatted
4052 .insert(buffer.read(cx).remote_id())
4053 });
4054
4055 let _cleanup = defer({
4056 let this = this.clone();
4057 let mut cx = cx.clone();
4058 let closure_buffer = buffer.clone();
4059 move || {
4060 this.update(&mut cx, |this, cx| {
4061 this.buffers_being_formatted
4062 .remove(&closure_buffer.read(cx).remote_id());
4063 });
4064 }
4065 });
4066
4067 buffer
4068 .update(&mut cx, |buffer, _| {
4069 buffer.wait_for_edits(Some(position.timestamp))
4070 })
4071 .await?;
4072 this.update(&mut cx, |this, cx| {
4073 let position = position.to_point_utf16(buffer.read(cx));
4074 this.on_type_format(buffer, position, trigger, cx)
4075 })
4076 .await
4077 })
4078 } else if let Some(project_id) = self.remote_id() {
4079 let client = self.client.clone();
4080 let request = proto::OnTypeFormatting {
4081 project_id,
4082 buffer_id: buffer.read(cx).remote_id(),
4083 position: Some(serialize_anchor(&position)),
4084 trigger,
4085 version: serialize_version(&buffer.read(cx).version()),
4086 };
4087 cx.spawn(|this, mut cx| async move {
4088 let response = client
4089 .request(request)
4090 .await?
4091 .transaction
4092 .ok_or_else(|| anyhow!("missing transaction"))?;
4093 this.update(&mut cx, |this, cx| {
4094 this.deserialize_project_transaction(response, push_to_history, cx)
4095 })
4096 .await
4097 })
4098 } else {
4099 Task::ready(Err(anyhow!("project does not have a remote id")))
4100 }
4101 }
4102
4103 async fn deserialize_edits(
4104 this: ModelHandle<Self>,
4105 buffer_to_edit: ModelHandle<Buffer>,
4106 edits: Vec<lsp::TextEdit>,
4107 push_to_history: bool,
4108 _: Arc<CachedLspAdapter>,
4109 language_server: Arc<LanguageServer>,
4110 cx: &mut AsyncAppContext,
4111 ) -> Result<ProjectTransaction> {
4112 let edits = this
4113 .update(cx, |this, cx| {
4114 this.edits_from_lsp(
4115 &buffer_to_edit,
4116 edits,
4117 language_server.server_id(),
4118 None,
4119 cx,
4120 )
4121 })
4122 .await?;
4123
4124 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4125 buffer.finalize_last_transaction();
4126 buffer.start_transaction();
4127 for (range, text) in edits {
4128 buffer.edit([(range, text)], None, cx);
4129 }
4130
4131 if buffer.end_transaction(cx).is_some() {
4132 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4133 if !push_to_history {
4134 buffer.forget_transaction(transaction.id);
4135 }
4136 Some(transaction)
4137 } else {
4138 None
4139 }
4140 });
4141
4142 let mut project_transaction = ProjectTransaction::default();
4143 if let Some(transaction) = transaction {
4144 project_transaction.0.insert(buffer_to_edit, transaction);
4145 }
4146
4147 Ok(project_transaction)
4148 }
4149
4150 async fn deserialize_workspace_edit(
4151 this: ModelHandle<Self>,
4152 edit: lsp::WorkspaceEdit,
4153 push_to_history: bool,
4154 lsp_adapter: Arc<CachedLspAdapter>,
4155 language_server: Arc<LanguageServer>,
4156 cx: &mut AsyncAppContext,
4157 ) -> Result<ProjectTransaction> {
4158 let fs = this.read_with(cx, |this, _| this.fs.clone());
4159 let mut operations = Vec::new();
4160 if let Some(document_changes) = edit.document_changes {
4161 match document_changes {
4162 lsp::DocumentChanges::Edits(edits) => {
4163 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
4164 }
4165 lsp::DocumentChanges::Operations(ops) => operations = ops,
4166 }
4167 } else if let Some(changes) = edit.changes {
4168 operations.extend(changes.into_iter().map(|(uri, edits)| {
4169 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
4170 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4171 uri,
4172 version: None,
4173 },
4174 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
4175 })
4176 }));
4177 }
4178
4179 let mut project_transaction = ProjectTransaction::default();
4180 for operation in operations {
4181 match operation {
4182 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
4183 let abs_path = op
4184 .uri
4185 .to_file_path()
4186 .map_err(|_| anyhow!("can't convert URI to path"))?;
4187
4188 if let Some(parent_path) = abs_path.parent() {
4189 fs.create_dir(parent_path).await?;
4190 }
4191 if abs_path.ends_with("/") {
4192 fs.create_dir(&abs_path).await?;
4193 } else {
4194 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
4195 .await?;
4196 }
4197 }
4198
4199 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
4200 let source_abs_path = op
4201 .old_uri
4202 .to_file_path()
4203 .map_err(|_| anyhow!("can't convert URI to path"))?;
4204 let target_abs_path = op
4205 .new_uri
4206 .to_file_path()
4207 .map_err(|_| anyhow!("can't convert URI to path"))?;
4208 fs.rename(
4209 &source_abs_path,
4210 &target_abs_path,
4211 op.options.map(Into::into).unwrap_or_default(),
4212 )
4213 .await?;
4214 }
4215
4216 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
4217 let abs_path = op
4218 .uri
4219 .to_file_path()
4220 .map_err(|_| anyhow!("can't convert URI to path"))?;
4221 let options = op.options.map(Into::into).unwrap_or_default();
4222 if abs_path.ends_with("/") {
4223 fs.remove_dir(&abs_path, options).await?;
4224 } else {
4225 fs.remove_file(&abs_path, options).await?;
4226 }
4227 }
4228
4229 lsp::DocumentChangeOperation::Edit(op) => {
4230 let buffer_to_edit = this
4231 .update(cx, |this, cx| {
4232 this.open_local_buffer_via_lsp(
4233 op.text_document.uri,
4234 language_server.server_id(),
4235 lsp_adapter.name.clone(),
4236 cx,
4237 )
4238 })
4239 .await?;
4240
4241 let edits = this
4242 .update(cx, |this, cx| {
4243 let edits = op.edits.into_iter().map(|edit| match edit {
4244 lsp::OneOf::Left(edit) => edit,
4245 lsp::OneOf::Right(edit) => edit.text_edit,
4246 });
4247 this.edits_from_lsp(
4248 &buffer_to_edit,
4249 edits,
4250 language_server.server_id(),
4251 op.text_document.version,
4252 cx,
4253 )
4254 })
4255 .await?;
4256
4257 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
4258 buffer.finalize_last_transaction();
4259 buffer.start_transaction();
4260 for (range, text) in edits {
4261 buffer.edit([(range, text)], None, cx);
4262 }
4263 let transaction = if buffer.end_transaction(cx).is_some() {
4264 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4265 if !push_to_history {
4266 buffer.forget_transaction(transaction.id);
4267 }
4268 Some(transaction)
4269 } else {
4270 None
4271 };
4272
4273 transaction
4274 });
4275 if let Some(transaction) = transaction {
4276 project_transaction.0.insert(buffer_to_edit, transaction);
4277 }
4278 }
4279 }
4280 }
4281
4282 Ok(project_transaction)
4283 }
4284
4285 pub fn prepare_rename<T: ToPointUtf16>(
4286 &self,
4287 buffer: ModelHandle<Buffer>,
4288 position: T,
4289 cx: &mut ModelContext<Self>,
4290 ) -> Task<Result<Option<Range<Anchor>>>> {
4291 let position = position.to_point_utf16(buffer.read(cx));
4292 self.request_lsp(buffer, PrepareRename { position }, cx)
4293 }
4294
4295 pub fn perform_rename<T: ToPointUtf16>(
4296 &self,
4297 buffer: ModelHandle<Buffer>,
4298 position: T,
4299 new_name: String,
4300 push_to_history: bool,
4301 cx: &mut ModelContext<Self>,
4302 ) -> Task<Result<ProjectTransaction>> {
4303 let position = position.to_point_utf16(buffer.read(cx));
4304 self.request_lsp(
4305 buffer,
4306 PerformRename {
4307 position,
4308 new_name,
4309 push_to_history,
4310 },
4311 cx,
4312 )
4313 }
4314
4315 pub fn on_type_format<T: ToPointUtf16>(
4316 &self,
4317 buffer: ModelHandle<Buffer>,
4318 position: T,
4319 trigger: String,
4320 cx: &mut ModelContext<Self>,
4321 ) -> Task<Result<ProjectTransaction>> {
4322 let tab_size = buffer.read_with(cx, |buffer, cx| {
4323 let language_name = buffer.language().map(|language| language.name());
4324 language_settings(language_name.as_deref(), cx).tab_size
4325 });
4326 let position = position.to_point_utf16(buffer.read(cx));
4327 self.request_lsp(
4328 buffer.clone(),
4329 OnTypeFormatting {
4330 position,
4331 trigger,
4332 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
4333 push_to_history: true,
4334 },
4335 cx,
4336 )
4337 }
4338
4339 #[allow(clippy::type_complexity)]
4340 pub fn search(
4341 &self,
4342 query: SearchQuery,
4343 cx: &mut ModelContext<Self>,
4344 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
4345 if self.is_local() {
4346 let snapshots = self
4347 .visible_worktrees(cx)
4348 .filter_map(|tree| {
4349 let tree = tree.read(cx).as_local()?;
4350 Some(tree.snapshot())
4351 })
4352 .collect::<Vec<_>>();
4353
4354 let background = cx.background().clone();
4355 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
4356 if path_count == 0 {
4357 return Task::ready(Ok(Default::default()));
4358 }
4359 let workers = background.num_cpus().min(path_count);
4360 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
4361 cx.background()
4362 .spawn({
4363 let fs = self.fs.clone();
4364 let background = cx.background().clone();
4365 let query = query.clone();
4366 async move {
4367 let fs = &fs;
4368 let query = &query;
4369 let matching_paths_tx = &matching_paths_tx;
4370 let paths_per_worker = (path_count + workers - 1) / workers;
4371 let snapshots = &snapshots;
4372 background
4373 .scoped(|scope| {
4374 for worker_ix in 0..workers {
4375 let worker_start_ix = worker_ix * paths_per_worker;
4376 let worker_end_ix = worker_start_ix + paths_per_worker;
4377 scope.spawn(async move {
4378 let mut snapshot_start_ix = 0;
4379 let mut abs_path = PathBuf::new();
4380 for snapshot in snapshots {
4381 let snapshot_end_ix =
4382 snapshot_start_ix + snapshot.visible_file_count();
4383 if worker_end_ix <= snapshot_start_ix {
4384 break;
4385 } else if worker_start_ix > snapshot_end_ix {
4386 snapshot_start_ix = snapshot_end_ix;
4387 continue;
4388 } else {
4389 let start_in_snapshot = worker_start_ix
4390 .saturating_sub(snapshot_start_ix);
4391 let end_in_snapshot =
4392 cmp::min(worker_end_ix, snapshot_end_ix)
4393 - snapshot_start_ix;
4394
4395 for entry in snapshot
4396 .files(false, start_in_snapshot)
4397 .take(end_in_snapshot - start_in_snapshot)
4398 {
4399 if matching_paths_tx.is_closed() {
4400 break;
4401 }
4402 let matches = if query
4403 .file_matches(Some(&entry.path))
4404 {
4405 abs_path.clear();
4406 abs_path.push(&snapshot.abs_path());
4407 abs_path.push(&entry.path);
4408 if let Some(file) =
4409 fs.open_sync(&abs_path).await.log_err()
4410 {
4411 query.detect(file).unwrap_or(false)
4412 } else {
4413 false
4414 }
4415 } else {
4416 false
4417 };
4418
4419 if matches {
4420 let project_path =
4421 (snapshot.id(), entry.path.clone());
4422 if matching_paths_tx
4423 .send(project_path)
4424 .await
4425 .is_err()
4426 {
4427 break;
4428 }
4429 }
4430 }
4431
4432 snapshot_start_ix = snapshot_end_ix;
4433 }
4434 }
4435 });
4436 }
4437 })
4438 .await;
4439 }
4440 })
4441 .detach();
4442
4443 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
4444 let open_buffers = self
4445 .opened_buffers
4446 .values()
4447 .filter_map(|b| b.upgrade(cx))
4448 .collect::<HashSet<_>>();
4449 cx.spawn(|this, cx| async move {
4450 for buffer in &open_buffers {
4451 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4452 buffers_tx.send((buffer.clone(), snapshot)).await?;
4453 }
4454
4455 let open_buffers = Rc::new(RefCell::new(open_buffers));
4456 while let Some(project_path) = matching_paths_rx.next().await {
4457 if buffers_tx.is_closed() {
4458 break;
4459 }
4460
4461 let this = this.clone();
4462 let open_buffers = open_buffers.clone();
4463 let buffers_tx = buffers_tx.clone();
4464 cx.spawn(|mut cx| async move {
4465 if let Some(buffer) = this
4466 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
4467 .await
4468 .log_err()
4469 {
4470 if open_buffers.borrow_mut().insert(buffer.clone()) {
4471 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
4472 buffers_tx.send((buffer, snapshot)).await?;
4473 }
4474 }
4475
4476 Ok::<_, anyhow::Error>(())
4477 })
4478 .detach();
4479 }
4480
4481 Ok::<_, anyhow::Error>(())
4482 })
4483 .detach_and_log_err(cx);
4484
4485 let background = cx.background().clone();
4486 cx.background().spawn(async move {
4487 let query = &query;
4488 let mut matched_buffers = Vec::new();
4489 for _ in 0..workers {
4490 matched_buffers.push(HashMap::default());
4491 }
4492 background
4493 .scoped(|scope| {
4494 for worker_matched_buffers in matched_buffers.iter_mut() {
4495 let mut buffers_rx = buffers_rx.clone();
4496 scope.spawn(async move {
4497 while let Some((buffer, snapshot)) = buffers_rx.next().await {
4498 let buffer_matches = if query.file_matches(
4499 snapshot.file().map(|file| file.path().as_ref()),
4500 ) {
4501 query
4502 .search(snapshot.as_rope())
4503 .await
4504 .iter()
4505 .map(|range| {
4506 snapshot.anchor_before(range.start)
4507 ..snapshot.anchor_after(range.end)
4508 })
4509 .collect()
4510 } else {
4511 Vec::new()
4512 };
4513 if !buffer_matches.is_empty() {
4514 worker_matched_buffers
4515 .insert(buffer.clone(), buffer_matches);
4516 }
4517 }
4518 });
4519 }
4520 })
4521 .await;
4522 Ok(matched_buffers.into_iter().flatten().collect())
4523 })
4524 } else if let Some(project_id) = self.remote_id() {
4525 let request = self.client.request(query.to_proto(project_id));
4526 cx.spawn(|this, mut cx| async move {
4527 let response = request.await?;
4528 let mut result = HashMap::default();
4529 for location in response.locations {
4530 let target_buffer = this
4531 .update(&mut cx, |this, cx| {
4532 this.wait_for_remote_buffer(location.buffer_id, cx)
4533 })
4534 .await?;
4535 let start = location
4536 .start
4537 .and_then(deserialize_anchor)
4538 .ok_or_else(|| anyhow!("missing target start"))?;
4539 let end = location
4540 .end
4541 .and_then(deserialize_anchor)
4542 .ok_or_else(|| anyhow!("missing target end"))?;
4543 result
4544 .entry(target_buffer)
4545 .or_insert(Vec::new())
4546 .push(start..end)
4547 }
4548 Ok(result)
4549 })
4550 } else {
4551 Task::ready(Ok(Default::default()))
4552 }
4553 }
4554
4555 // TODO: Wire this up to allow selecting a server?
4556 fn request_lsp<R: LspCommand>(
4557 &self,
4558 buffer_handle: ModelHandle<Buffer>,
4559 request: R,
4560 cx: &mut ModelContext<Self>,
4561 ) -> Task<Result<R::Response>>
4562 where
4563 <R::LspRequest as lsp::request::Request>::Result: Send,
4564 {
4565 let buffer = buffer_handle.read(cx);
4566 if self.is_local() {
4567 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4568 if let Some((file, language_server)) = file.zip(
4569 self.primary_language_servers_for_buffer(buffer, cx)
4570 .map(|(_, server)| server.clone()),
4571 ) {
4572 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
4573 return cx.spawn(|this, cx| async move {
4574 if !request.check_capabilities(language_server.capabilities()) {
4575 return Ok(Default::default());
4576 }
4577
4578 let response = language_server
4579 .request::<R::LspRequest>(lsp_params)
4580 .await
4581 .context("lsp request failed")?;
4582 request
4583 .response_from_lsp(
4584 response,
4585 this,
4586 buffer_handle,
4587 language_server.server_id(),
4588 cx,
4589 )
4590 .await
4591 });
4592 }
4593 } else if let Some(project_id) = self.remote_id() {
4594 let rpc = self.client.clone();
4595 let message = request.to_proto(project_id, buffer);
4596 return cx.spawn_weak(|this, cx| async move {
4597 // Ensure the project is still alive by the time the task
4598 // is scheduled.
4599 this.upgrade(&cx)
4600 .ok_or_else(|| anyhow!("project dropped"))?;
4601
4602 let response = rpc.request(message).await?;
4603
4604 let this = this
4605 .upgrade(&cx)
4606 .ok_or_else(|| anyhow!("project dropped"))?;
4607 if this.read_with(&cx, |this, _| this.is_read_only()) {
4608 Err(anyhow!("disconnected before completing request"))
4609 } else {
4610 request
4611 .response_from_proto(response, this, buffer_handle, cx)
4612 .await
4613 }
4614 });
4615 }
4616 Task::ready(Ok(Default::default()))
4617 }
4618
4619 pub fn find_or_create_local_worktree(
4620 &mut self,
4621 abs_path: impl AsRef<Path>,
4622 visible: bool,
4623 cx: &mut ModelContext<Self>,
4624 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4625 let abs_path = abs_path.as_ref();
4626 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4627 Task::ready(Ok((tree, relative_path)))
4628 } else {
4629 let worktree = self.create_local_worktree(abs_path, visible, cx);
4630 cx.foreground()
4631 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4632 }
4633 }
4634
4635 pub fn find_local_worktree(
4636 &self,
4637 abs_path: &Path,
4638 cx: &AppContext,
4639 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4640 for tree in &self.worktrees {
4641 if let Some(tree) = tree.upgrade(cx) {
4642 if let Some(relative_path) = tree
4643 .read(cx)
4644 .as_local()
4645 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4646 {
4647 return Some((tree.clone(), relative_path.into()));
4648 }
4649 }
4650 }
4651 None
4652 }
4653
4654 pub fn is_shared(&self) -> bool {
4655 match &self.client_state {
4656 Some(ProjectClientState::Local { .. }) => true,
4657 _ => false,
4658 }
4659 }
4660
4661 fn create_local_worktree(
4662 &mut self,
4663 abs_path: impl AsRef<Path>,
4664 visible: bool,
4665 cx: &mut ModelContext<Self>,
4666 ) -> Task<Result<ModelHandle<Worktree>>> {
4667 let fs = self.fs.clone();
4668 let client = self.client.clone();
4669 let next_entry_id = self.next_entry_id.clone();
4670 let path: Arc<Path> = abs_path.as_ref().into();
4671 let task = self
4672 .loading_local_worktrees
4673 .entry(path.clone())
4674 .or_insert_with(|| {
4675 cx.spawn(|project, mut cx| {
4676 async move {
4677 let worktree = Worktree::local(
4678 client.clone(),
4679 path.clone(),
4680 visible,
4681 fs,
4682 next_entry_id,
4683 &mut cx,
4684 )
4685 .await;
4686
4687 project.update(&mut cx, |project, _| {
4688 project.loading_local_worktrees.remove(&path);
4689 });
4690
4691 let worktree = worktree?;
4692 project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
4693 Ok(worktree)
4694 }
4695 .map_err(Arc::new)
4696 })
4697 .shared()
4698 })
4699 .clone();
4700 cx.foreground().spawn(async move {
4701 match task.await {
4702 Ok(worktree) => Ok(worktree),
4703 Err(err) => Err(anyhow!("{}", err)),
4704 }
4705 })
4706 }
4707
4708 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4709 self.worktrees.retain(|worktree| {
4710 if let Some(worktree) = worktree.upgrade(cx) {
4711 let id = worktree.read(cx).id();
4712 if id == id_to_remove {
4713 cx.emit(Event::WorktreeRemoved(id));
4714 false
4715 } else {
4716 true
4717 }
4718 } else {
4719 false
4720 }
4721 });
4722 self.metadata_changed(cx);
4723 }
4724
4725 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4726 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
4727 if worktree.read(cx).is_local() {
4728 cx.subscribe(worktree, |this, worktree, event, cx| match event {
4729 worktree::Event::UpdatedEntries(changes) => {
4730 this.update_local_worktree_buffers(&worktree, &changes, cx);
4731 this.update_local_worktree_language_servers(&worktree, changes, cx);
4732 }
4733 worktree::Event::UpdatedGitRepositories(updated_repos) => {
4734 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
4735 }
4736 })
4737 .detach();
4738 }
4739
4740 let push_strong_handle = {
4741 let worktree = worktree.read(cx);
4742 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4743 };
4744 if push_strong_handle {
4745 self.worktrees
4746 .push(WorktreeHandle::Strong(worktree.clone()));
4747 } else {
4748 self.worktrees
4749 .push(WorktreeHandle::Weak(worktree.downgrade()));
4750 }
4751
4752 cx.observe_release(worktree, |this, worktree, cx| {
4753 let _ = this.remove_worktree(worktree.id(), cx);
4754 })
4755 .detach();
4756
4757 cx.emit(Event::WorktreeAdded);
4758 self.metadata_changed(cx);
4759 }
4760
4761 fn update_local_worktree_buffers(
4762 &mut self,
4763 worktree_handle: &ModelHandle<Worktree>,
4764 changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
4765 cx: &mut ModelContext<Self>,
4766 ) {
4767 let snapshot = worktree_handle.read(cx).snapshot();
4768
4769 let mut renamed_buffers = Vec::new();
4770 for (path, entry_id) in changes.keys() {
4771 let worktree_id = worktree_handle.read(cx).id();
4772 let project_path = ProjectPath {
4773 worktree_id,
4774 path: path.clone(),
4775 };
4776
4777 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
4778 Some(&buffer_id) => buffer_id,
4779 None => match self.local_buffer_ids_by_path.get(&project_path) {
4780 Some(&buffer_id) => buffer_id,
4781 None => continue,
4782 },
4783 };
4784
4785 let open_buffer = self.opened_buffers.get(&buffer_id);
4786 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
4787 buffer
4788 } else {
4789 self.opened_buffers.remove(&buffer_id);
4790 self.local_buffer_ids_by_path.remove(&project_path);
4791 self.local_buffer_ids_by_entry_id.remove(entry_id);
4792 continue;
4793 };
4794
4795 buffer.update(cx, |buffer, cx| {
4796 if let Some(old_file) = File::from_dyn(buffer.file()) {
4797 if old_file.worktree != *worktree_handle {
4798 return;
4799 }
4800
4801 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
4802 File {
4803 is_local: true,
4804 entry_id: entry.id,
4805 mtime: entry.mtime,
4806 path: entry.path.clone(),
4807 worktree: worktree_handle.clone(),
4808 is_deleted: false,
4809 }
4810 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
4811 File {
4812 is_local: true,
4813 entry_id: entry.id,
4814 mtime: entry.mtime,
4815 path: entry.path.clone(),
4816 worktree: worktree_handle.clone(),
4817 is_deleted: false,
4818 }
4819 } else {
4820 File {
4821 is_local: true,
4822 entry_id: old_file.entry_id,
4823 path: old_file.path().clone(),
4824 mtime: old_file.mtime(),
4825 worktree: worktree_handle.clone(),
4826 is_deleted: true,
4827 }
4828 };
4829
4830 let old_path = old_file.abs_path(cx);
4831 if new_file.abs_path(cx) != old_path {
4832 renamed_buffers.push((cx.handle(), old_file.clone()));
4833 self.local_buffer_ids_by_path.remove(&project_path);
4834 self.local_buffer_ids_by_path.insert(
4835 ProjectPath {
4836 worktree_id,
4837 path: path.clone(),
4838 },
4839 buffer_id,
4840 );
4841 }
4842
4843 if new_file.entry_id != *entry_id {
4844 self.local_buffer_ids_by_entry_id.remove(entry_id);
4845 self.local_buffer_ids_by_entry_id
4846 .insert(new_file.entry_id, buffer_id);
4847 }
4848
4849 if new_file != *old_file {
4850 if let Some(project_id) = self.remote_id() {
4851 self.client
4852 .send(proto::UpdateBufferFile {
4853 project_id,
4854 buffer_id: buffer_id as u64,
4855 file: Some(new_file.to_proto()),
4856 })
4857 .log_err();
4858 }
4859
4860 buffer.file_updated(Arc::new(new_file), cx).detach();
4861 }
4862 }
4863 });
4864 }
4865
4866 for (buffer, old_file) in renamed_buffers {
4867 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
4868 self.detect_language_for_buffer(&buffer, cx);
4869 self.register_buffer_with_language_servers(&buffer, cx);
4870 }
4871 }
4872
4873 fn update_local_worktree_language_servers(
4874 &mut self,
4875 worktree_handle: &ModelHandle<Worktree>,
4876 changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
4877 cx: &mut ModelContext<Self>,
4878 ) {
4879 if changes.is_empty() {
4880 return;
4881 }
4882
4883 let worktree_id = worktree_handle.read(cx).id();
4884 let mut language_server_ids = self
4885 .language_server_ids
4886 .iter()
4887 .filter_map(|((server_worktree_id, _), server_id)| {
4888 (*server_worktree_id == worktree_id).then_some(*server_id)
4889 })
4890 .collect::<Vec<_>>();
4891 language_server_ids.sort();
4892 language_server_ids.dedup();
4893
4894 let abs_path = worktree_handle.read(cx).abs_path();
4895 for server_id in &language_server_ids {
4896 if let Some(server) = self.language_servers.get(server_id) {
4897 if let LanguageServerState::Running {
4898 server,
4899 watched_paths,
4900 ..
4901 } = server
4902 {
4903 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
4904 let params = lsp::DidChangeWatchedFilesParams {
4905 changes: changes
4906 .iter()
4907 .filter_map(|((path, _), change)| {
4908 if watched_paths.is_match(&path) {
4909 Some(lsp::FileEvent {
4910 uri: lsp::Url::from_file_path(abs_path.join(path))
4911 .unwrap(),
4912 typ: match change {
4913 PathChange::Added => lsp::FileChangeType::CREATED,
4914 PathChange::Removed => lsp::FileChangeType::DELETED,
4915 PathChange::Updated
4916 | PathChange::AddedOrUpdated => {
4917 lsp::FileChangeType::CHANGED
4918 }
4919 },
4920 })
4921 } else {
4922 None
4923 }
4924 })
4925 .collect(),
4926 };
4927
4928 if !params.changes.is_empty() {
4929 server
4930 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
4931 .log_err();
4932 }
4933 }
4934 }
4935 }
4936 }
4937 }
4938
4939 fn update_local_worktree_buffers_git_repos(
4940 &mut self,
4941 worktree_handle: ModelHandle<Worktree>,
4942 repos: &HashMap<Arc<Path>, LocalRepositoryEntry>,
4943 cx: &mut ModelContext<Self>,
4944 ) {
4945 debug_assert!(worktree_handle.read(cx).is_local());
4946
4947 // Setup the pending buffers
4948 let future_buffers = self
4949 .loading_buffers_by_path
4950 .iter()
4951 .filter_map(|(path, receiver)| {
4952 let path = &path.path;
4953 let (work_directory, repo) = repos
4954 .iter()
4955 .find(|(work_directory, _)| path.starts_with(work_directory))?;
4956
4957 let repo_relative_path = path.strip_prefix(work_directory).log_err()?;
4958
4959 let receiver = receiver.clone();
4960 let repo_ptr = repo.repo_ptr.clone();
4961 let repo_relative_path = repo_relative_path.to_owned();
4962 Some(async move {
4963 pump_loading_buffer_reciever(receiver)
4964 .await
4965 .ok()
4966 .map(|buffer| (buffer, repo_relative_path, repo_ptr))
4967 })
4968 })
4969 .collect::<FuturesUnordered<_>>()
4970 .filter_map(|result| async move {
4971 let (buffer_handle, repo_relative_path, repo_ptr) = result?;
4972
4973 let lock = repo_ptr.lock();
4974 lock.load_index_text(&repo_relative_path)
4975 .map(|diff_base| (diff_base, buffer_handle))
4976 });
4977
4978 let update_diff_base_fn = update_diff_base(self);
4979 cx.spawn(|_, mut cx| async move {
4980 let diff_base_tasks = cx
4981 .background()
4982 .spawn(future_buffers.collect::<Vec<_>>())
4983 .await;
4984
4985 for (diff_base, buffer) in diff_base_tasks.into_iter() {
4986 update_diff_base_fn(Some(diff_base), buffer, &mut cx);
4987 }
4988 })
4989 .detach();
4990
4991 // And the current buffers
4992 for (_, buffer) in &self.opened_buffers {
4993 if let Some(buffer) = buffer.upgrade(cx) {
4994 let file = match File::from_dyn(buffer.read(cx).file()) {
4995 Some(file) => file,
4996 None => continue,
4997 };
4998 if file.worktree != worktree_handle {
4999 continue;
5000 }
5001
5002 let path = file.path().clone();
5003
5004 let worktree = worktree_handle.read(cx);
5005
5006 let (work_directory, repo) = match repos
5007 .iter()
5008 .find(|(work_directory, _)| path.starts_with(work_directory))
5009 {
5010 Some(repo) => repo.clone(),
5011 None => continue,
5012 };
5013
5014 let relative_repo = match path.strip_prefix(work_directory).log_err() {
5015 Some(relative_repo) => relative_repo.to_owned(),
5016 None => continue,
5017 };
5018
5019 drop(worktree);
5020
5021 let update_diff_base_fn = update_diff_base(self);
5022 let git_ptr = repo.repo_ptr.clone();
5023 let diff_base_task = cx
5024 .background()
5025 .spawn(async move { git_ptr.lock().load_index_text(&relative_repo) });
5026
5027 cx.spawn(|_, mut cx| async move {
5028 let diff_base = diff_base_task.await;
5029 update_diff_base_fn(diff_base, buffer, &mut cx);
5030 })
5031 .detach();
5032 }
5033 }
5034 }
5035
5036 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
5037 let new_active_entry = entry.and_then(|project_path| {
5038 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
5039 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
5040 Some(entry.id)
5041 });
5042 if new_active_entry != self.active_entry {
5043 self.active_entry = new_active_entry;
5044 cx.emit(Event::ActiveEntryChanged(new_active_entry));
5045 }
5046 }
5047
5048 pub fn language_servers_running_disk_based_diagnostics(
5049 &self,
5050 ) -> impl Iterator<Item = LanguageServerId> + '_ {
5051 self.language_server_statuses
5052 .iter()
5053 .filter_map(|(id, status)| {
5054 if status.has_pending_diagnostic_updates {
5055 Some(*id)
5056 } else {
5057 None
5058 }
5059 })
5060 }
5061
5062 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
5063 let mut summary = DiagnosticSummary::default();
5064 for (_, _, path_summary) in self.diagnostic_summaries(cx) {
5065 summary.error_count += path_summary.error_count;
5066 summary.warning_count += path_summary.warning_count;
5067 }
5068 summary
5069 }
5070
5071 pub fn diagnostic_summaries<'a>(
5072 &'a self,
5073 cx: &'a AppContext,
5074 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
5075 self.visible_worktrees(cx).flat_map(move |worktree| {
5076 let worktree = worktree.read(cx);
5077 let worktree_id = worktree.id();
5078 worktree
5079 .diagnostic_summaries()
5080 .map(move |(path, server_id, summary)| {
5081 (ProjectPath { worktree_id, path }, server_id, summary)
5082 })
5083 })
5084 }
5085
5086 pub fn disk_based_diagnostics_started(
5087 &mut self,
5088 language_server_id: LanguageServerId,
5089 cx: &mut ModelContext<Self>,
5090 ) {
5091 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
5092 }
5093
5094 pub fn disk_based_diagnostics_finished(
5095 &mut self,
5096 language_server_id: LanguageServerId,
5097 cx: &mut ModelContext<Self>,
5098 ) {
5099 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
5100 }
5101
5102 pub fn active_entry(&self) -> Option<ProjectEntryId> {
5103 self.active_entry
5104 }
5105
5106 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
5107 self.worktree_for_id(path.worktree_id, cx)?
5108 .read(cx)
5109 .entry_for_path(&path.path)
5110 .cloned()
5111 }
5112
5113 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
5114 let worktree = self.worktree_for_entry(entry_id, cx)?;
5115 let worktree = worktree.read(cx);
5116 let worktree_id = worktree.id();
5117 let path = worktree.entry_for_id(entry_id)?.path.clone();
5118 Some(ProjectPath { worktree_id, path })
5119 }
5120
5121 // RPC message handlers
5122
5123 async fn handle_unshare_project(
5124 this: ModelHandle<Self>,
5125 _: TypedEnvelope<proto::UnshareProject>,
5126 _: Arc<Client>,
5127 mut cx: AsyncAppContext,
5128 ) -> Result<()> {
5129 this.update(&mut cx, |this, cx| {
5130 if this.is_local() {
5131 this.unshare(cx)?;
5132 } else {
5133 this.disconnected_from_host(cx);
5134 }
5135 Ok(())
5136 })
5137 }
5138
5139 async fn handle_add_collaborator(
5140 this: ModelHandle<Self>,
5141 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
5142 _: Arc<Client>,
5143 mut cx: AsyncAppContext,
5144 ) -> Result<()> {
5145 let collaborator = envelope
5146 .payload
5147 .collaborator
5148 .take()
5149 .ok_or_else(|| anyhow!("empty collaborator"))?;
5150
5151 let collaborator = Collaborator::from_proto(collaborator)?;
5152 this.update(&mut cx, |this, cx| {
5153 this.shared_buffers.remove(&collaborator.peer_id);
5154 this.collaborators
5155 .insert(collaborator.peer_id, collaborator);
5156 cx.notify();
5157 });
5158
5159 Ok(())
5160 }
5161
5162 async fn handle_update_project_collaborator(
5163 this: ModelHandle<Self>,
5164 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
5165 _: Arc<Client>,
5166 mut cx: AsyncAppContext,
5167 ) -> Result<()> {
5168 let old_peer_id = envelope
5169 .payload
5170 .old_peer_id
5171 .ok_or_else(|| anyhow!("missing old peer id"))?;
5172 let new_peer_id = envelope
5173 .payload
5174 .new_peer_id
5175 .ok_or_else(|| anyhow!("missing new peer id"))?;
5176 this.update(&mut cx, |this, cx| {
5177 let collaborator = this
5178 .collaborators
5179 .remove(&old_peer_id)
5180 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
5181 let is_host = collaborator.replica_id == 0;
5182 this.collaborators.insert(new_peer_id, collaborator);
5183
5184 let buffers = this.shared_buffers.remove(&old_peer_id);
5185 log::info!(
5186 "peer {} became {}. moving buffers {:?}",
5187 old_peer_id,
5188 new_peer_id,
5189 &buffers
5190 );
5191 if let Some(buffers) = buffers {
5192 this.shared_buffers.insert(new_peer_id, buffers);
5193 }
5194
5195 if is_host {
5196 this.opened_buffers
5197 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
5198 this.buffer_ordered_messages_tx
5199 .unbounded_send(BufferOrderedMessage::Resync)
5200 .unwrap();
5201 }
5202
5203 cx.emit(Event::CollaboratorUpdated {
5204 old_peer_id,
5205 new_peer_id,
5206 });
5207 cx.notify();
5208 Ok(())
5209 })
5210 }
5211
5212 async fn handle_remove_collaborator(
5213 this: ModelHandle<Self>,
5214 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
5215 _: Arc<Client>,
5216 mut cx: AsyncAppContext,
5217 ) -> Result<()> {
5218 this.update(&mut cx, |this, cx| {
5219 let peer_id = envelope
5220 .payload
5221 .peer_id
5222 .ok_or_else(|| anyhow!("invalid peer id"))?;
5223 let replica_id = this
5224 .collaborators
5225 .remove(&peer_id)
5226 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
5227 .replica_id;
5228 for buffer in this.opened_buffers.values() {
5229 if let Some(buffer) = buffer.upgrade(cx) {
5230 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
5231 }
5232 }
5233 this.shared_buffers.remove(&peer_id);
5234
5235 cx.emit(Event::CollaboratorLeft(peer_id));
5236 cx.notify();
5237 Ok(())
5238 })
5239 }
5240
5241 async fn handle_update_project(
5242 this: ModelHandle<Self>,
5243 envelope: TypedEnvelope<proto::UpdateProject>,
5244 _: Arc<Client>,
5245 mut cx: AsyncAppContext,
5246 ) -> Result<()> {
5247 this.update(&mut cx, |this, cx| {
5248 // Don't handle messages that were sent before the response to us joining the project
5249 if envelope.message_id > this.join_project_response_message_id {
5250 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
5251 }
5252 Ok(())
5253 })
5254 }
5255
5256 async fn handle_update_worktree(
5257 this: ModelHandle<Self>,
5258 envelope: TypedEnvelope<proto::UpdateWorktree>,
5259 _: Arc<Client>,
5260 mut cx: AsyncAppContext,
5261 ) -> Result<()> {
5262 this.update(&mut cx, |this, cx| {
5263 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5264 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
5265 worktree.update(cx, |worktree, _| {
5266 let worktree = worktree.as_remote_mut().unwrap();
5267 worktree.update_from_remote(envelope.payload);
5268 });
5269 }
5270 Ok(())
5271 })
5272 }
5273
5274 async fn handle_create_project_entry(
5275 this: ModelHandle<Self>,
5276 envelope: TypedEnvelope<proto::CreateProjectEntry>,
5277 _: Arc<Client>,
5278 mut cx: AsyncAppContext,
5279 ) -> Result<proto::ProjectEntryResponse> {
5280 let worktree = this.update(&mut cx, |this, cx| {
5281 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5282 this.worktree_for_id(worktree_id, cx)
5283 .ok_or_else(|| anyhow!("worktree not found"))
5284 })?;
5285 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5286 let entry = worktree
5287 .update(&mut cx, |worktree, cx| {
5288 let worktree = worktree.as_local_mut().unwrap();
5289 let path = PathBuf::from(envelope.payload.path);
5290 worktree.create_entry(path, envelope.payload.is_directory, cx)
5291 })
5292 .await?;
5293 Ok(proto::ProjectEntryResponse {
5294 entry: Some((&entry).into()),
5295 worktree_scan_id: worktree_scan_id as u64,
5296 })
5297 }
5298
5299 async fn handle_rename_project_entry(
5300 this: ModelHandle<Self>,
5301 envelope: TypedEnvelope<proto::RenameProjectEntry>,
5302 _: Arc<Client>,
5303 mut cx: AsyncAppContext,
5304 ) -> Result<proto::ProjectEntryResponse> {
5305 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5306 let worktree = this.read_with(&cx, |this, cx| {
5307 this.worktree_for_entry(entry_id, cx)
5308 .ok_or_else(|| anyhow!("worktree not found"))
5309 })?;
5310 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5311 let entry = worktree
5312 .update(&mut cx, |worktree, cx| {
5313 let new_path = PathBuf::from(envelope.payload.new_path);
5314 worktree
5315 .as_local_mut()
5316 .unwrap()
5317 .rename_entry(entry_id, new_path, cx)
5318 .ok_or_else(|| anyhow!("invalid entry"))
5319 })?
5320 .await?;
5321 Ok(proto::ProjectEntryResponse {
5322 entry: Some((&entry).into()),
5323 worktree_scan_id: worktree_scan_id as u64,
5324 })
5325 }
5326
5327 async fn handle_copy_project_entry(
5328 this: ModelHandle<Self>,
5329 envelope: TypedEnvelope<proto::CopyProjectEntry>,
5330 _: Arc<Client>,
5331 mut cx: AsyncAppContext,
5332 ) -> Result<proto::ProjectEntryResponse> {
5333 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5334 let worktree = this.read_with(&cx, |this, cx| {
5335 this.worktree_for_entry(entry_id, cx)
5336 .ok_or_else(|| anyhow!("worktree not found"))
5337 })?;
5338 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5339 let entry = worktree
5340 .update(&mut cx, |worktree, cx| {
5341 let new_path = PathBuf::from(envelope.payload.new_path);
5342 worktree
5343 .as_local_mut()
5344 .unwrap()
5345 .copy_entry(entry_id, new_path, cx)
5346 .ok_or_else(|| anyhow!("invalid entry"))
5347 })?
5348 .await?;
5349 Ok(proto::ProjectEntryResponse {
5350 entry: Some((&entry).into()),
5351 worktree_scan_id: worktree_scan_id as u64,
5352 })
5353 }
5354
5355 async fn handle_delete_project_entry(
5356 this: ModelHandle<Self>,
5357 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
5358 _: Arc<Client>,
5359 mut cx: AsyncAppContext,
5360 ) -> Result<proto::ProjectEntryResponse> {
5361 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
5362
5363 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
5364
5365 let worktree = this.read_with(&cx, |this, cx| {
5366 this.worktree_for_entry(entry_id, cx)
5367 .ok_or_else(|| anyhow!("worktree not found"))
5368 })?;
5369 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
5370 worktree
5371 .update(&mut cx, |worktree, cx| {
5372 worktree
5373 .as_local_mut()
5374 .unwrap()
5375 .delete_entry(entry_id, cx)
5376 .ok_or_else(|| anyhow!("invalid entry"))
5377 })?
5378 .await?;
5379 Ok(proto::ProjectEntryResponse {
5380 entry: None,
5381 worktree_scan_id: worktree_scan_id as u64,
5382 })
5383 }
5384
5385 async fn handle_update_diagnostic_summary(
5386 this: ModelHandle<Self>,
5387 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
5388 _: Arc<Client>,
5389 mut cx: AsyncAppContext,
5390 ) -> Result<()> {
5391 this.update(&mut cx, |this, cx| {
5392 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5393 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
5394 if let Some(summary) = envelope.payload.summary {
5395 let project_path = ProjectPath {
5396 worktree_id,
5397 path: Path::new(&summary.path).into(),
5398 };
5399 worktree.update(cx, |worktree, _| {
5400 worktree
5401 .as_remote_mut()
5402 .unwrap()
5403 .update_diagnostic_summary(project_path.path.clone(), &summary);
5404 });
5405 cx.emit(Event::DiagnosticsUpdated {
5406 language_server_id: LanguageServerId(summary.language_server_id as usize),
5407 path: project_path,
5408 });
5409 }
5410 }
5411 Ok(())
5412 })
5413 }
5414
5415 async fn handle_start_language_server(
5416 this: ModelHandle<Self>,
5417 envelope: TypedEnvelope<proto::StartLanguageServer>,
5418 _: Arc<Client>,
5419 mut cx: AsyncAppContext,
5420 ) -> Result<()> {
5421 let server = envelope
5422 .payload
5423 .server
5424 .ok_or_else(|| anyhow!("invalid server"))?;
5425 this.update(&mut cx, |this, cx| {
5426 this.language_server_statuses.insert(
5427 LanguageServerId(server.id as usize),
5428 LanguageServerStatus {
5429 name: server.name,
5430 pending_work: Default::default(),
5431 has_pending_diagnostic_updates: false,
5432 progress_tokens: Default::default(),
5433 },
5434 );
5435 cx.notify();
5436 });
5437 Ok(())
5438 }
5439
5440 async fn handle_update_language_server(
5441 this: ModelHandle<Self>,
5442 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
5443 _: Arc<Client>,
5444 mut cx: AsyncAppContext,
5445 ) -> Result<()> {
5446 this.update(&mut cx, |this, cx| {
5447 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
5448
5449 match envelope
5450 .payload
5451 .variant
5452 .ok_or_else(|| anyhow!("invalid variant"))?
5453 {
5454 proto::update_language_server::Variant::WorkStart(payload) => {
5455 this.on_lsp_work_start(
5456 language_server_id,
5457 payload.token,
5458 LanguageServerProgress {
5459 message: payload.message,
5460 percentage: payload.percentage.map(|p| p as usize),
5461 last_update_at: Instant::now(),
5462 },
5463 cx,
5464 );
5465 }
5466
5467 proto::update_language_server::Variant::WorkProgress(payload) => {
5468 this.on_lsp_work_progress(
5469 language_server_id,
5470 payload.token,
5471 LanguageServerProgress {
5472 message: payload.message,
5473 percentage: payload.percentage.map(|p| p as usize),
5474 last_update_at: Instant::now(),
5475 },
5476 cx,
5477 );
5478 }
5479
5480 proto::update_language_server::Variant::WorkEnd(payload) => {
5481 this.on_lsp_work_end(language_server_id, payload.token, cx);
5482 }
5483
5484 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
5485 this.disk_based_diagnostics_started(language_server_id, cx);
5486 }
5487
5488 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
5489 this.disk_based_diagnostics_finished(language_server_id, cx)
5490 }
5491 }
5492
5493 Ok(())
5494 })
5495 }
5496
5497 async fn handle_update_buffer(
5498 this: ModelHandle<Self>,
5499 envelope: TypedEnvelope<proto::UpdateBuffer>,
5500 _: Arc<Client>,
5501 mut cx: AsyncAppContext,
5502 ) -> Result<proto::Ack> {
5503 this.update(&mut cx, |this, cx| {
5504 let payload = envelope.payload.clone();
5505 let buffer_id = payload.buffer_id;
5506 let ops = payload
5507 .operations
5508 .into_iter()
5509 .map(language::proto::deserialize_operation)
5510 .collect::<Result<Vec<_>, _>>()?;
5511 let is_remote = this.is_remote();
5512 match this.opened_buffers.entry(buffer_id) {
5513 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
5514 OpenBuffer::Strong(buffer) => {
5515 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
5516 }
5517 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
5518 OpenBuffer::Weak(_) => {}
5519 },
5520 hash_map::Entry::Vacant(e) => {
5521 assert!(
5522 is_remote,
5523 "received buffer update from {:?}",
5524 envelope.original_sender_id
5525 );
5526 e.insert(OpenBuffer::Operations(ops));
5527 }
5528 }
5529 Ok(proto::Ack {})
5530 })
5531 }
5532
5533 async fn handle_create_buffer_for_peer(
5534 this: ModelHandle<Self>,
5535 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
5536 _: Arc<Client>,
5537 mut cx: AsyncAppContext,
5538 ) -> Result<()> {
5539 this.update(&mut cx, |this, cx| {
5540 match envelope
5541 .payload
5542 .variant
5543 .ok_or_else(|| anyhow!("missing variant"))?
5544 {
5545 proto::create_buffer_for_peer::Variant::State(mut state) => {
5546 let mut buffer_file = None;
5547 if let Some(file) = state.file.take() {
5548 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5549 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5550 anyhow!("no worktree found for id {}", file.worktree_id)
5551 })?;
5552 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5553 as Arc<dyn language::File>);
5554 }
5555
5556 let buffer_id = state.id;
5557 let buffer = cx.add_model(|_| {
5558 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
5559 });
5560 this.incomplete_remote_buffers
5561 .insert(buffer_id, Some(buffer));
5562 }
5563 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
5564 let buffer = this
5565 .incomplete_remote_buffers
5566 .get(&chunk.buffer_id)
5567 .cloned()
5568 .flatten()
5569 .ok_or_else(|| {
5570 anyhow!(
5571 "received chunk for buffer {} without initial state",
5572 chunk.buffer_id
5573 )
5574 })?;
5575 let operations = chunk
5576 .operations
5577 .into_iter()
5578 .map(language::proto::deserialize_operation)
5579 .collect::<Result<Vec<_>>>()?;
5580 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
5581
5582 if chunk.is_last {
5583 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
5584 this.register_buffer(&buffer, cx)?;
5585 }
5586 }
5587 }
5588
5589 Ok(())
5590 })
5591 }
5592
5593 async fn handle_update_diff_base(
5594 this: ModelHandle<Self>,
5595 envelope: TypedEnvelope<proto::UpdateDiffBase>,
5596 _: Arc<Client>,
5597 mut cx: AsyncAppContext,
5598 ) -> Result<()> {
5599 this.update(&mut cx, |this, cx| {
5600 let buffer_id = envelope.payload.buffer_id;
5601 let diff_base = envelope.payload.diff_base;
5602 if let Some(buffer) = this
5603 .opened_buffers
5604 .get_mut(&buffer_id)
5605 .and_then(|b| b.upgrade(cx))
5606 .or_else(|| {
5607 this.incomplete_remote_buffers
5608 .get(&buffer_id)
5609 .cloned()
5610 .flatten()
5611 })
5612 {
5613 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
5614 }
5615 Ok(())
5616 })
5617 }
5618
5619 async fn handle_update_buffer_file(
5620 this: ModelHandle<Self>,
5621 envelope: TypedEnvelope<proto::UpdateBufferFile>,
5622 _: Arc<Client>,
5623 mut cx: AsyncAppContext,
5624 ) -> Result<()> {
5625 let buffer_id = envelope.payload.buffer_id;
5626
5627 this.update(&mut cx, |this, cx| {
5628 let payload = envelope.payload.clone();
5629 if let Some(buffer) = this
5630 .opened_buffers
5631 .get(&buffer_id)
5632 .and_then(|b| b.upgrade(cx))
5633 .or_else(|| {
5634 this.incomplete_remote_buffers
5635 .get(&buffer_id)
5636 .cloned()
5637 .flatten()
5638 })
5639 {
5640 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
5641 let worktree = this
5642 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
5643 .ok_or_else(|| anyhow!("no such worktree"))?;
5644 let file = File::from_proto(file, worktree, cx)?;
5645 buffer.update(cx, |buffer, cx| {
5646 buffer.file_updated(Arc::new(file), cx).detach();
5647 });
5648 this.detect_language_for_buffer(&buffer, cx);
5649 }
5650 Ok(())
5651 })
5652 }
5653
5654 async fn handle_save_buffer(
5655 this: ModelHandle<Self>,
5656 envelope: TypedEnvelope<proto::SaveBuffer>,
5657 _: Arc<Client>,
5658 mut cx: AsyncAppContext,
5659 ) -> Result<proto::BufferSaved> {
5660 let buffer_id = envelope.payload.buffer_id;
5661 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
5662 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
5663 let buffer = this
5664 .opened_buffers
5665 .get(&buffer_id)
5666 .and_then(|buffer| buffer.upgrade(cx))
5667 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
5668 anyhow::Ok((project_id, buffer))
5669 })?;
5670 buffer
5671 .update(&mut cx, |buffer, _| {
5672 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
5673 })
5674 .await?;
5675 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
5676
5677 let (saved_version, fingerprint, mtime) = this
5678 .update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
5679 .await?;
5680 Ok(proto::BufferSaved {
5681 project_id,
5682 buffer_id,
5683 version: serialize_version(&saved_version),
5684 mtime: Some(mtime.into()),
5685 fingerprint: language::proto::serialize_fingerprint(fingerprint),
5686 })
5687 }
5688
5689 async fn handle_reload_buffers(
5690 this: ModelHandle<Self>,
5691 envelope: TypedEnvelope<proto::ReloadBuffers>,
5692 _: Arc<Client>,
5693 mut cx: AsyncAppContext,
5694 ) -> Result<proto::ReloadBuffersResponse> {
5695 let sender_id = envelope.original_sender_id()?;
5696 let reload = this.update(&mut cx, |this, cx| {
5697 let mut buffers = HashSet::default();
5698 for buffer_id in &envelope.payload.buffer_ids {
5699 buffers.insert(
5700 this.opened_buffers
5701 .get(buffer_id)
5702 .and_then(|buffer| buffer.upgrade(cx))
5703 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
5704 );
5705 }
5706 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
5707 })?;
5708
5709 let project_transaction = reload.await?;
5710 let project_transaction = this.update(&mut cx, |this, cx| {
5711 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5712 });
5713 Ok(proto::ReloadBuffersResponse {
5714 transaction: Some(project_transaction),
5715 })
5716 }
5717
5718 async fn handle_synchronize_buffers(
5719 this: ModelHandle<Self>,
5720 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
5721 _: Arc<Client>,
5722 mut cx: AsyncAppContext,
5723 ) -> Result<proto::SynchronizeBuffersResponse> {
5724 let project_id = envelope.payload.project_id;
5725 let mut response = proto::SynchronizeBuffersResponse {
5726 buffers: Default::default(),
5727 };
5728
5729 this.update(&mut cx, |this, cx| {
5730 let Some(guest_id) = envelope.original_sender_id else {
5731 log::error!("missing original_sender_id on SynchronizeBuffers request");
5732 return;
5733 };
5734
5735 this.shared_buffers.entry(guest_id).or_default().clear();
5736 for buffer in envelope.payload.buffers {
5737 let buffer_id = buffer.id;
5738 let remote_version = language::proto::deserialize_version(&buffer.version);
5739 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
5740 this.shared_buffers
5741 .entry(guest_id)
5742 .or_default()
5743 .insert(buffer_id);
5744
5745 let buffer = buffer.read(cx);
5746 response.buffers.push(proto::BufferVersion {
5747 id: buffer_id,
5748 version: language::proto::serialize_version(&buffer.version),
5749 });
5750
5751 let operations = buffer.serialize_ops(Some(remote_version), cx);
5752 let client = this.client.clone();
5753 if let Some(file) = buffer.file() {
5754 client
5755 .send(proto::UpdateBufferFile {
5756 project_id,
5757 buffer_id: buffer_id as u64,
5758 file: Some(file.to_proto()),
5759 })
5760 .log_err();
5761 }
5762
5763 client
5764 .send(proto::UpdateDiffBase {
5765 project_id,
5766 buffer_id: buffer_id as u64,
5767 diff_base: buffer.diff_base().map(Into::into),
5768 })
5769 .log_err();
5770
5771 client
5772 .send(proto::BufferReloaded {
5773 project_id,
5774 buffer_id,
5775 version: language::proto::serialize_version(buffer.saved_version()),
5776 mtime: Some(buffer.saved_mtime().into()),
5777 fingerprint: language::proto::serialize_fingerprint(
5778 buffer.saved_version_fingerprint(),
5779 ),
5780 line_ending: language::proto::serialize_line_ending(
5781 buffer.line_ending(),
5782 ) as i32,
5783 })
5784 .log_err();
5785
5786 cx.background()
5787 .spawn(
5788 async move {
5789 let operations = operations.await;
5790 for chunk in split_operations(operations) {
5791 client
5792 .request(proto::UpdateBuffer {
5793 project_id,
5794 buffer_id,
5795 operations: chunk,
5796 })
5797 .await?;
5798 }
5799 anyhow::Ok(())
5800 }
5801 .log_err(),
5802 )
5803 .detach();
5804 }
5805 }
5806 });
5807
5808 Ok(response)
5809 }
5810
5811 async fn handle_format_buffers(
5812 this: ModelHandle<Self>,
5813 envelope: TypedEnvelope<proto::FormatBuffers>,
5814 _: Arc<Client>,
5815 mut cx: AsyncAppContext,
5816 ) -> Result<proto::FormatBuffersResponse> {
5817 let sender_id = envelope.original_sender_id()?;
5818 let format = this.update(&mut cx, |this, cx| {
5819 let mut buffers = HashSet::default();
5820 for buffer_id in &envelope.payload.buffer_ids {
5821 buffers.insert(
5822 this.opened_buffers
5823 .get(buffer_id)
5824 .and_then(|buffer| buffer.upgrade(cx))
5825 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
5826 );
5827 }
5828 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
5829 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
5830 })?;
5831
5832 let project_transaction = format.await?;
5833 let project_transaction = this.update(&mut cx, |this, cx| {
5834 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5835 });
5836 Ok(proto::FormatBuffersResponse {
5837 transaction: Some(project_transaction),
5838 })
5839 }
5840
5841 async fn handle_apply_additional_edits_for_completion(
5842 this: ModelHandle<Self>,
5843 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
5844 _: Arc<Client>,
5845 mut cx: AsyncAppContext,
5846 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
5847 let (buffer, completion) = this.update(&mut cx, |this, cx| {
5848 let buffer = this
5849 .opened_buffers
5850 .get(&envelope.payload.buffer_id)
5851 .and_then(|buffer| buffer.upgrade(cx))
5852 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5853 let language = buffer.read(cx).language();
5854 let completion = language::proto::deserialize_completion(
5855 envelope
5856 .payload
5857 .completion
5858 .ok_or_else(|| anyhow!("invalid completion"))?,
5859 language.cloned(),
5860 );
5861 Ok::<_, anyhow::Error>((buffer, completion))
5862 })?;
5863
5864 let completion = completion.await?;
5865
5866 let apply_additional_edits = this.update(&mut cx, |this, cx| {
5867 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
5868 });
5869
5870 Ok(proto::ApplyCompletionAdditionalEditsResponse {
5871 transaction: apply_additional_edits
5872 .await?
5873 .as_ref()
5874 .map(language::proto::serialize_transaction),
5875 })
5876 }
5877
5878 async fn handle_apply_code_action(
5879 this: ModelHandle<Self>,
5880 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5881 _: Arc<Client>,
5882 mut cx: AsyncAppContext,
5883 ) -> Result<proto::ApplyCodeActionResponse> {
5884 let sender_id = envelope.original_sender_id()?;
5885 let action = language::proto::deserialize_code_action(
5886 envelope
5887 .payload
5888 .action
5889 .ok_or_else(|| anyhow!("invalid action"))?,
5890 )?;
5891 let apply_code_action = this.update(&mut cx, |this, cx| {
5892 let buffer = this
5893 .opened_buffers
5894 .get(&envelope.payload.buffer_id)
5895 .and_then(|buffer| buffer.upgrade(cx))
5896 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5897 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5898 })?;
5899
5900 let project_transaction = apply_code_action.await?;
5901 let project_transaction = this.update(&mut cx, |this, cx| {
5902 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5903 });
5904 Ok(proto::ApplyCodeActionResponse {
5905 transaction: Some(project_transaction),
5906 })
5907 }
5908
5909 async fn handle_on_type_formatting(
5910 this: ModelHandle<Self>,
5911 envelope: TypedEnvelope<proto::OnTypeFormatting>,
5912 _: Arc<Client>,
5913 mut cx: AsyncAppContext,
5914 ) -> Result<proto::OnTypeFormattingResponse> {
5915 let sender_id = envelope.original_sender_id()?;
5916 let on_type_formatting = this.update(&mut cx, |this, cx| {
5917 let buffer = this
5918 .opened_buffers
5919 .get(&envelope.payload.buffer_id)
5920 .and_then(|buffer| buffer.upgrade(cx))
5921 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5922 let position = envelope
5923 .payload
5924 .position
5925 .and_then(deserialize_anchor)
5926 .ok_or_else(|| anyhow!("invalid position"))?;
5927 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
5928 buffer,
5929 position,
5930 envelope.payload.trigger.clone(),
5931 false,
5932 cx,
5933 ))
5934 })?;
5935
5936 let project_transaction = on_type_formatting.await?;
5937 let project_transaction = this.update(&mut cx, |this, cx| {
5938 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5939 });
5940 Ok(proto::OnTypeFormattingResponse {
5941 transaction: Some(project_transaction),
5942 })
5943 }
5944
5945 async fn handle_lsp_command<T: LspCommand>(
5946 this: ModelHandle<Self>,
5947 envelope: TypedEnvelope<T::ProtoRequest>,
5948 _: Arc<Client>,
5949 mut cx: AsyncAppContext,
5950 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5951 where
5952 <T::LspRequest as lsp::request::Request>::Result: Send,
5953 {
5954 let sender_id = envelope.original_sender_id()?;
5955 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5956 let buffer_handle = this.read_with(&cx, |this, _| {
5957 this.opened_buffers
5958 .get(&buffer_id)
5959 .and_then(|buffer| buffer.upgrade(&cx))
5960 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5961 })?;
5962 let request = T::from_proto(
5963 envelope.payload,
5964 this.clone(),
5965 buffer_handle.clone(),
5966 cx.clone(),
5967 )
5968 .await?;
5969 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5970 let response = this
5971 .update(&mut cx, |this, cx| {
5972 this.request_lsp(buffer_handle, request, cx)
5973 })
5974 .await?;
5975 this.update(&mut cx, |this, cx| {
5976 Ok(T::response_to_proto(
5977 response,
5978 this,
5979 sender_id,
5980 &buffer_version,
5981 cx,
5982 ))
5983 })
5984 }
5985
5986 async fn handle_get_project_symbols(
5987 this: ModelHandle<Self>,
5988 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5989 _: Arc<Client>,
5990 mut cx: AsyncAppContext,
5991 ) -> Result<proto::GetProjectSymbolsResponse> {
5992 let symbols = this
5993 .update(&mut cx, |this, cx| {
5994 this.symbols(&envelope.payload.query, cx)
5995 })
5996 .await?;
5997
5998 Ok(proto::GetProjectSymbolsResponse {
5999 symbols: symbols.iter().map(serialize_symbol).collect(),
6000 })
6001 }
6002
6003 async fn handle_search_project(
6004 this: ModelHandle<Self>,
6005 envelope: TypedEnvelope<proto::SearchProject>,
6006 _: Arc<Client>,
6007 mut cx: AsyncAppContext,
6008 ) -> Result<proto::SearchProjectResponse> {
6009 let peer_id = envelope.original_sender_id()?;
6010 let query = SearchQuery::from_proto(envelope.payload)?;
6011 let result = this
6012 .update(&mut cx, |this, cx| this.search(query, cx))
6013 .await?;
6014
6015 this.update(&mut cx, |this, cx| {
6016 let mut locations = Vec::new();
6017 for (buffer, ranges) in result {
6018 for range in ranges {
6019 let start = serialize_anchor(&range.start);
6020 let end = serialize_anchor(&range.end);
6021 let buffer_id = this.create_buffer_for_peer(&buffer, peer_id, cx);
6022 locations.push(proto::Location {
6023 buffer_id,
6024 start: Some(start),
6025 end: Some(end),
6026 });
6027 }
6028 }
6029 Ok(proto::SearchProjectResponse { locations })
6030 })
6031 }
6032
6033 async fn handle_open_buffer_for_symbol(
6034 this: ModelHandle<Self>,
6035 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
6036 _: Arc<Client>,
6037 mut cx: AsyncAppContext,
6038 ) -> Result<proto::OpenBufferForSymbolResponse> {
6039 let peer_id = envelope.original_sender_id()?;
6040 let symbol = envelope
6041 .payload
6042 .symbol
6043 .ok_or_else(|| anyhow!("invalid symbol"))?;
6044 let symbol = this
6045 .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
6046 .await?;
6047 let symbol = this.read_with(&cx, |this, _| {
6048 let signature = this.symbol_signature(&symbol.path);
6049 if signature == symbol.signature {
6050 Ok(symbol)
6051 } else {
6052 Err(anyhow!("invalid symbol signature"))
6053 }
6054 })?;
6055 let buffer = this
6056 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
6057 .await?;
6058
6059 Ok(proto::OpenBufferForSymbolResponse {
6060 buffer_id: this.update(&mut cx, |this, cx| {
6061 this.create_buffer_for_peer(&buffer, peer_id, cx)
6062 }),
6063 })
6064 }
6065
6066 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
6067 let mut hasher = Sha256::new();
6068 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
6069 hasher.update(project_path.path.to_string_lossy().as_bytes());
6070 hasher.update(self.nonce.to_be_bytes());
6071 hasher.finalize().as_slice().try_into().unwrap()
6072 }
6073
6074 async fn handle_open_buffer_by_id(
6075 this: ModelHandle<Self>,
6076 envelope: TypedEnvelope<proto::OpenBufferById>,
6077 _: Arc<Client>,
6078 mut cx: AsyncAppContext,
6079 ) -> Result<proto::OpenBufferResponse> {
6080 let peer_id = envelope.original_sender_id()?;
6081 let buffer = this
6082 .update(&mut cx, |this, cx| {
6083 this.open_buffer_by_id(envelope.payload.id, cx)
6084 })
6085 .await?;
6086 this.update(&mut cx, |this, cx| {
6087 Ok(proto::OpenBufferResponse {
6088 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
6089 })
6090 })
6091 }
6092
6093 async fn handle_open_buffer_by_path(
6094 this: ModelHandle<Self>,
6095 envelope: TypedEnvelope<proto::OpenBufferByPath>,
6096 _: Arc<Client>,
6097 mut cx: AsyncAppContext,
6098 ) -> Result<proto::OpenBufferResponse> {
6099 let peer_id = envelope.original_sender_id()?;
6100 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6101 let open_buffer = this.update(&mut cx, |this, cx| {
6102 this.open_buffer(
6103 ProjectPath {
6104 worktree_id,
6105 path: PathBuf::from(envelope.payload.path).into(),
6106 },
6107 cx,
6108 )
6109 });
6110
6111 let buffer = open_buffer.await?;
6112 this.update(&mut cx, |this, cx| {
6113 Ok(proto::OpenBufferResponse {
6114 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
6115 })
6116 })
6117 }
6118
6119 fn serialize_project_transaction_for_peer(
6120 &mut self,
6121 project_transaction: ProjectTransaction,
6122 peer_id: proto::PeerId,
6123 cx: &mut AppContext,
6124 ) -> proto::ProjectTransaction {
6125 let mut serialized_transaction = proto::ProjectTransaction {
6126 buffer_ids: Default::default(),
6127 transactions: Default::default(),
6128 };
6129 for (buffer, transaction) in project_transaction.0 {
6130 serialized_transaction
6131 .buffer_ids
6132 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
6133 serialized_transaction
6134 .transactions
6135 .push(language::proto::serialize_transaction(&transaction));
6136 }
6137 serialized_transaction
6138 }
6139
6140 fn deserialize_project_transaction(
6141 &mut self,
6142 message: proto::ProjectTransaction,
6143 push_to_history: bool,
6144 cx: &mut ModelContext<Self>,
6145 ) -> Task<Result<ProjectTransaction>> {
6146 cx.spawn(|this, mut cx| async move {
6147 let mut project_transaction = ProjectTransaction::default();
6148 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
6149 {
6150 let buffer = this
6151 .update(&mut cx, |this, cx| {
6152 this.wait_for_remote_buffer(buffer_id, cx)
6153 })
6154 .await?;
6155 let transaction = language::proto::deserialize_transaction(transaction)?;
6156 project_transaction.0.insert(buffer, transaction);
6157 }
6158
6159 for (buffer, transaction) in &project_transaction.0 {
6160 buffer
6161 .update(&mut cx, |buffer, _| {
6162 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
6163 })
6164 .await?;
6165
6166 if push_to_history {
6167 buffer.update(&mut cx, |buffer, _| {
6168 buffer.push_transaction(transaction.clone(), Instant::now());
6169 });
6170 }
6171 }
6172
6173 Ok(project_transaction)
6174 })
6175 }
6176
6177 fn create_buffer_for_peer(
6178 &mut self,
6179 buffer: &ModelHandle<Buffer>,
6180 peer_id: proto::PeerId,
6181 cx: &mut AppContext,
6182 ) -> u64 {
6183 let buffer_id = buffer.read(cx).remote_id();
6184 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
6185 updates_tx
6186 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
6187 .ok();
6188 }
6189 buffer_id
6190 }
6191
6192 fn wait_for_remote_buffer(
6193 &mut self,
6194 id: u64,
6195 cx: &mut ModelContext<Self>,
6196 ) -> Task<Result<ModelHandle<Buffer>>> {
6197 let mut opened_buffer_rx = self.opened_buffer.1.clone();
6198
6199 cx.spawn_weak(|this, mut cx| async move {
6200 let buffer = loop {
6201 let Some(this) = this.upgrade(&cx) else {
6202 return Err(anyhow!("project dropped"));
6203 };
6204 let buffer = this.read_with(&cx, |this, cx| {
6205 this.opened_buffers
6206 .get(&id)
6207 .and_then(|buffer| buffer.upgrade(cx))
6208 });
6209 if let Some(buffer) = buffer {
6210 break buffer;
6211 } else if this.read_with(&cx, |this, _| this.is_read_only()) {
6212 return Err(anyhow!("disconnected before buffer {} could be opened", id));
6213 }
6214
6215 this.update(&mut cx, |this, _| {
6216 this.incomplete_remote_buffers.entry(id).or_default();
6217 });
6218 drop(this);
6219 opened_buffer_rx
6220 .next()
6221 .await
6222 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
6223 };
6224 buffer.update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx));
6225 Ok(buffer)
6226 })
6227 }
6228
6229 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
6230 let project_id = match self.client_state.as_ref() {
6231 Some(ProjectClientState::Remote {
6232 sharing_has_stopped,
6233 remote_id,
6234 ..
6235 }) => {
6236 if *sharing_has_stopped {
6237 return Task::ready(Err(anyhow!(
6238 "can't synchronize remote buffers on a readonly project"
6239 )));
6240 } else {
6241 *remote_id
6242 }
6243 }
6244 Some(ProjectClientState::Local { .. }) | None => {
6245 return Task::ready(Err(anyhow!(
6246 "can't synchronize remote buffers on a local project"
6247 )))
6248 }
6249 };
6250
6251 let client = self.client.clone();
6252 cx.spawn(|this, cx| async move {
6253 let (buffers, incomplete_buffer_ids) = this.read_with(&cx, |this, cx| {
6254 let buffers = this
6255 .opened_buffers
6256 .iter()
6257 .filter_map(|(id, buffer)| {
6258 let buffer = buffer.upgrade(cx)?;
6259 Some(proto::BufferVersion {
6260 id: *id,
6261 version: language::proto::serialize_version(&buffer.read(cx).version),
6262 })
6263 })
6264 .collect();
6265 let incomplete_buffer_ids = this
6266 .incomplete_remote_buffers
6267 .keys()
6268 .copied()
6269 .collect::<Vec<_>>();
6270
6271 (buffers, incomplete_buffer_ids)
6272 });
6273 let response = client
6274 .request(proto::SynchronizeBuffers {
6275 project_id,
6276 buffers,
6277 })
6278 .await?;
6279
6280 let send_updates_for_buffers = response.buffers.into_iter().map(|buffer| {
6281 let client = client.clone();
6282 let buffer_id = buffer.id;
6283 let remote_version = language::proto::deserialize_version(&buffer.version);
6284 this.read_with(&cx, |this, cx| {
6285 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
6286 let operations = buffer.read(cx).serialize_ops(Some(remote_version), cx);
6287 cx.background().spawn(async move {
6288 let operations = operations.await;
6289 for chunk in split_operations(operations) {
6290 client
6291 .request(proto::UpdateBuffer {
6292 project_id,
6293 buffer_id,
6294 operations: chunk,
6295 })
6296 .await?;
6297 }
6298 anyhow::Ok(())
6299 })
6300 } else {
6301 Task::ready(Ok(()))
6302 }
6303 })
6304 });
6305
6306 // Any incomplete buffers have open requests waiting. Request that the host sends
6307 // creates these buffers for us again to unblock any waiting futures.
6308 for id in incomplete_buffer_ids {
6309 cx.background()
6310 .spawn(client.request(proto::OpenBufferById { project_id, id }))
6311 .detach();
6312 }
6313
6314 futures::future::join_all(send_updates_for_buffers)
6315 .await
6316 .into_iter()
6317 .collect()
6318 })
6319 }
6320
6321 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
6322 self.worktrees(cx)
6323 .map(|worktree| {
6324 let worktree = worktree.read(cx);
6325 proto::WorktreeMetadata {
6326 id: worktree.id().to_proto(),
6327 root_name: worktree.root_name().into(),
6328 visible: worktree.is_visible(),
6329 abs_path: worktree.abs_path().to_string_lossy().into(),
6330 }
6331 })
6332 .collect()
6333 }
6334
6335 fn set_worktrees_from_proto(
6336 &mut self,
6337 worktrees: Vec<proto::WorktreeMetadata>,
6338 cx: &mut ModelContext<Project>,
6339 ) -> Result<()> {
6340 let replica_id = self.replica_id();
6341 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
6342
6343 let mut old_worktrees_by_id = self
6344 .worktrees
6345 .drain(..)
6346 .filter_map(|worktree| {
6347 let worktree = worktree.upgrade(cx)?;
6348 Some((worktree.read(cx).id(), worktree))
6349 })
6350 .collect::<HashMap<_, _>>();
6351
6352 for worktree in worktrees {
6353 if let Some(old_worktree) =
6354 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
6355 {
6356 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
6357 } else {
6358 let worktree =
6359 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
6360 let _ = self.add_worktree(&worktree, cx);
6361 }
6362 }
6363
6364 self.metadata_changed(cx);
6365 for (id, _) in old_worktrees_by_id {
6366 cx.emit(Event::WorktreeRemoved(id));
6367 }
6368
6369 Ok(())
6370 }
6371
6372 fn set_collaborators_from_proto(
6373 &mut self,
6374 messages: Vec<proto::Collaborator>,
6375 cx: &mut ModelContext<Self>,
6376 ) -> Result<()> {
6377 let mut collaborators = HashMap::default();
6378 for message in messages {
6379 let collaborator = Collaborator::from_proto(message)?;
6380 collaborators.insert(collaborator.peer_id, collaborator);
6381 }
6382 for old_peer_id in self.collaborators.keys() {
6383 if !collaborators.contains_key(old_peer_id) {
6384 cx.emit(Event::CollaboratorLeft(*old_peer_id));
6385 }
6386 }
6387 self.collaborators = collaborators;
6388 Ok(())
6389 }
6390
6391 fn deserialize_symbol(
6392 &self,
6393 serialized_symbol: proto::Symbol,
6394 ) -> impl Future<Output = Result<Symbol>> {
6395 let languages = self.languages.clone();
6396 async move {
6397 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
6398 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
6399 let start = serialized_symbol
6400 .start
6401 .ok_or_else(|| anyhow!("invalid start"))?;
6402 let end = serialized_symbol
6403 .end
6404 .ok_or_else(|| anyhow!("invalid end"))?;
6405 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
6406 let path = ProjectPath {
6407 worktree_id,
6408 path: PathBuf::from(serialized_symbol.path).into(),
6409 };
6410 let language = languages
6411 .language_for_file(&path.path, None)
6412 .await
6413 .log_err();
6414 Ok(Symbol {
6415 language_server_name: LanguageServerName(
6416 serialized_symbol.language_server_name.into(),
6417 ),
6418 source_worktree_id,
6419 path,
6420 label: {
6421 match language {
6422 Some(language) => {
6423 language
6424 .label_for_symbol(&serialized_symbol.name, kind)
6425 .await
6426 }
6427 None => None,
6428 }
6429 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
6430 },
6431
6432 name: serialized_symbol.name,
6433 range: Unclipped(PointUtf16::new(start.row, start.column))
6434 ..Unclipped(PointUtf16::new(end.row, end.column)),
6435 kind,
6436 signature: serialized_symbol
6437 .signature
6438 .try_into()
6439 .map_err(|_| anyhow!("invalid signature"))?,
6440 })
6441 }
6442 }
6443
6444 async fn handle_buffer_saved(
6445 this: ModelHandle<Self>,
6446 envelope: TypedEnvelope<proto::BufferSaved>,
6447 _: Arc<Client>,
6448 mut cx: AsyncAppContext,
6449 ) -> Result<()> {
6450 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
6451 let version = deserialize_version(&envelope.payload.version);
6452 let mtime = envelope
6453 .payload
6454 .mtime
6455 .ok_or_else(|| anyhow!("missing mtime"))?
6456 .into();
6457
6458 this.update(&mut cx, |this, cx| {
6459 let buffer = this
6460 .opened_buffers
6461 .get(&envelope.payload.buffer_id)
6462 .and_then(|buffer| buffer.upgrade(cx))
6463 .or_else(|| {
6464 this.incomplete_remote_buffers
6465 .get(&envelope.payload.buffer_id)
6466 .and_then(|b| b.clone())
6467 });
6468 if let Some(buffer) = buffer {
6469 buffer.update(cx, |buffer, cx| {
6470 buffer.did_save(version, fingerprint, mtime, cx);
6471 });
6472 }
6473 Ok(())
6474 })
6475 }
6476
6477 async fn handle_buffer_reloaded(
6478 this: ModelHandle<Self>,
6479 envelope: TypedEnvelope<proto::BufferReloaded>,
6480 _: Arc<Client>,
6481 mut cx: AsyncAppContext,
6482 ) -> Result<()> {
6483 let payload = envelope.payload;
6484 let version = deserialize_version(&payload.version);
6485 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
6486 let line_ending = deserialize_line_ending(
6487 proto::LineEnding::from_i32(payload.line_ending)
6488 .ok_or_else(|| anyhow!("missing line ending"))?,
6489 );
6490 let mtime = payload
6491 .mtime
6492 .ok_or_else(|| anyhow!("missing mtime"))?
6493 .into();
6494 this.update(&mut cx, |this, cx| {
6495 let buffer = this
6496 .opened_buffers
6497 .get(&payload.buffer_id)
6498 .and_then(|buffer| buffer.upgrade(cx))
6499 .or_else(|| {
6500 this.incomplete_remote_buffers
6501 .get(&payload.buffer_id)
6502 .cloned()
6503 .flatten()
6504 });
6505 if let Some(buffer) = buffer {
6506 buffer.update(cx, |buffer, cx| {
6507 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
6508 });
6509 }
6510 Ok(())
6511 })
6512 }
6513
6514 #[allow(clippy::type_complexity)]
6515 fn edits_from_lsp(
6516 &mut self,
6517 buffer: &ModelHandle<Buffer>,
6518 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
6519 server_id: LanguageServerId,
6520 version: Option<i32>,
6521 cx: &mut ModelContext<Self>,
6522 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
6523 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
6524 cx.background().spawn(async move {
6525 let snapshot = snapshot?;
6526 let mut lsp_edits = lsp_edits
6527 .into_iter()
6528 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
6529 .collect::<Vec<_>>();
6530 lsp_edits.sort_by_key(|(range, _)| range.start);
6531
6532 let mut lsp_edits = lsp_edits.into_iter().peekable();
6533 let mut edits = Vec::new();
6534 while let Some((range, mut new_text)) = lsp_edits.next() {
6535 // Clip invalid ranges provided by the language server.
6536 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
6537 ..snapshot.clip_point_utf16(range.end, Bias::Left);
6538
6539 // Combine any LSP edits that are adjacent.
6540 //
6541 // Also, combine LSP edits that are separated from each other by only
6542 // a newline. This is important because for some code actions,
6543 // Rust-analyzer rewrites the entire buffer via a series of edits that
6544 // are separated by unchanged newline characters.
6545 //
6546 // In order for the diffing logic below to work properly, any edits that
6547 // cancel each other out must be combined into one.
6548 while let Some((next_range, next_text)) = lsp_edits.peek() {
6549 if next_range.start.0 > range.end {
6550 if next_range.start.0.row > range.end.row + 1
6551 || next_range.start.0.column > 0
6552 || snapshot.clip_point_utf16(
6553 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
6554 Bias::Left,
6555 ) > range.end
6556 {
6557 break;
6558 }
6559 new_text.push('\n');
6560 }
6561 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
6562 new_text.push_str(next_text);
6563 lsp_edits.next();
6564 }
6565
6566 // For multiline edits, perform a diff of the old and new text so that
6567 // we can identify the changes more precisely, preserving the locations
6568 // of any anchors positioned in the unchanged regions.
6569 if range.end.row > range.start.row {
6570 let mut offset = range.start.to_offset(&snapshot);
6571 let old_text = snapshot.text_for_range(range).collect::<String>();
6572
6573 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
6574 let mut moved_since_edit = true;
6575 for change in diff.iter_all_changes() {
6576 let tag = change.tag();
6577 let value = change.value();
6578 match tag {
6579 ChangeTag::Equal => {
6580 offset += value.len();
6581 moved_since_edit = true;
6582 }
6583 ChangeTag::Delete => {
6584 let start = snapshot.anchor_after(offset);
6585 let end = snapshot.anchor_before(offset + value.len());
6586 if moved_since_edit {
6587 edits.push((start..end, String::new()));
6588 } else {
6589 edits.last_mut().unwrap().0.end = end;
6590 }
6591 offset += value.len();
6592 moved_since_edit = false;
6593 }
6594 ChangeTag::Insert => {
6595 if moved_since_edit {
6596 let anchor = snapshot.anchor_after(offset);
6597 edits.push((anchor..anchor, value.to_string()));
6598 } else {
6599 edits.last_mut().unwrap().1.push_str(value);
6600 }
6601 moved_since_edit = false;
6602 }
6603 }
6604 }
6605 } else if range.end == range.start {
6606 let anchor = snapshot.anchor_after(range.start);
6607 edits.push((anchor..anchor, new_text));
6608 } else {
6609 let edit_start = snapshot.anchor_after(range.start);
6610 let edit_end = snapshot.anchor_before(range.end);
6611 edits.push((edit_start..edit_end, new_text));
6612 }
6613 }
6614
6615 Ok(edits)
6616 })
6617 }
6618
6619 fn buffer_snapshot_for_lsp_version(
6620 &mut self,
6621 buffer: &ModelHandle<Buffer>,
6622 server_id: LanguageServerId,
6623 version: Option<i32>,
6624 cx: &AppContext,
6625 ) -> Result<TextBufferSnapshot> {
6626 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
6627
6628 if let Some(version) = version {
6629 let buffer_id = buffer.read(cx).remote_id();
6630 let snapshots = self
6631 .buffer_snapshots
6632 .get_mut(&buffer_id)
6633 .and_then(|m| m.get_mut(&server_id))
6634 .ok_or_else(|| {
6635 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
6636 })?;
6637
6638 let found_snapshot = snapshots
6639 .binary_search_by_key(&version, |e| e.version)
6640 .map(|ix| snapshots[ix].snapshot.clone())
6641 .map_err(|_| {
6642 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
6643 })?;
6644
6645 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
6646 Ok(found_snapshot)
6647 } else {
6648 Ok((buffer.read(cx)).text_snapshot())
6649 }
6650 }
6651
6652 pub fn language_servers(
6653 &self,
6654 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
6655 self.language_server_ids
6656 .iter()
6657 .map(|((worktree_id, server_name), server_id)| {
6658 (*server_id, server_name.clone(), *worktree_id)
6659 })
6660 }
6661
6662 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
6663 if let LanguageServerState::Running { server, .. } = self.language_servers.get(&id)? {
6664 Some(server.clone())
6665 } else {
6666 None
6667 }
6668 }
6669
6670 pub fn language_servers_for_buffer(
6671 &self,
6672 buffer: &Buffer,
6673 cx: &AppContext,
6674 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
6675 self.language_server_ids_for_buffer(buffer, cx)
6676 .into_iter()
6677 .filter_map(|server_id| {
6678 let server = self.language_servers.get(&server_id)?;
6679 if let LanguageServerState::Running {
6680 adapter, server, ..
6681 } = server
6682 {
6683 Some((adapter, server))
6684 } else {
6685 None
6686 }
6687 })
6688 }
6689
6690 fn primary_language_servers_for_buffer(
6691 &self,
6692 buffer: &Buffer,
6693 cx: &AppContext,
6694 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
6695 self.language_servers_for_buffer(buffer, cx).next()
6696 }
6697
6698 fn language_server_for_buffer(
6699 &self,
6700 buffer: &Buffer,
6701 server_id: LanguageServerId,
6702 cx: &AppContext,
6703 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
6704 self.language_servers_for_buffer(buffer, cx)
6705 .find(|(_, s)| s.server_id() == server_id)
6706 }
6707
6708 fn language_server_ids_for_buffer(
6709 &self,
6710 buffer: &Buffer,
6711 cx: &AppContext,
6712 ) -> Vec<LanguageServerId> {
6713 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
6714 let worktree_id = file.worktree_id(cx);
6715 language
6716 .lsp_adapters()
6717 .iter()
6718 .flat_map(|adapter| {
6719 let key = (worktree_id, adapter.name.clone());
6720 self.language_server_ids.get(&key).copied()
6721 })
6722 .collect()
6723 } else {
6724 Vec::new()
6725 }
6726 }
6727}
6728
6729impl WorktreeHandle {
6730 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
6731 match self {
6732 WorktreeHandle::Strong(handle) => Some(handle.clone()),
6733 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
6734 }
6735 }
6736}
6737
6738impl OpenBuffer {
6739 pub fn upgrade(&self, cx: &impl BorrowAppContext) -> Option<ModelHandle<Buffer>> {
6740 match self {
6741 OpenBuffer::Strong(handle) => Some(handle.clone()),
6742 OpenBuffer::Weak(handle) => handle.upgrade(cx),
6743 OpenBuffer::Operations(_) => None,
6744 }
6745 }
6746}
6747
6748pub struct PathMatchCandidateSet {
6749 pub snapshot: Snapshot,
6750 pub include_ignored: bool,
6751 pub include_root_name: bool,
6752}
6753
6754impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
6755 type Candidates = PathMatchCandidateSetIter<'a>;
6756
6757 fn id(&self) -> usize {
6758 self.snapshot.id().to_usize()
6759 }
6760
6761 fn len(&self) -> usize {
6762 if self.include_ignored {
6763 self.snapshot.file_count()
6764 } else {
6765 self.snapshot.visible_file_count()
6766 }
6767 }
6768
6769 fn prefix(&self) -> Arc<str> {
6770 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
6771 self.snapshot.root_name().into()
6772 } else if self.include_root_name {
6773 format!("{}/", self.snapshot.root_name()).into()
6774 } else {
6775 "".into()
6776 }
6777 }
6778
6779 fn candidates(&'a self, start: usize) -> Self::Candidates {
6780 PathMatchCandidateSetIter {
6781 traversal: self.snapshot.files(self.include_ignored, start),
6782 }
6783 }
6784}
6785
6786pub struct PathMatchCandidateSetIter<'a> {
6787 traversal: Traversal<'a>,
6788}
6789
6790impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
6791 type Item = fuzzy::PathMatchCandidate<'a>;
6792
6793 fn next(&mut self) -> Option<Self::Item> {
6794 self.traversal.next().map(|entry| {
6795 if let EntryKind::File(char_bag) = entry.kind {
6796 fuzzy::PathMatchCandidate {
6797 path: &entry.path,
6798 char_bag,
6799 }
6800 } else {
6801 unreachable!()
6802 }
6803 })
6804 }
6805}
6806
6807impl Entity for Project {
6808 type Event = Event;
6809
6810 fn release(&mut self, cx: &mut gpui::AppContext) {
6811 match &self.client_state {
6812 Some(ProjectClientState::Local { .. }) => {
6813 let _ = self.unshare_internal(cx);
6814 }
6815 Some(ProjectClientState::Remote { remote_id, .. }) => {
6816 let _ = self.client.send(proto::LeaveProject {
6817 project_id: *remote_id,
6818 });
6819 self.disconnected_from_host_internal(cx);
6820 }
6821 _ => {}
6822 }
6823 }
6824
6825 fn app_will_quit(
6826 &mut self,
6827 _: &mut AppContext,
6828 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
6829 let shutdown_futures = self
6830 .language_servers
6831 .drain()
6832 .map(|(_, server_state)| async {
6833 match server_state {
6834 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
6835 LanguageServerState::Starting(starting_server) => {
6836 starting_server.await?.shutdown()?.await
6837 }
6838 }
6839 })
6840 .collect::<Vec<_>>();
6841
6842 Some(
6843 async move {
6844 futures::future::join_all(shutdown_futures).await;
6845 }
6846 .boxed(),
6847 )
6848 }
6849}
6850
6851impl Collaborator {
6852 fn from_proto(message: proto::Collaborator) -> Result<Self> {
6853 Ok(Self {
6854 peer_id: message.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?,
6855 replica_id: message.replica_id as ReplicaId,
6856 })
6857 }
6858}
6859
6860impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
6861 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
6862 Self {
6863 worktree_id,
6864 path: path.as_ref().into(),
6865 }
6866 }
6867}
6868
6869fn split_operations(
6870 mut operations: Vec<proto::Operation>,
6871) -> impl Iterator<Item = Vec<proto::Operation>> {
6872 #[cfg(any(test, feature = "test-support"))]
6873 const CHUNK_SIZE: usize = 5;
6874
6875 #[cfg(not(any(test, feature = "test-support")))]
6876 const CHUNK_SIZE: usize = 100;
6877
6878 let mut done = false;
6879 std::iter::from_fn(move || {
6880 if done {
6881 return None;
6882 }
6883
6884 let operations = operations
6885 .drain(..cmp::min(CHUNK_SIZE, operations.len()))
6886 .collect::<Vec<_>>();
6887 if operations.is_empty() {
6888 done = true;
6889 }
6890 Some(operations)
6891 })
6892}
6893
6894fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
6895 proto::Symbol {
6896 language_server_name: symbol.language_server_name.0.to_string(),
6897 source_worktree_id: symbol.source_worktree_id.to_proto(),
6898 worktree_id: symbol.path.worktree_id.to_proto(),
6899 path: symbol.path.path.to_string_lossy().to_string(),
6900 name: symbol.name.clone(),
6901 kind: unsafe { mem::transmute(symbol.kind) },
6902 start: Some(proto::PointUtf16 {
6903 row: symbol.range.start.0.row,
6904 column: symbol.range.start.0.column,
6905 }),
6906 end: Some(proto::PointUtf16 {
6907 row: symbol.range.end.0.row,
6908 column: symbol.range.end.0.column,
6909 }),
6910 signature: symbol.signature.to_vec(),
6911 }
6912}
6913
6914fn relativize_path(base: &Path, path: &Path) -> PathBuf {
6915 let mut path_components = path.components();
6916 let mut base_components = base.components();
6917 let mut components: Vec<Component> = Vec::new();
6918 loop {
6919 match (path_components.next(), base_components.next()) {
6920 (None, None) => break,
6921 (Some(a), None) => {
6922 components.push(a);
6923 components.extend(path_components.by_ref());
6924 break;
6925 }
6926 (None, _) => components.push(Component::ParentDir),
6927 (Some(a), Some(b)) if components.is_empty() && a == b => (),
6928 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
6929 (Some(a), Some(_)) => {
6930 components.push(Component::ParentDir);
6931 for _ in base_components {
6932 components.push(Component::ParentDir);
6933 }
6934 components.push(a);
6935 components.extend(path_components.by_ref());
6936 break;
6937 }
6938 }
6939 }
6940 components.iter().map(|c| c.as_os_str()).collect()
6941}
6942
6943impl Item for Buffer {
6944 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
6945 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
6946 }
6947
6948 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
6949 File::from_dyn(self.file()).map(|file| ProjectPath {
6950 worktree_id: file.worktree_id(cx),
6951 path: file.path().clone(),
6952 })
6953 }
6954}
6955
6956async fn pump_loading_buffer_reciever(
6957 mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
6958) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
6959 loop {
6960 if let Some(result) = receiver.borrow().as_ref() {
6961 match result {
6962 Ok(buffer) => return Ok(buffer.to_owned()),
6963 Err(e) => return Err(e.to_owned()),
6964 }
6965 }
6966 receiver.next().await;
6967 }
6968}
6969
6970fn update_diff_base(
6971 project: &Project,
6972) -> impl Fn(Option<String>, ModelHandle<Buffer>, &mut AsyncAppContext) {
6973 let remote_id = project.remote_id();
6974 let client = project.client().clone();
6975 move |diff_base, buffer, cx| {
6976 let buffer_id = buffer.update(cx, |buffer, cx| {
6977 buffer.set_diff_base(diff_base.clone(), cx);
6978 buffer.remote_id()
6979 });
6980
6981 if let Some(project_id) = remote_id {
6982 client
6983 .send(proto::UpdateDiffBase {
6984 project_id,
6985 buffer_id: buffer_id as u64,
6986 diff_base,
6987 })
6988 .log_err();
6989 }
6990 }
6991}