1mod ignore;
2mod lsp_command;
3pub mod project_settings;
4pub mod search;
5pub mod terminals;
6pub mod worktree;
7
8#[cfg(test)]
9mod project_tests;
10#[cfg(test)]
11mod worktree_tests;
12
13use anyhow::{anyhow, Context, Result};
14use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
15use clock::ReplicaId;
16use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
17use copilot::Copilot;
18use futures::{
19 channel::{
20 mpsc::{self, UnboundedReceiver},
21 oneshot,
22 },
23 future::{self, try_join_all, Shared},
24 stream::FuturesUnordered,
25 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
26};
27use globset::{Glob, GlobSet, GlobSetBuilder};
28use gpui::{
29 executor::Background, AnyModelHandle, AppContext, AsyncAppContext, BorrowAppContext, Entity,
30 ModelContext, ModelHandle, Task, WeakModelHandle,
31};
32use itertools::Itertools;
33use language::{
34 language_settings::{
35 language_settings, FormatOnSave, Formatter, InlayHintKind, LanguageSettings,
36 },
37 point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
43 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
44 File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
45 OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
46 ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use parking_lot::Mutex;
56use postage::watch;
57use prettier::Prettier;
58use project_settings::{LspSettings, ProjectSettings};
59use rand::prelude::*;
60use search::SearchQuery;
61use serde::Serialize;
62use settings::SettingsStore;
63use sha2::{Digest, Sha256};
64use similar::{ChangeTag, TextDiff};
65use smol::{
66 channel::{Receiver, Sender},
67 lock::Semaphore,
68};
69use std::{
70 cmp::{self, Ordering},
71 convert::TryInto,
72 hash::Hash,
73 mem,
74 num::NonZeroU32,
75 ops::{ControlFlow, Range},
76 path::{self, Component, Path, PathBuf},
77 process::Stdio,
78 str,
79 sync::{
80 atomic::{AtomicUsize, Ordering::SeqCst},
81 Arc,
82 },
83 time::{Duration, Instant},
84};
85use terminals::Terminals;
86use text::Anchor;
87use util::{
88 debug_panic, defer,
89 http::HttpClient,
90 merge_json_value_into,
91 paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
92 post_inc, ResultExt, TryFutureExt as _,
93};
94
95pub use fs::*;
96#[cfg(any(test, feature = "test-support"))]
97pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
98pub use worktree::*;
99
100const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
101
102pub trait Item {
103 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
104 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
105}
106
107// Language server state is stored across 3 collections:
108// language_servers =>
109// a mapping from unique server id to LanguageServerState which can either be a task for a
110// server in the process of starting, or a running server with adapter and language server arcs
111// language_server_ids => a mapping from worktreeId and server name to the unique server id
112// language_server_statuses => a mapping from unique server id to the current server status
113//
114// Multiple worktrees can map to the same language server for example when you jump to the definition
115// of a file in the standard library. So language_server_ids is used to look up which server is active
116// for a given worktree and language server name
117//
118// When starting a language server, first the id map is checked to make sure a server isn't already available
119// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
120// the Starting variant of LanguageServerState is stored in the language_servers map.
121pub struct Project {
122 worktrees: Vec<WorktreeHandle>,
123 active_entry: Option<ProjectEntryId>,
124 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
125 languages: Arc<LanguageRegistry>,
126 supplementary_language_servers:
127 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
128 language_servers: HashMap<LanguageServerId, LanguageServerState>,
129 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
130 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
131 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
132 client: Arc<client::Client>,
133 next_entry_id: Arc<AtomicUsize>,
134 join_project_response_message_id: u32,
135 next_diagnostic_group_id: usize,
136 user_store: ModelHandle<UserStore>,
137 fs: Arc<dyn Fs>,
138 client_state: Option<ProjectClientState>,
139 collaborators: HashMap<proto::PeerId, Collaborator>,
140 client_subscriptions: Vec<client::Subscription>,
141 _subscriptions: Vec<gpui::Subscription>,
142 next_buffer_id: u64,
143 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
144 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
145 #[allow(clippy::type_complexity)]
146 loading_buffers_by_path: HashMap<
147 ProjectPath,
148 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
149 >,
150 #[allow(clippy::type_complexity)]
151 loading_local_worktrees:
152 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
153 opened_buffers: HashMap<u64, OpenBuffer>,
154 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
155 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
156 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
157 /// Used for re-issuing buffer requests when peers temporarily disconnect
158 incomplete_remote_buffers: HashMap<u64, Option<ModelHandle<Buffer>>>,
159 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
160 buffers_being_formatted: HashSet<u64>,
161 buffers_needing_diff: HashSet<WeakModelHandle<Buffer>>,
162 git_diff_debouncer: DelayedDebounced,
163 nonce: u128,
164 _maintain_buffer_languages: Task<()>,
165 _maintain_workspace_config: Task<()>,
166 terminals: Terminals,
167 copilot_lsp_subscription: Option<gpui::Subscription>,
168 copilot_log_subscription: Option<lsp::Subscription>,
169 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
170 node: Option<Arc<dyn NodeRuntime>>,
171 default_prettier: DefaultPrettier,
172 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
173 prettier_instances: HashMap<PathBuf, PrettierInstance>,
174}
175
176type PrettierInstance = Shared<Task<PrettierProcess>>;
177
178#[derive(Clone)]
179enum PrettierProcess {
180 Running(Arc<Prettier>),
181 Stopped { start_attempts: usize },
182}
183
184struct DefaultPrettier {
185 prettier: PrettierInstallation,
186 installed_plugins: HashSet<&'static str>,
187}
188
189enum PrettierInstallation {
190 NotInstalled {
191 attempts: usize,
192 installation_process: Option<Shared<Task<Result<(), Arc<anyhow::Error>>>>>,
193 },
194 Installed(PrettierInstance),
195}
196
197impl Default for DefaultPrettier {
198 fn default() -> Self {
199 Self {
200 prettier: PrettierInstallation::NotInstalled {
201 attempts: 0,
202 installation_process: None,
203 },
204 installed_plugins: HashSet::default(),
205 }
206 }
207}
208
209impl DefaultPrettier {
210 fn instance(&self) -> Option<&PrettierInstance> {
211 if let PrettierInstallation::Installed(instance) = &self.prettier {
212 Some(instance)
213 } else {
214 None
215 }
216 }
217}
218
219struct DelayedDebounced {
220 task: Option<Task<()>>,
221 cancel_channel: Option<oneshot::Sender<()>>,
222}
223
224enum LanguageServerToQuery {
225 Primary,
226 Other(LanguageServerId),
227}
228
229impl DelayedDebounced {
230 fn new() -> DelayedDebounced {
231 DelayedDebounced {
232 task: None,
233 cancel_channel: None,
234 }
235 }
236
237 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
238 where
239 F: 'static + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
240 {
241 if let Some(channel) = self.cancel_channel.take() {
242 _ = channel.send(());
243 }
244
245 let (sender, mut receiver) = oneshot::channel::<()>();
246 self.cancel_channel = Some(sender);
247
248 let previous_task = self.task.take();
249 self.task = Some(cx.spawn(|workspace, mut cx| async move {
250 let mut timer = cx.background().timer(delay).fuse();
251 if let Some(previous_task) = previous_task {
252 previous_task.await;
253 }
254
255 futures::select_biased! {
256 _ = receiver => return,
257 _ = timer => {}
258 }
259
260 workspace
261 .update(&mut cx, |workspace, cx| (func)(workspace, cx))
262 .await;
263 }));
264 }
265}
266
267struct LspBufferSnapshot {
268 version: i32,
269 snapshot: TextBufferSnapshot,
270}
271
272/// Message ordered with respect to buffer operations
273enum BufferOrderedMessage {
274 Operation {
275 buffer_id: u64,
276 operation: proto::Operation,
277 },
278 LanguageServerUpdate {
279 language_server_id: LanguageServerId,
280 message: proto::update_language_server::Variant,
281 },
282 Resync,
283}
284
285enum LocalProjectUpdate {
286 WorktreesChanged,
287 CreateBufferForPeer {
288 peer_id: proto::PeerId,
289 buffer_id: u64,
290 },
291}
292
293enum OpenBuffer {
294 Strong(ModelHandle<Buffer>),
295 Weak(WeakModelHandle<Buffer>),
296 Operations(Vec<Operation>),
297}
298
299#[derive(Clone)]
300enum WorktreeHandle {
301 Strong(ModelHandle<Worktree>),
302 Weak(WeakModelHandle<Worktree>),
303}
304
305enum ProjectClientState {
306 Local {
307 remote_id: u64,
308 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
309 _send_updates: Task<()>,
310 },
311 Remote {
312 sharing_has_stopped: bool,
313 remote_id: u64,
314 replica_id: ReplicaId,
315 },
316}
317
318#[derive(Clone, Debug, PartialEq)]
319pub enum Event {
320 LanguageServerAdded(LanguageServerId),
321 LanguageServerRemoved(LanguageServerId),
322 LanguageServerLog(LanguageServerId, String),
323 Notification(String),
324 ActiveEntryChanged(Option<ProjectEntryId>),
325 ActivateProjectPanel,
326 WorktreeAdded,
327 WorktreeRemoved(WorktreeId),
328 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
329 DiskBasedDiagnosticsStarted {
330 language_server_id: LanguageServerId,
331 },
332 DiskBasedDiagnosticsFinished {
333 language_server_id: LanguageServerId,
334 },
335 DiagnosticsUpdated {
336 path: ProjectPath,
337 language_server_id: LanguageServerId,
338 },
339 RemoteIdChanged(Option<u64>),
340 DisconnectedFromHost,
341 Closed,
342 DeletedEntry(ProjectEntryId),
343 CollaboratorUpdated {
344 old_peer_id: proto::PeerId,
345 new_peer_id: proto::PeerId,
346 },
347 CollaboratorJoined(proto::PeerId),
348 CollaboratorLeft(proto::PeerId),
349 RefreshInlayHints,
350}
351
352pub enum LanguageServerState {
353 Starting(Task<Option<Arc<LanguageServer>>>),
354
355 Running {
356 language: Arc<Language>,
357 adapter: Arc<CachedLspAdapter>,
358 server: Arc<LanguageServer>,
359 watched_paths: HashMap<WorktreeId, GlobSet>,
360 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
361 },
362}
363
364#[derive(Serialize)]
365pub struct LanguageServerStatus {
366 pub name: String,
367 pub pending_work: BTreeMap<String, LanguageServerProgress>,
368 pub has_pending_diagnostic_updates: bool,
369 progress_tokens: HashSet<String>,
370}
371
372#[derive(Clone, Debug, Serialize)]
373pub struct LanguageServerProgress {
374 pub message: Option<String>,
375 pub percentage: Option<usize>,
376 #[serde(skip_serializing)]
377 pub last_update_at: Instant,
378}
379
380#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
381pub struct ProjectPath {
382 pub worktree_id: WorktreeId,
383 pub path: Arc<Path>,
384}
385
386#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
387pub struct DiagnosticSummary {
388 pub error_count: usize,
389 pub warning_count: usize,
390}
391
392#[derive(Debug, Clone, PartialEq, Eq, Hash)]
393pub struct Location {
394 pub buffer: ModelHandle<Buffer>,
395 pub range: Range<language::Anchor>,
396}
397
398#[derive(Debug, Clone, PartialEq, Eq)]
399pub struct InlayHint {
400 pub position: language::Anchor,
401 pub label: InlayHintLabel,
402 pub kind: Option<InlayHintKind>,
403 pub padding_left: bool,
404 pub padding_right: bool,
405 pub tooltip: Option<InlayHintTooltip>,
406 pub resolve_state: ResolveState,
407}
408
409#[derive(Debug, Clone, PartialEq, Eq)]
410pub enum ResolveState {
411 Resolved,
412 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
413 Resolving,
414}
415
416impl InlayHint {
417 pub fn text(&self) -> String {
418 match &self.label {
419 InlayHintLabel::String(s) => s.to_owned(),
420 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
421 }
422 }
423}
424
425#[derive(Debug, Clone, PartialEq, Eq)]
426pub enum InlayHintLabel {
427 String(String),
428 LabelParts(Vec<InlayHintLabelPart>),
429}
430
431#[derive(Debug, Clone, PartialEq, Eq)]
432pub struct InlayHintLabelPart {
433 pub value: String,
434 pub tooltip: Option<InlayHintLabelPartTooltip>,
435 pub location: Option<(LanguageServerId, lsp::Location)>,
436}
437
438#[derive(Debug, Clone, PartialEq, Eq)]
439pub enum InlayHintTooltip {
440 String(String),
441 MarkupContent(MarkupContent),
442}
443
444#[derive(Debug, Clone, PartialEq, Eq)]
445pub enum InlayHintLabelPartTooltip {
446 String(String),
447 MarkupContent(MarkupContent),
448}
449
450#[derive(Debug, Clone, PartialEq, Eq)]
451pub struct MarkupContent {
452 pub kind: HoverBlockKind,
453 pub value: String,
454}
455
456#[derive(Debug, Clone)]
457pub struct LocationLink {
458 pub origin: Option<Location>,
459 pub target: Location,
460}
461
462#[derive(Debug)]
463pub struct DocumentHighlight {
464 pub range: Range<language::Anchor>,
465 pub kind: DocumentHighlightKind,
466}
467
468#[derive(Clone, Debug)]
469pub struct Symbol {
470 pub language_server_name: LanguageServerName,
471 pub source_worktree_id: WorktreeId,
472 pub path: ProjectPath,
473 pub label: CodeLabel,
474 pub name: String,
475 pub kind: lsp::SymbolKind,
476 pub range: Range<Unclipped<PointUtf16>>,
477 pub signature: [u8; 32],
478}
479
480#[derive(Clone, Debug, PartialEq)]
481pub struct HoverBlock {
482 pub text: String,
483 pub kind: HoverBlockKind,
484}
485
486#[derive(Clone, Debug, PartialEq, Eq)]
487pub enum HoverBlockKind {
488 PlainText,
489 Markdown,
490 Code { language: String },
491}
492
493#[derive(Debug)]
494pub struct Hover {
495 pub contents: Vec<HoverBlock>,
496 pub range: Option<Range<language::Anchor>>,
497 pub language: Option<Arc<Language>>,
498}
499
500impl Hover {
501 pub fn is_empty(&self) -> bool {
502 self.contents.iter().all(|block| block.text.is_empty())
503 }
504}
505
506#[derive(Default)]
507pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
508
509impl DiagnosticSummary {
510 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
511 let mut this = Self {
512 error_count: 0,
513 warning_count: 0,
514 };
515
516 for entry in diagnostics {
517 if entry.diagnostic.is_primary {
518 match entry.diagnostic.severity {
519 DiagnosticSeverity::ERROR => this.error_count += 1,
520 DiagnosticSeverity::WARNING => this.warning_count += 1,
521 _ => {}
522 }
523 }
524 }
525
526 this
527 }
528
529 pub fn is_empty(&self) -> bool {
530 self.error_count == 0 && self.warning_count == 0
531 }
532
533 pub fn to_proto(
534 &self,
535 language_server_id: LanguageServerId,
536 path: &Path,
537 ) -> proto::DiagnosticSummary {
538 proto::DiagnosticSummary {
539 path: path.to_string_lossy().to_string(),
540 language_server_id: language_server_id.0 as u64,
541 error_count: self.error_count as u32,
542 warning_count: self.warning_count as u32,
543 }
544 }
545}
546
547#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
548pub struct ProjectEntryId(usize);
549
550impl ProjectEntryId {
551 pub const MAX: Self = Self(usize::MAX);
552
553 pub fn new(counter: &AtomicUsize) -> Self {
554 Self(counter.fetch_add(1, SeqCst))
555 }
556
557 pub fn from_proto(id: u64) -> Self {
558 Self(id as usize)
559 }
560
561 pub fn to_proto(&self) -> u64 {
562 self.0 as u64
563 }
564
565 pub fn to_usize(&self) -> usize {
566 self.0
567 }
568}
569
570#[derive(Debug, Clone, Copy, PartialEq, Eq)]
571pub enum FormatTrigger {
572 Save,
573 Manual,
574}
575
576struct ProjectLspAdapterDelegate {
577 project: ModelHandle<Project>,
578 http_client: Arc<dyn HttpClient>,
579}
580
581// Currently, formatting operations are represented differently depending on
582// whether they come from a language server or an external command.
583enum FormatOperation {
584 Lsp(Vec<(Range<Anchor>, String)>),
585 External(Diff),
586 Prettier(Diff),
587}
588
589impl FormatTrigger {
590 fn from_proto(value: i32) -> FormatTrigger {
591 match value {
592 0 => FormatTrigger::Save,
593 1 => FormatTrigger::Manual,
594 _ => FormatTrigger::Save,
595 }
596 }
597}
598#[derive(Clone, Debug, PartialEq)]
599enum SearchMatchCandidate {
600 OpenBuffer {
601 buffer: ModelHandle<Buffer>,
602 // This might be an unnamed file without representation on filesystem
603 path: Option<Arc<Path>>,
604 },
605 Path {
606 worktree_id: WorktreeId,
607 is_ignored: bool,
608 path: Arc<Path>,
609 },
610}
611
612type SearchMatchCandidateIndex = usize;
613impl SearchMatchCandidate {
614 fn path(&self) -> Option<Arc<Path>> {
615 match self {
616 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
617 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
618 }
619 }
620}
621
622impl Project {
623 pub fn init_settings(cx: &mut AppContext) {
624 settings::register::<ProjectSettings>(cx);
625 }
626
627 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
628 Self::init_settings(cx);
629
630 client.add_model_message_handler(Self::handle_add_collaborator);
631 client.add_model_message_handler(Self::handle_update_project_collaborator);
632 client.add_model_message_handler(Self::handle_remove_collaborator);
633 client.add_model_message_handler(Self::handle_buffer_reloaded);
634 client.add_model_message_handler(Self::handle_buffer_saved);
635 client.add_model_message_handler(Self::handle_start_language_server);
636 client.add_model_message_handler(Self::handle_update_language_server);
637 client.add_model_message_handler(Self::handle_update_project);
638 client.add_model_message_handler(Self::handle_unshare_project);
639 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
640 client.add_model_message_handler(Self::handle_update_buffer_file);
641 client.add_model_request_handler(Self::handle_update_buffer);
642 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
643 client.add_model_message_handler(Self::handle_update_worktree);
644 client.add_model_message_handler(Self::handle_update_worktree_settings);
645 client.add_model_request_handler(Self::handle_create_project_entry);
646 client.add_model_request_handler(Self::handle_rename_project_entry);
647 client.add_model_request_handler(Self::handle_copy_project_entry);
648 client.add_model_request_handler(Self::handle_delete_project_entry);
649 client.add_model_request_handler(Self::handle_expand_project_entry);
650 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
651 client.add_model_request_handler(Self::handle_apply_code_action);
652 client.add_model_request_handler(Self::handle_on_type_formatting);
653 client.add_model_request_handler(Self::handle_inlay_hints);
654 client.add_model_request_handler(Self::handle_resolve_completion_documentation);
655 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
656 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
657 client.add_model_request_handler(Self::handle_reload_buffers);
658 client.add_model_request_handler(Self::handle_synchronize_buffers);
659 client.add_model_request_handler(Self::handle_format_buffers);
660 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
661 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
662 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
663 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
664 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
665 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
666 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
667 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
668 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
669 client.add_model_request_handler(Self::handle_search_project);
670 client.add_model_request_handler(Self::handle_get_project_symbols);
671 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
672 client.add_model_request_handler(Self::handle_open_buffer_by_id);
673 client.add_model_request_handler(Self::handle_open_buffer_by_path);
674 client.add_model_request_handler(Self::handle_save_buffer);
675 client.add_model_message_handler(Self::handle_update_diff_base);
676 }
677
678 pub fn local(
679 client: Arc<Client>,
680 node: Arc<dyn NodeRuntime>,
681 user_store: ModelHandle<UserStore>,
682 languages: Arc<LanguageRegistry>,
683 fs: Arc<dyn Fs>,
684 cx: &mut AppContext,
685 ) -> ModelHandle<Self> {
686 cx.add_model(|cx: &mut ModelContext<Self>| {
687 let (tx, rx) = mpsc::unbounded();
688 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
689 .detach();
690 let copilot_lsp_subscription =
691 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
692 Self {
693 worktrees: Default::default(),
694 buffer_ordered_messages_tx: tx,
695 collaborators: Default::default(),
696 next_buffer_id: 0,
697 opened_buffers: Default::default(),
698 shared_buffers: Default::default(),
699 incomplete_remote_buffers: Default::default(),
700 loading_buffers_by_path: Default::default(),
701 loading_local_worktrees: Default::default(),
702 local_buffer_ids_by_path: Default::default(),
703 local_buffer_ids_by_entry_id: Default::default(),
704 buffer_snapshots: Default::default(),
705 join_project_response_message_id: 0,
706 client_state: None,
707 opened_buffer: watch::channel(),
708 client_subscriptions: Vec::new(),
709 _subscriptions: vec![
710 cx.observe_global::<SettingsStore, _>(Self::on_settings_changed)
711 ],
712 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
713 _maintain_workspace_config: Self::maintain_workspace_config(cx),
714 active_entry: None,
715 languages,
716 client,
717 user_store,
718 fs,
719 next_entry_id: Default::default(),
720 next_diagnostic_group_id: Default::default(),
721 supplementary_language_servers: HashMap::default(),
722 language_servers: Default::default(),
723 language_server_ids: Default::default(),
724 language_server_statuses: Default::default(),
725 last_workspace_edits_by_language_server: Default::default(),
726 buffers_being_formatted: Default::default(),
727 buffers_needing_diff: Default::default(),
728 git_diff_debouncer: DelayedDebounced::new(),
729 nonce: StdRng::from_entropy().gen(),
730 terminals: Terminals {
731 local_handles: Vec::new(),
732 },
733 copilot_lsp_subscription,
734 copilot_log_subscription: None,
735 current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
736 node: Some(node),
737 default_prettier: DefaultPrettier::default(),
738 prettiers_per_worktree: HashMap::default(),
739 prettier_instances: HashMap::default(),
740 }
741 })
742 }
743
744 pub async fn remote(
745 remote_id: u64,
746 client: Arc<Client>,
747 user_store: ModelHandle<UserStore>,
748 languages: Arc<LanguageRegistry>,
749 fs: Arc<dyn Fs>,
750 mut cx: AsyncAppContext,
751 ) -> Result<ModelHandle<Self>> {
752 client.authenticate_and_connect(true, &cx).await?;
753
754 let subscription = client.subscribe_to_entity(remote_id)?;
755 let response = client
756 .request_envelope(proto::JoinProject {
757 project_id: remote_id,
758 })
759 .await?;
760 let this = cx.add_model(|cx| {
761 let replica_id = response.payload.replica_id as ReplicaId;
762
763 let mut worktrees = Vec::new();
764 for worktree in response.payload.worktrees {
765 let worktree = cx.update(|cx| {
766 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx)
767 });
768 worktrees.push(worktree);
769 }
770
771 let (tx, rx) = mpsc::unbounded();
772 cx.spawn_weak(|this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
773 .detach();
774 let copilot_lsp_subscription =
775 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
776 let mut this = Self {
777 worktrees: Vec::new(),
778 buffer_ordered_messages_tx: tx,
779 loading_buffers_by_path: Default::default(),
780 next_buffer_id: 0,
781 opened_buffer: watch::channel(),
782 shared_buffers: Default::default(),
783 incomplete_remote_buffers: Default::default(),
784 loading_local_worktrees: Default::default(),
785 local_buffer_ids_by_path: Default::default(),
786 local_buffer_ids_by_entry_id: Default::default(),
787 active_entry: None,
788 collaborators: Default::default(),
789 join_project_response_message_id: response.message_id,
790 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
791 _maintain_workspace_config: Self::maintain_workspace_config(cx),
792 languages,
793 user_store: user_store.clone(),
794 fs,
795 next_entry_id: Default::default(),
796 next_diagnostic_group_id: Default::default(),
797 client_subscriptions: Default::default(),
798 _subscriptions: Default::default(),
799 client: client.clone(),
800 client_state: Some(ProjectClientState::Remote {
801 sharing_has_stopped: false,
802 remote_id,
803 replica_id,
804 }),
805 supplementary_language_servers: HashMap::default(),
806 language_servers: Default::default(),
807 language_server_ids: Default::default(),
808 language_server_statuses: response
809 .payload
810 .language_servers
811 .into_iter()
812 .map(|server| {
813 (
814 LanguageServerId(server.id as usize),
815 LanguageServerStatus {
816 name: server.name,
817 pending_work: Default::default(),
818 has_pending_diagnostic_updates: false,
819 progress_tokens: Default::default(),
820 },
821 )
822 })
823 .collect(),
824 last_workspace_edits_by_language_server: Default::default(),
825 opened_buffers: Default::default(),
826 buffers_being_formatted: Default::default(),
827 buffers_needing_diff: Default::default(),
828 git_diff_debouncer: DelayedDebounced::new(),
829 buffer_snapshots: Default::default(),
830 nonce: StdRng::from_entropy().gen(),
831 terminals: Terminals {
832 local_handles: Vec::new(),
833 },
834 copilot_lsp_subscription,
835 copilot_log_subscription: None,
836 current_lsp_settings: settings::get::<ProjectSettings>(cx).lsp.clone(),
837 node: None,
838 default_prettier: DefaultPrettier::default(),
839 prettiers_per_worktree: HashMap::default(),
840 prettier_instances: HashMap::default(),
841 };
842 for worktree in worktrees {
843 let _ = this.add_worktree(&worktree, cx);
844 }
845 this
846 });
847 let subscription = subscription.set_model(&this, &mut cx);
848
849 let user_ids = response
850 .payload
851 .collaborators
852 .iter()
853 .map(|peer| peer.user_id)
854 .collect();
855 user_store
856 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
857 .await?;
858
859 this.update(&mut cx, |this, cx| {
860 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
861 this.client_subscriptions.push(subscription);
862 anyhow::Ok(())
863 })?;
864
865 Ok(this)
866 }
867
868 #[cfg(any(test, feature = "test-support"))]
869 pub async fn test(
870 fs: Arc<dyn Fs>,
871 root_paths: impl IntoIterator<Item = &Path>,
872 cx: &mut gpui::TestAppContext,
873 ) -> ModelHandle<Project> {
874 let mut languages = LanguageRegistry::test();
875 languages.set_executor(cx.background());
876 let http_client = util::http::FakeHttpClient::with_404_response();
877 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
878 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
879 let project = cx.update(|cx| {
880 Project::local(
881 client,
882 node_runtime::FakeNodeRuntime::new(),
883 user_store,
884 Arc::new(languages),
885 fs,
886 cx,
887 )
888 });
889 for path in root_paths {
890 let (tree, _) = project
891 .update(cx, |project, cx| {
892 project.find_or_create_local_worktree(path, true, cx)
893 })
894 .await
895 .unwrap();
896 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
897 .await;
898 }
899 project
900 }
901
902 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
903 let mut language_servers_to_start = Vec::new();
904 let mut language_formatters_to_check = Vec::new();
905 for buffer in self.opened_buffers.values() {
906 if let Some(buffer) = buffer.upgrade(cx) {
907 let buffer = buffer.read(cx);
908 let buffer_file = File::from_dyn(buffer.file());
909 let buffer_language = buffer.language();
910 let settings = language_settings(buffer_language, buffer.file(), cx);
911 if let Some(language) = buffer_language {
912 if settings.enable_language_server {
913 if let Some(file) = buffer_file {
914 language_servers_to_start
915 .push((file.worktree.clone(), Arc::clone(language)));
916 }
917 }
918 language_formatters_to_check.push((
919 buffer_file.map(|f| f.worktree_id(cx)),
920 Arc::clone(language),
921 settings.clone(),
922 ));
923 }
924 }
925 }
926
927 let mut language_servers_to_stop = Vec::new();
928 let mut language_servers_to_restart = Vec::new();
929 let languages = self.languages.to_vec();
930
931 let new_lsp_settings = settings::get::<ProjectSettings>(cx).lsp.clone();
932 let current_lsp_settings = &self.current_lsp_settings;
933 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
934 let language = languages.iter().find_map(|l| {
935 let adapter = l
936 .lsp_adapters()
937 .iter()
938 .find(|adapter| &adapter.name == started_lsp_name)?;
939 Some((l, adapter))
940 });
941 if let Some((language, adapter)) = language {
942 let worktree = self.worktree_for_id(*worktree_id, cx);
943 let file = worktree.as_ref().and_then(|tree| {
944 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
945 });
946 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
947 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
948 } else if let Some(worktree) = worktree {
949 let server_name = &adapter.name.0;
950 match (
951 current_lsp_settings.get(server_name),
952 new_lsp_settings.get(server_name),
953 ) {
954 (None, None) => {}
955 (Some(_), None) | (None, Some(_)) => {
956 language_servers_to_restart.push((worktree, Arc::clone(language)));
957 }
958 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
959 if current_lsp_settings != new_lsp_settings {
960 language_servers_to_restart.push((worktree, Arc::clone(language)));
961 }
962 }
963 }
964 }
965 }
966 }
967 self.current_lsp_settings = new_lsp_settings;
968
969 // Stop all newly-disabled language servers.
970 for (worktree_id, adapter_name) in language_servers_to_stop {
971 self.stop_language_server(worktree_id, adapter_name, cx)
972 .detach();
973 }
974
975 for (worktree, language, settings) in language_formatters_to_check {
976 self.install_default_formatters(worktree, &language, &settings, cx);
977 }
978
979 // Start all the newly-enabled language servers.
980 for (worktree, language) in language_servers_to_start {
981 let worktree_path = worktree.read(cx).abs_path();
982 self.start_language_servers(&worktree, worktree_path, language, cx);
983 }
984
985 // Restart all language servers with changed initialization options.
986 for (worktree, language) in language_servers_to_restart {
987 self.restart_language_servers(worktree, language, cx);
988 }
989
990 if self.copilot_lsp_subscription.is_none() {
991 if let Some(copilot) = Copilot::global(cx) {
992 for buffer in self.opened_buffers.values() {
993 if let Some(buffer) = buffer.upgrade(cx) {
994 self.register_buffer_with_copilot(&buffer, cx);
995 }
996 }
997 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
998 }
999 }
1000
1001 cx.notify();
1002 }
1003
1004 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
1005 self.opened_buffers
1006 .get(&remote_id)
1007 .and_then(|buffer| buffer.upgrade(cx))
1008 }
1009
1010 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1011 &self.languages
1012 }
1013
1014 pub fn client(&self) -> Arc<Client> {
1015 self.client.clone()
1016 }
1017
1018 pub fn user_store(&self) -> ModelHandle<UserStore> {
1019 self.user_store.clone()
1020 }
1021
1022 pub fn opened_buffers(&self, cx: &AppContext) -> Vec<ModelHandle<Buffer>> {
1023 self.opened_buffers
1024 .values()
1025 .filter_map(|b| b.upgrade(cx))
1026 .collect()
1027 }
1028
1029 #[cfg(any(test, feature = "test-support"))]
1030 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1031 let path = path.into();
1032 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1033 self.opened_buffers.iter().any(|(_, buffer)| {
1034 if let Some(buffer) = buffer.upgrade(cx) {
1035 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1036 if file.worktree == worktree && file.path() == &path.path {
1037 return true;
1038 }
1039 }
1040 }
1041 false
1042 })
1043 } else {
1044 false
1045 }
1046 }
1047
1048 pub fn fs(&self) -> &Arc<dyn Fs> {
1049 &self.fs
1050 }
1051
1052 pub fn remote_id(&self) -> Option<u64> {
1053 match self.client_state.as_ref()? {
1054 ProjectClientState::Local { remote_id, .. }
1055 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
1056 }
1057 }
1058
1059 pub fn replica_id(&self) -> ReplicaId {
1060 match &self.client_state {
1061 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
1062 _ => 0,
1063 }
1064 }
1065
1066 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1067 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
1068 updates_tx
1069 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1070 .ok();
1071 }
1072 cx.notify();
1073 }
1074
1075 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1076 &self.collaborators
1077 }
1078
1079 pub fn host(&self) -> Option<&Collaborator> {
1080 self.collaborators.values().find(|c| c.replica_id == 0)
1081 }
1082
1083 /// Collect all worktrees, including ones that don't appear in the project panel
1084 pub fn worktrees<'a>(
1085 &'a self,
1086 cx: &'a AppContext,
1087 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
1088 self.worktrees
1089 .iter()
1090 .filter_map(move |worktree| worktree.upgrade(cx))
1091 }
1092
1093 /// Collect all user-visible worktrees, the ones that appear in the project panel
1094 pub fn visible_worktrees<'a>(
1095 &'a self,
1096 cx: &'a AppContext,
1097 ) -> impl 'a + DoubleEndedIterator<Item = ModelHandle<Worktree>> {
1098 self.worktrees.iter().filter_map(|worktree| {
1099 worktree.upgrade(cx).and_then(|worktree| {
1100 if worktree.read(cx).is_visible() {
1101 Some(worktree)
1102 } else {
1103 None
1104 }
1105 })
1106 })
1107 }
1108
1109 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1110 self.visible_worktrees(cx)
1111 .map(|tree| tree.read(cx).root_name())
1112 }
1113
1114 pub fn worktree_for_id(
1115 &self,
1116 id: WorktreeId,
1117 cx: &AppContext,
1118 ) -> Option<ModelHandle<Worktree>> {
1119 self.worktrees(cx)
1120 .find(|worktree| worktree.read(cx).id() == id)
1121 }
1122
1123 pub fn worktree_for_entry(
1124 &self,
1125 entry_id: ProjectEntryId,
1126 cx: &AppContext,
1127 ) -> Option<ModelHandle<Worktree>> {
1128 self.worktrees(cx)
1129 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1130 }
1131
1132 pub fn worktree_id_for_entry(
1133 &self,
1134 entry_id: ProjectEntryId,
1135 cx: &AppContext,
1136 ) -> Option<WorktreeId> {
1137 self.worktree_for_entry(entry_id, cx)
1138 .map(|worktree| worktree.read(cx).id())
1139 }
1140
1141 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1142 paths.iter().all(|path| self.contains_path(path, cx))
1143 }
1144
1145 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1146 for worktree in self.worktrees(cx) {
1147 let worktree = worktree.read(cx).as_local();
1148 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1149 return true;
1150 }
1151 }
1152 false
1153 }
1154
1155 pub fn create_entry(
1156 &mut self,
1157 project_path: impl Into<ProjectPath>,
1158 is_directory: bool,
1159 cx: &mut ModelContext<Self>,
1160 ) -> Option<Task<Result<Entry>>> {
1161 let project_path = project_path.into();
1162 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1163 if self.is_local() {
1164 Some(worktree.update(cx, |worktree, cx| {
1165 worktree
1166 .as_local_mut()
1167 .unwrap()
1168 .create_entry(project_path.path, is_directory, cx)
1169 }))
1170 } else {
1171 let client = self.client.clone();
1172 let project_id = self.remote_id().unwrap();
1173 Some(cx.spawn_weak(|_, mut cx| async move {
1174 let response = client
1175 .request(proto::CreateProjectEntry {
1176 worktree_id: project_path.worktree_id.to_proto(),
1177 project_id,
1178 path: project_path.path.to_string_lossy().into(),
1179 is_directory,
1180 })
1181 .await?;
1182 let entry = response
1183 .entry
1184 .ok_or_else(|| anyhow!("missing entry in response"))?;
1185 worktree
1186 .update(&mut cx, |worktree, cx| {
1187 worktree.as_remote_mut().unwrap().insert_entry(
1188 entry,
1189 response.worktree_scan_id as usize,
1190 cx,
1191 )
1192 })
1193 .await
1194 }))
1195 }
1196 }
1197
1198 pub fn copy_entry(
1199 &mut self,
1200 entry_id: ProjectEntryId,
1201 new_path: impl Into<Arc<Path>>,
1202 cx: &mut ModelContext<Self>,
1203 ) -> Option<Task<Result<Entry>>> {
1204 let worktree = self.worktree_for_entry(entry_id, cx)?;
1205 let new_path = new_path.into();
1206 if self.is_local() {
1207 worktree.update(cx, |worktree, cx| {
1208 worktree
1209 .as_local_mut()
1210 .unwrap()
1211 .copy_entry(entry_id, new_path, cx)
1212 })
1213 } else {
1214 let client = self.client.clone();
1215 let project_id = self.remote_id().unwrap();
1216
1217 Some(cx.spawn_weak(|_, mut cx| async move {
1218 let response = client
1219 .request(proto::CopyProjectEntry {
1220 project_id,
1221 entry_id: entry_id.to_proto(),
1222 new_path: new_path.to_string_lossy().into(),
1223 })
1224 .await?;
1225 let entry = response
1226 .entry
1227 .ok_or_else(|| anyhow!("missing entry in response"))?;
1228 worktree
1229 .update(&mut cx, |worktree, cx| {
1230 worktree.as_remote_mut().unwrap().insert_entry(
1231 entry,
1232 response.worktree_scan_id as usize,
1233 cx,
1234 )
1235 })
1236 .await
1237 }))
1238 }
1239 }
1240
1241 pub fn rename_entry(
1242 &mut self,
1243 entry_id: ProjectEntryId,
1244 new_path: impl Into<Arc<Path>>,
1245 cx: &mut ModelContext<Self>,
1246 ) -> Option<Task<Result<Entry>>> {
1247 let worktree = self.worktree_for_entry(entry_id, cx)?;
1248 let new_path = new_path.into();
1249 if self.is_local() {
1250 worktree.update(cx, |worktree, cx| {
1251 worktree
1252 .as_local_mut()
1253 .unwrap()
1254 .rename_entry(entry_id, new_path, cx)
1255 })
1256 } else {
1257 let client = self.client.clone();
1258 let project_id = self.remote_id().unwrap();
1259
1260 Some(cx.spawn_weak(|_, mut cx| async move {
1261 let response = client
1262 .request(proto::RenameProjectEntry {
1263 project_id,
1264 entry_id: entry_id.to_proto(),
1265 new_path: new_path.to_string_lossy().into(),
1266 })
1267 .await?;
1268 let entry = response
1269 .entry
1270 .ok_or_else(|| anyhow!("missing entry in response"))?;
1271 worktree
1272 .update(&mut cx, |worktree, cx| {
1273 worktree.as_remote_mut().unwrap().insert_entry(
1274 entry,
1275 response.worktree_scan_id as usize,
1276 cx,
1277 )
1278 })
1279 .await
1280 }))
1281 }
1282 }
1283
1284 pub fn delete_entry(
1285 &mut self,
1286 entry_id: ProjectEntryId,
1287 cx: &mut ModelContext<Self>,
1288 ) -> Option<Task<Result<()>>> {
1289 let worktree = self.worktree_for_entry(entry_id, cx)?;
1290
1291 cx.emit(Event::DeletedEntry(entry_id));
1292
1293 if self.is_local() {
1294 worktree.update(cx, |worktree, cx| {
1295 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1296 })
1297 } else {
1298 let client = self.client.clone();
1299 let project_id = self.remote_id().unwrap();
1300 Some(cx.spawn_weak(|_, mut cx| async move {
1301 let response = client
1302 .request(proto::DeleteProjectEntry {
1303 project_id,
1304 entry_id: entry_id.to_proto(),
1305 })
1306 .await?;
1307 worktree
1308 .update(&mut cx, move |worktree, cx| {
1309 worktree.as_remote_mut().unwrap().delete_entry(
1310 entry_id,
1311 response.worktree_scan_id as usize,
1312 cx,
1313 )
1314 })
1315 .await
1316 }))
1317 }
1318 }
1319
1320 pub fn expand_entry(
1321 &mut self,
1322 worktree_id: WorktreeId,
1323 entry_id: ProjectEntryId,
1324 cx: &mut ModelContext<Self>,
1325 ) -> Option<Task<Result<()>>> {
1326 let worktree = self.worktree_for_id(worktree_id, cx)?;
1327 if self.is_local() {
1328 worktree.update(cx, |worktree, cx| {
1329 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1330 })
1331 } else {
1332 let worktree = worktree.downgrade();
1333 let request = self.client.request(proto::ExpandProjectEntry {
1334 project_id: self.remote_id().unwrap(),
1335 entry_id: entry_id.to_proto(),
1336 });
1337 Some(cx.spawn_weak(|_, mut cx| async move {
1338 let response = request.await?;
1339 if let Some(worktree) = worktree.upgrade(&cx) {
1340 worktree
1341 .update(&mut cx, |worktree, _| {
1342 worktree
1343 .as_remote_mut()
1344 .unwrap()
1345 .wait_for_snapshot(response.worktree_scan_id as usize)
1346 })
1347 .await?;
1348 }
1349 Ok(())
1350 }))
1351 }
1352 }
1353
1354 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1355 if self.client_state.is_some() {
1356 return Err(anyhow!("project was already shared"));
1357 }
1358 self.client_subscriptions.push(
1359 self.client
1360 .subscribe_to_entity(project_id)?
1361 .set_model(&cx.handle(), &mut cx.to_async()),
1362 );
1363
1364 for open_buffer in self.opened_buffers.values_mut() {
1365 match open_buffer {
1366 OpenBuffer::Strong(_) => {}
1367 OpenBuffer::Weak(buffer) => {
1368 if let Some(buffer) = buffer.upgrade(cx) {
1369 *open_buffer = OpenBuffer::Strong(buffer);
1370 }
1371 }
1372 OpenBuffer::Operations(_) => unreachable!(),
1373 }
1374 }
1375
1376 for worktree_handle in self.worktrees.iter_mut() {
1377 match worktree_handle {
1378 WorktreeHandle::Strong(_) => {}
1379 WorktreeHandle::Weak(worktree) => {
1380 if let Some(worktree) = worktree.upgrade(cx) {
1381 *worktree_handle = WorktreeHandle::Strong(worktree);
1382 }
1383 }
1384 }
1385 }
1386
1387 for (server_id, status) in &self.language_server_statuses {
1388 self.client
1389 .send(proto::StartLanguageServer {
1390 project_id,
1391 server: Some(proto::LanguageServer {
1392 id: server_id.0 as u64,
1393 name: status.name.clone(),
1394 }),
1395 })
1396 .log_err();
1397 }
1398
1399 let store = cx.global::<SettingsStore>();
1400 for worktree in self.worktrees(cx) {
1401 let worktree_id = worktree.read(cx).id().to_proto();
1402 for (path, content) in store.local_settings(worktree.id()) {
1403 self.client
1404 .send(proto::UpdateWorktreeSettings {
1405 project_id,
1406 worktree_id,
1407 path: path.to_string_lossy().into(),
1408 content: Some(content),
1409 })
1410 .log_err();
1411 }
1412 }
1413
1414 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1415 let client = self.client.clone();
1416 self.client_state = Some(ProjectClientState::Local {
1417 remote_id: project_id,
1418 updates_tx,
1419 _send_updates: cx.spawn_weak(move |this, mut cx| async move {
1420 while let Some(update) = updates_rx.next().await {
1421 let Some(this) = this.upgrade(&cx) else { break };
1422
1423 match update {
1424 LocalProjectUpdate::WorktreesChanged => {
1425 let worktrees = this
1426 .read_with(&cx, |this, cx| this.worktrees(cx).collect::<Vec<_>>());
1427 let update_project = this
1428 .read_with(&cx, |this, cx| {
1429 this.client.request(proto::UpdateProject {
1430 project_id,
1431 worktrees: this.worktree_metadata_protos(cx),
1432 })
1433 })
1434 .await;
1435 if update_project.is_ok() {
1436 for worktree in worktrees {
1437 worktree.update(&mut cx, |worktree, cx| {
1438 let worktree = worktree.as_local_mut().unwrap();
1439 worktree.share(project_id, cx).detach_and_log_err(cx)
1440 });
1441 }
1442 }
1443 }
1444 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1445 let buffer = this.update(&mut cx, |this, _| {
1446 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1447 let shared_buffers =
1448 this.shared_buffers.entry(peer_id).or_default();
1449 if shared_buffers.insert(buffer_id) {
1450 if let OpenBuffer::Strong(buffer) = buffer {
1451 Some(buffer.clone())
1452 } else {
1453 None
1454 }
1455 } else {
1456 None
1457 }
1458 });
1459
1460 let Some(buffer) = buffer else { continue };
1461 let operations =
1462 buffer.read_with(&cx, |b, cx| b.serialize_ops(None, cx));
1463 let operations = operations.await;
1464 let state = buffer.read_with(&cx, |buffer, _| buffer.to_proto());
1465
1466 let initial_state = proto::CreateBufferForPeer {
1467 project_id,
1468 peer_id: Some(peer_id),
1469 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1470 };
1471 if client.send(initial_state).log_err().is_some() {
1472 let client = client.clone();
1473 cx.background()
1474 .spawn(async move {
1475 let mut chunks = split_operations(operations).peekable();
1476 while let Some(chunk) = chunks.next() {
1477 let is_last = chunks.peek().is_none();
1478 client.send(proto::CreateBufferForPeer {
1479 project_id,
1480 peer_id: Some(peer_id),
1481 variant: Some(
1482 proto::create_buffer_for_peer::Variant::Chunk(
1483 proto::BufferChunk {
1484 buffer_id,
1485 operations: chunk,
1486 is_last,
1487 },
1488 ),
1489 ),
1490 })?;
1491 }
1492 anyhow::Ok(())
1493 })
1494 .await
1495 .log_err();
1496 }
1497 }
1498 }
1499 }
1500 }),
1501 });
1502
1503 self.metadata_changed(cx);
1504 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1505 cx.notify();
1506 Ok(())
1507 }
1508
1509 pub fn reshared(
1510 &mut self,
1511 message: proto::ResharedProject,
1512 cx: &mut ModelContext<Self>,
1513 ) -> Result<()> {
1514 self.shared_buffers.clear();
1515 self.set_collaborators_from_proto(message.collaborators, cx)?;
1516 self.metadata_changed(cx);
1517 Ok(())
1518 }
1519
1520 pub fn rejoined(
1521 &mut self,
1522 message: proto::RejoinedProject,
1523 message_id: u32,
1524 cx: &mut ModelContext<Self>,
1525 ) -> Result<()> {
1526 cx.update_global::<SettingsStore, _, _>(|store, cx| {
1527 for worktree in &self.worktrees {
1528 store
1529 .clear_local_settings(worktree.handle_id(), cx)
1530 .log_err();
1531 }
1532 });
1533
1534 self.join_project_response_message_id = message_id;
1535 self.set_worktrees_from_proto(message.worktrees, cx)?;
1536 self.set_collaborators_from_proto(message.collaborators, cx)?;
1537 self.language_server_statuses = message
1538 .language_servers
1539 .into_iter()
1540 .map(|server| {
1541 (
1542 LanguageServerId(server.id as usize),
1543 LanguageServerStatus {
1544 name: server.name,
1545 pending_work: Default::default(),
1546 has_pending_diagnostic_updates: false,
1547 progress_tokens: Default::default(),
1548 },
1549 )
1550 })
1551 .collect();
1552 self.buffer_ordered_messages_tx
1553 .unbounded_send(BufferOrderedMessage::Resync)
1554 .unwrap();
1555 cx.notify();
1556 Ok(())
1557 }
1558
1559 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1560 self.unshare_internal(cx)?;
1561 self.metadata_changed(cx);
1562 cx.notify();
1563 Ok(())
1564 }
1565
1566 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1567 if self.is_remote() {
1568 return Err(anyhow!("attempted to unshare a remote project"));
1569 }
1570
1571 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1572 self.collaborators.clear();
1573 self.shared_buffers.clear();
1574 self.client_subscriptions.clear();
1575
1576 for worktree_handle in self.worktrees.iter_mut() {
1577 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1578 let is_visible = worktree.update(cx, |worktree, _| {
1579 worktree.as_local_mut().unwrap().unshare();
1580 worktree.is_visible()
1581 });
1582 if !is_visible {
1583 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1584 }
1585 }
1586 }
1587
1588 for open_buffer in self.opened_buffers.values_mut() {
1589 // Wake up any tasks waiting for peers' edits to this buffer.
1590 if let Some(buffer) = open_buffer.upgrade(cx) {
1591 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1592 }
1593
1594 if let OpenBuffer::Strong(buffer) = open_buffer {
1595 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1596 }
1597 }
1598
1599 self.client.send(proto::UnshareProject {
1600 project_id: remote_id,
1601 })?;
1602
1603 Ok(())
1604 } else {
1605 Err(anyhow!("attempted to unshare an unshared project"))
1606 }
1607 }
1608
1609 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1610 self.disconnected_from_host_internal(cx);
1611 cx.emit(Event::DisconnectedFromHost);
1612 cx.notify();
1613 }
1614
1615 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1616 if let Some(ProjectClientState::Remote {
1617 sharing_has_stopped,
1618 ..
1619 }) = &mut self.client_state
1620 {
1621 *sharing_has_stopped = true;
1622
1623 self.collaborators.clear();
1624
1625 for worktree in &self.worktrees {
1626 if let Some(worktree) = worktree.upgrade(cx) {
1627 worktree.update(cx, |worktree, _| {
1628 if let Some(worktree) = worktree.as_remote_mut() {
1629 worktree.disconnected_from_host();
1630 }
1631 });
1632 }
1633 }
1634
1635 for open_buffer in self.opened_buffers.values_mut() {
1636 // Wake up any tasks waiting for peers' edits to this buffer.
1637 if let Some(buffer) = open_buffer.upgrade(cx) {
1638 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1639 }
1640
1641 if let OpenBuffer::Strong(buffer) = open_buffer {
1642 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1643 }
1644 }
1645
1646 // Wake up all futures currently waiting on a buffer to get opened,
1647 // to give them a chance to fail now that we've disconnected.
1648 *self.opened_buffer.0.borrow_mut() = ();
1649 }
1650 }
1651
1652 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1653 cx.emit(Event::Closed);
1654 }
1655
1656 pub fn is_read_only(&self) -> bool {
1657 match &self.client_state {
1658 Some(ProjectClientState::Remote {
1659 sharing_has_stopped,
1660 ..
1661 }) => *sharing_has_stopped,
1662 _ => false,
1663 }
1664 }
1665
1666 pub fn is_local(&self) -> bool {
1667 match &self.client_state {
1668 Some(ProjectClientState::Remote { .. }) => false,
1669 _ => true,
1670 }
1671 }
1672
1673 pub fn is_remote(&self) -> bool {
1674 !self.is_local()
1675 }
1676
1677 pub fn create_buffer(
1678 &mut self,
1679 text: &str,
1680 language: Option<Arc<Language>>,
1681 cx: &mut ModelContext<Self>,
1682 ) -> Result<ModelHandle<Buffer>> {
1683 if self.is_remote() {
1684 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1685 }
1686 let id = post_inc(&mut self.next_buffer_id);
1687 let buffer = cx.add_model(|cx| {
1688 Buffer::new(self.replica_id(), id, text)
1689 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1690 });
1691 self.register_buffer(&buffer, cx)?;
1692 Ok(buffer)
1693 }
1694
1695 pub fn open_path(
1696 &mut self,
1697 path: impl Into<ProjectPath>,
1698 cx: &mut ModelContext<Self>,
1699 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1700 let task = self.open_buffer(path, cx);
1701 cx.spawn_weak(|_, cx| async move {
1702 let buffer = task.await?;
1703 let project_entry_id = buffer
1704 .read_with(&cx, |buffer, cx| {
1705 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1706 })
1707 .ok_or_else(|| anyhow!("no project entry"))?;
1708
1709 let buffer: &AnyModelHandle = &buffer;
1710 Ok((project_entry_id, buffer.clone()))
1711 })
1712 }
1713
1714 pub fn open_local_buffer(
1715 &mut self,
1716 abs_path: impl AsRef<Path>,
1717 cx: &mut ModelContext<Self>,
1718 ) -> Task<Result<ModelHandle<Buffer>>> {
1719 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1720 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1721 } else {
1722 Task::ready(Err(anyhow!("no such path")))
1723 }
1724 }
1725
1726 pub fn open_buffer(
1727 &mut self,
1728 path: impl Into<ProjectPath>,
1729 cx: &mut ModelContext<Self>,
1730 ) -> Task<Result<ModelHandle<Buffer>>> {
1731 let project_path = path.into();
1732 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1733 worktree
1734 } else {
1735 return Task::ready(Err(anyhow!("no such worktree")));
1736 };
1737
1738 // If there is already a buffer for the given path, then return it.
1739 let existing_buffer = self.get_open_buffer(&project_path, cx);
1740 if let Some(existing_buffer) = existing_buffer {
1741 return Task::ready(Ok(existing_buffer));
1742 }
1743
1744 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1745 // If the given path is already being loaded, then wait for that existing
1746 // task to complete and return the same buffer.
1747 hash_map::Entry::Occupied(e) => e.get().clone(),
1748
1749 // Otherwise, record the fact that this path is now being loaded.
1750 hash_map::Entry::Vacant(entry) => {
1751 let (mut tx, rx) = postage::watch::channel();
1752 entry.insert(rx.clone());
1753
1754 let load_buffer = if worktree.read(cx).is_local() {
1755 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1756 } else {
1757 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1758 };
1759
1760 let project_path = project_path.clone();
1761 cx.spawn(move |this, mut cx| async move {
1762 let load_result = load_buffer.await;
1763 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1764 // Record the fact that the buffer is no longer loading.
1765 this.loading_buffers_by_path.remove(&project_path);
1766 let buffer = load_result.map_err(Arc::new)?;
1767 Ok(buffer)
1768 }));
1769 })
1770 .detach();
1771 rx
1772 }
1773 };
1774
1775 cx.foreground().spawn(async move {
1776 wait_for_loading_buffer(loading_watch)
1777 .await
1778 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1779 })
1780 }
1781
1782 fn open_local_buffer_internal(
1783 &mut self,
1784 path: &Arc<Path>,
1785 worktree: &ModelHandle<Worktree>,
1786 cx: &mut ModelContext<Self>,
1787 ) -> Task<Result<ModelHandle<Buffer>>> {
1788 let buffer_id = post_inc(&mut self.next_buffer_id);
1789 let load_buffer = worktree.update(cx, |worktree, cx| {
1790 let worktree = worktree.as_local_mut().unwrap();
1791 worktree.load_buffer(buffer_id, path, cx)
1792 });
1793 cx.spawn(|this, mut cx| async move {
1794 let buffer = load_buffer.await?;
1795 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1796 Ok(buffer)
1797 })
1798 }
1799
1800 fn open_remote_buffer_internal(
1801 &mut self,
1802 path: &Arc<Path>,
1803 worktree: &ModelHandle<Worktree>,
1804 cx: &mut ModelContext<Self>,
1805 ) -> Task<Result<ModelHandle<Buffer>>> {
1806 let rpc = self.client.clone();
1807 let project_id = self.remote_id().unwrap();
1808 let remote_worktree_id = worktree.read(cx).id();
1809 let path = path.clone();
1810 let path_string = path.to_string_lossy().to_string();
1811 cx.spawn(|this, mut cx| async move {
1812 let response = rpc
1813 .request(proto::OpenBufferByPath {
1814 project_id,
1815 worktree_id: remote_worktree_id.to_proto(),
1816 path: path_string,
1817 })
1818 .await?;
1819 this.update(&mut cx, |this, cx| {
1820 this.wait_for_remote_buffer(response.buffer_id, cx)
1821 })
1822 .await
1823 })
1824 }
1825
1826 /// LanguageServerName is owned, because it is inserted into a map
1827 pub fn open_local_buffer_via_lsp(
1828 &mut self,
1829 abs_path: lsp::Url,
1830 language_server_id: LanguageServerId,
1831 language_server_name: LanguageServerName,
1832 cx: &mut ModelContext<Self>,
1833 ) -> Task<Result<ModelHandle<Buffer>>> {
1834 cx.spawn(|this, mut cx| async move {
1835 let abs_path = abs_path
1836 .to_file_path()
1837 .map_err(|_| anyhow!("can't convert URI to path"))?;
1838 let (worktree, relative_path) = if let Some(result) =
1839 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1840 {
1841 result
1842 } else {
1843 let worktree = this
1844 .update(&mut cx, |this, cx| {
1845 this.create_local_worktree(&abs_path, false, cx)
1846 })
1847 .await?;
1848 this.update(&mut cx, |this, cx| {
1849 this.language_server_ids.insert(
1850 (worktree.read(cx).id(), language_server_name),
1851 language_server_id,
1852 );
1853 });
1854 (worktree, PathBuf::new())
1855 };
1856
1857 let project_path = ProjectPath {
1858 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1859 path: relative_path.into(),
1860 };
1861 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1862 .await
1863 })
1864 }
1865
1866 pub fn open_buffer_by_id(
1867 &mut self,
1868 id: u64,
1869 cx: &mut ModelContext<Self>,
1870 ) -> Task<Result<ModelHandle<Buffer>>> {
1871 if let Some(buffer) = self.buffer_for_id(id, cx) {
1872 Task::ready(Ok(buffer))
1873 } else if self.is_local() {
1874 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1875 } else if let Some(project_id) = self.remote_id() {
1876 let request = self
1877 .client
1878 .request(proto::OpenBufferById { project_id, id });
1879 cx.spawn(|this, mut cx| async move {
1880 let buffer_id = request.await?.buffer_id;
1881 this.update(&mut cx, |this, cx| {
1882 this.wait_for_remote_buffer(buffer_id, cx)
1883 })
1884 .await
1885 })
1886 } else {
1887 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1888 }
1889 }
1890
1891 pub fn save_buffers(
1892 &self,
1893 buffers: HashSet<ModelHandle<Buffer>>,
1894 cx: &mut ModelContext<Self>,
1895 ) -> Task<Result<()>> {
1896 cx.spawn(|this, mut cx| async move {
1897 let save_tasks = buffers
1898 .into_iter()
1899 .map(|buffer| this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx)));
1900 try_join_all(save_tasks).await?;
1901 Ok(())
1902 })
1903 }
1904
1905 pub fn save_buffer(
1906 &self,
1907 buffer: ModelHandle<Buffer>,
1908 cx: &mut ModelContext<Self>,
1909 ) -> Task<Result<()>> {
1910 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1911 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1912 };
1913 let worktree = file.worktree.clone();
1914 let path = file.path.clone();
1915 worktree.update(cx, |worktree, cx| match worktree {
1916 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1917 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1918 })
1919 }
1920
1921 pub fn save_buffer_as(
1922 &mut self,
1923 buffer: ModelHandle<Buffer>,
1924 abs_path: PathBuf,
1925 cx: &mut ModelContext<Self>,
1926 ) -> Task<Result<()>> {
1927 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1928 let old_file = File::from_dyn(buffer.read(cx).file())
1929 .filter(|f| f.is_local())
1930 .cloned();
1931 cx.spawn(|this, mut cx| async move {
1932 if let Some(old_file) = &old_file {
1933 this.update(&mut cx, |this, cx| {
1934 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1935 });
1936 }
1937 let (worktree, path) = worktree_task.await?;
1938 worktree
1939 .update(&mut cx, |worktree, cx| match worktree {
1940 Worktree::Local(worktree) => {
1941 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1942 }
1943 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1944 })
1945 .await?;
1946
1947 this.update(&mut cx, |this, cx| {
1948 this.detect_language_for_buffer(&buffer, cx);
1949 this.register_buffer_with_language_servers(&buffer, cx);
1950 });
1951 Ok(())
1952 })
1953 }
1954
1955 pub fn get_open_buffer(
1956 &mut self,
1957 path: &ProjectPath,
1958 cx: &mut ModelContext<Self>,
1959 ) -> Option<ModelHandle<Buffer>> {
1960 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1961 self.opened_buffers.values().find_map(|buffer| {
1962 let buffer = buffer.upgrade(cx)?;
1963 let file = File::from_dyn(buffer.read(cx).file())?;
1964 if file.worktree == worktree && file.path() == &path.path {
1965 Some(buffer)
1966 } else {
1967 None
1968 }
1969 })
1970 }
1971
1972 fn register_buffer(
1973 &mut self,
1974 buffer: &ModelHandle<Buffer>,
1975 cx: &mut ModelContext<Self>,
1976 ) -> Result<()> {
1977 self.request_buffer_diff_recalculation(buffer, cx);
1978 buffer.update(cx, |buffer, _| {
1979 buffer.set_language_registry(self.languages.clone())
1980 });
1981
1982 let remote_id = buffer.read(cx).remote_id();
1983 let is_remote = self.is_remote();
1984 let open_buffer = if is_remote || self.is_shared() {
1985 OpenBuffer::Strong(buffer.clone())
1986 } else {
1987 OpenBuffer::Weak(buffer.downgrade())
1988 };
1989
1990 match self.opened_buffers.entry(remote_id) {
1991 hash_map::Entry::Vacant(entry) => {
1992 entry.insert(open_buffer);
1993 }
1994 hash_map::Entry::Occupied(mut entry) => {
1995 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1996 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1997 } else if entry.get().upgrade(cx).is_some() {
1998 if is_remote {
1999 return Ok(());
2000 } else {
2001 debug_panic!("buffer {} was already registered", remote_id);
2002 Err(anyhow!("buffer {} was already registered", remote_id))?;
2003 }
2004 }
2005 entry.insert(open_buffer);
2006 }
2007 }
2008 cx.subscribe(buffer, |this, buffer, event, cx| {
2009 this.on_buffer_event(buffer, event, cx);
2010 })
2011 .detach();
2012
2013 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2014 if file.is_local {
2015 self.local_buffer_ids_by_path.insert(
2016 ProjectPath {
2017 worktree_id: file.worktree_id(cx),
2018 path: file.path.clone(),
2019 },
2020 remote_id,
2021 );
2022
2023 self.local_buffer_ids_by_entry_id
2024 .insert(file.entry_id, remote_id);
2025 }
2026 }
2027
2028 self.detect_language_for_buffer(buffer, cx);
2029 self.register_buffer_with_language_servers(buffer, cx);
2030 self.register_buffer_with_copilot(buffer, cx);
2031 cx.observe_release(buffer, |this, buffer, cx| {
2032 if let Some(file) = File::from_dyn(buffer.file()) {
2033 if file.is_local() {
2034 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2035 for server in this.language_servers_for_buffer(buffer, cx) {
2036 server
2037 .1
2038 .notify::<lsp::notification::DidCloseTextDocument>(
2039 lsp::DidCloseTextDocumentParams {
2040 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2041 },
2042 )
2043 .log_err();
2044 }
2045 }
2046 }
2047 })
2048 .detach();
2049
2050 *self.opened_buffer.0.borrow_mut() = ();
2051 Ok(())
2052 }
2053
2054 fn register_buffer_with_language_servers(
2055 &mut self,
2056 buffer_handle: &ModelHandle<Buffer>,
2057 cx: &mut ModelContext<Self>,
2058 ) {
2059 let buffer = buffer_handle.read(cx);
2060 let buffer_id = buffer.remote_id();
2061
2062 if let Some(file) = File::from_dyn(buffer.file()) {
2063 if !file.is_local() {
2064 return;
2065 }
2066
2067 let abs_path = file.abs_path(cx);
2068 let uri = lsp::Url::from_file_path(&abs_path)
2069 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2070 let initial_snapshot = buffer.text_snapshot();
2071 let language = buffer.language().cloned();
2072 let worktree_id = file.worktree_id(cx);
2073
2074 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2075 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2076 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2077 .log_err();
2078 }
2079 }
2080
2081 if let Some(language) = language {
2082 for adapter in language.lsp_adapters() {
2083 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2084 let server = self
2085 .language_server_ids
2086 .get(&(worktree_id, adapter.name.clone()))
2087 .and_then(|id| self.language_servers.get(id))
2088 .and_then(|server_state| {
2089 if let LanguageServerState::Running { server, .. } = server_state {
2090 Some(server.clone())
2091 } else {
2092 None
2093 }
2094 });
2095 let server = match server {
2096 Some(server) => server,
2097 None => continue,
2098 };
2099
2100 server
2101 .notify::<lsp::notification::DidOpenTextDocument>(
2102 lsp::DidOpenTextDocumentParams {
2103 text_document: lsp::TextDocumentItem::new(
2104 uri.clone(),
2105 language_id.unwrap_or_default(),
2106 0,
2107 initial_snapshot.text(),
2108 ),
2109 },
2110 )
2111 .log_err();
2112
2113 buffer_handle.update(cx, |buffer, cx| {
2114 buffer.set_completion_triggers(
2115 server
2116 .capabilities()
2117 .completion_provider
2118 .as_ref()
2119 .and_then(|provider| provider.trigger_characters.clone())
2120 .unwrap_or_default(),
2121 cx,
2122 );
2123 });
2124
2125 let snapshot = LspBufferSnapshot {
2126 version: 0,
2127 snapshot: initial_snapshot.clone(),
2128 };
2129 self.buffer_snapshots
2130 .entry(buffer_id)
2131 .or_default()
2132 .insert(server.server_id(), vec![snapshot]);
2133 }
2134 }
2135 }
2136 }
2137
2138 fn unregister_buffer_from_language_servers(
2139 &mut self,
2140 buffer: &ModelHandle<Buffer>,
2141 old_file: &File,
2142 cx: &mut ModelContext<Self>,
2143 ) {
2144 let old_path = match old_file.as_local() {
2145 Some(local) => local.abs_path(cx),
2146 None => return,
2147 };
2148
2149 buffer.update(cx, |buffer, cx| {
2150 let worktree_id = old_file.worktree_id(cx);
2151 let ids = &self.language_server_ids;
2152
2153 let language = buffer.language().cloned();
2154 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2155 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2156 buffer.update_diagnostics(server_id, Default::default(), cx);
2157 }
2158
2159 self.buffer_snapshots.remove(&buffer.remote_id());
2160 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2161 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2162 language_server
2163 .notify::<lsp::notification::DidCloseTextDocument>(
2164 lsp::DidCloseTextDocumentParams {
2165 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2166 },
2167 )
2168 .log_err();
2169 }
2170 });
2171 }
2172
2173 fn register_buffer_with_copilot(
2174 &self,
2175 buffer_handle: &ModelHandle<Buffer>,
2176 cx: &mut ModelContext<Self>,
2177 ) {
2178 if let Some(copilot) = Copilot::global(cx) {
2179 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2180 }
2181 }
2182
2183 async fn send_buffer_ordered_messages(
2184 this: WeakModelHandle<Self>,
2185 rx: UnboundedReceiver<BufferOrderedMessage>,
2186 mut cx: AsyncAppContext,
2187 ) -> Option<()> {
2188 const MAX_BATCH_SIZE: usize = 128;
2189
2190 let mut operations_by_buffer_id = HashMap::default();
2191 async fn flush_operations(
2192 this: &ModelHandle<Project>,
2193 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2194 needs_resync_with_host: &mut bool,
2195 is_local: bool,
2196 cx: &AsyncAppContext,
2197 ) {
2198 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2199 let request = this.read_with(cx, |this, _| {
2200 let project_id = this.remote_id()?;
2201 Some(this.client.request(proto::UpdateBuffer {
2202 buffer_id,
2203 project_id,
2204 operations,
2205 }))
2206 });
2207 if let Some(request) = request {
2208 if request.await.is_err() && !is_local {
2209 *needs_resync_with_host = true;
2210 break;
2211 }
2212 }
2213 }
2214 }
2215
2216 let mut needs_resync_with_host = false;
2217 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2218
2219 while let Some(changes) = changes.next().await {
2220 let this = this.upgrade(&mut cx)?;
2221 let is_local = this.read_with(&cx, |this, _| this.is_local());
2222
2223 for change in changes {
2224 match change {
2225 BufferOrderedMessage::Operation {
2226 buffer_id,
2227 operation,
2228 } => {
2229 if needs_resync_with_host {
2230 continue;
2231 }
2232
2233 operations_by_buffer_id
2234 .entry(buffer_id)
2235 .or_insert(Vec::new())
2236 .push(operation);
2237 }
2238
2239 BufferOrderedMessage::Resync => {
2240 operations_by_buffer_id.clear();
2241 if this
2242 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))
2243 .await
2244 .is_ok()
2245 {
2246 needs_resync_with_host = false;
2247 }
2248 }
2249
2250 BufferOrderedMessage::LanguageServerUpdate {
2251 language_server_id,
2252 message,
2253 } => {
2254 flush_operations(
2255 &this,
2256 &mut operations_by_buffer_id,
2257 &mut needs_resync_with_host,
2258 is_local,
2259 &cx,
2260 )
2261 .await;
2262
2263 this.read_with(&cx, |this, _| {
2264 if let Some(project_id) = this.remote_id() {
2265 this.client
2266 .send(proto::UpdateLanguageServer {
2267 project_id,
2268 language_server_id: language_server_id.0 as u64,
2269 variant: Some(message),
2270 })
2271 .log_err();
2272 }
2273 });
2274 }
2275 }
2276 }
2277
2278 flush_operations(
2279 &this,
2280 &mut operations_by_buffer_id,
2281 &mut needs_resync_with_host,
2282 is_local,
2283 &cx,
2284 )
2285 .await;
2286 }
2287
2288 None
2289 }
2290
2291 fn on_buffer_event(
2292 &mut self,
2293 buffer: ModelHandle<Buffer>,
2294 event: &BufferEvent,
2295 cx: &mut ModelContext<Self>,
2296 ) -> Option<()> {
2297 if matches!(
2298 event,
2299 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2300 ) {
2301 self.request_buffer_diff_recalculation(&buffer, cx);
2302 }
2303
2304 match event {
2305 BufferEvent::Operation(operation) => {
2306 self.buffer_ordered_messages_tx
2307 .unbounded_send(BufferOrderedMessage::Operation {
2308 buffer_id: buffer.read(cx).remote_id(),
2309 operation: language::proto::serialize_operation(operation),
2310 })
2311 .ok();
2312 }
2313
2314 BufferEvent::Edited { .. } => {
2315 let buffer = buffer.read(cx);
2316 let file = File::from_dyn(buffer.file())?;
2317 let abs_path = file.as_local()?.abs_path(cx);
2318 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2319 let next_snapshot = buffer.text_snapshot();
2320
2321 let language_servers: Vec<_> = self
2322 .language_servers_for_buffer(buffer, cx)
2323 .map(|i| i.1.clone())
2324 .collect();
2325
2326 for language_server in language_servers {
2327 let language_server = language_server.clone();
2328
2329 let buffer_snapshots = self
2330 .buffer_snapshots
2331 .get_mut(&buffer.remote_id())
2332 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2333 let previous_snapshot = buffer_snapshots.last()?;
2334
2335 let build_incremental_change = || {
2336 buffer
2337 .edits_since::<(PointUtf16, usize)>(
2338 previous_snapshot.snapshot.version(),
2339 )
2340 .map(|edit| {
2341 let edit_start = edit.new.start.0;
2342 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2343 let new_text = next_snapshot
2344 .text_for_range(edit.new.start.1..edit.new.end.1)
2345 .collect();
2346 lsp::TextDocumentContentChangeEvent {
2347 range: Some(lsp::Range::new(
2348 point_to_lsp(edit_start),
2349 point_to_lsp(edit_end),
2350 )),
2351 range_length: None,
2352 text: new_text,
2353 }
2354 })
2355 .collect()
2356 };
2357
2358 let document_sync_kind = language_server
2359 .capabilities()
2360 .text_document_sync
2361 .as_ref()
2362 .and_then(|sync| match sync {
2363 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2364 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2365 });
2366
2367 let content_changes: Vec<_> = match document_sync_kind {
2368 Some(lsp::TextDocumentSyncKind::FULL) => {
2369 vec![lsp::TextDocumentContentChangeEvent {
2370 range: None,
2371 range_length: None,
2372 text: next_snapshot.text(),
2373 }]
2374 }
2375 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2376 _ => {
2377 #[cfg(any(test, feature = "test-support"))]
2378 {
2379 build_incremental_change()
2380 }
2381
2382 #[cfg(not(any(test, feature = "test-support")))]
2383 {
2384 continue;
2385 }
2386 }
2387 };
2388
2389 let next_version = previous_snapshot.version + 1;
2390
2391 buffer_snapshots.push(LspBufferSnapshot {
2392 version: next_version,
2393 snapshot: next_snapshot.clone(),
2394 });
2395
2396 language_server
2397 .notify::<lsp::notification::DidChangeTextDocument>(
2398 lsp::DidChangeTextDocumentParams {
2399 text_document: lsp::VersionedTextDocumentIdentifier::new(
2400 uri.clone(),
2401 next_version,
2402 ),
2403 content_changes,
2404 },
2405 )
2406 .log_err();
2407 }
2408 }
2409
2410 BufferEvent::Saved => {
2411 let file = File::from_dyn(buffer.read(cx).file())?;
2412 let worktree_id = file.worktree_id(cx);
2413 let abs_path = file.as_local()?.abs_path(cx);
2414 let text_document = lsp::TextDocumentIdentifier {
2415 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2416 };
2417
2418 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2419 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2420
2421 server
2422 .notify::<lsp::notification::DidSaveTextDocument>(
2423 lsp::DidSaveTextDocumentParams {
2424 text_document: text_document.clone(),
2425 text,
2426 },
2427 )
2428 .log_err();
2429 }
2430
2431 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2432 for language_server_id in language_server_ids {
2433 if let Some(LanguageServerState::Running {
2434 adapter,
2435 simulate_disk_based_diagnostics_completion,
2436 ..
2437 }) = self.language_servers.get_mut(&language_server_id)
2438 {
2439 // After saving a buffer using a language server that doesn't provide
2440 // a disk-based progress token, kick off a timer that will reset every
2441 // time the buffer is saved. If the timer eventually fires, simulate
2442 // disk-based diagnostics being finished so that other pieces of UI
2443 // (e.g., project diagnostics view, diagnostic status bar) can update.
2444 // We don't emit an event right away because the language server might take
2445 // some time to publish diagnostics.
2446 if adapter.disk_based_diagnostics_progress_token.is_none() {
2447 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2448 Duration::from_secs(1);
2449
2450 let task = cx.spawn_weak(|this, mut cx| async move {
2451 cx.background().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2452 if let Some(this) = this.upgrade(&cx) {
2453 this.update(&mut cx, |this, cx| {
2454 this.disk_based_diagnostics_finished(
2455 language_server_id,
2456 cx,
2457 );
2458 this.buffer_ordered_messages_tx
2459 .unbounded_send(
2460 BufferOrderedMessage::LanguageServerUpdate {
2461 language_server_id,
2462 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2463 },
2464 )
2465 .ok();
2466 });
2467 }
2468 });
2469 *simulate_disk_based_diagnostics_completion = Some(task);
2470 }
2471 }
2472 }
2473 }
2474 BufferEvent::FileHandleChanged => {
2475 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2476 return None;
2477 };
2478
2479 match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
2480 Some(_) => {
2481 return None;
2482 }
2483 None => {
2484 let remote_id = buffer.read(cx).remote_id();
2485 self.local_buffer_ids_by_entry_id
2486 .insert(file.entry_id, remote_id);
2487
2488 self.local_buffer_ids_by_path.insert(
2489 ProjectPath {
2490 worktree_id: file.worktree_id(cx),
2491 path: file.path.clone(),
2492 },
2493 remote_id,
2494 );
2495 }
2496 }
2497 }
2498 _ => {}
2499 }
2500
2501 None
2502 }
2503
2504 fn request_buffer_diff_recalculation(
2505 &mut self,
2506 buffer: &ModelHandle<Buffer>,
2507 cx: &mut ModelContext<Self>,
2508 ) {
2509 self.buffers_needing_diff.insert(buffer.downgrade());
2510 let first_insertion = self.buffers_needing_diff.len() == 1;
2511
2512 let settings = settings::get::<ProjectSettings>(cx);
2513 let delay = if let Some(delay) = settings.git.gutter_debounce {
2514 delay
2515 } else {
2516 if first_insertion {
2517 let this = cx.weak_handle();
2518 cx.defer(move |cx| {
2519 if let Some(this) = this.upgrade(cx) {
2520 this.update(cx, |this, cx| {
2521 this.recalculate_buffer_diffs(cx).detach();
2522 });
2523 }
2524 });
2525 }
2526 return;
2527 };
2528
2529 const MIN_DELAY: u64 = 50;
2530 let delay = delay.max(MIN_DELAY);
2531 let duration = Duration::from_millis(delay);
2532
2533 self.git_diff_debouncer
2534 .fire_new(duration, cx, move |this, cx| {
2535 this.recalculate_buffer_diffs(cx)
2536 });
2537 }
2538
2539 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2540 cx.spawn(|this, mut cx| async move {
2541 let buffers: Vec<_> = this.update(&mut cx, |this, _| {
2542 this.buffers_needing_diff.drain().collect()
2543 });
2544
2545 let tasks: Vec<_> = this.update(&mut cx, |_, cx| {
2546 buffers
2547 .iter()
2548 .filter_map(|buffer| {
2549 let buffer = buffer.upgrade(cx)?;
2550 buffer.update(cx, |buffer, cx| buffer.git_diff_recalc(cx))
2551 })
2552 .collect()
2553 });
2554
2555 futures::future::join_all(tasks).await;
2556
2557 this.update(&mut cx, |this, cx| {
2558 if !this.buffers_needing_diff.is_empty() {
2559 this.recalculate_buffer_diffs(cx).detach();
2560 } else {
2561 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2562 for buffer in buffers {
2563 if let Some(buffer) = buffer.upgrade(cx) {
2564 buffer.update(cx, |_, cx| cx.notify());
2565 }
2566 }
2567 }
2568 });
2569 })
2570 }
2571
2572 fn language_servers_for_worktree(
2573 &self,
2574 worktree_id: WorktreeId,
2575 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2576 self.language_server_ids
2577 .iter()
2578 .filter_map(move |((language_server_worktree_id, _), id)| {
2579 if *language_server_worktree_id == worktree_id {
2580 if let Some(LanguageServerState::Running {
2581 adapter,
2582 language,
2583 server,
2584 ..
2585 }) = self.language_servers.get(id)
2586 {
2587 return Some((adapter, language, server));
2588 }
2589 }
2590 None
2591 })
2592 }
2593
2594 fn maintain_buffer_languages(
2595 languages: Arc<LanguageRegistry>,
2596 cx: &mut ModelContext<Project>,
2597 ) -> Task<()> {
2598 let mut subscription = languages.subscribe();
2599 let mut prev_reload_count = languages.reload_count();
2600 cx.spawn_weak(|project, mut cx| async move {
2601 while let Some(()) = subscription.next().await {
2602 if let Some(project) = project.upgrade(&cx) {
2603 // If the language registry has been reloaded, then remove and
2604 // re-assign the languages on all open buffers.
2605 let reload_count = languages.reload_count();
2606 if reload_count > prev_reload_count {
2607 prev_reload_count = reload_count;
2608 project.update(&mut cx, |this, cx| {
2609 let buffers = this
2610 .opened_buffers
2611 .values()
2612 .filter_map(|b| b.upgrade(cx))
2613 .collect::<Vec<_>>();
2614 for buffer in buffers {
2615 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() {
2616 this.unregister_buffer_from_language_servers(&buffer, &f, cx);
2617 buffer.update(cx, |buffer, cx| buffer.set_language(None, cx));
2618 }
2619 }
2620 });
2621 }
2622
2623 project.update(&mut cx, |project, cx| {
2624 let mut plain_text_buffers = Vec::new();
2625 let mut buffers_with_unknown_injections = Vec::new();
2626 for buffer in project.opened_buffers.values() {
2627 if let Some(handle) = buffer.upgrade(cx) {
2628 let buffer = &handle.read(cx);
2629 if buffer.language().is_none()
2630 || buffer.language() == Some(&*language::PLAIN_TEXT)
2631 {
2632 plain_text_buffers.push(handle);
2633 } else if buffer.contains_unknown_injections() {
2634 buffers_with_unknown_injections.push(handle);
2635 }
2636 }
2637 }
2638
2639 for buffer in plain_text_buffers {
2640 project.detect_language_for_buffer(&buffer, cx);
2641 project.register_buffer_with_language_servers(&buffer, cx);
2642 }
2643
2644 for buffer in buffers_with_unknown_injections {
2645 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2646 }
2647 });
2648 }
2649 }
2650 })
2651 }
2652
2653 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<()> {
2654 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2655 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2656
2657 let settings_observation = cx.observe_global::<SettingsStore, _>(move |_, _| {
2658 *settings_changed_tx.borrow_mut() = ();
2659 });
2660
2661 cx.spawn_weak(|this, mut cx| async move {
2662 while let Some(_) = settings_changed_rx.next().await {
2663 let Some(this) = this.upgrade(&cx) else {
2664 break;
2665 };
2666
2667 let servers: Vec<_> = this.read_with(&cx, |this, _| {
2668 this.language_servers
2669 .values()
2670 .filter_map(|state| match state {
2671 LanguageServerState::Starting(_) => None,
2672 LanguageServerState::Running {
2673 adapter, server, ..
2674 } => Some((adapter.clone(), server.clone())),
2675 })
2676 .collect()
2677 });
2678
2679 for (adapter, server) in servers {
2680 let workspace_config =
2681 cx.update(|cx| adapter.workspace_configuration(cx)).await;
2682 server
2683 .notify::<lsp::notification::DidChangeConfiguration>(
2684 lsp::DidChangeConfigurationParams {
2685 settings: workspace_config.clone(),
2686 },
2687 )
2688 .ok();
2689 }
2690 }
2691
2692 drop(settings_observation);
2693 })
2694 }
2695
2696 fn detect_language_for_buffer(
2697 &mut self,
2698 buffer_handle: &ModelHandle<Buffer>,
2699 cx: &mut ModelContext<Self>,
2700 ) -> Option<()> {
2701 // If the buffer has a language, set it and start the language server if we haven't already.
2702 let buffer = buffer_handle.read(cx);
2703 let full_path = buffer.file()?.full_path(cx);
2704 let content = buffer.as_rope();
2705 let new_language = self
2706 .languages
2707 .language_for_file(&full_path, Some(content))
2708 .now_or_never()?
2709 .ok()?;
2710 self.set_language_for_buffer(buffer_handle, new_language, cx);
2711 None
2712 }
2713
2714 pub fn set_language_for_buffer(
2715 &mut self,
2716 buffer: &ModelHandle<Buffer>,
2717 new_language: Arc<Language>,
2718 cx: &mut ModelContext<Self>,
2719 ) {
2720 buffer.update(cx, |buffer, cx| {
2721 if buffer.language().map_or(true, |old_language| {
2722 !Arc::ptr_eq(old_language, &new_language)
2723 }) {
2724 buffer.set_language(Some(new_language.clone()), cx);
2725 }
2726 });
2727
2728 let buffer_file = buffer.read(cx).file().cloned();
2729 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2730 let buffer_file = File::from_dyn(buffer_file.as_ref());
2731 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2732
2733 self.install_default_formatters(worktree, &new_language, &settings, cx);
2734 if let Some(file) = buffer_file {
2735 let worktree = file.worktree.clone();
2736 if let Some(tree) = worktree.read(cx).as_local() {
2737 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2738 }
2739 }
2740 }
2741
2742 fn start_language_servers(
2743 &mut self,
2744 worktree: &ModelHandle<Worktree>,
2745 worktree_path: Arc<Path>,
2746 language: Arc<Language>,
2747 cx: &mut ModelContext<Self>,
2748 ) {
2749 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2750 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2751 if !settings.enable_language_server {
2752 return;
2753 }
2754
2755 let worktree_id = worktree.read(cx).id();
2756 for adapter in language.lsp_adapters() {
2757 self.start_language_server(
2758 worktree_id,
2759 worktree_path.clone(),
2760 adapter.clone(),
2761 language.clone(),
2762 cx,
2763 );
2764 }
2765 }
2766
2767 fn start_language_server(
2768 &mut self,
2769 worktree_id: WorktreeId,
2770 worktree_path: Arc<Path>,
2771 adapter: Arc<CachedLspAdapter>,
2772 language: Arc<Language>,
2773 cx: &mut ModelContext<Self>,
2774 ) {
2775 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2776 return;
2777 }
2778
2779 let key = (worktree_id, adapter.name.clone());
2780 if self.language_server_ids.contains_key(&key) {
2781 return;
2782 }
2783
2784 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2785 let pending_server = match self.languages.create_pending_language_server(
2786 stderr_capture.clone(),
2787 language.clone(),
2788 adapter.clone(),
2789 worktree_path,
2790 ProjectLspAdapterDelegate::new(self, cx),
2791 cx,
2792 ) {
2793 Some(pending_server) => pending_server,
2794 None => return,
2795 };
2796
2797 let project_settings = settings::get::<ProjectSettings>(cx);
2798 let lsp = project_settings.lsp.get(&adapter.name.0);
2799 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2800
2801 let server_id = pending_server.server_id;
2802 let container_dir = pending_server.container_dir.clone();
2803 let state = LanguageServerState::Starting({
2804 let adapter = adapter.clone();
2805 let server_name = adapter.name.0.clone();
2806 let language = language.clone();
2807 let key = key.clone();
2808
2809 cx.spawn_weak(|this, mut cx| async move {
2810 let result = Self::setup_and_insert_language_server(
2811 this,
2812 override_options,
2813 pending_server,
2814 adapter.clone(),
2815 language.clone(),
2816 server_id,
2817 key,
2818 &mut cx,
2819 )
2820 .await;
2821
2822 match result {
2823 Ok(server) => {
2824 stderr_capture.lock().take();
2825 Some(server)
2826 }
2827
2828 Err(err) => {
2829 log::error!("failed to start language server {server_name:?}: {err}");
2830 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2831
2832 let this = this.upgrade(&cx)?;
2833 let container_dir = container_dir?;
2834
2835 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2836 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2837 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2838 log::error!(
2839 "Hit {max} max reinstallation attempts for {server_name:?}"
2840 );
2841 return None;
2842 }
2843
2844 let installation_test_binary = adapter
2845 .installation_test_binary(container_dir.to_path_buf())
2846 .await;
2847
2848 this.update(&mut cx, |_, cx| {
2849 Self::check_errored_server(
2850 language,
2851 adapter,
2852 server_id,
2853 installation_test_binary,
2854 cx,
2855 )
2856 });
2857
2858 None
2859 }
2860 }
2861 })
2862 });
2863
2864 self.language_servers.insert(server_id, state);
2865 self.language_server_ids.insert(key, server_id);
2866 }
2867
2868 fn reinstall_language_server(
2869 &mut self,
2870 language: Arc<Language>,
2871 adapter: Arc<CachedLspAdapter>,
2872 server_id: LanguageServerId,
2873 cx: &mut ModelContext<Self>,
2874 ) -> Option<Task<()>> {
2875 log::info!("beginning to reinstall server");
2876
2877 let existing_server = match self.language_servers.remove(&server_id) {
2878 Some(LanguageServerState::Running { server, .. }) => Some(server),
2879 _ => None,
2880 };
2881
2882 for worktree in &self.worktrees {
2883 if let Some(worktree) = worktree.upgrade(cx) {
2884 let key = (worktree.read(cx).id(), adapter.name.clone());
2885 self.language_server_ids.remove(&key);
2886 }
2887 }
2888
2889 Some(cx.spawn(move |this, mut cx| async move {
2890 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2891 log::info!("shutting down existing server");
2892 task.await;
2893 }
2894
2895 // TODO: This is race-safe with regards to preventing new instances from
2896 // starting while deleting, but existing instances in other projects are going
2897 // to be very confused and messed up
2898 this.update(&mut cx, |this, cx| {
2899 this.languages.delete_server_container(adapter.clone(), cx)
2900 })
2901 .await;
2902
2903 this.update(&mut cx, |this, mut cx| {
2904 let worktrees = this.worktrees.clone();
2905 for worktree in worktrees {
2906 let worktree = match worktree.upgrade(cx) {
2907 Some(worktree) => worktree.read(cx),
2908 None => continue,
2909 };
2910 let worktree_id = worktree.id();
2911 let root_path = worktree.abs_path();
2912
2913 this.start_language_server(
2914 worktree_id,
2915 root_path,
2916 adapter.clone(),
2917 language.clone(),
2918 &mut cx,
2919 );
2920 }
2921 })
2922 }))
2923 }
2924
2925 async fn setup_and_insert_language_server(
2926 this: WeakModelHandle<Self>,
2927 override_initialization_options: Option<serde_json::Value>,
2928 pending_server: PendingLanguageServer,
2929 adapter: Arc<CachedLspAdapter>,
2930 language: Arc<Language>,
2931 server_id: LanguageServerId,
2932 key: (WorktreeId, LanguageServerName),
2933 cx: &mut AsyncAppContext,
2934 ) -> Result<Arc<LanguageServer>> {
2935 let language_server = Self::setup_pending_language_server(
2936 this,
2937 override_initialization_options,
2938 pending_server,
2939 adapter.clone(),
2940 server_id,
2941 cx,
2942 )
2943 .await?;
2944
2945 let this = match this.upgrade(cx) {
2946 Some(this) => this,
2947 None => return Err(anyhow!("failed to upgrade project handle")),
2948 };
2949
2950 this.update(cx, |this, cx| {
2951 this.insert_newly_running_language_server(
2952 language,
2953 adapter,
2954 language_server.clone(),
2955 server_id,
2956 key,
2957 cx,
2958 )
2959 })?;
2960
2961 Ok(language_server)
2962 }
2963
2964 async fn setup_pending_language_server(
2965 this: WeakModelHandle<Self>,
2966 override_options: Option<serde_json::Value>,
2967 pending_server: PendingLanguageServer,
2968 adapter: Arc<CachedLspAdapter>,
2969 server_id: LanguageServerId,
2970 cx: &mut AsyncAppContext,
2971 ) -> Result<Arc<LanguageServer>> {
2972 let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx)).await;
2973 let language_server = pending_server.task.await?;
2974
2975 language_server
2976 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2977 let adapter = adapter.clone();
2978 move |mut params, mut cx| {
2979 let this = this;
2980 let adapter = adapter.clone();
2981 if let Some(this) = this.upgrade(&cx) {
2982 adapter.process_diagnostics(&mut params);
2983 this.update(&mut cx, |this, cx| {
2984 this.update_diagnostics(
2985 server_id,
2986 params,
2987 &adapter.disk_based_diagnostic_sources,
2988 cx,
2989 )
2990 .log_err();
2991 });
2992 }
2993 }
2994 })
2995 .detach();
2996
2997 language_server
2998 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2999 let adapter = adapter.clone();
3000 move |params, mut cx| {
3001 let adapter = adapter.clone();
3002 async move {
3003 let workspace_config =
3004 cx.update(|cx| adapter.workspace_configuration(cx)).await;
3005 Ok(params
3006 .items
3007 .into_iter()
3008 .map(|item| {
3009 if let Some(section) = &item.section {
3010 workspace_config
3011 .get(section)
3012 .cloned()
3013 .unwrap_or(serde_json::Value::Null)
3014 } else {
3015 workspace_config.clone()
3016 }
3017 })
3018 .collect())
3019 }
3020 }
3021 })
3022 .detach();
3023
3024 // Even though we don't have handling for these requests, respond to them to
3025 // avoid stalling any language server like `gopls` which waits for a response
3026 // to these requests when initializing.
3027 language_server
3028 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>(
3029 move |params, mut cx| async move {
3030 if let Some(this) = this.upgrade(&cx) {
3031 this.update(&mut cx, |this, _| {
3032 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3033 {
3034 if let lsp::NumberOrString::String(token) = params.token {
3035 status.progress_tokens.insert(token);
3036 }
3037 }
3038 });
3039 }
3040 Ok(())
3041 },
3042 )
3043 .detach();
3044
3045 language_server
3046 .on_request::<lsp::request::RegisterCapability, _, _>({
3047 move |params, mut cx| async move {
3048 let this = this
3049 .upgrade(&cx)
3050 .ok_or_else(|| anyhow!("project dropped"))?;
3051 for reg in params.registrations {
3052 if reg.method == "workspace/didChangeWatchedFiles" {
3053 if let Some(options) = reg.register_options {
3054 let options = serde_json::from_value(options)?;
3055 this.update(&mut cx, |this, cx| {
3056 this.on_lsp_did_change_watched_files(server_id, options, cx);
3057 });
3058 }
3059 }
3060 }
3061 Ok(())
3062 }
3063 })
3064 .detach();
3065
3066 language_server
3067 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3068 let adapter = adapter.clone();
3069 move |params, cx| {
3070 Self::on_lsp_workspace_edit(this, params, server_id, adapter.clone(), cx)
3071 }
3072 })
3073 .detach();
3074
3075 language_server
3076 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3077 move |(), mut cx| async move {
3078 let this = this
3079 .upgrade(&cx)
3080 .ok_or_else(|| anyhow!("project dropped"))?;
3081 this.update(&mut cx, |project, cx| {
3082 cx.emit(Event::RefreshInlayHints);
3083 project.remote_id().map(|project_id| {
3084 project.client.send(proto::RefreshInlayHints { project_id })
3085 })
3086 })
3087 .transpose()?;
3088 Ok(())
3089 }
3090 })
3091 .detach();
3092
3093 let disk_based_diagnostics_progress_token =
3094 adapter.disk_based_diagnostics_progress_token.clone();
3095
3096 language_server
3097 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3098 if let Some(this) = this.upgrade(&cx) {
3099 this.update(&mut cx, |this, cx| {
3100 this.on_lsp_progress(
3101 params,
3102 server_id,
3103 disk_based_diagnostics_progress_token.clone(),
3104 cx,
3105 );
3106 });
3107 }
3108 })
3109 .detach();
3110
3111 let mut initialization_options = adapter.adapter.initialization_options().await;
3112 match (&mut initialization_options, override_options) {
3113 (Some(initialization_options), Some(override_options)) => {
3114 merge_json_value_into(override_options, initialization_options);
3115 }
3116 (None, override_options) => initialization_options = override_options,
3117 _ => {}
3118 }
3119
3120 let language_server = language_server.initialize(initialization_options).await?;
3121
3122 language_server
3123 .notify::<lsp::notification::DidChangeConfiguration>(
3124 lsp::DidChangeConfigurationParams {
3125 settings: workspace_config,
3126 },
3127 )
3128 .ok();
3129
3130 Ok(language_server)
3131 }
3132
3133 fn insert_newly_running_language_server(
3134 &mut self,
3135 language: Arc<Language>,
3136 adapter: Arc<CachedLspAdapter>,
3137 language_server: Arc<LanguageServer>,
3138 server_id: LanguageServerId,
3139 key: (WorktreeId, LanguageServerName),
3140 cx: &mut ModelContext<Self>,
3141 ) -> Result<()> {
3142 // If the language server for this key doesn't match the server id, don't store the
3143 // server. Which will cause it to be dropped, killing the process
3144 if self
3145 .language_server_ids
3146 .get(&key)
3147 .map(|id| id != &server_id)
3148 .unwrap_or(false)
3149 {
3150 return Ok(());
3151 }
3152
3153 // Update language_servers collection with Running variant of LanguageServerState
3154 // indicating that the server is up and running and ready
3155 self.language_servers.insert(
3156 server_id,
3157 LanguageServerState::Running {
3158 adapter: adapter.clone(),
3159 language: language.clone(),
3160 watched_paths: Default::default(),
3161 server: language_server.clone(),
3162 simulate_disk_based_diagnostics_completion: None,
3163 },
3164 );
3165
3166 self.language_server_statuses.insert(
3167 server_id,
3168 LanguageServerStatus {
3169 name: language_server.name().to_string(),
3170 pending_work: Default::default(),
3171 has_pending_diagnostic_updates: false,
3172 progress_tokens: Default::default(),
3173 },
3174 );
3175
3176 cx.emit(Event::LanguageServerAdded(server_id));
3177
3178 if let Some(project_id) = self.remote_id() {
3179 self.client.send(proto::StartLanguageServer {
3180 project_id,
3181 server: Some(proto::LanguageServer {
3182 id: server_id.0 as u64,
3183 name: language_server.name().to_string(),
3184 }),
3185 })?;
3186 }
3187
3188 // Tell the language server about every open buffer in the worktree that matches the language.
3189 for buffer in self.opened_buffers.values() {
3190 if let Some(buffer_handle) = buffer.upgrade(cx) {
3191 let buffer = buffer_handle.read(cx);
3192 let file = match File::from_dyn(buffer.file()) {
3193 Some(file) => file,
3194 None => continue,
3195 };
3196 let language = match buffer.language() {
3197 Some(language) => language,
3198 None => continue,
3199 };
3200
3201 if file.worktree.read(cx).id() != key.0
3202 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3203 {
3204 continue;
3205 }
3206
3207 let file = match file.as_local() {
3208 Some(file) => file,
3209 None => continue,
3210 };
3211
3212 let versions = self
3213 .buffer_snapshots
3214 .entry(buffer.remote_id())
3215 .or_default()
3216 .entry(server_id)
3217 .or_insert_with(|| {
3218 vec![LspBufferSnapshot {
3219 version: 0,
3220 snapshot: buffer.text_snapshot(),
3221 }]
3222 });
3223
3224 let snapshot = versions.last().unwrap();
3225 let version = snapshot.version;
3226 let initial_snapshot = &snapshot.snapshot;
3227 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3228 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3229 lsp::DidOpenTextDocumentParams {
3230 text_document: lsp::TextDocumentItem::new(
3231 uri,
3232 adapter
3233 .language_ids
3234 .get(language.name().as_ref())
3235 .cloned()
3236 .unwrap_or_default(),
3237 version,
3238 initial_snapshot.text(),
3239 ),
3240 },
3241 )?;
3242
3243 buffer_handle.update(cx, |buffer, cx| {
3244 buffer.set_completion_triggers(
3245 language_server
3246 .capabilities()
3247 .completion_provider
3248 .as_ref()
3249 .and_then(|provider| provider.trigger_characters.clone())
3250 .unwrap_or_default(),
3251 cx,
3252 )
3253 });
3254 }
3255 }
3256
3257 cx.notify();
3258 Ok(())
3259 }
3260
3261 // Returns a list of all of the worktrees which no longer have a language server and the root path
3262 // for the stopped server
3263 fn stop_language_server(
3264 &mut self,
3265 worktree_id: WorktreeId,
3266 adapter_name: LanguageServerName,
3267 cx: &mut ModelContext<Self>,
3268 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3269 let key = (worktree_id, adapter_name);
3270 if let Some(server_id) = self.language_server_ids.remove(&key) {
3271 log::info!("stopping language server {}", key.1 .0);
3272
3273 // Remove other entries for this language server as well
3274 let mut orphaned_worktrees = vec![worktree_id];
3275 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3276 for other_key in other_keys {
3277 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3278 self.language_server_ids.remove(&other_key);
3279 orphaned_worktrees.push(other_key.0);
3280 }
3281 }
3282
3283 for buffer in self.opened_buffers.values() {
3284 if let Some(buffer) = buffer.upgrade(cx) {
3285 buffer.update(cx, |buffer, cx| {
3286 buffer.update_diagnostics(server_id, Default::default(), cx);
3287 });
3288 }
3289 }
3290 for worktree in &self.worktrees {
3291 if let Some(worktree) = worktree.upgrade(cx) {
3292 worktree.update(cx, |worktree, cx| {
3293 if let Some(worktree) = worktree.as_local_mut() {
3294 worktree.clear_diagnostics_for_language_server(server_id, cx);
3295 }
3296 });
3297 }
3298 }
3299
3300 self.language_server_statuses.remove(&server_id);
3301 cx.notify();
3302
3303 let server_state = self.language_servers.remove(&server_id);
3304 cx.emit(Event::LanguageServerRemoved(server_id));
3305 cx.spawn_weak(|this, mut cx| async move {
3306 let mut root_path = None;
3307
3308 let server = match server_state {
3309 Some(LanguageServerState::Starting(task)) => task.await,
3310 Some(LanguageServerState::Running { server, .. }) => Some(server),
3311 None => None,
3312 };
3313
3314 if let Some(server) = server {
3315 root_path = Some(server.root_path().clone());
3316 if let Some(shutdown) = server.shutdown() {
3317 shutdown.await;
3318 }
3319 }
3320
3321 if let Some(this) = this.upgrade(&cx) {
3322 this.update(&mut cx, |this, cx| {
3323 this.language_server_statuses.remove(&server_id);
3324 cx.notify();
3325 });
3326 }
3327
3328 (root_path, orphaned_worktrees)
3329 })
3330 } else {
3331 Task::ready((None, Vec::new()))
3332 }
3333 }
3334
3335 pub fn restart_language_servers_for_buffers(
3336 &mut self,
3337 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
3338 cx: &mut ModelContext<Self>,
3339 ) -> Option<()> {
3340 let language_server_lookup_info: HashSet<(ModelHandle<Worktree>, Arc<Language>)> = buffers
3341 .into_iter()
3342 .filter_map(|buffer| {
3343 let buffer = buffer.read(cx);
3344 let file = File::from_dyn(buffer.file())?;
3345 let full_path = file.full_path(cx);
3346 let language = self
3347 .languages
3348 .language_for_file(&full_path, Some(buffer.as_rope()))
3349 .now_or_never()?
3350 .ok()?;
3351 Some((file.worktree.clone(), language))
3352 })
3353 .collect();
3354 for (worktree, language) in language_server_lookup_info {
3355 self.restart_language_servers(worktree, language, cx);
3356 }
3357
3358 None
3359 }
3360
3361 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3362 fn restart_language_servers(
3363 &mut self,
3364 worktree: ModelHandle<Worktree>,
3365 language: Arc<Language>,
3366 cx: &mut ModelContext<Self>,
3367 ) {
3368 let worktree_id = worktree.read(cx).id();
3369 let fallback_path = worktree.read(cx).abs_path();
3370
3371 let mut stops = Vec::new();
3372 for adapter in language.lsp_adapters() {
3373 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3374 }
3375
3376 if stops.is_empty() {
3377 return;
3378 }
3379 let mut stops = stops.into_iter();
3380
3381 cx.spawn_weak(|this, mut cx| async move {
3382 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3383 for stop in stops {
3384 let (_, worktrees) = stop.await;
3385 orphaned_worktrees.extend_from_slice(&worktrees);
3386 }
3387
3388 let this = match this.upgrade(&cx) {
3389 Some(this) => this,
3390 None => return,
3391 };
3392
3393 this.update(&mut cx, |this, cx| {
3394 // Attempt to restart using original server path. Fallback to passed in
3395 // path if we could not retrieve the root path
3396 let root_path = original_root_path
3397 .map(|path_buf| Arc::from(path_buf.as_path()))
3398 .unwrap_or(fallback_path);
3399
3400 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3401
3402 // Lookup new server ids and set them for each of the orphaned worktrees
3403 for adapter in language.lsp_adapters() {
3404 if let Some(new_server_id) = this
3405 .language_server_ids
3406 .get(&(worktree_id, adapter.name.clone()))
3407 .cloned()
3408 {
3409 for &orphaned_worktree in &orphaned_worktrees {
3410 this.language_server_ids
3411 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3412 }
3413 }
3414 }
3415 });
3416 })
3417 .detach();
3418 }
3419
3420 fn check_errored_server(
3421 language: Arc<Language>,
3422 adapter: Arc<CachedLspAdapter>,
3423 server_id: LanguageServerId,
3424 installation_test_binary: Option<LanguageServerBinary>,
3425 cx: &mut ModelContext<Self>,
3426 ) {
3427 if !adapter.can_be_reinstalled() {
3428 log::info!(
3429 "Validation check requested for {:?} but it cannot be reinstalled",
3430 adapter.name.0
3431 );
3432 return;
3433 }
3434
3435 cx.spawn(|this, mut cx| async move {
3436 log::info!("About to spawn test binary");
3437
3438 // A lack of test binary counts as a failure
3439 let process = installation_test_binary.and_then(|binary| {
3440 smol::process::Command::new(&binary.path)
3441 .current_dir(&binary.path)
3442 .args(binary.arguments)
3443 .stdin(Stdio::piped())
3444 .stdout(Stdio::piped())
3445 .stderr(Stdio::inherit())
3446 .kill_on_drop(true)
3447 .spawn()
3448 .ok()
3449 });
3450
3451 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3452 let mut timeout = cx.background().timer(PROCESS_TIMEOUT).fuse();
3453
3454 let mut errored = false;
3455 if let Some(mut process) = process {
3456 futures::select! {
3457 status = process.status().fuse() => match status {
3458 Ok(status) => errored = !status.success(),
3459 Err(_) => errored = true,
3460 },
3461
3462 _ = timeout => {
3463 log::info!("test binary time-ed out, this counts as a success");
3464 _ = process.kill();
3465 }
3466 }
3467 } else {
3468 log::warn!("test binary failed to launch");
3469 errored = true;
3470 }
3471
3472 if errored {
3473 log::warn!("test binary check failed");
3474 let task = this.update(&mut cx, move |this, mut cx| {
3475 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3476 });
3477
3478 if let Some(task) = task {
3479 task.await;
3480 }
3481 }
3482 })
3483 .detach();
3484 }
3485
3486 fn on_lsp_progress(
3487 &mut self,
3488 progress: lsp::ProgressParams,
3489 language_server_id: LanguageServerId,
3490 disk_based_diagnostics_progress_token: Option<String>,
3491 cx: &mut ModelContext<Self>,
3492 ) {
3493 let token = match progress.token {
3494 lsp::NumberOrString::String(token) => token,
3495 lsp::NumberOrString::Number(token) => {
3496 log::info!("skipping numeric progress token {}", token);
3497 return;
3498 }
3499 };
3500 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3501 let language_server_status =
3502 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3503 status
3504 } else {
3505 return;
3506 };
3507
3508 if !language_server_status.progress_tokens.contains(&token) {
3509 return;
3510 }
3511
3512 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3513 .as_ref()
3514 .map_or(false, |disk_based_token| {
3515 token.starts_with(disk_based_token)
3516 });
3517
3518 match progress {
3519 lsp::WorkDoneProgress::Begin(report) => {
3520 if is_disk_based_diagnostics_progress {
3521 language_server_status.has_pending_diagnostic_updates = true;
3522 self.disk_based_diagnostics_started(language_server_id, cx);
3523 self.buffer_ordered_messages_tx
3524 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3525 language_server_id,
3526 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3527 })
3528 .ok();
3529 } else {
3530 self.on_lsp_work_start(
3531 language_server_id,
3532 token.clone(),
3533 LanguageServerProgress {
3534 message: report.message.clone(),
3535 percentage: report.percentage.map(|p| p as usize),
3536 last_update_at: Instant::now(),
3537 },
3538 cx,
3539 );
3540 self.buffer_ordered_messages_tx
3541 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3542 language_server_id,
3543 message: proto::update_language_server::Variant::WorkStart(
3544 proto::LspWorkStart {
3545 token,
3546 message: report.message,
3547 percentage: report.percentage.map(|p| p as u32),
3548 },
3549 ),
3550 })
3551 .ok();
3552 }
3553 }
3554 lsp::WorkDoneProgress::Report(report) => {
3555 if !is_disk_based_diagnostics_progress {
3556 self.on_lsp_work_progress(
3557 language_server_id,
3558 token.clone(),
3559 LanguageServerProgress {
3560 message: report.message.clone(),
3561 percentage: report.percentage.map(|p| p as usize),
3562 last_update_at: Instant::now(),
3563 },
3564 cx,
3565 );
3566 self.buffer_ordered_messages_tx
3567 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3568 language_server_id,
3569 message: proto::update_language_server::Variant::WorkProgress(
3570 proto::LspWorkProgress {
3571 token,
3572 message: report.message,
3573 percentage: report.percentage.map(|p| p as u32),
3574 },
3575 ),
3576 })
3577 .ok();
3578 }
3579 }
3580 lsp::WorkDoneProgress::End(_) => {
3581 language_server_status.progress_tokens.remove(&token);
3582
3583 if is_disk_based_diagnostics_progress {
3584 language_server_status.has_pending_diagnostic_updates = false;
3585 self.disk_based_diagnostics_finished(language_server_id, cx);
3586 self.buffer_ordered_messages_tx
3587 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3588 language_server_id,
3589 message:
3590 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3591 Default::default(),
3592 ),
3593 })
3594 .ok();
3595 } else {
3596 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3597 self.buffer_ordered_messages_tx
3598 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3599 language_server_id,
3600 message: proto::update_language_server::Variant::WorkEnd(
3601 proto::LspWorkEnd { token },
3602 ),
3603 })
3604 .ok();
3605 }
3606 }
3607 }
3608 }
3609
3610 fn on_lsp_work_start(
3611 &mut self,
3612 language_server_id: LanguageServerId,
3613 token: String,
3614 progress: LanguageServerProgress,
3615 cx: &mut ModelContext<Self>,
3616 ) {
3617 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3618 status.pending_work.insert(token, progress);
3619 cx.notify();
3620 }
3621 }
3622
3623 fn on_lsp_work_progress(
3624 &mut self,
3625 language_server_id: LanguageServerId,
3626 token: String,
3627 progress: LanguageServerProgress,
3628 cx: &mut ModelContext<Self>,
3629 ) {
3630 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3631 let entry = status
3632 .pending_work
3633 .entry(token)
3634 .or_insert(LanguageServerProgress {
3635 message: Default::default(),
3636 percentage: Default::default(),
3637 last_update_at: progress.last_update_at,
3638 });
3639 if progress.message.is_some() {
3640 entry.message = progress.message;
3641 }
3642 if progress.percentage.is_some() {
3643 entry.percentage = progress.percentage;
3644 }
3645 entry.last_update_at = progress.last_update_at;
3646 cx.notify();
3647 }
3648 }
3649
3650 fn on_lsp_work_end(
3651 &mut self,
3652 language_server_id: LanguageServerId,
3653 token: String,
3654 cx: &mut ModelContext<Self>,
3655 ) {
3656 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3657 cx.emit(Event::RefreshInlayHints);
3658 status.pending_work.remove(&token);
3659 cx.notify();
3660 }
3661 }
3662
3663 fn on_lsp_did_change_watched_files(
3664 &mut self,
3665 language_server_id: LanguageServerId,
3666 params: DidChangeWatchedFilesRegistrationOptions,
3667 cx: &mut ModelContext<Self>,
3668 ) {
3669 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3670 self.language_servers.get_mut(&language_server_id)
3671 {
3672 let mut builders = HashMap::default();
3673 for watcher in params.watchers {
3674 for worktree in &self.worktrees {
3675 if let Some(worktree) = worktree.upgrade(cx) {
3676 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3677 if let Some(abs_path) = tree.abs_path().to_str() {
3678 let relative_glob_pattern = match &watcher.glob_pattern {
3679 lsp::GlobPattern::String(s) => s
3680 .strip_prefix(abs_path)
3681 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3682 lsp::GlobPattern::Relative(rp) => {
3683 let base_uri = match &rp.base_uri {
3684 lsp::OneOf::Left(workspace_folder) => {
3685 &workspace_folder.uri
3686 }
3687 lsp::OneOf::Right(base_uri) => base_uri,
3688 };
3689 base_uri.to_file_path().ok().and_then(|file_path| {
3690 (file_path.to_str() == Some(abs_path))
3691 .then_some(rp.pattern.as_str())
3692 })
3693 }
3694 };
3695 if let Some(relative_glob_pattern) = relative_glob_pattern {
3696 let literal_prefix =
3697 glob_literal_prefix(&relative_glob_pattern);
3698 tree.as_local_mut()
3699 .unwrap()
3700 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3701 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3702 builders
3703 .entry(tree.id())
3704 .or_insert_with(|| GlobSetBuilder::new())
3705 .add(glob);
3706 }
3707 return true;
3708 }
3709 }
3710 false
3711 });
3712 if glob_is_inside_worktree {
3713 break;
3714 }
3715 }
3716 }
3717 }
3718
3719 watched_paths.clear();
3720 for (worktree_id, builder) in builders {
3721 if let Ok(globset) = builder.build() {
3722 watched_paths.insert(worktree_id, globset);
3723 }
3724 }
3725
3726 cx.notify();
3727 }
3728 }
3729
3730 async fn on_lsp_workspace_edit(
3731 this: WeakModelHandle<Self>,
3732 params: lsp::ApplyWorkspaceEditParams,
3733 server_id: LanguageServerId,
3734 adapter: Arc<CachedLspAdapter>,
3735 mut cx: AsyncAppContext,
3736 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3737 let this = this
3738 .upgrade(&cx)
3739 .ok_or_else(|| anyhow!("project project closed"))?;
3740 let language_server = this
3741 .read_with(&cx, |this, _| this.language_server_for_id(server_id))
3742 .ok_or_else(|| anyhow!("language server not found"))?;
3743 let transaction = Self::deserialize_workspace_edit(
3744 this.clone(),
3745 params.edit,
3746 true,
3747 adapter.clone(),
3748 language_server.clone(),
3749 &mut cx,
3750 )
3751 .await
3752 .log_err();
3753 this.update(&mut cx, |this, _| {
3754 if let Some(transaction) = transaction {
3755 this.last_workspace_edits_by_language_server
3756 .insert(server_id, transaction);
3757 }
3758 });
3759 Ok(lsp::ApplyWorkspaceEditResponse {
3760 applied: true,
3761 failed_change: None,
3762 failure_reason: None,
3763 })
3764 }
3765
3766 pub fn language_server_statuses(
3767 &self,
3768 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3769 self.language_server_statuses.values()
3770 }
3771
3772 pub fn update_diagnostics(
3773 &mut self,
3774 language_server_id: LanguageServerId,
3775 mut params: lsp::PublishDiagnosticsParams,
3776 disk_based_sources: &[String],
3777 cx: &mut ModelContext<Self>,
3778 ) -> Result<()> {
3779 let abs_path = params
3780 .uri
3781 .to_file_path()
3782 .map_err(|_| anyhow!("URI is not a file"))?;
3783 let mut diagnostics = Vec::default();
3784 let mut primary_diagnostic_group_ids = HashMap::default();
3785 let mut sources_by_group_id = HashMap::default();
3786 let mut supporting_diagnostics = HashMap::default();
3787
3788 // Ensure that primary diagnostics are always the most severe
3789 params.diagnostics.sort_by_key(|item| item.severity);
3790
3791 for diagnostic in ¶ms.diagnostics {
3792 let source = diagnostic.source.as_ref();
3793 let code = diagnostic.code.as_ref().map(|code| match code {
3794 lsp::NumberOrString::Number(code) => code.to_string(),
3795 lsp::NumberOrString::String(code) => code.clone(),
3796 });
3797 let range = range_from_lsp(diagnostic.range);
3798 let is_supporting = diagnostic
3799 .related_information
3800 .as_ref()
3801 .map_or(false, |infos| {
3802 infos.iter().any(|info| {
3803 primary_diagnostic_group_ids.contains_key(&(
3804 source,
3805 code.clone(),
3806 range_from_lsp(info.location.range),
3807 ))
3808 })
3809 });
3810
3811 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3812 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3813 });
3814
3815 if is_supporting {
3816 supporting_diagnostics.insert(
3817 (source, code.clone(), range),
3818 (diagnostic.severity, is_unnecessary),
3819 );
3820 } else {
3821 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3822 let is_disk_based =
3823 source.map_or(false, |source| disk_based_sources.contains(source));
3824
3825 sources_by_group_id.insert(group_id, source);
3826 primary_diagnostic_group_ids
3827 .insert((source, code.clone(), range.clone()), group_id);
3828
3829 diagnostics.push(DiagnosticEntry {
3830 range,
3831 diagnostic: Diagnostic {
3832 source: diagnostic.source.clone(),
3833 code: code.clone(),
3834 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3835 message: diagnostic.message.clone(),
3836 group_id,
3837 is_primary: true,
3838 is_valid: true,
3839 is_disk_based,
3840 is_unnecessary,
3841 },
3842 });
3843 if let Some(infos) = &diagnostic.related_information {
3844 for info in infos {
3845 if info.location.uri == params.uri && !info.message.is_empty() {
3846 let range = range_from_lsp(info.location.range);
3847 diagnostics.push(DiagnosticEntry {
3848 range,
3849 diagnostic: Diagnostic {
3850 source: diagnostic.source.clone(),
3851 code: code.clone(),
3852 severity: DiagnosticSeverity::INFORMATION,
3853 message: info.message.clone(),
3854 group_id,
3855 is_primary: false,
3856 is_valid: true,
3857 is_disk_based,
3858 is_unnecessary: false,
3859 },
3860 });
3861 }
3862 }
3863 }
3864 }
3865 }
3866
3867 for entry in &mut diagnostics {
3868 let diagnostic = &mut entry.diagnostic;
3869 if !diagnostic.is_primary {
3870 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3871 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3872 source,
3873 diagnostic.code.clone(),
3874 entry.range.clone(),
3875 )) {
3876 if let Some(severity) = severity {
3877 diagnostic.severity = severity;
3878 }
3879 diagnostic.is_unnecessary = is_unnecessary;
3880 }
3881 }
3882 }
3883
3884 self.update_diagnostic_entries(
3885 language_server_id,
3886 abs_path,
3887 params.version,
3888 diagnostics,
3889 cx,
3890 )?;
3891 Ok(())
3892 }
3893
3894 pub fn update_diagnostic_entries(
3895 &mut self,
3896 server_id: LanguageServerId,
3897 abs_path: PathBuf,
3898 version: Option<i32>,
3899 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3900 cx: &mut ModelContext<Project>,
3901 ) -> Result<(), anyhow::Error> {
3902 let (worktree, relative_path) = self
3903 .find_local_worktree(&abs_path, cx)
3904 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3905
3906 let project_path = ProjectPath {
3907 worktree_id: worktree.read(cx).id(),
3908 path: relative_path.into(),
3909 };
3910
3911 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3912 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3913 }
3914
3915 let updated = worktree.update(cx, |worktree, cx| {
3916 worktree
3917 .as_local_mut()
3918 .ok_or_else(|| anyhow!("not a local worktree"))?
3919 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3920 })?;
3921 if updated {
3922 cx.emit(Event::DiagnosticsUpdated {
3923 language_server_id: server_id,
3924 path: project_path,
3925 });
3926 }
3927 Ok(())
3928 }
3929
3930 fn update_buffer_diagnostics(
3931 &mut self,
3932 buffer: &ModelHandle<Buffer>,
3933 server_id: LanguageServerId,
3934 version: Option<i32>,
3935 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3936 cx: &mut ModelContext<Self>,
3937 ) -> Result<()> {
3938 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3939 Ordering::Equal
3940 .then_with(|| b.is_primary.cmp(&a.is_primary))
3941 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3942 .then_with(|| a.severity.cmp(&b.severity))
3943 .then_with(|| a.message.cmp(&b.message))
3944 }
3945
3946 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3947
3948 diagnostics.sort_unstable_by(|a, b| {
3949 Ordering::Equal
3950 .then_with(|| a.range.start.cmp(&b.range.start))
3951 .then_with(|| b.range.end.cmp(&a.range.end))
3952 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3953 });
3954
3955 let mut sanitized_diagnostics = Vec::new();
3956 let edits_since_save = Patch::new(
3957 snapshot
3958 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
3959 .collect(),
3960 );
3961 for entry in diagnostics {
3962 let start;
3963 let end;
3964 if entry.diagnostic.is_disk_based {
3965 // Some diagnostics are based on files on disk instead of buffers'
3966 // current contents. Adjust these diagnostics' ranges to reflect
3967 // any unsaved edits.
3968 start = edits_since_save.old_to_new(entry.range.start);
3969 end = edits_since_save.old_to_new(entry.range.end);
3970 } else {
3971 start = entry.range.start;
3972 end = entry.range.end;
3973 }
3974
3975 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
3976 ..snapshot.clip_point_utf16(end, Bias::Right);
3977
3978 // Expand empty ranges by one codepoint
3979 if range.start == range.end {
3980 // This will be go to the next boundary when being clipped
3981 range.end.column += 1;
3982 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
3983 if range.start == range.end && range.end.column > 0 {
3984 range.start.column -= 1;
3985 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
3986 }
3987 }
3988
3989 sanitized_diagnostics.push(DiagnosticEntry {
3990 range,
3991 diagnostic: entry.diagnostic,
3992 });
3993 }
3994 drop(edits_since_save);
3995
3996 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
3997 buffer.update(cx, |buffer, cx| {
3998 buffer.update_diagnostics(server_id, set, cx)
3999 });
4000 Ok(())
4001 }
4002
4003 pub fn reload_buffers(
4004 &self,
4005 buffers: HashSet<ModelHandle<Buffer>>,
4006 push_to_history: bool,
4007 cx: &mut ModelContext<Self>,
4008 ) -> Task<Result<ProjectTransaction>> {
4009 let mut local_buffers = Vec::new();
4010 let mut remote_buffers = None;
4011 for buffer_handle in buffers {
4012 let buffer = buffer_handle.read(cx);
4013 if buffer.is_dirty() {
4014 if let Some(file) = File::from_dyn(buffer.file()) {
4015 if file.is_local() {
4016 local_buffers.push(buffer_handle);
4017 } else {
4018 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4019 }
4020 }
4021 }
4022 }
4023
4024 let remote_buffers = self.remote_id().zip(remote_buffers);
4025 let client = self.client.clone();
4026
4027 cx.spawn(|this, mut cx| async move {
4028 let mut project_transaction = ProjectTransaction::default();
4029
4030 if let Some((project_id, remote_buffers)) = remote_buffers {
4031 let response = client
4032 .request(proto::ReloadBuffers {
4033 project_id,
4034 buffer_ids: remote_buffers
4035 .iter()
4036 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
4037 .collect(),
4038 })
4039 .await?
4040 .transaction
4041 .ok_or_else(|| anyhow!("missing transaction"))?;
4042 project_transaction = this
4043 .update(&mut cx, |this, cx| {
4044 this.deserialize_project_transaction(response, push_to_history, cx)
4045 })
4046 .await?;
4047 }
4048
4049 for buffer in local_buffers {
4050 let transaction = buffer
4051 .update(&mut cx, |buffer, cx| buffer.reload(cx))
4052 .await?;
4053 buffer.update(&mut cx, |buffer, cx| {
4054 if let Some(transaction) = transaction {
4055 if !push_to_history {
4056 buffer.forget_transaction(transaction.id);
4057 }
4058 project_transaction.0.insert(cx.handle(), transaction);
4059 }
4060 });
4061 }
4062
4063 Ok(project_transaction)
4064 })
4065 }
4066
4067 pub fn format(
4068 &mut self,
4069 buffers: HashSet<ModelHandle<Buffer>>,
4070 push_to_history: bool,
4071 trigger: FormatTrigger,
4072 cx: &mut ModelContext<Project>,
4073 ) -> Task<anyhow::Result<ProjectTransaction>> {
4074 if self.is_local() {
4075 let mut buffers_with_paths_and_servers = buffers
4076 .into_iter()
4077 .filter_map(|buffer_handle| {
4078 let buffer = buffer_handle.read(cx);
4079 let file = File::from_dyn(buffer.file())?;
4080 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4081 let server = self
4082 .primary_language_server_for_buffer(buffer, cx)
4083 .map(|s| s.1.clone());
4084 Some((buffer_handle, buffer_abs_path, server))
4085 })
4086 .collect::<Vec<_>>();
4087
4088 cx.spawn(|project, mut cx| async move {
4089 // Do not allow multiple concurrent formatting requests for the
4090 // same buffer.
4091 project.update(&mut cx, |this, cx| {
4092 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4093 this.buffers_being_formatted
4094 .insert(buffer.read(cx).remote_id())
4095 });
4096 });
4097
4098 let _cleanup = defer({
4099 let this = project.clone();
4100 let mut cx = cx.clone();
4101 let buffers = &buffers_with_paths_and_servers;
4102 move || {
4103 this.update(&mut cx, |this, cx| {
4104 for (buffer, _, _) in buffers {
4105 this.buffers_being_formatted
4106 .remove(&buffer.read(cx).remote_id());
4107 }
4108 });
4109 }
4110 });
4111
4112 let mut project_transaction = ProjectTransaction::default();
4113 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4114 let settings = buffer.read_with(&cx, |buffer, cx| {
4115 language_settings(buffer.language(), buffer.file(), cx).clone()
4116 });
4117
4118 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4119 let ensure_final_newline = settings.ensure_final_newline_on_save;
4120 let format_on_save = settings.format_on_save.clone();
4121 let formatter = settings.formatter.clone();
4122 let tab_size = settings.tab_size;
4123
4124 // First, format buffer's whitespace according to the settings.
4125 let trailing_whitespace_diff = if remove_trailing_whitespace {
4126 Some(
4127 buffer
4128 .read_with(&cx, |b, cx| b.remove_trailing_whitespace(cx))
4129 .await,
4130 )
4131 } else {
4132 None
4133 };
4134 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4135 buffer.finalize_last_transaction();
4136 buffer.start_transaction();
4137 if let Some(diff) = trailing_whitespace_diff {
4138 buffer.apply_diff(diff, cx);
4139 }
4140 if ensure_final_newline {
4141 buffer.ensure_final_newline(cx);
4142 }
4143 buffer.end_transaction(cx)
4144 });
4145
4146 // Apply language-specific formatting using either a language server
4147 // or external command.
4148 let mut format_operation = None;
4149 match (formatter, format_on_save) {
4150 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4151
4152 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4153 | (_, FormatOnSave::LanguageServer) => {
4154 if let Some((language_server, buffer_abs_path)) =
4155 language_server.as_ref().zip(buffer_abs_path.as_ref())
4156 {
4157 format_operation = Some(FormatOperation::Lsp(
4158 Self::format_via_lsp(
4159 &project,
4160 &buffer,
4161 buffer_abs_path,
4162 &language_server,
4163 tab_size,
4164 &mut cx,
4165 )
4166 .await
4167 .context("failed to format via language server")?,
4168 ));
4169 }
4170 }
4171
4172 (
4173 Formatter::External { command, arguments },
4174 FormatOnSave::On | FormatOnSave::Off,
4175 )
4176 | (_, FormatOnSave::External { command, arguments }) => {
4177 if let Some(buffer_abs_path) = buffer_abs_path {
4178 format_operation = Self::format_via_external_command(
4179 buffer,
4180 buffer_abs_path,
4181 &command,
4182 &arguments,
4183 &mut cx,
4184 )
4185 .await
4186 .context(format!(
4187 "failed to format via external command {:?}",
4188 command
4189 ))?
4190 .map(FormatOperation::External);
4191 }
4192 }
4193 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4194 if let Some(new_operation) =
4195 format_with_prettier(&project, buffer, &mut cx).await
4196 {
4197 format_operation = Some(new_operation);
4198 } else if let Some((language_server, buffer_abs_path)) =
4199 language_server.as_ref().zip(buffer_abs_path.as_ref())
4200 {
4201 format_operation = Some(FormatOperation::Lsp(
4202 Self::format_via_lsp(
4203 &project,
4204 &buffer,
4205 buffer_abs_path,
4206 &language_server,
4207 tab_size,
4208 &mut cx,
4209 )
4210 .await
4211 .context("failed to format via language server")?,
4212 ));
4213 }
4214 }
4215 (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
4216 if let Some(new_operation) =
4217 format_with_prettier(&project, buffer, &mut cx).await
4218 {
4219 format_operation = Some(new_operation);
4220 }
4221 }
4222 };
4223
4224 buffer.update(&mut cx, |b, cx| {
4225 // If the buffer had its whitespace formatted and was edited while the language-specific
4226 // formatting was being computed, avoid applying the language-specific formatting, because
4227 // it can't be grouped with the whitespace formatting in the undo history.
4228 if let Some(transaction_id) = whitespace_transaction_id {
4229 if b.peek_undo_stack()
4230 .map_or(true, |e| e.transaction_id() != transaction_id)
4231 {
4232 format_operation.take();
4233 }
4234 }
4235
4236 // Apply any language-specific formatting, and group the two formatting operations
4237 // in the buffer's undo history.
4238 if let Some(operation) = format_operation {
4239 match operation {
4240 FormatOperation::Lsp(edits) => {
4241 b.edit(edits, None, cx);
4242 }
4243 FormatOperation::External(diff) => {
4244 b.apply_diff(diff, cx);
4245 }
4246 FormatOperation::Prettier(diff) => {
4247 b.apply_diff(diff, cx);
4248 }
4249 }
4250
4251 if let Some(transaction_id) = whitespace_transaction_id {
4252 b.group_until_transaction(transaction_id);
4253 }
4254 }
4255
4256 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4257 if !push_to_history {
4258 b.forget_transaction(transaction.id);
4259 }
4260 project_transaction.0.insert(buffer.clone(), transaction);
4261 }
4262 });
4263 }
4264
4265 Ok(project_transaction)
4266 })
4267 } else {
4268 let remote_id = self.remote_id();
4269 let client = self.client.clone();
4270 cx.spawn(|this, mut cx| async move {
4271 let mut project_transaction = ProjectTransaction::default();
4272 if let Some(project_id) = remote_id {
4273 let response = client
4274 .request(proto::FormatBuffers {
4275 project_id,
4276 trigger: trigger as i32,
4277 buffer_ids: buffers
4278 .iter()
4279 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
4280 .collect(),
4281 })
4282 .await?
4283 .transaction
4284 .ok_or_else(|| anyhow!("missing transaction"))?;
4285 project_transaction = this
4286 .update(&mut cx, |this, cx| {
4287 this.deserialize_project_transaction(response, push_to_history, cx)
4288 })
4289 .await?;
4290 }
4291 Ok(project_transaction)
4292 })
4293 }
4294 }
4295
4296 async fn format_via_lsp(
4297 this: &ModelHandle<Self>,
4298 buffer: &ModelHandle<Buffer>,
4299 abs_path: &Path,
4300 language_server: &Arc<LanguageServer>,
4301 tab_size: NonZeroU32,
4302 cx: &mut AsyncAppContext,
4303 ) -> Result<Vec<(Range<Anchor>, String)>> {
4304 let uri = lsp::Url::from_file_path(abs_path)
4305 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4306 let text_document = lsp::TextDocumentIdentifier::new(uri);
4307 let capabilities = &language_server.capabilities();
4308
4309 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4310 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4311
4312 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4313 language_server
4314 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4315 text_document,
4316 options: lsp_command::lsp_formatting_options(tab_size.get()),
4317 work_done_progress_params: Default::default(),
4318 })
4319 .await?
4320 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4321 let buffer_start = lsp::Position::new(0, 0);
4322 let buffer_end = buffer.read_with(cx, |b, _| point_to_lsp(b.max_point_utf16()));
4323
4324 language_server
4325 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4326 text_document,
4327 range: lsp::Range::new(buffer_start, buffer_end),
4328 options: lsp_command::lsp_formatting_options(tab_size.get()),
4329 work_done_progress_params: Default::default(),
4330 })
4331 .await?
4332 } else {
4333 None
4334 };
4335
4336 if let Some(lsp_edits) = lsp_edits {
4337 this.update(cx, |this, cx| {
4338 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4339 })
4340 .await
4341 } else {
4342 Ok(Vec::new())
4343 }
4344 }
4345
4346 async fn format_via_external_command(
4347 buffer: &ModelHandle<Buffer>,
4348 buffer_abs_path: &Path,
4349 command: &str,
4350 arguments: &[String],
4351 cx: &mut AsyncAppContext,
4352 ) -> Result<Option<Diff>> {
4353 let working_dir_path = buffer.read_with(cx, |buffer, cx| {
4354 let file = File::from_dyn(buffer.file())?;
4355 let worktree = file.worktree.read(cx).as_local()?;
4356 let mut worktree_path = worktree.abs_path().to_path_buf();
4357 if worktree.root_entry()?.is_file() {
4358 worktree_path.pop();
4359 }
4360 Some(worktree_path)
4361 });
4362
4363 if let Some(working_dir_path) = working_dir_path {
4364 let mut child =
4365 smol::process::Command::new(command)
4366 .args(arguments.iter().map(|arg| {
4367 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4368 }))
4369 .current_dir(&working_dir_path)
4370 .stdin(smol::process::Stdio::piped())
4371 .stdout(smol::process::Stdio::piped())
4372 .stderr(smol::process::Stdio::piped())
4373 .spawn()?;
4374 let stdin = child
4375 .stdin
4376 .as_mut()
4377 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4378 let text = buffer.read_with(cx, |buffer, _| buffer.as_rope().clone());
4379 for chunk in text.chunks() {
4380 stdin.write_all(chunk.as_bytes()).await?;
4381 }
4382 stdin.flush().await?;
4383
4384 let output = child.output().await?;
4385 if !output.status.success() {
4386 return Err(anyhow!(
4387 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4388 output.status.code(),
4389 String::from_utf8_lossy(&output.stdout),
4390 String::from_utf8_lossy(&output.stderr),
4391 ));
4392 }
4393
4394 let stdout = String::from_utf8(output.stdout)?;
4395 Ok(Some(
4396 buffer
4397 .read_with(cx, |buffer, cx| buffer.diff(stdout, cx))
4398 .await,
4399 ))
4400 } else {
4401 Ok(None)
4402 }
4403 }
4404
4405 pub fn definition<T: ToPointUtf16>(
4406 &self,
4407 buffer: &ModelHandle<Buffer>,
4408 position: T,
4409 cx: &mut ModelContext<Self>,
4410 ) -> Task<Result<Vec<LocationLink>>> {
4411 let position = position.to_point_utf16(buffer.read(cx));
4412 self.request_lsp(
4413 buffer.clone(),
4414 LanguageServerToQuery::Primary,
4415 GetDefinition { position },
4416 cx,
4417 )
4418 }
4419
4420 pub fn type_definition<T: ToPointUtf16>(
4421 &self,
4422 buffer: &ModelHandle<Buffer>,
4423 position: T,
4424 cx: &mut ModelContext<Self>,
4425 ) -> Task<Result<Vec<LocationLink>>> {
4426 let position = position.to_point_utf16(buffer.read(cx));
4427 self.request_lsp(
4428 buffer.clone(),
4429 LanguageServerToQuery::Primary,
4430 GetTypeDefinition { position },
4431 cx,
4432 )
4433 }
4434
4435 pub fn references<T: ToPointUtf16>(
4436 &self,
4437 buffer: &ModelHandle<Buffer>,
4438 position: T,
4439 cx: &mut ModelContext<Self>,
4440 ) -> Task<Result<Vec<Location>>> {
4441 let position = position.to_point_utf16(buffer.read(cx));
4442 self.request_lsp(
4443 buffer.clone(),
4444 LanguageServerToQuery::Primary,
4445 GetReferences { position },
4446 cx,
4447 )
4448 }
4449
4450 pub fn document_highlights<T: ToPointUtf16>(
4451 &self,
4452 buffer: &ModelHandle<Buffer>,
4453 position: T,
4454 cx: &mut ModelContext<Self>,
4455 ) -> Task<Result<Vec<DocumentHighlight>>> {
4456 let position = position.to_point_utf16(buffer.read(cx));
4457 self.request_lsp(
4458 buffer.clone(),
4459 LanguageServerToQuery::Primary,
4460 GetDocumentHighlights { position },
4461 cx,
4462 )
4463 }
4464
4465 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4466 if self.is_local() {
4467 let mut requests = Vec::new();
4468 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4469 let worktree_id = *worktree_id;
4470 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4471 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4472 Some(worktree) => worktree,
4473 None => continue,
4474 };
4475 let worktree_abs_path = worktree.abs_path().clone();
4476
4477 let (adapter, language, server) = match self.language_servers.get(server_id) {
4478 Some(LanguageServerState::Running {
4479 adapter,
4480 language,
4481 server,
4482 ..
4483 }) => (adapter.clone(), language.clone(), server),
4484
4485 _ => continue,
4486 };
4487
4488 requests.push(
4489 server
4490 .request::<lsp::request::WorkspaceSymbolRequest>(
4491 lsp::WorkspaceSymbolParams {
4492 query: query.to_string(),
4493 ..Default::default()
4494 },
4495 )
4496 .log_err()
4497 .map(move |response| {
4498 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4499 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4500 flat_responses.into_iter().map(|lsp_symbol| {
4501 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4502 }).collect::<Vec<_>>()
4503 }
4504 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4505 nested_responses.into_iter().filter_map(|lsp_symbol| {
4506 let location = match lsp_symbol.location {
4507 OneOf::Left(location) => location,
4508 OneOf::Right(_) => {
4509 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4510 return None
4511 }
4512 };
4513 Some((lsp_symbol.name, lsp_symbol.kind, location))
4514 }).collect::<Vec<_>>()
4515 }
4516 }).unwrap_or_default();
4517
4518 (
4519 adapter,
4520 language,
4521 worktree_id,
4522 worktree_abs_path,
4523 lsp_symbols,
4524 )
4525 }),
4526 );
4527 }
4528
4529 cx.spawn_weak(|this, cx| async move {
4530 let responses = futures::future::join_all(requests).await;
4531 let this = match this.upgrade(&cx) {
4532 Some(this) => this,
4533 None => return Ok(Vec::new()),
4534 };
4535
4536 let symbols = this.read_with(&cx, |this, cx| {
4537 let mut symbols = Vec::new();
4538 for (
4539 adapter,
4540 adapter_language,
4541 source_worktree_id,
4542 worktree_abs_path,
4543 lsp_symbols,
4544 ) in responses
4545 {
4546 symbols.extend(lsp_symbols.into_iter().filter_map(
4547 |(symbol_name, symbol_kind, symbol_location)| {
4548 let abs_path = symbol_location.uri.to_file_path().ok()?;
4549 let mut worktree_id = source_worktree_id;
4550 let path;
4551 if let Some((worktree, rel_path)) =
4552 this.find_local_worktree(&abs_path, cx)
4553 {
4554 worktree_id = worktree.read(cx).id();
4555 path = rel_path;
4556 } else {
4557 path = relativize_path(&worktree_abs_path, &abs_path);
4558 }
4559
4560 let project_path = ProjectPath {
4561 worktree_id,
4562 path: path.into(),
4563 };
4564 let signature = this.symbol_signature(&project_path);
4565 let adapter_language = adapter_language.clone();
4566 let language = this
4567 .languages
4568 .language_for_file(&project_path.path, None)
4569 .unwrap_or_else(move |_| adapter_language);
4570 let language_server_name = adapter.name.clone();
4571 Some(async move {
4572 let language = language.await;
4573 let label =
4574 language.label_for_symbol(&symbol_name, symbol_kind).await;
4575
4576 Symbol {
4577 language_server_name,
4578 source_worktree_id,
4579 path: project_path,
4580 label: label.unwrap_or_else(|| {
4581 CodeLabel::plain(symbol_name.clone(), None)
4582 }),
4583 kind: symbol_kind,
4584 name: symbol_name,
4585 range: range_from_lsp(symbol_location.range),
4586 signature,
4587 }
4588 })
4589 },
4590 ));
4591 }
4592
4593 symbols
4594 });
4595
4596 Ok(futures::future::join_all(symbols).await)
4597 })
4598 } else if let Some(project_id) = self.remote_id() {
4599 let request = self.client.request(proto::GetProjectSymbols {
4600 project_id,
4601 query: query.to_string(),
4602 });
4603 cx.spawn_weak(|this, cx| async move {
4604 let response = request.await?;
4605 let mut symbols = Vec::new();
4606 if let Some(this) = this.upgrade(&cx) {
4607 let new_symbols = this.read_with(&cx, |this, _| {
4608 response
4609 .symbols
4610 .into_iter()
4611 .map(|symbol| this.deserialize_symbol(symbol))
4612 .collect::<Vec<_>>()
4613 });
4614 symbols = futures::future::join_all(new_symbols)
4615 .await
4616 .into_iter()
4617 .filter_map(|symbol| symbol.log_err())
4618 .collect::<Vec<_>>();
4619 }
4620 Ok(symbols)
4621 })
4622 } else {
4623 Task::ready(Ok(Default::default()))
4624 }
4625 }
4626
4627 pub fn open_buffer_for_symbol(
4628 &mut self,
4629 symbol: &Symbol,
4630 cx: &mut ModelContext<Self>,
4631 ) -> Task<Result<ModelHandle<Buffer>>> {
4632 if self.is_local() {
4633 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4634 symbol.source_worktree_id,
4635 symbol.language_server_name.clone(),
4636 )) {
4637 *id
4638 } else {
4639 return Task::ready(Err(anyhow!(
4640 "language server for worktree and language not found"
4641 )));
4642 };
4643
4644 let worktree_abs_path = if let Some(worktree_abs_path) = self
4645 .worktree_for_id(symbol.path.worktree_id, cx)
4646 .and_then(|worktree| worktree.read(cx).as_local())
4647 .map(|local_worktree| local_worktree.abs_path())
4648 {
4649 worktree_abs_path
4650 } else {
4651 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4652 };
4653 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
4654 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4655 uri
4656 } else {
4657 return Task::ready(Err(anyhow!("invalid symbol path")));
4658 };
4659
4660 self.open_local_buffer_via_lsp(
4661 symbol_uri,
4662 language_server_id,
4663 symbol.language_server_name.clone(),
4664 cx,
4665 )
4666 } else if let Some(project_id) = self.remote_id() {
4667 let request = self.client.request(proto::OpenBufferForSymbol {
4668 project_id,
4669 symbol: Some(serialize_symbol(symbol)),
4670 });
4671 cx.spawn(|this, mut cx| async move {
4672 let response = request.await?;
4673 this.update(&mut cx, |this, cx| {
4674 this.wait_for_remote_buffer(response.buffer_id, cx)
4675 })
4676 .await
4677 })
4678 } else {
4679 Task::ready(Err(anyhow!("project does not have a remote id")))
4680 }
4681 }
4682
4683 pub fn hover<T: ToPointUtf16>(
4684 &self,
4685 buffer: &ModelHandle<Buffer>,
4686 position: T,
4687 cx: &mut ModelContext<Self>,
4688 ) -> Task<Result<Option<Hover>>> {
4689 let position = position.to_point_utf16(buffer.read(cx));
4690 self.request_lsp(
4691 buffer.clone(),
4692 LanguageServerToQuery::Primary,
4693 GetHover { position },
4694 cx,
4695 )
4696 }
4697
4698 pub fn completions<T: ToOffset + ToPointUtf16>(
4699 &self,
4700 buffer: &ModelHandle<Buffer>,
4701 position: T,
4702 cx: &mut ModelContext<Self>,
4703 ) -> Task<Result<Vec<Completion>>> {
4704 let position = position.to_point_utf16(buffer.read(cx));
4705 if self.is_local() {
4706 let snapshot = buffer.read(cx).snapshot();
4707 let offset = position.to_offset(&snapshot);
4708 let scope = snapshot.language_scope_at(offset);
4709
4710 let server_ids: Vec<_> = self
4711 .language_servers_for_buffer(buffer.read(cx), cx)
4712 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4713 .filter(|(adapter, _)| {
4714 scope
4715 .as_ref()
4716 .map(|scope| scope.language_allowed(&adapter.name))
4717 .unwrap_or(true)
4718 })
4719 .map(|(_, server)| server.server_id())
4720 .collect();
4721
4722 let buffer = buffer.clone();
4723 cx.spawn(|this, mut cx| async move {
4724 let mut tasks = Vec::with_capacity(server_ids.len());
4725 this.update(&mut cx, |this, cx| {
4726 for server_id in server_ids {
4727 tasks.push(this.request_lsp(
4728 buffer.clone(),
4729 LanguageServerToQuery::Other(server_id),
4730 GetCompletions { position },
4731 cx,
4732 ));
4733 }
4734 });
4735
4736 let mut completions = Vec::new();
4737 for task in tasks {
4738 if let Ok(new_completions) = task.await {
4739 completions.extend_from_slice(&new_completions);
4740 }
4741 }
4742
4743 Ok(completions)
4744 })
4745 } else if let Some(project_id) = self.remote_id() {
4746 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4747 } else {
4748 Task::ready(Ok(Default::default()))
4749 }
4750 }
4751
4752 pub fn apply_additional_edits_for_completion(
4753 &self,
4754 buffer_handle: ModelHandle<Buffer>,
4755 completion: Completion,
4756 push_to_history: bool,
4757 cx: &mut ModelContext<Self>,
4758 ) -> Task<Result<Option<Transaction>>> {
4759 let buffer = buffer_handle.read(cx);
4760 let buffer_id = buffer.remote_id();
4761
4762 if self.is_local() {
4763 let server_id = completion.server_id;
4764 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4765 Some((_, server)) => server.clone(),
4766 _ => return Task::ready(Ok(Default::default())),
4767 };
4768
4769 cx.spawn(|this, mut cx| async move {
4770 let can_resolve = lang_server
4771 .capabilities()
4772 .completion_provider
4773 .as_ref()
4774 .and_then(|options| options.resolve_provider)
4775 .unwrap_or(false);
4776 let additional_text_edits = if can_resolve {
4777 lang_server
4778 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4779 .await?
4780 .additional_text_edits
4781 } else {
4782 completion.lsp_completion.additional_text_edits
4783 };
4784 if let Some(edits) = additional_text_edits {
4785 let edits = this
4786 .update(&mut cx, |this, cx| {
4787 this.edits_from_lsp(
4788 &buffer_handle,
4789 edits,
4790 lang_server.server_id(),
4791 None,
4792 cx,
4793 )
4794 })
4795 .await?;
4796
4797 buffer_handle.update(&mut cx, |buffer, cx| {
4798 buffer.finalize_last_transaction();
4799 buffer.start_transaction();
4800
4801 for (range, text) in edits {
4802 let primary = &completion.old_range;
4803 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4804 && primary.end.cmp(&range.start, buffer).is_ge();
4805 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4806 && range.end.cmp(&primary.end, buffer).is_ge();
4807
4808 //Skip additional edits which overlap with the primary completion edit
4809 //https://github.com/zed-industries/zed/pull/1871
4810 if !start_within && !end_within {
4811 buffer.edit([(range, text)], None, cx);
4812 }
4813 }
4814
4815 let transaction = if buffer.end_transaction(cx).is_some() {
4816 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4817 if !push_to_history {
4818 buffer.forget_transaction(transaction.id);
4819 }
4820 Some(transaction)
4821 } else {
4822 None
4823 };
4824 Ok(transaction)
4825 })
4826 } else {
4827 Ok(None)
4828 }
4829 })
4830 } else if let Some(project_id) = self.remote_id() {
4831 let client = self.client.clone();
4832 cx.spawn(|_, mut cx| async move {
4833 let response = client
4834 .request(proto::ApplyCompletionAdditionalEdits {
4835 project_id,
4836 buffer_id,
4837 completion: Some(language::proto::serialize_completion(&completion)),
4838 })
4839 .await?;
4840
4841 if let Some(transaction) = response.transaction {
4842 let transaction = language::proto::deserialize_transaction(transaction)?;
4843 buffer_handle
4844 .update(&mut cx, |buffer, _| {
4845 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4846 })
4847 .await?;
4848 if push_to_history {
4849 buffer_handle.update(&mut cx, |buffer, _| {
4850 buffer.push_transaction(transaction.clone(), Instant::now());
4851 });
4852 }
4853 Ok(Some(transaction))
4854 } else {
4855 Ok(None)
4856 }
4857 })
4858 } else {
4859 Task::ready(Err(anyhow!("project does not have a remote id")))
4860 }
4861 }
4862
4863 pub fn code_actions<T: Clone + ToOffset>(
4864 &self,
4865 buffer_handle: &ModelHandle<Buffer>,
4866 range: Range<T>,
4867 cx: &mut ModelContext<Self>,
4868 ) -> Task<Result<Vec<CodeAction>>> {
4869 let buffer = buffer_handle.read(cx);
4870 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4871 self.request_lsp(
4872 buffer_handle.clone(),
4873 LanguageServerToQuery::Primary,
4874 GetCodeActions { range },
4875 cx,
4876 )
4877 }
4878
4879 pub fn apply_code_action(
4880 &self,
4881 buffer_handle: ModelHandle<Buffer>,
4882 mut action: CodeAction,
4883 push_to_history: bool,
4884 cx: &mut ModelContext<Self>,
4885 ) -> Task<Result<ProjectTransaction>> {
4886 if self.is_local() {
4887 let buffer = buffer_handle.read(cx);
4888 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4889 self.language_server_for_buffer(buffer, action.server_id, cx)
4890 {
4891 (adapter.clone(), server.clone())
4892 } else {
4893 return Task::ready(Ok(Default::default()));
4894 };
4895 let range = action.range.to_point_utf16(buffer);
4896
4897 cx.spawn(|this, mut cx| async move {
4898 if let Some(lsp_range) = action
4899 .lsp_action
4900 .data
4901 .as_mut()
4902 .and_then(|d| d.get_mut("codeActionParams"))
4903 .and_then(|d| d.get_mut("range"))
4904 {
4905 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4906 action.lsp_action = lang_server
4907 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
4908 .await?;
4909 } else {
4910 let actions = this
4911 .update(&mut cx, |this, cx| {
4912 this.code_actions(&buffer_handle, action.range, cx)
4913 })
4914 .await?;
4915 action.lsp_action = actions
4916 .into_iter()
4917 .find(|a| a.lsp_action.title == action.lsp_action.title)
4918 .ok_or_else(|| anyhow!("code action is outdated"))?
4919 .lsp_action;
4920 }
4921
4922 if let Some(edit) = action.lsp_action.edit {
4923 if edit.changes.is_some() || edit.document_changes.is_some() {
4924 return Self::deserialize_workspace_edit(
4925 this,
4926 edit,
4927 push_to_history,
4928 lsp_adapter.clone(),
4929 lang_server.clone(),
4930 &mut cx,
4931 )
4932 .await;
4933 }
4934 }
4935
4936 if let Some(command) = action.lsp_action.command {
4937 this.update(&mut cx, |this, _| {
4938 this.last_workspace_edits_by_language_server
4939 .remove(&lang_server.server_id());
4940 });
4941
4942 let result = lang_server
4943 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
4944 command: command.command,
4945 arguments: command.arguments.unwrap_or_default(),
4946 ..Default::default()
4947 })
4948 .await;
4949
4950 if let Err(err) = result {
4951 // TODO: LSP ERROR
4952 return Err(err);
4953 }
4954
4955 return Ok(this.update(&mut cx, |this, _| {
4956 this.last_workspace_edits_by_language_server
4957 .remove(&lang_server.server_id())
4958 .unwrap_or_default()
4959 }));
4960 }
4961
4962 Ok(ProjectTransaction::default())
4963 })
4964 } else if let Some(project_id) = self.remote_id() {
4965 let client = self.client.clone();
4966 let request = proto::ApplyCodeAction {
4967 project_id,
4968 buffer_id: buffer_handle.read(cx).remote_id(),
4969 action: Some(language::proto::serialize_code_action(&action)),
4970 };
4971 cx.spawn(|this, mut cx| async move {
4972 let response = client
4973 .request(request)
4974 .await?
4975 .transaction
4976 .ok_or_else(|| anyhow!("missing transaction"))?;
4977 this.update(&mut cx, |this, cx| {
4978 this.deserialize_project_transaction(response, push_to_history, cx)
4979 })
4980 .await
4981 })
4982 } else {
4983 Task::ready(Err(anyhow!("project does not have a remote id")))
4984 }
4985 }
4986
4987 fn apply_on_type_formatting(
4988 &self,
4989 buffer: ModelHandle<Buffer>,
4990 position: Anchor,
4991 trigger: String,
4992 cx: &mut ModelContext<Self>,
4993 ) -> Task<Result<Option<Transaction>>> {
4994 if self.is_local() {
4995 cx.spawn(|this, mut cx| async move {
4996 // Do not allow multiple concurrent formatting requests for the
4997 // same buffer.
4998 this.update(&mut cx, |this, cx| {
4999 this.buffers_being_formatted
5000 .insert(buffer.read(cx).remote_id())
5001 });
5002
5003 let _cleanup = defer({
5004 let this = this.clone();
5005 let mut cx = cx.clone();
5006 let closure_buffer = buffer.clone();
5007 move || {
5008 this.update(&mut cx, |this, cx| {
5009 this.buffers_being_formatted
5010 .remove(&closure_buffer.read(cx).remote_id());
5011 });
5012 }
5013 });
5014
5015 buffer
5016 .update(&mut cx, |buffer, _| {
5017 buffer.wait_for_edits(Some(position.timestamp))
5018 })
5019 .await?;
5020 this.update(&mut cx, |this, cx| {
5021 let position = position.to_point_utf16(buffer.read(cx));
5022 this.on_type_format(buffer, position, trigger, false, cx)
5023 })
5024 .await
5025 })
5026 } else if let Some(project_id) = self.remote_id() {
5027 let client = self.client.clone();
5028 let request = proto::OnTypeFormatting {
5029 project_id,
5030 buffer_id: buffer.read(cx).remote_id(),
5031 position: Some(serialize_anchor(&position)),
5032 trigger,
5033 version: serialize_version(&buffer.read(cx).version()),
5034 };
5035 cx.spawn(|_, _| async move {
5036 client
5037 .request(request)
5038 .await?
5039 .transaction
5040 .map(language::proto::deserialize_transaction)
5041 .transpose()
5042 })
5043 } else {
5044 Task::ready(Err(anyhow!("project does not have a remote id")))
5045 }
5046 }
5047
5048 async fn deserialize_edits(
5049 this: ModelHandle<Self>,
5050 buffer_to_edit: ModelHandle<Buffer>,
5051 edits: Vec<lsp::TextEdit>,
5052 push_to_history: bool,
5053 _: Arc<CachedLspAdapter>,
5054 language_server: Arc<LanguageServer>,
5055 cx: &mut AsyncAppContext,
5056 ) -> Result<Option<Transaction>> {
5057 let edits = this
5058 .update(cx, |this, cx| {
5059 this.edits_from_lsp(
5060 &buffer_to_edit,
5061 edits,
5062 language_server.server_id(),
5063 None,
5064 cx,
5065 )
5066 })
5067 .await?;
5068
5069 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5070 buffer.finalize_last_transaction();
5071 buffer.start_transaction();
5072 for (range, text) in edits {
5073 buffer.edit([(range, text)], None, cx);
5074 }
5075
5076 if buffer.end_transaction(cx).is_some() {
5077 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5078 if !push_to_history {
5079 buffer.forget_transaction(transaction.id);
5080 }
5081 Some(transaction)
5082 } else {
5083 None
5084 }
5085 });
5086
5087 Ok(transaction)
5088 }
5089
5090 async fn deserialize_workspace_edit(
5091 this: ModelHandle<Self>,
5092 edit: lsp::WorkspaceEdit,
5093 push_to_history: bool,
5094 lsp_adapter: Arc<CachedLspAdapter>,
5095 language_server: Arc<LanguageServer>,
5096 cx: &mut AsyncAppContext,
5097 ) -> Result<ProjectTransaction> {
5098 let fs = this.read_with(cx, |this, _| this.fs.clone());
5099 let mut operations = Vec::new();
5100 if let Some(document_changes) = edit.document_changes {
5101 match document_changes {
5102 lsp::DocumentChanges::Edits(edits) => {
5103 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5104 }
5105 lsp::DocumentChanges::Operations(ops) => operations = ops,
5106 }
5107 } else if let Some(changes) = edit.changes {
5108 operations.extend(changes.into_iter().map(|(uri, edits)| {
5109 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5110 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5111 uri,
5112 version: None,
5113 },
5114 edits: edits.into_iter().map(OneOf::Left).collect(),
5115 })
5116 }));
5117 }
5118
5119 let mut project_transaction = ProjectTransaction::default();
5120 for operation in operations {
5121 match operation {
5122 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5123 let abs_path = op
5124 .uri
5125 .to_file_path()
5126 .map_err(|_| anyhow!("can't convert URI to path"))?;
5127
5128 if let Some(parent_path) = abs_path.parent() {
5129 fs.create_dir(parent_path).await?;
5130 }
5131 if abs_path.ends_with("/") {
5132 fs.create_dir(&abs_path).await?;
5133 } else {
5134 fs.create_file(
5135 &abs_path,
5136 op.options
5137 .map(|options| fs::CreateOptions {
5138 overwrite: options.overwrite.unwrap_or(false),
5139 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5140 })
5141 .unwrap_or_default(),
5142 )
5143 .await?;
5144 }
5145 }
5146
5147 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5148 let source_abs_path = op
5149 .old_uri
5150 .to_file_path()
5151 .map_err(|_| anyhow!("can't convert URI to path"))?;
5152 let target_abs_path = op
5153 .new_uri
5154 .to_file_path()
5155 .map_err(|_| anyhow!("can't convert URI to path"))?;
5156 fs.rename(
5157 &source_abs_path,
5158 &target_abs_path,
5159 op.options
5160 .map(|options| fs::RenameOptions {
5161 overwrite: options.overwrite.unwrap_or(false),
5162 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5163 })
5164 .unwrap_or_default(),
5165 )
5166 .await?;
5167 }
5168
5169 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5170 let abs_path = op
5171 .uri
5172 .to_file_path()
5173 .map_err(|_| anyhow!("can't convert URI to path"))?;
5174 let options = op
5175 .options
5176 .map(|options| fs::RemoveOptions {
5177 recursive: options.recursive.unwrap_or(false),
5178 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5179 })
5180 .unwrap_or_default();
5181 if abs_path.ends_with("/") {
5182 fs.remove_dir(&abs_path, options).await?;
5183 } else {
5184 fs.remove_file(&abs_path, options).await?;
5185 }
5186 }
5187
5188 lsp::DocumentChangeOperation::Edit(op) => {
5189 let buffer_to_edit = this
5190 .update(cx, |this, cx| {
5191 this.open_local_buffer_via_lsp(
5192 op.text_document.uri,
5193 language_server.server_id(),
5194 lsp_adapter.name.clone(),
5195 cx,
5196 )
5197 })
5198 .await?;
5199
5200 let edits = this
5201 .update(cx, |this, cx| {
5202 let edits = op.edits.into_iter().map(|edit| match edit {
5203 OneOf::Left(edit) => edit,
5204 OneOf::Right(edit) => edit.text_edit,
5205 });
5206 this.edits_from_lsp(
5207 &buffer_to_edit,
5208 edits,
5209 language_server.server_id(),
5210 op.text_document.version,
5211 cx,
5212 )
5213 })
5214 .await?;
5215
5216 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5217 buffer.finalize_last_transaction();
5218 buffer.start_transaction();
5219 for (range, text) in edits {
5220 buffer.edit([(range, text)], None, cx);
5221 }
5222 let transaction = if buffer.end_transaction(cx).is_some() {
5223 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5224 if !push_to_history {
5225 buffer.forget_transaction(transaction.id);
5226 }
5227 Some(transaction)
5228 } else {
5229 None
5230 };
5231
5232 transaction
5233 });
5234 if let Some(transaction) = transaction {
5235 project_transaction.0.insert(buffer_to_edit, transaction);
5236 }
5237 }
5238 }
5239 }
5240
5241 Ok(project_transaction)
5242 }
5243
5244 pub fn prepare_rename<T: ToPointUtf16>(
5245 &self,
5246 buffer: ModelHandle<Buffer>,
5247 position: T,
5248 cx: &mut ModelContext<Self>,
5249 ) -> Task<Result<Option<Range<Anchor>>>> {
5250 let position = position.to_point_utf16(buffer.read(cx));
5251 self.request_lsp(
5252 buffer,
5253 LanguageServerToQuery::Primary,
5254 PrepareRename { position },
5255 cx,
5256 )
5257 }
5258
5259 pub fn perform_rename<T: ToPointUtf16>(
5260 &self,
5261 buffer: ModelHandle<Buffer>,
5262 position: T,
5263 new_name: String,
5264 push_to_history: bool,
5265 cx: &mut ModelContext<Self>,
5266 ) -> Task<Result<ProjectTransaction>> {
5267 let position = position.to_point_utf16(buffer.read(cx));
5268 self.request_lsp(
5269 buffer,
5270 LanguageServerToQuery::Primary,
5271 PerformRename {
5272 position,
5273 new_name,
5274 push_to_history,
5275 },
5276 cx,
5277 )
5278 }
5279
5280 pub fn on_type_format<T: ToPointUtf16>(
5281 &self,
5282 buffer: ModelHandle<Buffer>,
5283 position: T,
5284 trigger: String,
5285 push_to_history: bool,
5286 cx: &mut ModelContext<Self>,
5287 ) -> Task<Result<Option<Transaction>>> {
5288 let (position, tab_size) = buffer.read_with(cx, |buffer, cx| {
5289 let position = position.to_point_utf16(buffer);
5290 (
5291 position,
5292 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5293 .tab_size,
5294 )
5295 });
5296 self.request_lsp(
5297 buffer.clone(),
5298 LanguageServerToQuery::Primary,
5299 OnTypeFormatting {
5300 position,
5301 trigger,
5302 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5303 push_to_history,
5304 },
5305 cx,
5306 )
5307 }
5308
5309 pub fn inlay_hints<T: ToOffset>(
5310 &self,
5311 buffer_handle: ModelHandle<Buffer>,
5312 range: Range<T>,
5313 cx: &mut ModelContext<Self>,
5314 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5315 let buffer = buffer_handle.read(cx);
5316 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5317 let range_start = range.start;
5318 let range_end = range.end;
5319 let buffer_id = buffer.remote_id();
5320 let buffer_version = buffer.version().clone();
5321 let lsp_request = InlayHints { range };
5322
5323 if self.is_local() {
5324 let lsp_request_task = self.request_lsp(
5325 buffer_handle.clone(),
5326 LanguageServerToQuery::Primary,
5327 lsp_request,
5328 cx,
5329 );
5330 cx.spawn(|_, mut cx| async move {
5331 buffer_handle
5332 .update(&mut cx, |buffer, _| {
5333 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5334 })
5335 .await
5336 .context("waiting for inlay hint request range edits")?;
5337 lsp_request_task.await.context("inlay hints LSP request")
5338 })
5339 } else if let Some(project_id) = self.remote_id() {
5340 let client = self.client.clone();
5341 let request = proto::InlayHints {
5342 project_id,
5343 buffer_id,
5344 start: Some(serialize_anchor(&range_start)),
5345 end: Some(serialize_anchor(&range_end)),
5346 version: serialize_version(&buffer_version),
5347 };
5348 cx.spawn(|project, cx| async move {
5349 let response = client
5350 .request(request)
5351 .await
5352 .context("inlay hints proto request")?;
5353 let hints_request_result = LspCommand::response_from_proto(
5354 lsp_request,
5355 response,
5356 project,
5357 buffer_handle.clone(),
5358 cx,
5359 )
5360 .await;
5361
5362 hints_request_result.context("inlay hints proto response conversion")
5363 })
5364 } else {
5365 Task::ready(Err(anyhow!("project does not have a remote id")))
5366 }
5367 }
5368
5369 pub fn resolve_inlay_hint(
5370 &self,
5371 hint: InlayHint,
5372 buffer_handle: ModelHandle<Buffer>,
5373 server_id: LanguageServerId,
5374 cx: &mut ModelContext<Self>,
5375 ) -> Task<anyhow::Result<InlayHint>> {
5376 if self.is_local() {
5377 let buffer = buffer_handle.read(cx);
5378 let (_, lang_server) = if let Some((adapter, server)) =
5379 self.language_server_for_buffer(buffer, server_id, cx)
5380 {
5381 (adapter.clone(), server.clone())
5382 } else {
5383 return Task::ready(Ok(hint));
5384 };
5385 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5386 return Task::ready(Ok(hint));
5387 }
5388
5389 let buffer_snapshot = buffer.snapshot();
5390 cx.spawn(|_, mut cx| async move {
5391 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5392 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5393 );
5394 let resolved_hint = resolve_task
5395 .await
5396 .context("inlay hint resolve LSP request")?;
5397 let resolved_hint = InlayHints::lsp_to_project_hint(
5398 resolved_hint,
5399 &buffer_handle,
5400 server_id,
5401 ResolveState::Resolved,
5402 false,
5403 &mut cx,
5404 )
5405 .await?;
5406 Ok(resolved_hint)
5407 })
5408 } else if let Some(project_id) = self.remote_id() {
5409 let client = self.client.clone();
5410 let request = proto::ResolveInlayHint {
5411 project_id,
5412 buffer_id: buffer_handle.read(cx).remote_id(),
5413 language_server_id: server_id.0 as u64,
5414 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5415 };
5416 cx.spawn(|_, _| async move {
5417 let response = client
5418 .request(request)
5419 .await
5420 .context("inlay hints proto request")?;
5421 match response.hint {
5422 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5423 .context("inlay hints proto resolve response conversion"),
5424 None => Ok(hint),
5425 }
5426 })
5427 } else {
5428 Task::ready(Err(anyhow!("project does not have a remote id")))
5429 }
5430 }
5431
5432 #[allow(clippy::type_complexity)]
5433 pub fn search(
5434 &self,
5435 query: SearchQuery,
5436 cx: &mut ModelContext<Self>,
5437 ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
5438 if self.is_local() {
5439 self.search_local(query, cx)
5440 } else if let Some(project_id) = self.remote_id() {
5441 let (tx, rx) = smol::channel::unbounded();
5442 let request = self.client.request(query.to_proto(project_id));
5443 cx.spawn(|this, mut cx| async move {
5444 let response = request.await?;
5445 let mut result = HashMap::default();
5446 for location in response.locations {
5447 let target_buffer = this
5448 .update(&mut cx, |this, cx| {
5449 this.wait_for_remote_buffer(location.buffer_id, cx)
5450 })
5451 .await?;
5452 let start = location
5453 .start
5454 .and_then(deserialize_anchor)
5455 .ok_or_else(|| anyhow!("missing target start"))?;
5456 let end = location
5457 .end
5458 .and_then(deserialize_anchor)
5459 .ok_or_else(|| anyhow!("missing target end"))?;
5460 result
5461 .entry(target_buffer)
5462 .or_insert(Vec::new())
5463 .push(start..end)
5464 }
5465 for (buffer, ranges) in result {
5466 let _ = tx.send((buffer, ranges)).await;
5467 }
5468 Result::<(), anyhow::Error>::Ok(())
5469 })
5470 .detach_and_log_err(cx);
5471 rx
5472 } else {
5473 unimplemented!();
5474 }
5475 }
5476
5477 pub fn search_local(
5478 &self,
5479 query: SearchQuery,
5480 cx: &mut ModelContext<Self>,
5481 ) -> Receiver<(ModelHandle<Buffer>, Vec<Range<Anchor>>)> {
5482 // Local search is split into several phases.
5483 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5484 // and the second phase that finds positions of all the matches found in the candidate files.
5485 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5486 //
5487 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5488 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5489 //
5490 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5491 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5492 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5493 // 2. At this point, we have a list of all potentially matching buffers/files.
5494 // We sort that list by buffer path - this list is retained for later use.
5495 // We ensure that all buffers are now opened and available in project.
5496 // 3. We run a scan over all the candidate buffers on multiple background threads.
5497 // We cannot assume that there will even be a match - while at least one match
5498 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5499 // There is also an auxilliary background thread responsible for result gathering.
5500 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5501 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5502 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5503 // entry - which might already be available thanks to out-of-order processing.
5504 //
5505 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5506 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5507 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5508 // in face of constantly updating list of sorted matches.
5509 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5510 let snapshots = self
5511 .visible_worktrees(cx)
5512 .filter_map(|tree| {
5513 let tree = tree.read(cx).as_local()?;
5514 Some(tree.snapshot())
5515 })
5516 .collect::<Vec<_>>();
5517
5518 let background = cx.background().clone();
5519 let path_count: usize = snapshots
5520 .iter()
5521 .map(|s| {
5522 if query.include_ignored() {
5523 s.file_count()
5524 } else {
5525 s.visible_file_count()
5526 }
5527 })
5528 .sum();
5529 if path_count == 0 {
5530 let (_, rx) = smol::channel::bounded(1024);
5531 return rx;
5532 }
5533 let workers = background.num_cpus().min(path_count);
5534 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5535 let mut unnamed_files = vec![];
5536 let opened_buffers = self
5537 .opened_buffers
5538 .iter()
5539 .filter_map(|(_, b)| {
5540 let buffer = b.upgrade(cx)?;
5541 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5542 let is_ignored = buffer
5543 .project_path(cx)
5544 .and_then(|path| self.entry_for_path(&path, cx))
5545 .map_or(false, |entry| entry.is_ignored);
5546 (is_ignored, buffer.snapshot())
5547 });
5548 if is_ignored && !query.include_ignored() {
5549 return None;
5550 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5551 Some((path.clone(), (buffer, snapshot)))
5552 } else {
5553 unnamed_files.push(buffer);
5554 None
5555 }
5556 })
5557 .collect();
5558 cx.background()
5559 .spawn(Self::background_search(
5560 unnamed_files,
5561 opened_buffers,
5562 cx.background().clone(),
5563 self.fs.clone(),
5564 workers,
5565 query.clone(),
5566 path_count,
5567 snapshots,
5568 matching_paths_tx,
5569 ))
5570 .detach();
5571
5572 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5573 let background = cx.background().clone();
5574 let (result_tx, result_rx) = smol::channel::bounded(1024);
5575 cx.background()
5576 .spawn(async move {
5577 let Ok(buffers) = buffers.await else {
5578 return;
5579 };
5580
5581 let buffers_len = buffers.len();
5582 if buffers_len == 0 {
5583 return;
5584 }
5585 let query = &query;
5586 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5587 background
5588 .scoped(|scope| {
5589 #[derive(Clone)]
5590 struct FinishedStatus {
5591 entry: Option<(ModelHandle<Buffer>, Vec<Range<Anchor>>)>,
5592 buffer_index: SearchMatchCandidateIndex,
5593 }
5594
5595 for _ in 0..workers {
5596 let finished_tx = finished_tx.clone();
5597 let mut buffers_rx = buffers_rx.clone();
5598 scope.spawn(async move {
5599 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5600 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5601 {
5602 if query.file_matches(
5603 snapshot.file().map(|file| file.path().as_ref()),
5604 ) {
5605 query
5606 .search(&snapshot, None)
5607 .await
5608 .iter()
5609 .map(|range| {
5610 snapshot.anchor_before(range.start)
5611 ..snapshot.anchor_after(range.end)
5612 })
5613 .collect()
5614 } else {
5615 Vec::new()
5616 }
5617 } else {
5618 Vec::new()
5619 };
5620
5621 let status = if !buffer_matches.is_empty() {
5622 let entry = if let Some((buffer, _)) = entry.as_ref() {
5623 Some((buffer.clone(), buffer_matches))
5624 } else {
5625 None
5626 };
5627 FinishedStatus {
5628 entry,
5629 buffer_index,
5630 }
5631 } else {
5632 FinishedStatus {
5633 entry: None,
5634 buffer_index,
5635 }
5636 };
5637 if finished_tx.send(status).await.is_err() {
5638 break;
5639 }
5640 }
5641 });
5642 }
5643 // Report sorted matches
5644 scope.spawn(async move {
5645 let mut current_index = 0;
5646 let mut scratch = vec![None; buffers_len];
5647 while let Some(status) = finished_rx.next().await {
5648 debug_assert!(
5649 scratch[status.buffer_index].is_none(),
5650 "Got match status of position {} twice",
5651 status.buffer_index
5652 );
5653 let index = status.buffer_index;
5654 scratch[index] = Some(status);
5655 while current_index < buffers_len {
5656 let Some(current_entry) = scratch[current_index].take() else {
5657 // We intentionally **do not** increment `current_index` here. When next element arrives
5658 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5659 // this time.
5660 break;
5661 };
5662 if let Some(entry) = current_entry.entry {
5663 result_tx.send(entry).await.log_err();
5664 }
5665 current_index += 1;
5666 }
5667 if current_index == buffers_len {
5668 break;
5669 }
5670 }
5671 });
5672 })
5673 .await;
5674 })
5675 .detach();
5676 result_rx
5677 }
5678 /// Pick paths that might potentially contain a match of a given search query.
5679 async fn background_search(
5680 unnamed_buffers: Vec<ModelHandle<Buffer>>,
5681 opened_buffers: HashMap<Arc<Path>, (ModelHandle<Buffer>, BufferSnapshot)>,
5682 background: Arc<Background>,
5683 fs: Arc<dyn Fs>,
5684 workers: usize,
5685 query: SearchQuery,
5686 path_count: usize,
5687 snapshots: Vec<LocalSnapshot>,
5688 matching_paths_tx: Sender<SearchMatchCandidate>,
5689 ) {
5690 let fs = &fs;
5691 let query = &query;
5692 let matching_paths_tx = &matching_paths_tx;
5693 let snapshots = &snapshots;
5694 let paths_per_worker = (path_count + workers - 1) / workers;
5695 for buffer in unnamed_buffers {
5696 matching_paths_tx
5697 .send(SearchMatchCandidate::OpenBuffer {
5698 buffer: buffer.clone(),
5699 path: None,
5700 })
5701 .await
5702 .log_err();
5703 }
5704 for (path, (buffer, _)) in opened_buffers.iter() {
5705 matching_paths_tx
5706 .send(SearchMatchCandidate::OpenBuffer {
5707 buffer: buffer.clone(),
5708 path: Some(path.clone()),
5709 })
5710 .await
5711 .log_err();
5712 }
5713
5714 background
5715 .scoped(|scope| {
5716 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5717
5718 for worker_ix in 0..workers {
5719 let worker_start_ix = worker_ix * paths_per_worker;
5720 let worker_end_ix = worker_start_ix + paths_per_worker;
5721 let unnamed_buffers = opened_buffers.clone();
5722 let limiter = Arc::clone(&max_concurrent_workers);
5723 scope.spawn(async move {
5724 let _guard = limiter.acquire().await;
5725 let mut snapshot_start_ix = 0;
5726 let mut abs_path = PathBuf::new();
5727 for snapshot in snapshots {
5728 let snapshot_end_ix = snapshot_start_ix
5729 + if query.include_ignored() {
5730 snapshot.file_count()
5731 } else {
5732 snapshot.visible_file_count()
5733 };
5734 if worker_end_ix <= snapshot_start_ix {
5735 break;
5736 } else if worker_start_ix > snapshot_end_ix {
5737 snapshot_start_ix = snapshot_end_ix;
5738 continue;
5739 } else {
5740 let start_in_snapshot =
5741 worker_start_ix.saturating_sub(snapshot_start_ix);
5742 let end_in_snapshot =
5743 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5744
5745 for entry in snapshot
5746 .files(query.include_ignored(), start_in_snapshot)
5747 .take(end_in_snapshot - start_in_snapshot)
5748 {
5749 if matching_paths_tx.is_closed() {
5750 break;
5751 }
5752 if unnamed_buffers.contains_key(&entry.path) {
5753 continue;
5754 }
5755 let matches = if query.file_matches(Some(&entry.path)) {
5756 abs_path.clear();
5757 abs_path.push(&snapshot.abs_path());
5758 abs_path.push(&entry.path);
5759 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5760 {
5761 query.detect(file).unwrap_or(false)
5762 } else {
5763 false
5764 }
5765 } else {
5766 false
5767 };
5768
5769 if matches {
5770 let project_path = SearchMatchCandidate::Path {
5771 worktree_id: snapshot.id(),
5772 path: entry.path.clone(),
5773 is_ignored: entry.is_ignored,
5774 };
5775 if matching_paths_tx.send(project_path).await.is_err() {
5776 break;
5777 }
5778 }
5779 }
5780
5781 snapshot_start_ix = snapshot_end_ix;
5782 }
5783 }
5784 });
5785 }
5786
5787 if query.include_ignored() {
5788 for snapshot in snapshots {
5789 for ignored_entry in snapshot
5790 .entries(query.include_ignored())
5791 .filter(|e| e.is_ignored)
5792 {
5793 let limiter = Arc::clone(&max_concurrent_workers);
5794 scope.spawn(async move {
5795 let _guard = limiter.acquire().await;
5796 let mut ignored_paths_to_process =
5797 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
5798 while let Some(ignored_abs_path) =
5799 ignored_paths_to_process.pop_front()
5800 {
5801 if !query.file_matches(Some(&ignored_abs_path))
5802 || snapshot.is_path_excluded(&ignored_abs_path)
5803 {
5804 continue;
5805 }
5806 if let Some(fs_metadata) = fs
5807 .metadata(&ignored_abs_path)
5808 .await
5809 .with_context(|| {
5810 format!("fetching fs metadata for {ignored_abs_path:?}")
5811 })
5812 .log_err()
5813 .flatten()
5814 {
5815 if fs_metadata.is_dir {
5816 if let Some(mut subfiles) = fs
5817 .read_dir(&ignored_abs_path)
5818 .await
5819 .with_context(|| {
5820 format!(
5821 "listing ignored path {ignored_abs_path:?}"
5822 )
5823 })
5824 .log_err()
5825 {
5826 while let Some(subfile) = subfiles.next().await {
5827 if let Some(subfile) = subfile.log_err() {
5828 ignored_paths_to_process.push_back(subfile);
5829 }
5830 }
5831 }
5832 } else if !fs_metadata.is_symlink {
5833 let matches = if let Some(file) = fs
5834 .open_sync(&ignored_abs_path)
5835 .await
5836 .with_context(|| {
5837 format!(
5838 "Opening ignored path {ignored_abs_path:?}"
5839 )
5840 })
5841 .log_err()
5842 {
5843 query.detect(file).unwrap_or(false)
5844 } else {
5845 false
5846 };
5847 if matches {
5848 let project_path = SearchMatchCandidate::Path {
5849 worktree_id: snapshot.id(),
5850 path: Arc::from(
5851 ignored_abs_path
5852 .strip_prefix(snapshot.abs_path())
5853 .expect(
5854 "scanning worktree-related files",
5855 ),
5856 ),
5857 is_ignored: true,
5858 };
5859 if matching_paths_tx
5860 .send(project_path)
5861 .await
5862 .is_err()
5863 {
5864 return;
5865 }
5866 }
5867 }
5868 }
5869 }
5870 });
5871 }
5872 }
5873 }
5874 })
5875 .await;
5876 }
5877
5878 fn request_lsp<R: LspCommand>(
5879 &self,
5880 buffer_handle: ModelHandle<Buffer>,
5881 server: LanguageServerToQuery,
5882 request: R,
5883 cx: &mut ModelContext<Self>,
5884 ) -> Task<Result<R::Response>>
5885 where
5886 <R::LspRequest as lsp::request::Request>::Result: Send,
5887 {
5888 let buffer = buffer_handle.read(cx);
5889 if self.is_local() {
5890 let language_server = match server {
5891 LanguageServerToQuery::Primary => {
5892 match self.primary_language_server_for_buffer(buffer, cx) {
5893 Some((_, server)) => Some(Arc::clone(server)),
5894 None => return Task::ready(Ok(Default::default())),
5895 }
5896 }
5897 LanguageServerToQuery::Other(id) => self
5898 .language_server_for_buffer(buffer, id, cx)
5899 .map(|(_, server)| Arc::clone(server)),
5900 };
5901 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
5902 if let (Some(file), Some(language_server)) = (file, language_server) {
5903 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
5904 return cx.spawn(|this, cx| async move {
5905 if !request.check_capabilities(language_server.capabilities()) {
5906 return Ok(Default::default());
5907 }
5908
5909 let result = language_server.request::<R::LspRequest>(lsp_params).await;
5910 let response = match result {
5911 Ok(response) => response,
5912
5913 Err(err) => {
5914 log::warn!(
5915 "Generic lsp request to {} failed: {}",
5916 language_server.name(),
5917 err
5918 );
5919 return Err(err);
5920 }
5921 };
5922
5923 request
5924 .response_from_lsp(
5925 response,
5926 this,
5927 buffer_handle,
5928 language_server.server_id(),
5929 cx,
5930 )
5931 .await
5932 });
5933 }
5934 } else if let Some(project_id) = self.remote_id() {
5935 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
5936 }
5937
5938 Task::ready(Ok(Default::default()))
5939 }
5940
5941 fn send_lsp_proto_request<R: LspCommand>(
5942 &self,
5943 buffer: ModelHandle<Buffer>,
5944 project_id: u64,
5945 request: R,
5946 cx: &mut ModelContext<'_, Project>,
5947 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
5948 let rpc = self.client.clone();
5949 let message = request.to_proto(project_id, buffer.read(cx));
5950 cx.spawn_weak(|this, cx| async move {
5951 // Ensure the project is still alive by the time the task
5952 // is scheduled.
5953 this.upgrade(&cx)
5954 .ok_or_else(|| anyhow!("project dropped"))?;
5955 let response = rpc.request(message).await?;
5956 let this = this
5957 .upgrade(&cx)
5958 .ok_or_else(|| anyhow!("project dropped"))?;
5959 if this.read_with(&cx, |this, _| this.is_read_only()) {
5960 Err(anyhow!("disconnected before completing request"))
5961 } else {
5962 request
5963 .response_from_proto(response, this, buffer, cx)
5964 .await
5965 }
5966 })
5967 }
5968
5969 fn sort_candidates_and_open_buffers(
5970 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
5971 cx: &mut ModelContext<Self>,
5972 ) -> (
5973 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
5974 Receiver<(
5975 Option<(ModelHandle<Buffer>, BufferSnapshot)>,
5976 SearchMatchCandidateIndex,
5977 )>,
5978 ) {
5979 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
5980 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
5981 cx.spawn(|this, cx| async move {
5982 let mut buffers = Vec::new();
5983 let mut ignored_buffers = Vec::new();
5984 while let Some(entry) = matching_paths_rx.next().await {
5985 if matches!(
5986 entry,
5987 SearchMatchCandidate::Path {
5988 is_ignored: true,
5989 ..
5990 }
5991 ) {
5992 ignored_buffers.push(entry);
5993 } else {
5994 buffers.push(entry);
5995 }
5996 }
5997 buffers.sort_by_key(|candidate| candidate.path());
5998 ignored_buffers.sort_by_key(|candidate| candidate.path());
5999 buffers.extend(ignored_buffers);
6000 let matching_paths = buffers.clone();
6001 let _ = sorted_buffers_tx.send(buffers);
6002 for (index, candidate) in matching_paths.into_iter().enumerate() {
6003 if buffers_tx.is_closed() {
6004 break;
6005 }
6006 let this = this.clone();
6007 let buffers_tx = buffers_tx.clone();
6008 cx.spawn(|mut cx| async move {
6009 let buffer = match candidate {
6010 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6011 SearchMatchCandidate::Path {
6012 worktree_id, path, ..
6013 } => this
6014 .update(&mut cx, |this, cx| {
6015 this.open_buffer((worktree_id, path), cx)
6016 })
6017 .await
6018 .log_err(),
6019 };
6020 if let Some(buffer) = buffer {
6021 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
6022 buffers_tx
6023 .send((Some((buffer, snapshot)), index))
6024 .await
6025 .log_err();
6026 } else {
6027 buffers_tx.send((None, index)).await.log_err();
6028 }
6029
6030 Ok::<_, anyhow::Error>(())
6031 })
6032 .detach();
6033 }
6034 })
6035 .detach();
6036 (sorted_buffers_rx, buffers_rx)
6037 }
6038
6039 pub fn find_or_create_local_worktree(
6040 &mut self,
6041 abs_path: impl AsRef<Path>,
6042 visible: bool,
6043 cx: &mut ModelContext<Self>,
6044 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
6045 let abs_path = abs_path.as_ref();
6046 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6047 Task::ready(Ok((tree, relative_path)))
6048 } else {
6049 let worktree = self.create_local_worktree(abs_path, visible, cx);
6050 cx.foreground()
6051 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6052 }
6053 }
6054
6055 pub fn find_local_worktree(
6056 &self,
6057 abs_path: &Path,
6058 cx: &AppContext,
6059 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
6060 for tree in &self.worktrees {
6061 if let Some(tree) = tree.upgrade(cx) {
6062 if let Some(relative_path) = tree
6063 .read(cx)
6064 .as_local()
6065 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6066 {
6067 return Some((tree.clone(), relative_path.into()));
6068 }
6069 }
6070 }
6071 None
6072 }
6073
6074 pub fn is_shared(&self) -> bool {
6075 match &self.client_state {
6076 Some(ProjectClientState::Local { .. }) => true,
6077 _ => false,
6078 }
6079 }
6080
6081 fn create_local_worktree(
6082 &mut self,
6083 abs_path: impl AsRef<Path>,
6084 visible: bool,
6085 cx: &mut ModelContext<Self>,
6086 ) -> Task<Result<ModelHandle<Worktree>>> {
6087 let fs = self.fs.clone();
6088 let client = self.client.clone();
6089 let next_entry_id = self.next_entry_id.clone();
6090 let path: Arc<Path> = abs_path.as_ref().into();
6091 let task = self
6092 .loading_local_worktrees
6093 .entry(path.clone())
6094 .or_insert_with(|| {
6095 cx.spawn(|project, mut cx| {
6096 async move {
6097 let worktree = Worktree::local(
6098 client.clone(),
6099 path.clone(),
6100 visible,
6101 fs,
6102 next_entry_id,
6103 &mut cx,
6104 )
6105 .await;
6106
6107 project.update(&mut cx, |project, _| {
6108 project.loading_local_worktrees.remove(&path);
6109 });
6110
6111 let worktree = worktree?;
6112 project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
6113 Ok(worktree)
6114 }
6115 .map_err(Arc::new)
6116 })
6117 .shared()
6118 })
6119 .clone();
6120 cx.foreground().spawn(async move {
6121 match task.await {
6122 Ok(worktree) => Ok(worktree),
6123 Err(err) => Err(anyhow!("{}", err)),
6124 }
6125 })
6126 }
6127
6128 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6129 self.worktrees.retain(|worktree| {
6130 if let Some(worktree) = worktree.upgrade(cx) {
6131 let id = worktree.read(cx).id();
6132 if id == id_to_remove {
6133 cx.emit(Event::WorktreeRemoved(id));
6134 false
6135 } else {
6136 true
6137 }
6138 } else {
6139 false
6140 }
6141 });
6142 self.metadata_changed(cx);
6143 }
6144
6145 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
6146 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6147 if worktree.read(cx).is_local() {
6148 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6149 worktree::Event::UpdatedEntries(changes) => {
6150 this.update_local_worktree_buffers(&worktree, changes, cx);
6151 this.update_local_worktree_language_servers(&worktree, changes, cx);
6152 this.update_local_worktree_settings(&worktree, changes, cx);
6153 this.update_prettier_settings(&worktree, changes, cx);
6154 cx.emit(Event::WorktreeUpdatedEntries(
6155 worktree.read(cx).id(),
6156 changes.clone(),
6157 ));
6158 }
6159 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6160 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6161 }
6162 })
6163 .detach();
6164 }
6165
6166 let push_strong_handle = {
6167 let worktree = worktree.read(cx);
6168 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6169 };
6170 if push_strong_handle {
6171 self.worktrees
6172 .push(WorktreeHandle::Strong(worktree.clone()));
6173 } else {
6174 self.worktrees
6175 .push(WorktreeHandle::Weak(worktree.downgrade()));
6176 }
6177
6178 let handle_id = worktree.id();
6179 cx.observe_release(worktree, move |this, worktree, cx| {
6180 let _ = this.remove_worktree(worktree.id(), cx);
6181 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6182 store.clear_local_settings(handle_id, cx).log_err()
6183 });
6184 })
6185 .detach();
6186
6187 cx.emit(Event::WorktreeAdded);
6188 self.metadata_changed(cx);
6189 }
6190
6191 fn update_local_worktree_buffers(
6192 &mut self,
6193 worktree_handle: &ModelHandle<Worktree>,
6194 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6195 cx: &mut ModelContext<Self>,
6196 ) {
6197 let snapshot = worktree_handle.read(cx).snapshot();
6198
6199 let mut renamed_buffers = Vec::new();
6200 for (path, entry_id, _) in changes {
6201 let worktree_id = worktree_handle.read(cx).id();
6202 let project_path = ProjectPath {
6203 worktree_id,
6204 path: path.clone(),
6205 };
6206
6207 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6208 Some(&buffer_id) => buffer_id,
6209 None => match self.local_buffer_ids_by_path.get(&project_path) {
6210 Some(&buffer_id) => buffer_id,
6211 None => {
6212 continue;
6213 }
6214 },
6215 };
6216
6217 let open_buffer = self.opened_buffers.get(&buffer_id);
6218 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade(cx)) {
6219 buffer
6220 } else {
6221 self.opened_buffers.remove(&buffer_id);
6222 self.local_buffer_ids_by_path.remove(&project_path);
6223 self.local_buffer_ids_by_entry_id.remove(entry_id);
6224 continue;
6225 };
6226
6227 buffer.update(cx, |buffer, cx| {
6228 if let Some(old_file) = File::from_dyn(buffer.file()) {
6229 if old_file.worktree != *worktree_handle {
6230 return;
6231 }
6232
6233 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
6234 File {
6235 is_local: true,
6236 entry_id: entry.id,
6237 mtime: entry.mtime,
6238 path: entry.path.clone(),
6239 worktree: worktree_handle.clone(),
6240 is_deleted: false,
6241 }
6242 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6243 File {
6244 is_local: true,
6245 entry_id: entry.id,
6246 mtime: entry.mtime,
6247 path: entry.path.clone(),
6248 worktree: worktree_handle.clone(),
6249 is_deleted: false,
6250 }
6251 } else {
6252 File {
6253 is_local: true,
6254 entry_id: old_file.entry_id,
6255 path: old_file.path().clone(),
6256 mtime: old_file.mtime(),
6257 worktree: worktree_handle.clone(),
6258 is_deleted: true,
6259 }
6260 };
6261
6262 let old_path = old_file.abs_path(cx);
6263 if new_file.abs_path(cx) != old_path {
6264 renamed_buffers.push((cx.handle(), old_file.clone()));
6265 self.local_buffer_ids_by_path.remove(&project_path);
6266 self.local_buffer_ids_by_path.insert(
6267 ProjectPath {
6268 worktree_id,
6269 path: path.clone(),
6270 },
6271 buffer_id,
6272 );
6273 }
6274
6275 if new_file.entry_id != *entry_id {
6276 self.local_buffer_ids_by_entry_id.remove(entry_id);
6277 self.local_buffer_ids_by_entry_id
6278 .insert(new_file.entry_id, buffer_id);
6279 }
6280
6281 if new_file != *old_file {
6282 if let Some(project_id) = self.remote_id() {
6283 self.client
6284 .send(proto::UpdateBufferFile {
6285 project_id,
6286 buffer_id: buffer_id as u64,
6287 file: Some(new_file.to_proto()),
6288 })
6289 .log_err();
6290 }
6291
6292 buffer.file_updated(Arc::new(new_file), cx);
6293 }
6294 }
6295 });
6296 }
6297
6298 for (buffer, old_file) in renamed_buffers {
6299 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6300 self.detect_language_for_buffer(&buffer, cx);
6301 self.register_buffer_with_language_servers(&buffer, cx);
6302 }
6303 }
6304
6305 fn update_local_worktree_language_servers(
6306 &mut self,
6307 worktree_handle: &ModelHandle<Worktree>,
6308 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6309 cx: &mut ModelContext<Self>,
6310 ) {
6311 if changes.is_empty() {
6312 return;
6313 }
6314
6315 let worktree_id = worktree_handle.read(cx).id();
6316 let mut language_server_ids = self
6317 .language_server_ids
6318 .iter()
6319 .filter_map(|((server_worktree_id, _), server_id)| {
6320 (*server_worktree_id == worktree_id).then_some(*server_id)
6321 })
6322 .collect::<Vec<_>>();
6323 language_server_ids.sort();
6324 language_server_ids.dedup();
6325
6326 let abs_path = worktree_handle.read(cx).abs_path();
6327 for server_id in &language_server_ids {
6328 if let Some(LanguageServerState::Running {
6329 server,
6330 watched_paths,
6331 ..
6332 }) = self.language_servers.get(server_id)
6333 {
6334 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6335 let params = lsp::DidChangeWatchedFilesParams {
6336 changes: changes
6337 .iter()
6338 .filter_map(|(path, _, change)| {
6339 if !watched_paths.is_match(&path) {
6340 return None;
6341 }
6342 let typ = match change {
6343 PathChange::Loaded => return None,
6344 PathChange::Added => lsp::FileChangeType::CREATED,
6345 PathChange::Removed => lsp::FileChangeType::DELETED,
6346 PathChange::Updated => lsp::FileChangeType::CHANGED,
6347 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6348 };
6349 Some(lsp::FileEvent {
6350 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6351 typ,
6352 })
6353 })
6354 .collect(),
6355 };
6356
6357 if !params.changes.is_empty() {
6358 server
6359 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6360 .log_err();
6361 }
6362 }
6363 }
6364 }
6365 }
6366
6367 fn update_local_worktree_buffers_git_repos(
6368 &mut self,
6369 worktree_handle: ModelHandle<Worktree>,
6370 changed_repos: &UpdatedGitRepositoriesSet,
6371 cx: &mut ModelContext<Self>,
6372 ) {
6373 debug_assert!(worktree_handle.read(cx).is_local());
6374
6375 // Identify the loading buffers whose containing repository that has changed.
6376 let future_buffers = self
6377 .loading_buffers_by_path
6378 .iter()
6379 .filter_map(|(project_path, receiver)| {
6380 if project_path.worktree_id != worktree_handle.read(cx).id() {
6381 return None;
6382 }
6383 let path = &project_path.path;
6384 changed_repos
6385 .iter()
6386 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6387 let receiver = receiver.clone();
6388 let path = path.clone();
6389 Some(async move {
6390 wait_for_loading_buffer(receiver)
6391 .await
6392 .ok()
6393 .map(|buffer| (buffer, path))
6394 })
6395 })
6396 .collect::<FuturesUnordered<_>>();
6397
6398 // Identify the current buffers whose containing repository has changed.
6399 let current_buffers = self
6400 .opened_buffers
6401 .values()
6402 .filter_map(|buffer| {
6403 let buffer = buffer.upgrade(cx)?;
6404 let file = File::from_dyn(buffer.read(cx).file())?;
6405 if file.worktree != worktree_handle {
6406 return None;
6407 }
6408 let path = file.path();
6409 changed_repos
6410 .iter()
6411 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6412 Some((buffer, path.clone()))
6413 })
6414 .collect::<Vec<_>>();
6415
6416 if future_buffers.len() + current_buffers.len() == 0 {
6417 return;
6418 }
6419
6420 let remote_id = self.remote_id();
6421 let client = self.client.clone();
6422 cx.spawn_weak(move |_, mut cx| async move {
6423 // Wait for all of the buffers to load.
6424 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6425
6426 // Reload the diff base for every buffer whose containing git repository has changed.
6427 let snapshot =
6428 worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
6429 let diff_bases_by_buffer = cx
6430 .background()
6431 .spawn(async move {
6432 future_buffers
6433 .into_iter()
6434 .filter_map(|e| e)
6435 .chain(current_buffers)
6436 .filter_map(|(buffer, path)| {
6437 let (work_directory, repo) =
6438 snapshot.repository_and_work_directory_for_path(&path)?;
6439 let repo = snapshot.get_local_repo(&repo)?;
6440 let relative_path = path.strip_prefix(&work_directory).ok()?;
6441 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6442 Some((buffer, base_text))
6443 })
6444 .collect::<Vec<_>>()
6445 })
6446 .await;
6447
6448 // Assign the new diff bases on all of the buffers.
6449 for (buffer, diff_base) in diff_bases_by_buffer {
6450 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6451 buffer.set_diff_base(diff_base.clone(), cx);
6452 buffer.remote_id()
6453 });
6454 if let Some(project_id) = remote_id {
6455 client
6456 .send(proto::UpdateDiffBase {
6457 project_id,
6458 buffer_id,
6459 diff_base,
6460 })
6461 .log_err();
6462 }
6463 }
6464 })
6465 .detach();
6466 }
6467
6468 fn update_local_worktree_settings(
6469 &mut self,
6470 worktree: &ModelHandle<Worktree>,
6471 changes: &UpdatedEntriesSet,
6472 cx: &mut ModelContext<Self>,
6473 ) {
6474 let project_id = self.remote_id();
6475 let worktree_id = worktree.id();
6476 let worktree = worktree.read(cx).as_local().unwrap();
6477 let remote_worktree_id = worktree.id();
6478
6479 let mut settings_contents = Vec::new();
6480 for (path, _, change) in changes.iter() {
6481 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6482 let settings_dir = Arc::from(
6483 path.ancestors()
6484 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6485 .unwrap(),
6486 );
6487 let fs = self.fs.clone();
6488 let removed = *change == PathChange::Removed;
6489 let abs_path = worktree.absolutize(path);
6490 settings_contents.push(async move {
6491 (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
6492 });
6493 }
6494 }
6495
6496 if settings_contents.is_empty() {
6497 return;
6498 }
6499
6500 let client = self.client.clone();
6501 cx.spawn_weak(move |_, mut cx| async move {
6502 let settings_contents: Vec<(Arc<Path>, _)> =
6503 futures::future::join_all(settings_contents).await;
6504 cx.update(|cx| {
6505 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6506 for (directory, file_content) in settings_contents {
6507 let file_content = file_content.and_then(|content| content.log_err());
6508 store
6509 .set_local_settings(
6510 worktree_id,
6511 directory.clone(),
6512 file_content.as_ref().map(String::as_str),
6513 cx,
6514 )
6515 .log_err();
6516 if let Some(remote_id) = project_id {
6517 client
6518 .send(proto::UpdateWorktreeSettings {
6519 project_id: remote_id,
6520 worktree_id: remote_worktree_id.to_proto(),
6521 path: directory.to_string_lossy().into_owned(),
6522 content: file_content,
6523 })
6524 .log_err();
6525 }
6526 }
6527 });
6528 });
6529 })
6530 .detach();
6531 }
6532
6533 fn update_prettier_settings(
6534 &self,
6535 worktree: &ModelHandle<Worktree>,
6536 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6537 cx: &mut ModelContext<'_, Project>,
6538 ) {
6539 let prettier_config_files = Prettier::CONFIG_FILE_NAMES
6540 .iter()
6541 .map(Path::new)
6542 .collect::<HashSet<_>>();
6543
6544 let prettier_config_file_changed = changes
6545 .iter()
6546 .filter(|(_, _, change)| !matches!(change, PathChange::Loaded))
6547 .filter(|(path, _, _)| {
6548 !path
6549 .components()
6550 .any(|component| component.as_os_str().to_string_lossy() == "node_modules")
6551 })
6552 .find(|(path, _, _)| prettier_config_files.contains(path.as_ref()));
6553 let current_worktree_id = worktree.read(cx).id();
6554 if let Some((config_path, _, _)) = prettier_config_file_changed {
6555 log::info!(
6556 "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
6557 );
6558 let prettiers_to_reload =
6559 self.prettiers_per_worktree
6560 .get(¤t_worktree_id)
6561 .iter()
6562 .flat_map(|prettier_paths| prettier_paths.iter())
6563 .flatten()
6564 .filter_map(|prettier_path| {
6565 Some((
6566 current_worktree_id,
6567 Some(prettier_path.clone()),
6568 self.prettier_instances.get(prettier_path)?.clone(),
6569 ))
6570 })
6571 .chain(self.default_prettier.instance().map(|default_prettier| {
6572 (current_worktree_id, None, default_prettier.clone())
6573 }))
6574 .collect::<Vec<_>>();
6575
6576 cx.background()
6577 .spawn(async move {
6578 let _: Vec<()> = future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
6579 async move {
6580 if let PrettierProcess::Running(prettier) = prettier_task.await {
6581 if let Err(e) = prettier
6582 .clear_cache()
6583 .await {
6584 match prettier_path {
6585 Some(prettier_path) => log::error!(
6586 "Failed to clear prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
6587 ),
6588 None => log::error!(
6589 "Failed to clear default prettier cache for worktree {worktree_id:?} on prettier settings update: {e:#}"
6590 ),
6591 }
6592 }
6593 }
6594 }
6595 }))
6596 .await;
6597 })
6598 .detach();
6599 }
6600 }
6601
6602 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6603 let new_active_entry = entry.and_then(|project_path| {
6604 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6605 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6606 Some(entry.id)
6607 });
6608 if new_active_entry != self.active_entry {
6609 self.active_entry = new_active_entry;
6610 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6611 }
6612 }
6613
6614 pub fn language_servers_running_disk_based_diagnostics(
6615 &self,
6616 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6617 self.language_server_statuses
6618 .iter()
6619 .filter_map(|(id, status)| {
6620 if status.has_pending_diagnostic_updates {
6621 Some(*id)
6622 } else {
6623 None
6624 }
6625 })
6626 }
6627
6628 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
6629 let mut summary = DiagnosticSummary::default();
6630 for (_, _, path_summary) in self.diagnostic_summaries(cx) {
6631 summary.error_count += path_summary.error_count;
6632 summary.warning_count += path_summary.warning_count;
6633 }
6634 summary
6635 }
6636
6637 pub fn diagnostic_summaries<'a>(
6638 &'a self,
6639 cx: &'a AppContext,
6640 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6641 self.visible_worktrees(cx).flat_map(move |worktree| {
6642 let worktree = worktree.read(cx);
6643 let worktree_id = worktree.id();
6644 worktree
6645 .diagnostic_summaries()
6646 .map(move |(path, server_id, summary)| {
6647 (ProjectPath { worktree_id, path }, server_id, summary)
6648 })
6649 })
6650 }
6651
6652 pub fn disk_based_diagnostics_started(
6653 &mut self,
6654 language_server_id: LanguageServerId,
6655 cx: &mut ModelContext<Self>,
6656 ) {
6657 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6658 }
6659
6660 pub fn disk_based_diagnostics_finished(
6661 &mut self,
6662 language_server_id: LanguageServerId,
6663 cx: &mut ModelContext<Self>,
6664 ) {
6665 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6666 }
6667
6668 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6669 self.active_entry
6670 }
6671
6672 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6673 self.worktree_for_id(path.worktree_id, cx)?
6674 .read(cx)
6675 .entry_for_path(&path.path)
6676 .cloned()
6677 }
6678
6679 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6680 let worktree = self.worktree_for_entry(entry_id, cx)?;
6681 let worktree = worktree.read(cx);
6682 let worktree_id = worktree.id();
6683 let path = worktree.entry_for_id(entry_id)?.path.clone();
6684 Some(ProjectPath { worktree_id, path })
6685 }
6686
6687 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6688 let workspace_root = self
6689 .worktree_for_id(project_path.worktree_id, cx)?
6690 .read(cx)
6691 .abs_path();
6692 let project_path = project_path.path.as_ref();
6693
6694 Some(if project_path == Path::new("") {
6695 workspace_root.to_path_buf()
6696 } else {
6697 workspace_root.join(project_path)
6698 })
6699 }
6700
6701 // RPC message handlers
6702
6703 async fn handle_unshare_project(
6704 this: ModelHandle<Self>,
6705 _: TypedEnvelope<proto::UnshareProject>,
6706 _: Arc<Client>,
6707 mut cx: AsyncAppContext,
6708 ) -> Result<()> {
6709 this.update(&mut cx, |this, cx| {
6710 if this.is_local() {
6711 this.unshare(cx)?;
6712 } else {
6713 this.disconnected_from_host(cx);
6714 }
6715 Ok(())
6716 })
6717 }
6718
6719 async fn handle_add_collaborator(
6720 this: ModelHandle<Self>,
6721 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6722 _: Arc<Client>,
6723 mut cx: AsyncAppContext,
6724 ) -> Result<()> {
6725 let collaborator = envelope
6726 .payload
6727 .collaborator
6728 .take()
6729 .ok_or_else(|| anyhow!("empty collaborator"))?;
6730
6731 let collaborator = Collaborator::from_proto(collaborator)?;
6732 this.update(&mut cx, |this, cx| {
6733 this.shared_buffers.remove(&collaborator.peer_id);
6734 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6735 this.collaborators
6736 .insert(collaborator.peer_id, collaborator);
6737 cx.notify();
6738 });
6739
6740 Ok(())
6741 }
6742
6743 async fn handle_update_project_collaborator(
6744 this: ModelHandle<Self>,
6745 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6746 _: Arc<Client>,
6747 mut cx: AsyncAppContext,
6748 ) -> Result<()> {
6749 let old_peer_id = envelope
6750 .payload
6751 .old_peer_id
6752 .ok_or_else(|| anyhow!("missing old peer id"))?;
6753 let new_peer_id = envelope
6754 .payload
6755 .new_peer_id
6756 .ok_or_else(|| anyhow!("missing new peer id"))?;
6757 this.update(&mut cx, |this, cx| {
6758 let collaborator = this
6759 .collaborators
6760 .remove(&old_peer_id)
6761 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6762 let is_host = collaborator.replica_id == 0;
6763 this.collaborators.insert(new_peer_id, collaborator);
6764
6765 let buffers = this.shared_buffers.remove(&old_peer_id);
6766 log::info!(
6767 "peer {} became {}. moving buffers {:?}",
6768 old_peer_id,
6769 new_peer_id,
6770 &buffers
6771 );
6772 if let Some(buffers) = buffers {
6773 this.shared_buffers.insert(new_peer_id, buffers);
6774 }
6775
6776 if is_host {
6777 this.opened_buffers
6778 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6779 this.buffer_ordered_messages_tx
6780 .unbounded_send(BufferOrderedMessage::Resync)
6781 .unwrap();
6782 }
6783
6784 cx.emit(Event::CollaboratorUpdated {
6785 old_peer_id,
6786 new_peer_id,
6787 });
6788 cx.notify();
6789 Ok(())
6790 })
6791 }
6792
6793 async fn handle_remove_collaborator(
6794 this: ModelHandle<Self>,
6795 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6796 _: Arc<Client>,
6797 mut cx: AsyncAppContext,
6798 ) -> Result<()> {
6799 this.update(&mut cx, |this, cx| {
6800 let peer_id = envelope
6801 .payload
6802 .peer_id
6803 .ok_or_else(|| anyhow!("invalid peer id"))?;
6804 let replica_id = this
6805 .collaborators
6806 .remove(&peer_id)
6807 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6808 .replica_id;
6809 for buffer in this.opened_buffers.values() {
6810 if let Some(buffer) = buffer.upgrade(cx) {
6811 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6812 }
6813 }
6814 this.shared_buffers.remove(&peer_id);
6815
6816 cx.emit(Event::CollaboratorLeft(peer_id));
6817 cx.notify();
6818 Ok(())
6819 })
6820 }
6821
6822 async fn handle_update_project(
6823 this: ModelHandle<Self>,
6824 envelope: TypedEnvelope<proto::UpdateProject>,
6825 _: Arc<Client>,
6826 mut cx: AsyncAppContext,
6827 ) -> Result<()> {
6828 this.update(&mut cx, |this, cx| {
6829 // Don't handle messages that were sent before the response to us joining the project
6830 if envelope.message_id > this.join_project_response_message_id {
6831 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6832 }
6833 Ok(())
6834 })
6835 }
6836
6837 async fn handle_update_worktree(
6838 this: ModelHandle<Self>,
6839 envelope: TypedEnvelope<proto::UpdateWorktree>,
6840 _: Arc<Client>,
6841 mut cx: AsyncAppContext,
6842 ) -> Result<()> {
6843 this.update(&mut cx, |this, cx| {
6844 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6845 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6846 worktree.update(cx, |worktree, _| {
6847 let worktree = worktree.as_remote_mut().unwrap();
6848 worktree.update_from_remote(envelope.payload);
6849 });
6850 }
6851 Ok(())
6852 })
6853 }
6854
6855 async fn handle_update_worktree_settings(
6856 this: ModelHandle<Self>,
6857 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6858 _: Arc<Client>,
6859 mut cx: AsyncAppContext,
6860 ) -> Result<()> {
6861 this.update(&mut cx, |this, cx| {
6862 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6863 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6864 cx.update_global::<SettingsStore, _, _>(|store, cx| {
6865 store
6866 .set_local_settings(
6867 worktree.id(),
6868 PathBuf::from(&envelope.payload.path).into(),
6869 envelope.payload.content.as_ref().map(String::as_str),
6870 cx,
6871 )
6872 .log_err();
6873 });
6874 }
6875 Ok(())
6876 })
6877 }
6878
6879 async fn handle_create_project_entry(
6880 this: ModelHandle<Self>,
6881 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6882 _: Arc<Client>,
6883 mut cx: AsyncAppContext,
6884 ) -> Result<proto::ProjectEntryResponse> {
6885 let worktree = this.update(&mut cx, |this, cx| {
6886 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6887 this.worktree_for_id(worktree_id, cx)
6888 .ok_or_else(|| anyhow!("worktree not found"))
6889 })?;
6890 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6891 let entry = worktree
6892 .update(&mut cx, |worktree, cx| {
6893 let worktree = worktree.as_local_mut().unwrap();
6894 let path = PathBuf::from(envelope.payload.path);
6895 worktree.create_entry(path, envelope.payload.is_directory, cx)
6896 })
6897 .await?;
6898 Ok(proto::ProjectEntryResponse {
6899 entry: Some((&entry).into()),
6900 worktree_scan_id: worktree_scan_id as u64,
6901 })
6902 }
6903
6904 async fn handle_rename_project_entry(
6905 this: ModelHandle<Self>,
6906 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6907 _: Arc<Client>,
6908 mut cx: AsyncAppContext,
6909 ) -> Result<proto::ProjectEntryResponse> {
6910 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6911 let worktree = this.read_with(&cx, |this, cx| {
6912 this.worktree_for_entry(entry_id, cx)
6913 .ok_or_else(|| anyhow!("worktree not found"))
6914 })?;
6915 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6916 let entry = worktree
6917 .update(&mut cx, |worktree, cx| {
6918 let new_path = PathBuf::from(envelope.payload.new_path);
6919 worktree
6920 .as_local_mut()
6921 .unwrap()
6922 .rename_entry(entry_id, new_path, cx)
6923 .ok_or_else(|| anyhow!("invalid entry"))
6924 })?
6925 .await?;
6926 Ok(proto::ProjectEntryResponse {
6927 entry: Some((&entry).into()),
6928 worktree_scan_id: worktree_scan_id as u64,
6929 })
6930 }
6931
6932 async fn handle_copy_project_entry(
6933 this: ModelHandle<Self>,
6934 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6935 _: Arc<Client>,
6936 mut cx: AsyncAppContext,
6937 ) -> Result<proto::ProjectEntryResponse> {
6938 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6939 let worktree = this.read_with(&cx, |this, cx| {
6940 this.worktree_for_entry(entry_id, cx)
6941 .ok_or_else(|| anyhow!("worktree not found"))
6942 })?;
6943 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6944 let entry = worktree
6945 .update(&mut cx, |worktree, cx| {
6946 let new_path = PathBuf::from(envelope.payload.new_path);
6947 worktree
6948 .as_local_mut()
6949 .unwrap()
6950 .copy_entry(entry_id, new_path, cx)
6951 .ok_or_else(|| anyhow!("invalid entry"))
6952 })?
6953 .await?;
6954 Ok(proto::ProjectEntryResponse {
6955 entry: Some((&entry).into()),
6956 worktree_scan_id: worktree_scan_id as u64,
6957 })
6958 }
6959
6960 async fn handle_delete_project_entry(
6961 this: ModelHandle<Self>,
6962 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
6963 _: Arc<Client>,
6964 mut cx: AsyncAppContext,
6965 ) -> Result<proto::ProjectEntryResponse> {
6966 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6967
6968 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
6969
6970 let worktree = this.read_with(&cx, |this, cx| {
6971 this.worktree_for_entry(entry_id, cx)
6972 .ok_or_else(|| anyhow!("worktree not found"))
6973 })?;
6974 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
6975 worktree
6976 .update(&mut cx, |worktree, cx| {
6977 worktree
6978 .as_local_mut()
6979 .unwrap()
6980 .delete_entry(entry_id, cx)
6981 .ok_or_else(|| anyhow!("invalid entry"))
6982 })?
6983 .await?;
6984 Ok(proto::ProjectEntryResponse {
6985 entry: None,
6986 worktree_scan_id: worktree_scan_id as u64,
6987 })
6988 }
6989
6990 async fn handle_expand_project_entry(
6991 this: ModelHandle<Self>,
6992 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
6993 _: Arc<Client>,
6994 mut cx: AsyncAppContext,
6995 ) -> Result<proto::ExpandProjectEntryResponse> {
6996 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6997 let worktree = this
6998 .read_with(&cx, |this, cx| this.worktree_for_entry(entry_id, cx))
6999 .ok_or_else(|| anyhow!("invalid request"))?;
7000 worktree
7001 .update(&mut cx, |worktree, cx| {
7002 worktree
7003 .as_local_mut()
7004 .unwrap()
7005 .expand_entry(entry_id, cx)
7006 .ok_or_else(|| anyhow!("invalid entry"))
7007 })?
7008 .await?;
7009 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id()) as u64;
7010 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7011 }
7012
7013 async fn handle_update_diagnostic_summary(
7014 this: ModelHandle<Self>,
7015 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7016 _: Arc<Client>,
7017 mut cx: AsyncAppContext,
7018 ) -> Result<()> {
7019 this.update(&mut cx, |this, cx| {
7020 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7021 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7022 if let Some(summary) = envelope.payload.summary {
7023 let project_path = ProjectPath {
7024 worktree_id,
7025 path: Path::new(&summary.path).into(),
7026 };
7027 worktree.update(cx, |worktree, _| {
7028 worktree
7029 .as_remote_mut()
7030 .unwrap()
7031 .update_diagnostic_summary(project_path.path.clone(), &summary);
7032 });
7033 cx.emit(Event::DiagnosticsUpdated {
7034 language_server_id: LanguageServerId(summary.language_server_id as usize),
7035 path: project_path,
7036 });
7037 }
7038 }
7039 Ok(())
7040 })
7041 }
7042
7043 async fn handle_start_language_server(
7044 this: ModelHandle<Self>,
7045 envelope: TypedEnvelope<proto::StartLanguageServer>,
7046 _: Arc<Client>,
7047 mut cx: AsyncAppContext,
7048 ) -> Result<()> {
7049 let server = envelope
7050 .payload
7051 .server
7052 .ok_or_else(|| anyhow!("invalid server"))?;
7053 this.update(&mut cx, |this, cx| {
7054 this.language_server_statuses.insert(
7055 LanguageServerId(server.id as usize),
7056 LanguageServerStatus {
7057 name: server.name,
7058 pending_work: Default::default(),
7059 has_pending_diagnostic_updates: false,
7060 progress_tokens: Default::default(),
7061 },
7062 );
7063 cx.notify();
7064 });
7065 Ok(())
7066 }
7067
7068 async fn handle_update_language_server(
7069 this: ModelHandle<Self>,
7070 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7071 _: Arc<Client>,
7072 mut cx: AsyncAppContext,
7073 ) -> Result<()> {
7074 this.update(&mut cx, |this, cx| {
7075 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7076
7077 match envelope
7078 .payload
7079 .variant
7080 .ok_or_else(|| anyhow!("invalid variant"))?
7081 {
7082 proto::update_language_server::Variant::WorkStart(payload) => {
7083 this.on_lsp_work_start(
7084 language_server_id,
7085 payload.token,
7086 LanguageServerProgress {
7087 message: payload.message,
7088 percentage: payload.percentage.map(|p| p as usize),
7089 last_update_at: Instant::now(),
7090 },
7091 cx,
7092 );
7093 }
7094
7095 proto::update_language_server::Variant::WorkProgress(payload) => {
7096 this.on_lsp_work_progress(
7097 language_server_id,
7098 payload.token,
7099 LanguageServerProgress {
7100 message: payload.message,
7101 percentage: payload.percentage.map(|p| p as usize),
7102 last_update_at: Instant::now(),
7103 },
7104 cx,
7105 );
7106 }
7107
7108 proto::update_language_server::Variant::WorkEnd(payload) => {
7109 this.on_lsp_work_end(language_server_id, payload.token, cx);
7110 }
7111
7112 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7113 this.disk_based_diagnostics_started(language_server_id, cx);
7114 }
7115
7116 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7117 this.disk_based_diagnostics_finished(language_server_id, cx)
7118 }
7119 }
7120
7121 Ok(())
7122 })
7123 }
7124
7125 async fn handle_update_buffer(
7126 this: ModelHandle<Self>,
7127 envelope: TypedEnvelope<proto::UpdateBuffer>,
7128 _: Arc<Client>,
7129 mut cx: AsyncAppContext,
7130 ) -> Result<proto::Ack> {
7131 this.update(&mut cx, |this, cx| {
7132 let payload = envelope.payload.clone();
7133 let buffer_id = payload.buffer_id;
7134 let ops = payload
7135 .operations
7136 .into_iter()
7137 .map(language::proto::deserialize_operation)
7138 .collect::<Result<Vec<_>, _>>()?;
7139 let is_remote = this.is_remote();
7140 match this.opened_buffers.entry(buffer_id) {
7141 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7142 OpenBuffer::Strong(buffer) => {
7143 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7144 }
7145 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7146 OpenBuffer::Weak(_) => {}
7147 },
7148 hash_map::Entry::Vacant(e) => {
7149 assert!(
7150 is_remote,
7151 "received buffer update from {:?}",
7152 envelope.original_sender_id
7153 );
7154 e.insert(OpenBuffer::Operations(ops));
7155 }
7156 }
7157 Ok(proto::Ack {})
7158 })
7159 }
7160
7161 async fn handle_create_buffer_for_peer(
7162 this: ModelHandle<Self>,
7163 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7164 _: Arc<Client>,
7165 mut cx: AsyncAppContext,
7166 ) -> Result<()> {
7167 this.update(&mut cx, |this, cx| {
7168 match envelope
7169 .payload
7170 .variant
7171 .ok_or_else(|| anyhow!("missing variant"))?
7172 {
7173 proto::create_buffer_for_peer::Variant::State(mut state) => {
7174 let mut buffer_file = None;
7175 if let Some(file) = state.file.take() {
7176 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7177 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7178 anyhow!("no worktree found for id {}", file.worktree_id)
7179 })?;
7180 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7181 as Arc<dyn language::File>);
7182 }
7183
7184 let buffer_id = state.id;
7185 let buffer = cx.add_model(|_| {
7186 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
7187 });
7188 this.incomplete_remote_buffers
7189 .insert(buffer_id, Some(buffer));
7190 }
7191 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7192 let buffer = this
7193 .incomplete_remote_buffers
7194 .get(&chunk.buffer_id)
7195 .cloned()
7196 .flatten()
7197 .ok_or_else(|| {
7198 anyhow!(
7199 "received chunk for buffer {} without initial state",
7200 chunk.buffer_id
7201 )
7202 })?;
7203 let operations = chunk
7204 .operations
7205 .into_iter()
7206 .map(language::proto::deserialize_operation)
7207 .collect::<Result<Vec<_>>>()?;
7208 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7209
7210 if chunk.is_last {
7211 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7212 this.register_buffer(&buffer, cx)?;
7213 }
7214 }
7215 }
7216
7217 Ok(())
7218 })
7219 }
7220
7221 async fn handle_update_diff_base(
7222 this: ModelHandle<Self>,
7223 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7224 _: Arc<Client>,
7225 mut cx: AsyncAppContext,
7226 ) -> Result<()> {
7227 this.update(&mut cx, |this, cx| {
7228 let buffer_id = envelope.payload.buffer_id;
7229 let diff_base = envelope.payload.diff_base;
7230 if let Some(buffer) = this
7231 .opened_buffers
7232 .get_mut(&buffer_id)
7233 .and_then(|b| b.upgrade(cx))
7234 .or_else(|| {
7235 this.incomplete_remote_buffers
7236 .get(&buffer_id)
7237 .cloned()
7238 .flatten()
7239 })
7240 {
7241 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7242 }
7243 Ok(())
7244 })
7245 }
7246
7247 async fn handle_update_buffer_file(
7248 this: ModelHandle<Self>,
7249 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7250 _: Arc<Client>,
7251 mut cx: AsyncAppContext,
7252 ) -> Result<()> {
7253 let buffer_id = envelope.payload.buffer_id;
7254
7255 this.update(&mut cx, |this, cx| {
7256 let payload = envelope.payload.clone();
7257 if let Some(buffer) = this
7258 .opened_buffers
7259 .get(&buffer_id)
7260 .and_then(|b| b.upgrade(cx))
7261 .or_else(|| {
7262 this.incomplete_remote_buffers
7263 .get(&buffer_id)
7264 .cloned()
7265 .flatten()
7266 })
7267 {
7268 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7269 let worktree = this
7270 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7271 .ok_or_else(|| anyhow!("no such worktree"))?;
7272 let file = File::from_proto(file, worktree, cx)?;
7273 buffer.update(cx, |buffer, cx| {
7274 buffer.file_updated(Arc::new(file), cx);
7275 });
7276 this.detect_language_for_buffer(&buffer, cx);
7277 }
7278 Ok(())
7279 })
7280 }
7281
7282 async fn handle_save_buffer(
7283 this: ModelHandle<Self>,
7284 envelope: TypedEnvelope<proto::SaveBuffer>,
7285 _: Arc<Client>,
7286 mut cx: AsyncAppContext,
7287 ) -> Result<proto::BufferSaved> {
7288 let buffer_id = envelope.payload.buffer_id;
7289 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
7290 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7291 let buffer = this
7292 .opened_buffers
7293 .get(&buffer_id)
7294 .and_then(|buffer| buffer.upgrade(cx))
7295 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7296 anyhow::Ok((project_id, buffer))
7297 })?;
7298 buffer
7299 .update(&mut cx, |buffer, _| {
7300 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7301 })
7302 .await?;
7303 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
7304
7305 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))
7306 .await?;
7307 Ok(buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
7308 project_id,
7309 buffer_id,
7310 version: serialize_version(buffer.saved_version()),
7311 mtime: Some(buffer.saved_mtime().into()),
7312 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7313 }))
7314 }
7315
7316 async fn handle_reload_buffers(
7317 this: ModelHandle<Self>,
7318 envelope: TypedEnvelope<proto::ReloadBuffers>,
7319 _: Arc<Client>,
7320 mut cx: AsyncAppContext,
7321 ) -> Result<proto::ReloadBuffersResponse> {
7322 let sender_id = envelope.original_sender_id()?;
7323 let reload = this.update(&mut cx, |this, cx| {
7324 let mut buffers = HashSet::default();
7325 for buffer_id in &envelope.payload.buffer_ids {
7326 buffers.insert(
7327 this.opened_buffers
7328 .get(buffer_id)
7329 .and_then(|buffer| buffer.upgrade(cx))
7330 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7331 );
7332 }
7333 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7334 })?;
7335
7336 let project_transaction = reload.await?;
7337 let project_transaction = this.update(&mut cx, |this, cx| {
7338 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7339 });
7340 Ok(proto::ReloadBuffersResponse {
7341 transaction: Some(project_transaction),
7342 })
7343 }
7344
7345 async fn handle_synchronize_buffers(
7346 this: ModelHandle<Self>,
7347 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7348 _: Arc<Client>,
7349 mut cx: AsyncAppContext,
7350 ) -> Result<proto::SynchronizeBuffersResponse> {
7351 let project_id = envelope.payload.project_id;
7352 let mut response = proto::SynchronizeBuffersResponse {
7353 buffers: Default::default(),
7354 };
7355
7356 this.update(&mut cx, |this, cx| {
7357 let Some(guest_id) = envelope.original_sender_id else {
7358 error!("missing original_sender_id on SynchronizeBuffers request");
7359 return;
7360 };
7361
7362 this.shared_buffers.entry(guest_id).or_default().clear();
7363 for buffer in envelope.payload.buffers {
7364 let buffer_id = buffer.id;
7365 let remote_version = language::proto::deserialize_version(&buffer.version);
7366 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
7367 this.shared_buffers
7368 .entry(guest_id)
7369 .or_default()
7370 .insert(buffer_id);
7371
7372 let buffer = buffer.read(cx);
7373 response.buffers.push(proto::BufferVersion {
7374 id: buffer_id,
7375 version: language::proto::serialize_version(&buffer.version),
7376 });
7377
7378 let operations = buffer.serialize_ops(Some(remote_version), cx);
7379 let client = this.client.clone();
7380 if let Some(file) = buffer.file() {
7381 client
7382 .send(proto::UpdateBufferFile {
7383 project_id,
7384 buffer_id: buffer_id as u64,
7385 file: Some(file.to_proto()),
7386 })
7387 .log_err();
7388 }
7389
7390 client
7391 .send(proto::UpdateDiffBase {
7392 project_id,
7393 buffer_id: buffer_id as u64,
7394 diff_base: buffer.diff_base().map(Into::into),
7395 })
7396 .log_err();
7397
7398 client
7399 .send(proto::BufferReloaded {
7400 project_id,
7401 buffer_id,
7402 version: language::proto::serialize_version(buffer.saved_version()),
7403 mtime: Some(buffer.saved_mtime().into()),
7404 fingerprint: language::proto::serialize_fingerprint(
7405 buffer.saved_version_fingerprint(),
7406 ),
7407 line_ending: language::proto::serialize_line_ending(
7408 buffer.line_ending(),
7409 ) as i32,
7410 })
7411 .log_err();
7412
7413 cx.background()
7414 .spawn(
7415 async move {
7416 let operations = operations.await;
7417 for chunk in split_operations(operations) {
7418 client
7419 .request(proto::UpdateBuffer {
7420 project_id,
7421 buffer_id,
7422 operations: chunk,
7423 })
7424 .await?;
7425 }
7426 anyhow::Ok(())
7427 }
7428 .log_err(),
7429 )
7430 .detach();
7431 }
7432 }
7433 });
7434
7435 Ok(response)
7436 }
7437
7438 async fn handle_format_buffers(
7439 this: ModelHandle<Self>,
7440 envelope: TypedEnvelope<proto::FormatBuffers>,
7441 _: Arc<Client>,
7442 mut cx: AsyncAppContext,
7443 ) -> Result<proto::FormatBuffersResponse> {
7444 let sender_id = envelope.original_sender_id()?;
7445 let format = this.update(&mut cx, |this, cx| {
7446 let mut buffers = HashSet::default();
7447 for buffer_id in &envelope.payload.buffer_ids {
7448 buffers.insert(
7449 this.opened_buffers
7450 .get(buffer_id)
7451 .and_then(|buffer| buffer.upgrade(cx))
7452 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7453 );
7454 }
7455 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7456 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7457 })?;
7458
7459 let project_transaction = format.await?;
7460 let project_transaction = this.update(&mut cx, |this, cx| {
7461 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7462 });
7463 Ok(proto::FormatBuffersResponse {
7464 transaction: Some(project_transaction),
7465 })
7466 }
7467
7468 async fn handle_apply_additional_edits_for_completion(
7469 this: ModelHandle<Self>,
7470 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7471 _: Arc<Client>,
7472 mut cx: AsyncAppContext,
7473 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7474 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7475 let buffer = this
7476 .opened_buffers
7477 .get(&envelope.payload.buffer_id)
7478 .and_then(|buffer| buffer.upgrade(cx))
7479 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7480 let language = buffer.read(cx).language();
7481 let completion = language::proto::deserialize_completion(
7482 envelope
7483 .payload
7484 .completion
7485 .ok_or_else(|| anyhow!("invalid completion"))?,
7486 language.cloned(),
7487 );
7488 Ok::<_, anyhow::Error>((buffer, completion))
7489 })?;
7490
7491 let completion = completion.await?;
7492
7493 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7494 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7495 });
7496
7497 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7498 transaction: apply_additional_edits
7499 .await?
7500 .as_ref()
7501 .map(language::proto::serialize_transaction),
7502 })
7503 }
7504
7505 async fn handle_resolve_completion_documentation(
7506 this: ModelHandle<Self>,
7507 envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
7508 _: Arc<Client>,
7509 mut cx: AsyncAppContext,
7510 ) -> Result<proto::ResolveCompletionDocumentationResponse> {
7511 let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
7512
7513 let completion = this
7514 .read_with(&mut cx, |this, _| {
7515 let id = LanguageServerId(envelope.payload.language_server_id as usize);
7516 let Some(server) = this.language_server_for_id(id) else {
7517 return Err(anyhow!("No language server {id}"));
7518 };
7519
7520 Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
7521 })?
7522 .await?;
7523
7524 let mut is_markdown = false;
7525 let text = match completion.documentation {
7526 Some(lsp::Documentation::String(text)) => text,
7527
7528 Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
7529 is_markdown = kind == lsp::MarkupKind::Markdown;
7530 value
7531 }
7532
7533 _ => String::new(),
7534 };
7535
7536 Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
7537 }
7538
7539 async fn handle_apply_code_action(
7540 this: ModelHandle<Self>,
7541 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7542 _: Arc<Client>,
7543 mut cx: AsyncAppContext,
7544 ) -> Result<proto::ApplyCodeActionResponse> {
7545 let sender_id = envelope.original_sender_id()?;
7546 let action = language::proto::deserialize_code_action(
7547 envelope
7548 .payload
7549 .action
7550 .ok_or_else(|| anyhow!("invalid action"))?,
7551 )?;
7552 let apply_code_action = this.update(&mut cx, |this, cx| {
7553 let buffer = this
7554 .opened_buffers
7555 .get(&envelope.payload.buffer_id)
7556 .and_then(|buffer| buffer.upgrade(cx))
7557 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7558 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7559 })?;
7560
7561 let project_transaction = apply_code_action.await?;
7562 let project_transaction = this.update(&mut cx, |this, cx| {
7563 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7564 });
7565 Ok(proto::ApplyCodeActionResponse {
7566 transaction: Some(project_transaction),
7567 })
7568 }
7569
7570 async fn handle_on_type_formatting(
7571 this: ModelHandle<Self>,
7572 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7573 _: Arc<Client>,
7574 mut cx: AsyncAppContext,
7575 ) -> Result<proto::OnTypeFormattingResponse> {
7576 let on_type_formatting = this.update(&mut cx, |this, cx| {
7577 let buffer = this
7578 .opened_buffers
7579 .get(&envelope.payload.buffer_id)
7580 .and_then(|buffer| buffer.upgrade(cx))
7581 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7582 let position = envelope
7583 .payload
7584 .position
7585 .and_then(deserialize_anchor)
7586 .ok_or_else(|| anyhow!("invalid position"))?;
7587 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7588 buffer,
7589 position,
7590 envelope.payload.trigger.clone(),
7591 cx,
7592 ))
7593 })?;
7594
7595 let transaction = on_type_formatting
7596 .await?
7597 .as_ref()
7598 .map(language::proto::serialize_transaction);
7599 Ok(proto::OnTypeFormattingResponse { transaction })
7600 }
7601
7602 async fn handle_inlay_hints(
7603 this: ModelHandle<Self>,
7604 envelope: TypedEnvelope<proto::InlayHints>,
7605 _: Arc<Client>,
7606 mut cx: AsyncAppContext,
7607 ) -> Result<proto::InlayHintsResponse> {
7608 let sender_id = envelope.original_sender_id()?;
7609 let buffer = this.update(&mut cx, |this, cx| {
7610 this.opened_buffers
7611 .get(&envelope.payload.buffer_id)
7612 .and_then(|buffer| buffer.upgrade(cx))
7613 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7614 })?;
7615 let buffer_version = deserialize_version(&envelope.payload.version);
7616
7617 buffer
7618 .update(&mut cx, |buffer, _| {
7619 buffer.wait_for_version(buffer_version.clone())
7620 })
7621 .await
7622 .with_context(|| {
7623 format!(
7624 "waiting for version {:?} for buffer {}",
7625 buffer_version,
7626 buffer.id()
7627 )
7628 })?;
7629
7630 let start = envelope
7631 .payload
7632 .start
7633 .and_then(deserialize_anchor)
7634 .context("missing range start")?;
7635 let end = envelope
7636 .payload
7637 .end
7638 .and_then(deserialize_anchor)
7639 .context("missing range end")?;
7640 let buffer_hints = this
7641 .update(&mut cx, |project, cx| {
7642 project.inlay_hints(buffer, start..end, cx)
7643 })
7644 .await
7645 .context("inlay hints fetch")?;
7646
7647 Ok(this.update(&mut cx, |project, cx| {
7648 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7649 }))
7650 }
7651
7652 async fn handle_resolve_inlay_hint(
7653 this: ModelHandle<Self>,
7654 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7655 _: Arc<Client>,
7656 mut cx: AsyncAppContext,
7657 ) -> Result<proto::ResolveInlayHintResponse> {
7658 let proto_hint = envelope
7659 .payload
7660 .hint
7661 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7662 let hint = InlayHints::proto_to_project_hint(proto_hint)
7663 .context("resolved proto inlay hint conversion")?;
7664 let buffer = this.update(&mut cx, |this, cx| {
7665 this.opened_buffers
7666 .get(&envelope.payload.buffer_id)
7667 .and_then(|buffer| buffer.upgrade(cx))
7668 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7669 })?;
7670 let response_hint = this
7671 .update(&mut cx, |project, cx| {
7672 project.resolve_inlay_hint(
7673 hint,
7674 buffer,
7675 LanguageServerId(envelope.payload.language_server_id as usize),
7676 cx,
7677 )
7678 })
7679 .await
7680 .context("inlay hints fetch")?;
7681 Ok(proto::ResolveInlayHintResponse {
7682 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7683 })
7684 }
7685
7686 async fn handle_refresh_inlay_hints(
7687 this: ModelHandle<Self>,
7688 _: TypedEnvelope<proto::RefreshInlayHints>,
7689 _: Arc<Client>,
7690 mut cx: AsyncAppContext,
7691 ) -> Result<proto::Ack> {
7692 this.update(&mut cx, |_, cx| {
7693 cx.emit(Event::RefreshInlayHints);
7694 });
7695 Ok(proto::Ack {})
7696 }
7697
7698 async fn handle_lsp_command<T: LspCommand>(
7699 this: ModelHandle<Self>,
7700 envelope: TypedEnvelope<T::ProtoRequest>,
7701 _: Arc<Client>,
7702 mut cx: AsyncAppContext,
7703 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7704 where
7705 <T::LspRequest as lsp::request::Request>::Result: Send,
7706 {
7707 let sender_id = envelope.original_sender_id()?;
7708 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7709 let buffer_handle = this.read_with(&cx, |this, _| {
7710 this.opened_buffers
7711 .get(&buffer_id)
7712 .and_then(|buffer| buffer.upgrade(&cx))
7713 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7714 })?;
7715 let request = T::from_proto(
7716 envelope.payload,
7717 this.clone(),
7718 buffer_handle.clone(),
7719 cx.clone(),
7720 )
7721 .await?;
7722 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
7723 let response = this
7724 .update(&mut cx, |this, cx| {
7725 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7726 })
7727 .await?;
7728 this.update(&mut cx, |this, cx| {
7729 Ok(T::response_to_proto(
7730 response,
7731 this,
7732 sender_id,
7733 &buffer_version,
7734 cx,
7735 ))
7736 })
7737 }
7738
7739 async fn handle_get_project_symbols(
7740 this: ModelHandle<Self>,
7741 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7742 _: Arc<Client>,
7743 mut cx: AsyncAppContext,
7744 ) -> Result<proto::GetProjectSymbolsResponse> {
7745 let symbols = this
7746 .update(&mut cx, |this, cx| {
7747 this.symbols(&envelope.payload.query, cx)
7748 })
7749 .await?;
7750
7751 Ok(proto::GetProjectSymbolsResponse {
7752 symbols: symbols.iter().map(serialize_symbol).collect(),
7753 })
7754 }
7755
7756 async fn handle_search_project(
7757 this: ModelHandle<Self>,
7758 envelope: TypedEnvelope<proto::SearchProject>,
7759 _: Arc<Client>,
7760 mut cx: AsyncAppContext,
7761 ) -> Result<proto::SearchProjectResponse> {
7762 let peer_id = envelope.original_sender_id()?;
7763 let query = SearchQuery::from_proto(envelope.payload)?;
7764 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx));
7765
7766 cx.spawn(|mut cx| async move {
7767 let mut locations = Vec::new();
7768 while let Some((buffer, ranges)) = result.next().await {
7769 for range in ranges {
7770 let start = serialize_anchor(&range.start);
7771 let end = serialize_anchor(&range.end);
7772 let buffer_id = this.update(&mut cx, |this, cx| {
7773 this.create_buffer_for_peer(&buffer, peer_id, cx)
7774 });
7775 locations.push(proto::Location {
7776 buffer_id,
7777 start: Some(start),
7778 end: Some(end),
7779 });
7780 }
7781 }
7782 Ok(proto::SearchProjectResponse { locations })
7783 })
7784 .await
7785 }
7786
7787 async fn handle_open_buffer_for_symbol(
7788 this: ModelHandle<Self>,
7789 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7790 _: Arc<Client>,
7791 mut cx: AsyncAppContext,
7792 ) -> Result<proto::OpenBufferForSymbolResponse> {
7793 let peer_id = envelope.original_sender_id()?;
7794 let symbol = envelope
7795 .payload
7796 .symbol
7797 .ok_or_else(|| anyhow!("invalid symbol"))?;
7798 let symbol = this
7799 .read_with(&cx, |this, _| this.deserialize_symbol(symbol))
7800 .await?;
7801 let symbol = this.read_with(&cx, |this, _| {
7802 let signature = this.symbol_signature(&symbol.path);
7803 if signature == symbol.signature {
7804 Ok(symbol)
7805 } else {
7806 Err(anyhow!("invalid symbol signature"))
7807 }
7808 })?;
7809 let buffer = this
7810 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
7811 .await?;
7812
7813 Ok(proto::OpenBufferForSymbolResponse {
7814 buffer_id: this.update(&mut cx, |this, cx| {
7815 this.create_buffer_for_peer(&buffer, peer_id, cx)
7816 }),
7817 })
7818 }
7819
7820 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7821 let mut hasher = Sha256::new();
7822 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7823 hasher.update(project_path.path.to_string_lossy().as_bytes());
7824 hasher.update(self.nonce.to_be_bytes());
7825 hasher.finalize().as_slice().try_into().unwrap()
7826 }
7827
7828 async fn handle_open_buffer_by_id(
7829 this: ModelHandle<Self>,
7830 envelope: TypedEnvelope<proto::OpenBufferById>,
7831 _: Arc<Client>,
7832 mut cx: AsyncAppContext,
7833 ) -> Result<proto::OpenBufferResponse> {
7834 let peer_id = envelope.original_sender_id()?;
7835 let buffer = this
7836 .update(&mut cx, |this, cx| {
7837 this.open_buffer_by_id(envelope.payload.id, cx)
7838 })
7839 .await?;
7840 this.update(&mut cx, |this, cx| {
7841 Ok(proto::OpenBufferResponse {
7842 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7843 })
7844 })
7845 }
7846
7847 async fn handle_open_buffer_by_path(
7848 this: ModelHandle<Self>,
7849 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7850 _: Arc<Client>,
7851 mut cx: AsyncAppContext,
7852 ) -> Result<proto::OpenBufferResponse> {
7853 let peer_id = envelope.original_sender_id()?;
7854 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7855 let open_buffer = this.update(&mut cx, |this, cx| {
7856 this.open_buffer(
7857 ProjectPath {
7858 worktree_id,
7859 path: PathBuf::from(envelope.payload.path).into(),
7860 },
7861 cx,
7862 )
7863 });
7864
7865 let buffer = open_buffer.await?;
7866 this.update(&mut cx, |this, cx| {
7867 Ok(proto::OpenBufferResponse {
7868 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7869 })
7870 })
7871 }
7872
7873 fn serialize_project_transaction_for_peer(
7874 &mut self,
7875 project_transaction: ProjectTransaction,
7876 peer_id: proto::PeerId,
7877 cx: &mut AppContext,
7878 ) -> proto::ProjectTransaction {
7879 let mut serialized_transaction = proto::ProjectTransaction {
7880 buffer_ids: Default::default(),
7881 transactions: Default::default(),
7882 };
7883 for (buffer, transaction) in project_transaction.0 {
7884 serialized_transaction
7885 .buffer_ids
7886 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7887 serialized_transaction
7888 .transactions
7889 .push(language::proto::serialize_transaction(&transaction));
7890 }
7891 serialized_transaction
7892 }
7893
7894 fn deserialize_project_transaction(
7895 &mut self,
7896 message: proto::ProjectTransaction,
7897 push_to_history: bool,
7898 cx: &mut ModelContext<Self>,
7899 ) -> Task<Result<ProjectTransaction>> {
7900 cx.spawn(|this, mut cx| async move {
7901 let mut project_transaction = ProjectTransaction::default();
7902 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7903 {
7904 let buffer = this
7905 .update(&mut cx, |this, cx| {
7906 this.wait_for_remote_buffer(buffer_id, cx)
7907 })
7908 .await?;
7909 let transaction = language::proto::deserialize_transaction(transaction)?;
7910 project_transaction.0.insert(buffer, transaction);
7911 }
7912
7913 for (buffer, transaction) in &project_transaction.0 {
7914 buffer
7915 .update(&mut cx, |buffer, _| {
7916 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7917 })
7918 .await?;
7919
7920 if push_to_history {
7921 buffer.update(&mut cx, |buffer, _| {
7922 buffer.push_transaction(transaction.clone(), Instant::now());
7923 });
7924 }
7925 }
7926
7927 Ok(project_transaction)
7928 })
7929 }
7930
7931 fn create_buffer_for_peer(
7932 &mut self,
7933 buffer: &ModelHandle<Buffer>,
7934 peer_id: proto::PeerId,
7935 cx: &mut AppContext,
7936 ) -> u64 {
7937 let buffer_id = buffer.read(cx).remote_id();
7938 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
7939 updates_tx
7940 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7941 .ok();
7942 }
7943 buffer_id
7944 }
7945
7946 fn wait_for_remote_buffer(
7947 &mut self,
7948 id: u64,
7949 cx: &mut ModelContext<Self>,
7950 ) -> Task<Result<ModelHandle<Buffer>>> {
7951 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7952
7953 cx.spawn_weak(|this, mut cx| async move {
7954 let buffer = loop {
7955 let Some(this) = this.upgrade(&cx) else {
7956 return Err(anyhow!("project dropped"));
7957 };
7958
7959 let buffer = this.read_with(&cx, |this, cx| {
7960 this.opened_buffers
7961 .get(&id)
7962 .and_then(|buffer| buffer.upgrade(cx))
7963 });
7964
7965 if let Some(buffer) = buffer {
7966 break buffer;
7967 } else if this.read_with(&cx, |this, _| this.is_read_only()) {
7968 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7969 }
7970
7971 this.update(&mut cx, |this, _| {
7972 this.incomplete_remote_buffers.entry(id).or_default();
7973 });
7974 drop(this);
7975
7976 opened_buffer_rx
7977 .next()
7978 .await
7979 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
7980 };
7981
7982 Ok(buffer)
7983 })
7984 }
7985
7986 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
7987 let project_id = match self.client_state.as_ref() {
7988 Some(ProjectClientState::Remote {
7989 sharing_has_stopped,
7990 remote_id,
7991 ..
7992 }) => {
7993 if *sharing_has_stopped {
7994 return Task::ready(Err(anyhow!(
7995 "can't synchronize remote buffers on a readonly project"
7996 )));
7997 } else {
7998 *remote_id
7999 }
8000 }
8001 Some(ProjectClientState::Local { .. }) | None => {
8002 return Task::ready(Err(anyhow!(
8003 "can't synchronize remote buffers on a local project"
8004 )))
8005 }
8006 };
8007
8008 let client = self.client.clone();
8009 cx.spawn(|this, cx| async move {
8010 let (buffers, incomplete_buffer_ids) = this.read_with(&cx, |this, cx| {
8011 let buffers = this
8012 .opened_buffers
8013 .iter()
8014 .filter_map(|(id, buffer)| {
8015 let buffer = buffer.upgrade(cx)?;
8016 Some(proto::BufferVersion {
8017 id: *id,
8018 version: language::proto::serialize_version(&buffer.read(cx).version),
8019 })
8020 })
8021 .collect();
8022 let incomplete_buffer_ids = this
8023 .incomplete_remote_buffers
8024 .keys()
8025 .copied()
8026 .collect::<Vec<_>>();
8027
8028 (buffers, incomplete_buffer_ids)
8029 });
8030 let response = client
8031 .request(proto::SynchronizeBuffers {
8032 project_id,
8033 buffers,
8034 })
8035 .await?;
8036
8037 let send_updates_for_buffers = response.buffers.into_iter().map(|buffer| {
8038 let client = client.clone();
8039 let buffer_id = buffer.id;
8040 let remote_version = language::proto::deserialize_version(&buffer.version);
8041 this.read_with(&cx, |this, cx| {
8042 if let Some(buffer) = this.buffer_for_id(buffer_id, cx) {
8043 let operations = buffer.read(cx).serialize_ops(Some(remote_version), cx);
8044 cx.background().spawn(async move {
8045 let operations = operations.await;
8046 for chunk in split_operations(operations) {
8047 client
8048 .request(proto::UpdateBuffer {
8049 project_id,
8050 buffer_id,
8051 operations: chunk,
8052 })
8053 .await?;
8054 }
8055 anyhow::Ok(())
8056 })
8057 } else {
8058 Task::ready(Ok(()))
8059 }
8060 })
8061 });
8062
8063 // Any incomplete buffers have open requests waiting. Request that the host sends
8064 // creates these buffers for us again to unblock any waiting futures.
8065 for id in incomplete_buffer_ids {
8066 cx.background()
8067 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8068 .detach();
8069 }
8070
8071 futures::future::join_all(send_updates_for_buffers)
8072 .await
8073 .into_iter()
8074 .collect()
8075 })
8076 }
8077
8078 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8079 self.worktrees(cx)
8080 .map(|worktree| {
8081 let worktree = worktree.read(cx);
8082 proto::WorktreeMetadata {
8083 id: worktree.id().to_proto(),
8084 root_name: worktree.root_name().into(),
8085 visible: worktree.is_visible(),
8086 abs_path: worktree.abs_path().to_string_lossy().into(),
8087 }
8088 })
8089 .collect()
8090 }
8091
8092 fn set_worktrees_from_proto(
8093 &mut self,
8094 worktrees: Vec<proto::WorktreeMetadata>,
8095 cx: &mut ModelContext<Project>,
8096 ) -> Result<()> {
8097 let replica_id = self.replica_id();
8098 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8099
8100 let mut old_worktrees_by_id = self
8101 .worktrees
8102 .drain(..)
8103 .filter_map(|worktree| {
8104 let worktree = worktree.upgrade(cx)?;
8105 Some((worktree.read(cx).id(), worktree))
8106 })
8107 .collect::<HashMap<_, _>>();
8108
8109 for worktree in worktrees {
8110 if let Some(old_worktree) =
8111 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8112 {
8113 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8114 } else {
8115 let worktree =
8116 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8117 let _ = self.add_worktree(&worktree, cx);
8118 }
8119 }
8120
8121 self.metadata_changed(cx);
8122 for id in old_worktrees_by_id.keys() {
8123 cx.emit(Event::WorktreeRemoved(*id));
8124 }
8125
8126 Ok(())
8127 }
8128
8129 fn set_collaborators_from_proto(
8130 &mut self,
8131 messages: Vec<proto::Collaborator>,
8132 cx: &mut ModelContext<Self>,
8133 ) -> Result<()> {
8134 let mut collaborators = HashMap::default();
8135 for message in messages {
8136 let collaborator = Collaborator::from_proto(message)?;
8137 collaborators.insert(collaborator.peer_id, collaborator);
8138 }
8139 for old_peer_id in self.collaborators.keys() {
8140 if !collaborators.contains_key(old_peer_id) {
8141 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8142 }
8143 }
8144 self.collaborators = collaborators;
8145 Ok(())
8146 }
8147
8148 fn deserialize_symbol(
8149 &self,
8150 serialized_symbol: proto::Symbol,
8151 ) -> impl Future<Output = Result<Symbol>> {
8152 let languages = self.languages.clone();
8153 async move {
8154 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8155 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8156 let start = serialized_symbol
8157 .start
8158 .ok_or_else(|| anyhow!("invalid start"))?;
8159 let end = serialized_symbol
8160 .end
8161 .ok_or_else(|| anyhow!("invalid end"))?;
8162 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8163 let path = ProjectPath {
8164 worktree_id,
8165 path: PathBuf::from(serialized_symbol.path).into(),
8166 };
8167 let language = languages
8168 .language_for_file(&path.path, None)
8169 .await
8170 .log_err();
8171 Ok(Symbol {
8172 language_server_name: LanguageServerName(
8173 serialized_symbol.language_server_name.into(),
8174 ),
8175 source_worktree_id,
8176 path,
8177 label: {
8178 match language {
8179 Some(language) => {
8180 language
8181 .label_for_symbol(&serialized_symbol.name, kind)
8182 .await
8183 }
8184 None => None,
8185 }
8186 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8187 },
8188
8189 name: serialized_symbol.name,
8190 range: Unclipped(PointUtf16::new(start.row, start.column))
8191 ..Unclipped(PointUtf16::new(end.row, end.column)),
8192 kind,
8193 signature: serialized_symbol
8194 .signature
8195 .try_into()
8196 .map_err(|_| anyhow!("invalid signature"))?,
8197 })
8198 }
8199 }
8200
8201 async fn handle_buffer_saved(
8202 this: ModelHandle<Self>,
8203 envelope: TypedEnvelope<proto::BufferSaved>,
8204 _: Arc<Client>,
8205 mut cx: AsyncAppContext,
8206 ) -> Result<()> {
8207 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8208 let version = deserialize_version(&envelope.payload.version);
8209 let mtime = envelope
8210 .payload
8211 .mtime
8212 .ok_or_else(|| anyhow!("missing mtime"))?
8213 .into();
8214
8215 this.update(&mut cx, |this, cx| {
8216 let buffer = this
8217 .opened_buffers
8218 .get(&envelope.payload.buffer_id)
8219 .and_then(|buffer| buffer.upgrade(cx))
8220 .or_else(|| {
8221 this.incomplete_remote_buffers
8222 .get(&envelope.payload.buffer_id)
8223 .and_then(|b| b.clone())
8224 });
8225 if let Some(buffer) = buffer {
8226 buffer.update(cx, |buffer, cx| {
8227 buffer.did_save(version, fingerprint, mtime, cx);
8228 });
8229 }
8230 Ok(())
8231 })
8232 }
8233
8234 async fn handle_buffer_reloaded(
8235 this: ModelHandle<Self>,
8236 envelope: TypedEnvelope<proto::BufferReloaded>,
8237 _: Arc<Client>,
8238 mut cx: AsyncAppContext,
8239 ) -> Result<()> {
8240 let payload = envelope.payload;
8241 let version = deserialize_version(&payload.version);
8242 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8243 let line_ending = deserialize_line_ending(
8244 proto::LineEnding::from_i32(payload.line_ending)
8245 .ok_or_else(|| anyhow!("missing line ending"))?,
8246 );
8247 let mtime = payload
8248 .mtime
8249 .ok_or_else(|| anyhow!("missing mtime"))?
8250 .into();
8251 this.update(&mut cx, |this, cx| {
8252 let buffer = this
8253 .opened_buffers
8254 .get(&payload.buffer_id)
8255 .and_then(|buffer| buffer.upgrade(cx))
8256 .or_else(|| {
8257 this.incomplete_remote_buffers
8258 .get(&payload.buffer_id)
8259 .cloned()
8260 .flatten()
8261 });
8262 if let Some(buffer) = buffer {
8263 buffer.update(cx, |buffer, cx| {
8264 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8265 });
8266 }
8267 Ok(())
8268 })
8269 }
8270
8271 #[allow(clippy::type_complexity)]
8272 fn edits_from_lsp(
8273 &mut self,
8274 buffer: &ModelHandle<Buffer>,
8275 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8276 server_id: LanguageServerId,
8277 version: Option<i32>,
8278 cx: &mut ModelContext<Self>,
8279 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8280 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8281 cx.background().spawn(async move {
8282 let snapshot = snapshot?;
8283 let mut lsp_edits = lsp_edits
8284 .into_iter()
8285 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8286 .collect::<Vec<_>>();
8287 lsp_edits.sort_by_key(|(range, _)| range.start);
8288
8289 let mut lsp_edits = lsp_edits.into_iter().peekable();
8290 let mut edits = Vec::new();
8291 while let Some((range, mut new_text)) = lsp_edits.next() {
8292 // Clip invalid ranges provided by the language server.
8293 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8294 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8295
8296 // Combine any LSP edits that are adjacent.
8297 //
8298 // Also, combine LSP edits that are separated from each other by only
8299 // a newline. This is important because for some code actions,
8300 // Rust-analyzer rewrites the entire buffer via a series of edits that
8301 // are separated by unchanged newline characters.
8302 //
8303 // In order for the diffing logic below to work properly, any edits that
8304 // cancel each other out must be combined into one.
8305 while let Some((next_range, next_text)) = lsp_edits.peek() {
8306 if next_range.start.0 > range.end {
8307 if next_range.start.0.row > range.end.row + 1
8308 || next_range.start.0.column > 0
8309 || snapshot.clip_point_utf16(
8310 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8311 Bias::Left,
8312 ) > range.end
8313 {
8314 break;
8315 }
8316 new_text.push('\n');
8317 }
8318 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8319 new_text.push_str(next_text);
8320 lsp_edits.next();
8321 }
8322
8323 // For multiline edits, perform a diff of the old and new text so that
8324 // we can identify the changes more precisely, preserving the locations
8325 // of any anchors positioned in the unchanged regions.
8326 if range.end.row > range.start.row {
8327 let mut offset = range.start.to_offset(&snapshot);
8328 let old_text = snapshot.text_for_range(range).collect::<String>();
8329
8330 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8331 let mut moved_since_edit = true;
8332 for change in diff.iter_all_changes() {
8333 let tag = change.tag();
8334 let value = change.value();
8335 match tag {
8336 ChangeTag::Equal => {
8337 offset += value.len();
8338 moved_since_edit = true;
8339 }
8340 ChangeTag::Delete => {
8341 let start = snapshot.anchor_after(offset);
8342 let end = snapshot.anchor_before(offset + value.len());
8343 if moved_since_edit {
8344 edits.push((start..end, String::new()));
8345 } else {
8346 edits.last_mut().unwrap().0.end = end;
8347 }
8348 offset += value.len();
8349 moved_since_edit = false;
8350 }
8351 ChangeTag::Insert => {
8352 if moved_since_edit {
8353 let anchor = snapshot.anchor_after(offset);
8354 edits.push((anchor..anchor, value.to_string()));
8355 } else {
8356 edits.last_mut().unwrap().1.push_str(value);
8357 }
8358 moved_since_edit = false;
8359 }
8360 }
8361 }
8362 } else if range.end == range.start {
8363 let anchor = snapshot.anchor_after(range.start);
8364 edits.push((anchor..anchor, new_text));
8365 } else {
8366 let edit_start = snapshot.anchor_after(range.start);
8367 let edit_end = snapshot.anchor_before(range.end);
8368 edits.push((edit_start..edit_end, new_text));
8369 }
8370 }
8371
8372 Ok(edits)
8373 })
8374 }
8375
8376 fn buffer_snapshot_for_lsp_version(
8377 &mut self,
8378 buffer: &ModelHandle<Buffer>,
8379 server_id: LanguageServerId,
8380 version: Option<i32>,
8381 cx: &AppContext,
8382 ) -> Result<TextBufferSnapshot> {
8383 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8384
8385 if let Some(version) = version {
8386 let buffer_id = buffer.read(cx).remote_id();
8387 let snapshots = self
8388 .buffer_snapshots
8389 .get_mut(&buffer_id)
8390 .and_then(|m| m.get_mut(&server_id))
8391 .ok_or_else(|| {
8392 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8393 })?;
8394
8395 let found_snapshot = snapshots
8396 .binary_search_by_key(&version, |e| e.version)
8397 .map(|ix| snapshots[ix].snapshot.clone())
8398 .map_err(|_| {
8399 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8400 })?;
8401
8402 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8403 Ok(found_snapshot)
8404 } else {
8405 Ok((buffer.read(cx)).text_snapshot())
8406 }
8407 }
8408
8409 pub fn language_servers(
8410 &self,
8411 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8412 self.language_server_ids
8413 .iter()
8414 .map(|((worktree_id, server_name), server_id)| {
8415 (*server_id, server_name.clone(), *worktree_id)
8416 })
8417 }
8418
8419 pub fn supplementary_language_servers(
8420 &self,
8421 ) -> impl '_
8422 + Iterator<
8423 Item = (
8424 &LanguageServerId,
8425 &(LanguageServerName, Arc<LanguageServer>),
8426 ),
8427 > {
8428 self.supplementary_language_servers.iter()
8429 }
8430
8431 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8432 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8433 Some(server.clone())
8434 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8435 Some(Arc::clone(server))
8436 } else {
8437 None
8438 }
8439 }
8440
8441 pub fn language_servers_for_buffer(
8442 &self,
8443 buffer: &Buffer,
8444 cx: &AppContext,
8445 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8446 self.language_server_ids_for_buffer(buffer, cx)
8447 .into_iter()
8448 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8449 LanguageServerState::Running {
8450 adapter, server, ..
8451 } => Some((adapter, server)),
8452 _ => None,
8453 })
8454 }
8455
8456 fn primary_language_server_for_buffer(
8457 &self,
8458 buffer: &Buffer,
8459 cx: &AppContext,
8460 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8461 self.language_servers_for_buffer(buffer, cx).next()
8462 }
8463
8464 pub fn language_server_for_buffer(
8465 &self,
8466 buffer: &Buffer,
8467 server_id: LanguageServerId,
8468 cx: &AppContext,
8469 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8470 self.language_servers_for_buffer(buffer, cx)
8471 .find(|(_, s)| s.server_id() == server_id)
8472 }
8473
8474 fn language_server_ids_for_buffer(
8475 &self,
8476 buffer: &Buffer,
8477 cx: &AppContext,
8478 ) -> Vec<LanguageServerId> {
8479 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8480 let worktree_id = file.worktree_id(cx);
8481 language
8482 .lsp_adapters()
8483 .iter()
8484 .flat_map(|adapter| {
8485 let key = (worktree_id, adapter.name.clone());
8486 self.language_server_ids.get(&key).copied()
8487 })
8488 .collect()
8489 } else {
8490 Vec::new()
8491 }
8492 }
8493
8494 fn prettier_instance_for_buffer(
8495 &mut self,
8496 buffer: &ModelHandle<Buffer>,
8497 cx: &mut ModelContext<Self>,
8498 ) -> Task<Option<(Option<PathBuf>, PrettierInstance)>> {
8499 let buffer = buffer.read(cx);
8500 let buffer_file = buffer.file();
8501 let Some(buffer_language) = buffer.language() else {
8502 return Task::ready(None);
8503 };
8504 if buffer_language.prettier_parser_name().is_none() {
8505 return Task::ready(None);
8506 }
8507
8508 if self.is_local() {
8509 let Some(node) = self.node.as_ref().map(Arc::clone) else {
8510 return Task::ready(None);
8511 };
8512 match File::from_dyn(buffer_file).map(|file| (file.worktree_id(cx), file.abs_path(cx)))
8513 {
8514 Some((worktree_id, buffer_path)) => {
8515 let fs = Arc::clone(&self.fs);
8516 let installed_prettiers = self.prettier_instances.keys().cloned().collect();
8517 return cx.spawn(|project, mut cx| async move {
8518 match cx
8519 .background()
8520 .spawn(async move {
8521 Prettier::locate_prettier_installation(
8522 fs.as_ref(),
8523 &installed_prettiers,
8524 &buffer_path,
8525 )
8526 .await
8527 })
8528 .await
8529 {
8530 Ok(ControlFlow::Break(())) => {
8531 return None;
8532 }
8533 Ok(ControlFlow::Continue(None)) => {
8534 let started_default_prettier =
8535 project.update(&mut cx, |project, _| {
8536 project
8537 .prettiers_per_worktree
8538 .entry(worktree_id)
8539 .or_default()
8540 .insert(None);
8541 project.default_prettier.instance().cloned()
8542 });
8543 match started_default_prettier {
8544 Some(old_task) => {
8545 dbg!("Old prettier was found!");
8546 return Some((None, old_task));
8547 }
8548 None => {
8549 dbg!("starting new default prettier");
8550 let new_default_prettier = project
8551 .update(&mut cx, |_, cx| {
8552 start_default_prettier(node, Some(worktree_id), cx)
8553 })
8554 .log_err()
8555 .await;
8556 dbg!("started a default prettier");
8557 return Some((None, new_default_prettier?));
8558 }
8559 }
8560 }
8561 Ok(ControlFlow::Continue(Some(prettier_dir))) => {
8562 project.update(&mut cx, |project, _| {
8563 project
8564 .prettiers_per_worktree
8565 .entry(worktree_id)
8566 .or_default()
8567 .insert(Some(prettier_dir.clone()))
8568 });
8569 if let Some(existing_prettier) =
8570 project.update(&mut cx, |project, _| {
8571 project.prettier_instances.get(&prettier_dir).cloned()
8572 })
8573 {
8574 log::debug!(
8575 "Found already started prettier in {prettier_dir:?}"
8576 );
8577 return Some((Some(prettier_dir), existing_prettier));
8578 }
8579
8580 log::info!("Found prettier in {prettier_dir:?}, starting.");
8581 let new_prettier_task = project.update(&mut cx, |project, cx| {
8582 let new_prettier_task = start_prettier(
8583 node,
8584 prettier_dir.clone(),
8585 Some(worktree_id),
8586 cx,
8587 );
8588 project
8589 .prettier_instances
8590 .insert(prettier_dir.clone(), new_prettier_task.clone());
8591 new_prettier_task
8592 });
8593 Some((Some(prettier_dir), new_prettier_task))
8594 }
8595 Err(e) => {
8596 log::error!("Failed to determine prettier path for buffer: {e:#}");
8597 return None;
8598 }
8599 }
8600 });
8601 }
8602 None => match self.default_prettier.instance().cloned() {
8603 Some(old_task) => return Task::ready(Some((None, old_task))),
8604 None => {
8605 let new_task = start_default_prettier(node, None, cx).log_err();
8606 return cx.spawn(|_, _| async move { Some((None, new_task.await?)) });
8607 }
8608 },
8609 }
8610 } else {
8611 return Task::ready(None);
8612 }
8613 }
8614
8615 // TODO kb uncomment
8616 // #[cfg(any(test, feature = "test-support"))]
8617 // fn install_default_formatters(
8618 // &mut self,
8619 // _worktree: Option<WorktreeId>,
8620 // _new_language: &Language,
8621 // _language_settings: &LanguageSettings,
8622 // _cx: &mut ModelContext<Self>,
8623 // ) {
8624 // }
8625
8626 // #[cfg(not(any(test, feature = "test-support")))]
8627 fn install_default_formatters(
8628 &mut self,
8629 worktree: Option<WorktreeId>,
8630 new_language: &Language,
8631 language_settings: &LanguageSettings,
8632 cx: &mut ModelContext<Self>,
8633 ) {
8634 match &language_settings.formatter {
8635 Formatter::Prettier { .. } | Formatter::Auto => {}
8636 Formatter::LanguageServer | Formatter::External { .. } => return,
8637 };
8638 let Some(node) = self.node.as_ref().cloned() else {
8639 return;
8640 };
8641
8642 let mut prettier_plugins = None;
8643 if new_language.prettier_parser_name().is_some() {
8644 prettier_plugins
8645 .get_or_insert_with(|| HashSet::<&'static str>::default())
8646 .extend(
8647 new_language
8648 .lsp_adapters()
8649 .iter()
8650 .flat_map(|adapter| adapter.prettier_plugins()),
8651 )
8652 }
8653 let Some(prettier_plugins) = prettier_plugins else {
8654 return;
8655 };
8656
8657 let fs = Arc::clone(&self.fs);
8658 let locate_prettier_installation = match worktree.and_then(|worktree_id| {
8659 self.worktree_for_id(worktree_id, cx)
8660 .map(|worktree| worktree.read(cx).abs_path())
8661 }) {
8662 Some(locate_from) => {
8663 let installed_prettiers = self.prettier_instances.keys().cloned().collect();
8664 cx.background().spawn(async move {
8665 Prettier::locate_prettier_installation(
8666 fs.as_ref(),
8667 &installed_prettiers,
8668 locate_from.as_ref(),
8669 )
8670 .await
8671 })
8672 }
8673 None => Task::ready(Ok(ControlFlow::Break(()))),
8674 };
8675 let mut plugins_to_install = prettier_plugins;
8676 plugins_to_install
8677 .retain(|plugin| !self.default_prettier.installed_plugins.contains(plugin));
8678 let mut installation_attempts = 0;
8679 let previous_installation_process = match &self.default_prettier.prettier {
8680 PrettierInstallation::NotInstalled {
8681 installation_process,
8682 attempts,
8683 } => {
8684 installation_attempts = *attempts;
8685 installation_process.clone()
8686 }
8687 PrettierInstallation::Installed { .. } => {
8688 if plugins_to_install.is_empty() {
8689 return;
8690 }
8691 None
8692 }
8693 };
8694
8695 if installation_attempts > prettier::LAUNCH_THRESHOLD {
8696 log::warn!(
8697 "Default prettier installation has failed {installation_attempts} times, not attempting again",
8698 );
8699 return;
8700 }
8701
8702 let fs = Arc::clone(&self.fs);
8703 self.default_prettier.prettier = PrettierInstallation::NotInstalled {
8704 attempts: installation_attempts + 1,
8705 installation_process: Some(
8706 cx.spawn(|this, mut cx| async move {
8707 match locate_prettier_installation
8708 .await
8709 .context("locate prettier installation")
8710 .map_err(Arc::new)?
8711 {
8712 ControlFlow::Break(()) => return Ok(()),
8713 ControlFlow::Continue(Some(_non_default_prettier)) => return Ok(()),
8714 ControlFlow::Continue(None) => {
8715 let mut needs_install = match previous_installation_process {
8716 Some(previous_installation_process) => {
8717 previous_installation_process.await.is_err()
8718 }
8719 None => true,
8720 };
8721 this.update(&mut cx, |this, _| {
8722 plugins_to_install.retain(|plugin| {
8723 !this.default_prettier.installed_plugins.contains(plugin)
8724 });
8725 needs_install |= !plugins_to_install.is_empty();
8726 });
8727 if needs_install {
8728 let installed_plugins = plugins_to_install.clone();
8729 cx.background()
8730 .spawn(async move {
8731 install_default_prettier(plugins_to_install, node, fs).await
8732 })
8733 .await
8734 .context("prettier & plugins install")
8735 .map_err(Arc::new)?;
8736 this.update(&mut cx, |this, cx| {
8737 this.default_prettier.prettier =
8738 PrettierInstallation::Installed(
8739 cx.spawn(|_, _| async move {
8740 PrettierProcess::Stopped { start_attempts: 0 }
8741 })
8742 .shared(),
8743 );
8744 this.default_prettier
8745 .installed_plugins
8746 .extend(installed_plugins);
8747 });
8748 }
8749 }
8750 }
8751 Ok(())
8752 })
8753 .shared(),
8754 ),
8755 };
8756 }
8757}
8758
8759fn start_default_prettier(
8760 node: Arc<dyn NodeRuntime>,
8761 worktree_id: Option<WorktreeId>,
8762 cx: &mut ModelContext<'_, Project>,
8763) -> Task<anyhow::Result<PrettierInstance>> {
8764 cx.spawn(|project, mut cx| async move {
8765 loop {
8766 let installation_process = project.update(&mut cx, |project, _| {
8767 match &project.default_prettier.prettier {
8768 PrettierInstallation::NotInstalled {
8769 installation_process,
8770 ..
8771 } => ControlFlow::Continue(installation_process.clone()),
8772 PrettierInstallation::Installed(default_prettier) => {
8773 ControlFlow::Break(default_prettier.clone())
8774 }
8775 }
8776 });
8777
8778 match installation_process {
8779 ControlFlow::Continue(installation_process) => {
8780 if let Some(installation_process) = installation_process.clone() {
8781 if let Err(e) = installation_process.await {
8782 anyhow::bail!("Cannot start default prettier due to its installation failure: {e:#}");
8783 }
8784 }
8785 let new_default_prettier = project.update(&mut cx, |project, cx| {
8786 let new_default_prettier =
8787 start_prettier(node, DEFAULT_PRETTIER_DIR.clone(), worktree_id, cx);
8788 project.default_prettier.prettier =
8789 PrettierInstallation::Installed(new_default_prettier.clone());
8790 new_default_prettier
8791 });
8792 return Ok(new_default_prettier);
8793 }
8794 ControlFlow::Break(prettier) => return Ok(prettier),
8795 }
8796 }
8797 })
8798}
8799
8800fn start_prettier(
8801 node: Arc<dyn NodeRuntime>,
8802 prettier_dir: PathBuf,
8803 worktree_id: Option<WorktreeId>,
8804 cx: &mut ModelContext<'_, Project>,
8805) -> PrettierInstance {
8806 cx.spawn(|project, mut cx| async move {
8807 let new_server_id = project.update(&mut cx, |project, _| {
8808 project.languages.next_language_server_id()
8809 });
8810
8811 match Prettier::start(new_server_id, prettier_dir.clone(), node, cx.clone()).await {
8812 Ok(new_prettier) => {
8813 register_new_prettier(&project, &new_prettier, worktree_id, new_server_id, &mut cx);
8814 PrettierProcess::Running(Arc::new(new_prettier))
8815 }
8816 Err(e) => {
8817 log::error!("Failed to start prettier in dir {prettier_dir:?}: {e:#}");
8818 // TODO kb increment
8819 PrettierProcess::Stopped { start_attempts: 1 }
8820 }
8821 }
8822 })
8823 .shared()
8824}
8825
8826fn register_new_prettier(
8827 project: &ModelHandle<Project>,
8828 prettier: &Prettier,
8829 worktree_id: Option<WorktreeId>,
8830 new_server_id: LanguageServerId,
8831 cx: &mut AsyncAppContext,
8832) {
8833 let prettier_dir = prettier.prettier_dir();
8834 let is_default = prettier.is_default();
8835 if is_default {
8836 log::info!("Started default prettier in {prettier_dir:?}");
8837 } else {
8838 log::info!("Started prettier in {prettier_dir:?}");
8839 }
8840 if let Some(prettier_server) = prettier.server() {
8841 project.update(cx, |project, cx| {
8842 let name = if is_default {
8843 LanguageServerName(Arc::from("prettier (default)"))
8844 } else {
8845 let worktree_path = worktree_id
8846 .and_then(|id| project.worktree_for_id(id, cx))
8847 .map(|worktree| worktree.update(cx, |worktree, _| worktree.abs_path()));
8848 let name = match worktree_path {
8849 Some(worktree_path) => {
8850 if prettier_dir == worktree_path.as_ref() {
8851 let name = prettier_dir
8852 .file_name()
8853 .and_then(|name| name.to_str())
8854 .unwrap_or_default();
8855 format!("prettier ({name})")
8856 } else {
8857 let dir_to_display = prettier_dir
8858 .strip_prefix(worktree_path.as_ref())
8859 .ok()
8860 .unwrap_or(prettier_dir);
8861 format!("prettier ({})", dir_to_display.display())
8862 }
8863 }
8864 None => format!("prettier ({})", prettier_dir.display()),
8865 };
8866 LanguageServerName(Arc::from(name))
8867 };
8868 project
8869 .supplementary_language_servers
8870 .insert(new_server_id, (name, Arc::clone(prettier_server)));
8871 cx.emit(Event::LanguageServerAdded(new_server_id));
8872 });
8873 }
8874}
8875
8876async fn install_default_prettier(
8877 plugins_to_install: HashSet<&'static str>,
8878 node: Arc<dyn NodeRuntime>,
8879 fs: Arc<dyn Fs>,
8880) -> anyhow::Result<()> {
8881 let prettier_wrapper_path = DEFAULT_PRETTIER_DIR.join(prettier::PRETTIER_SERVER_FILE);
8882 // method creates parent directory if it doesn't exist
8883 fs.save(
8884 &prettier_wrapper_path,
8885 &text::Rope::from(prettier::PRETTIER_SERVER_JS),
8886 text::LineEnding::Unix,
8887 )
8888 .await
8889 .with_context(|| {
8890 format!(
8891 "writing {} file at {prettier_wrapper_path:?}",
8892 prettier::PRETTIER_SERVER_FILE
8893 )
8894 })?;
8895
8896 let packages_to_versions =
8897 future::try_join_all(plugins_to_install.iter().chain(Some(&"prettier")).map(
8898 |package_name| async {
8899 let returned_package_name = package_name.to_string();
8900 let latest_version = node
8901 .npm_package_latest_version(package_name)
8902 .await
8903 .with_context(|| {
8904 format!("fetching latest npm version for package {returned_package_name}")
8905 })?;
8906 anyhow::Ok((returned_package_name, latest_version))
8907 },
8908 ))
8909 .await
8910 .context("fetching latest npm versions")?;
8911
8912 log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
8913 let borrowed_packages = packages_to_versions
8914 .iter()
8915 .map(|(package, version)| (package.as_str(), version.as_str()))
8916 .collect::<Vec<_>>();
8917 node.npm_install_packages(DEFAULT_PRETTIER_DIR.as_path(), &borrowed_packages)
8918 .await
8919 .context("fetching formatter packages")?;
8920 anyhow::Ok(())
8921}
8922
8923fn subscribe_for_copilot_events(
8924 copilot: &ModelHandle<Copilot>,
8925 cx: &mut ModelContext<'_, Project>,
8926) -> gpui::Subscription {
8927 cx.subscribe(
8928 copilot,
8929 |project, copilot, copilot_event, cx| match copilot_event {
8930 copilot::Event::CopilotLanguageServerStarted => {
8931 match copilot.read(cx).language_server() {
8932 Some((name, copilot_server)) => {
8933 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8934 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8935 let new_server_id = copilot_server.server_id();
8936 let weak_project = cx.weak_handle();
8937 let copilot_log_subscription = copilot_server
8938 .on_notification::<copilot::request::LogMessage, _>(
8939 move |params, mut cx| {
8940 if let Some(project) = weak_project.upgrade(&mut cx) {
8941 project.update(&mut cx, |_, cx| {
8942 cx.emit(Event::LanguageServerLog(
8943 new_server_id,
8944 params.message,
8945 ));
8946 })
8947 }
8948 },
8949 );
8950 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8951 project.copilot_log_subscription = Some(copilot_log_subscription);
8952 cx.emit(Event::LanguageServerAdded(new_server_id));
8953 }
8954 }
8955 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8956 }
8957 }
8958 },
8959 )
8960}
8961
8962fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8963 let mut literal_end = 0;
8964 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8965 if part.contains(&['*', '?', '{', '}']) {
8966 break;
8967 } else {
8968 if i > 0 {
8969 // Acount for separator prior to this part
8970 literal_end += path::MAIN_SEPARATOR.len_utf8();
8971 }
8972 literal_end += part.len();
8973 }
8974 }
8975 &glob[..literal_end]
8976}
8977
8978impl WorktreeHandle {
8979 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
8980 match self {
8981 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8982 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
8983 }
8984 }
8985
8986 pub fn handle_id(&self) -> usize {
8987 match self {
8988 WorktreeHandle::Strong(handle) => handle.id(),
8989 WorktreeHandle::Weak(handle) => handle.id(),
8990 }
8991 }
8992}
8993
8994impl OpenBuffer {
8995 pub fn upgrade(&self, cx: &impl BorrowAppContext) -> Option<ModelHandle<Buffer>> {
8996 match self {
8997 OpenBuffer::Strong(handle) => Some(handle.clone()),
8998 OpenBuffer::Weak(handle) => handle.upgrade(cx),
8999 OpenBuffer::Operations(_) => None,
9000 }
9001 }
9002}
9003
9004pub struct PathMatchCandidateSet {
9005 pub snapshot: Snapshot,
9006 pub include_ignored: bool,
9007 pub include_root_name: bool,
9008}
9009
9010impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
9011 type Candidates = PathMatchCandidateSetIter<'a>;
9012
9013 fn id(&self) -> usize {
9014 self.snapshot.id().to_usize()
9015 }
9016
9017 fn len(&self) -> usize {
9018 if self.include_ignored {
9019 self.snapshot.file_count()
9020 } else {
9021 self.snapshot.visible_file_count()
9022 }
9023 }
9024
9025 fn prefix(&self) -> Arc<str> {
9026 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
9027 self.snapshot.root_name().into()
9028 } else if self.include_root_name {
9029 format!("{}/", self.snapshot.root_name()).into()
9030 } else {
9031 "".into()
9032 }
9033 }
9034
9035 fn candidates(&'a self, start: usize) -> Self::Candidates {
9036 PathMatchCandidateSetIter {
9037 traversal: self.snapshot.files(self.include_ignored, start),
9038 }
9039 }
9040}
9041
9042pub struct PathMatchCandidateSetIter<'a> {
9043 traversal: Traversal<'a>,
9044}
9045
9046impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
9047 type Item = fuzzy::PathMatchCandidate<'a>;
9048
9049 fn next(&mut self) -> Option<Self::Item> {
9050 self.traversal.next().map(|entry| {
9051 if let EntryKind::File(char_bag) = entry.kind {
9052 fuzzy::PathMatchCandidate {
9053 path: &entry.path,
9054 char_bag,
9055 }
9056 } else {
9057 unreachable!()
9058 }
9059 })
9060 }
9061}
9062
9063impl Entity for Project {
9064 type Event = Event;
9065
9066 fn release(&mut self, cx: &mut gpui::AppContext) {
9067 match &self.client_state {
9068 Some(ProjectClientState::Local { .. }) => {
9069 let _ = self.unshare_internal(cx);
9070 }
9071 Some(ProjectClientState::Remote { remote_id, .. }) => {
9072 let _ = self.client.send(proto::LeaveProject {
9073 project_id: *remote_id,
9074 });
9075 self.disconnected_from_host_internal(cx);
9076 }
9077 _ => {}
9078 }
9079 }
9080
9081 fn app_will_quit(
9082 &mut self,
9083 _: &mut AppContext,
9084 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
9085 let shutdown_futures = self
9086 .language_servers
9087 .drain()
9088 .map(|(_, server_state)| async {
9089 use LanguageServerState::*;
9090 match server_state {
9091 Running { server, .. } => server.shutdown()?.await,
9092 Starting(task) => task.await?.shutdown()?.await,
9093 }
9094 })
9095 .collect::<Vec<_>>();
9096
9097 Some(
9098 async move {
9099 futures::future::join_all(shutdown_futures).await;
9100 }
9101 .boxed(),
9102 )
9103 }
9104}
9105
9106impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
9107 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
9108 Self {
9109 worktree_id,
9110 path: path.as_ref().into(),
9111 }
9112 }
9113}
9114
9115impl ProjectLspAdapterDelegate {
9116 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
9117 Arc::new(Self {
9118 project: cx.handle(),
9119 http_client: project.client.http_client(),
9120 })
9121 }
9122}
9123
9124impl LspAdapterDelegate for ProjectLspAdapterDelegate {
9125 fn show_notification(&self, message: &str, cx: &mut AppContext) {
9126 self.project
9127 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
9128 }
9129
9130 fn http_client(&self) -> Arc<dyn HttpClient> {
9131 self.http_client.clone()
9132 }
9133}
9134
9135fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
9136 proto::Symbol {
9137 language_server_name: symbol.language_server_name.0.to_string(),
9138 source_worktree_id: symbol.source_worktree_id.to_proto(),
9139 worktree_id: symbol.path.worktree_id.to_proto(),
9140 path: symbol.path.path.to_string_lossy().to_string(),
9141 name: symbol.name.clone(),
9142 kind: unsafe { mem::transmute(symbol.kind) },
9143 start: Some(proto::PointUtf16 {
9144 row: symbol.range.start.0.row,
9145 column: symbol.range.start.0.column,
9146 }),
9147 end: Some(proto::PointUtf16 {
9148 row: symbol.range.end.0.row,
9149 column: symbol.range.end.0.column,
9150 }),
9151 signature: symbol.signature.to_vec(),
9152 }
9153}
9154
9155fn relativize_path(base: &Path, path: &Path) -> PathBuf {
9156 let mut path_components = path.components();
9157 let mut base_components = base.components();
9158 let mut components: Vec<Component> = Vec::new();
9159 loop {
9160 match (path_components.next(), base_components.next()) {
9161 (None, None) => break,
9162 (Some(a), None) => {
9163 components.push(a);
9164 components.extend(path_components.by_ref());
9165 break;
9166 }
9167 (None, _) => components.push(Component::ParentDir),
9168 (Some(a), Some(b)) if components.is_empty() && a == b => (),
9169 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
9170 (Some(a), Some(_)) => {
9171 components.push(Component::ParentDir);
9172 for _ in base_components {
9173 components.push(Component::ParentDir);
9174 }
9175 components.push(a);
9176 components.extend(path_components.by_ref());
9177 break;
9178 }
9179 }
9180 }
9181 components.iter().map(|c| c.as_os_str()).collect()
9182}
9183
9184impl Item for Buffer {
9185 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
9186 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
9187 }
9188
9189 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
9190 File::from_dyn(self.file()).map(|file| ProjectPath {
9191 worktree_id: file.worktree_id(cx),
9192 path: file.path().clone(),
9193 })
9194 }
9195}
9196
9197async fn wait_for_loading_buffer(
9198 mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
9199) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
9200 loop {
9201 if let Some(result) = receiver.borrow().as_ref() {
9202 match result {
9203 Ok(buffer) => return Ok(buffer.to_owned()),
9204 Err(e) => return Err(e.to_owned()),
9205 }
9206 }
9207 receiver.next().await;
9208 }
9209}
9210
9211fn include_text(server: &lsp::LanguageServer) -> bool {
9212 server
9213 .capabilities()
9214 .text_document_sync
9215 .as_ref()
9216 .and_then(|sync| match sync {
9217 lsp::TextDocumentSyncCapability::Kind(_) => None,
9218 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
9219 })
9220 .and_then(|save_options| match save_options {
9221 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
9222 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
9223 })
9224 .unwrap_or(false)
9225}
9226
9227async fn format_with_prettier(
9228 project: &ModelHandle<Project>,
9229 buffer: &ModelHandle<Buffer>,
9230 cx: &mut AsyncAppContext,
9231) -> Option<FormatOperation> {
9232 if let Some((prettier_path, prettier_task)) = project
9233 .update(cx, |project, cx| {
9234 project.prettier_instance_for_buffer(buffer, cx)
9235 })
9236 .await
9237 {
9238 // TODO kb re-insert incremented value here?
9239 if let PrettierProcess::Running(prettier) = prettier_task.await {
9240 let buffer_path = buffer.update(cx, |buffer, cx| {
9241 File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
9242 });
9243 match prettier.format(buffer, buffer_path, cx).await {
9244 Ok(new_diff) => return Some(FormatOperation::Prettier(new_diff)),
9245 Err(e) => {
9246 log::error!(
9247 "Prettier instance from {prettier_path:?} failed to format a buffer: {e:#}"
9248 );
9249 }
9250 }
9251 }
9252 }
9253
9254 None
9255}