1mod ignore;
2mod lsp_command;
3pub mod project_settings;
4pub mod search;
5pub mod terminals;
6pub mod worktree;
7
8#[cfg(test)]
9mod project_tests;
10#[cfg(test)]
11mod worktree_tests;
12
13use anyhow::{anyhow, Context as _, Result};
14use client2::{proto, Client, Collaborator, TypedEnvelope, UserStore};
15use clock::ReplicaId;
16use collections::{hash_map, BTreeMap, HashMap, HashSet};
17use copilot2::Copilot;
18use futures::{
19 channel::{
20 mpsc::{self, UnboundedReceiver},
21 oneshot,
22 },
23 future::{self, try_join_all, Shared},
24 stream::FuturesUnordered,
25 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
26};
27use globset::{Glob, GlobSet, GlobSetBuilder};
28use gpui2::{
29 AnyHandle, AppContext, AsyncAppContext, Context, EventEmitter, Executor, Handle, ModelContext,
30 Task, WeakHandle,
31};
32use itertools::Itertools;
33use language2::{
34 language_settings::{
35 language_settings, FormatOnSave, Formatter, InlayHintKind, LanguageSettings,
36 },
37 point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, BundledFormatter, CachedLspAdapter,
43 CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
44 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
45 LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16,
46 TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp2::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use postage::watch;
56use prettier2::{LocateStart, Prettier, PRETTIER_SERVER_FILE, PRETTIER_SERVER_JS};
57use project_settings::{LspSettings, ProjectSettings};
58use rand::prelude::*;
59use search::SearchQuery;
60use serde::Serialize;
61use settings2::SettingsStore;
62use sha2::{Digest, Sha256};
63use similar::{ChangeTag, TextDiff};
64use smol::channel::{Receiver, Sender};
65use std::{
66 cmp::{self, Ordering},
67 convert::TryInto,
68 hash::Hash,
69 mem,
70 num::NonZeroU32,
71 ops::Range,
72 path::{self, Component, Path, PathBuf},
73 process::Stdio,
74 str,
75 sync::{
76 atomic::{AtomicUsize, Ordering::SeqCst},
77 Arc,
78 },
79 time::{Duration, Instant},
80};
81use terminals::Terminals;
82use text::{Anchor, LineEnding, Rope};
83use util::{
84 debug_panic, defer,
85 http::HttpClient,
86 merge_json_value_into,
87 paths::{DEFAULT_PRETTIER_DIR, LOCAL_SETTINGS_RELATIVE_PATH},
88 post_inc, ResultExt, TryFutureExt as _,
89};
90
91pub use fs::*;
92pub use worktree::*;
93
94pub trait Item {
95 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
96 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
97}
98
99// Language server state is stored across 3 collections:
100// language_servers =>
101// a mapping from unique server id to LanguageServerState which can either be a task for a
102// server in the process of starting, or a running server with adapter and language server arcs
103// language_server_ids => a mapping from worktreeId and server name to the unique server id
104// language_server_statuses => a mapping from unique server id to the current server status
105//
106// Multiple worktrees can map to the same language server for example when you jump to the definition
107// of a file in the standard library. So language_server_ids is used to look up which server is active
108// for a given worktree and language server name
109//
110// When starting a language server, first the id map is checked to make sure a server isn't already available
111// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
112// the Starting variant of LanguageServerState is stored in the language_servers map.
113pub struct Project {
114 worktrees: Vec<WorktreeHandle>,
115 active_entry: Option<ProjectEntryId>,
116 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
117 languages: Arc<LanguageRegistry>,
118 supplementary_language_servers:
119 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
120 language_servers: HashMap<LanguageServerId, LanguageServerState>,
121 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
122 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
123 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
124 client: Arc<client2::Client>,
125 next_entry_id: Arc<AtomicUsize>,
126 join_project_response_message_id: u32,
127 next_diagnostic_group_id: usize,
128 user_store: Handle<UserStore>,
129 fs: Arc<dyn Fs>,
130 client_state: Option<ProjectClientState>,
131 collaborators: HashMap<proto::PeerId, Collaborator>,
132 client_subscriptions: Vec<client2::Subscription>,
133 _subscriptions: Vec<gpui2::Subscription>,
134 next_buffer_id: u64,
135 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
136 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
137 #[allow(clippy::type_complexity)]
138 loading_buffers_by_path: HashMap<
139 ProjectPath,
140 postage::watch::Receiver<Option<Result<Handle<Buffer>, Arc<anyhow::Error>>>>,
141 >,
142 #[allow(clippy::type_complexity)]
143 loading_local_worktrees:
144 HashMap<Arc<Path>, Shared<Task<Result<Handle<Worktree>, Arc<anyhow::Error>>>>>,
145 opened_buffers: HashMap<u64, OpenBuffer>,
146 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
147 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
148 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
149 /// Used for re-issuing buffer requests when peers temporarily disconnect
150 incomplete_remote_buffers: HashMap<u64, Option<Handle<Buffer>>>,
151 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
152 buffers_being_formatted: HashSet<u64>,
153 buffers_needing_diff: HashSet<WeakHandle<Buffer>>,
154 git_diff_debouncer: DelayedDebounced,
155 nonce: u128,
156 _maintain_buffer_languages: Task<()>,
157 _maintain_workspace_config: Task<Result<()>>,
158 terminals: Terminals,
159 copilot_lsp_subscription: Option<gpui2::Subscription>,
160 copilot_log_subscription: Option<lsp2::Subscription>,
161 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
162 node: Option<Arc<dyn NodeRuntime>>,
163 prettier_instances: HashMap<
164 (Option<WorktreeId>, PathBuf),
165 Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>,
166 >,
167}
168
169struct DelayedDebounced {
170 task: Option<Task<()>>,
171 cancel_channel: Option<oneshot::Sender<()>>,
172}
173
174enum LanguageServerToQuery {
175 Primary,
176 Other(LanguageServerId),
177}
178
179impl DelayedDebounced {
180 fn new() -> DelayedDebounced {
181 DelayedDebounced {
182 task: None,
183 cancel_channel: None,
184 }
185 }
186
187 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
188 where
189 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
190 {
191 if let Some(channel) = self.cancel_channel.take() {
192 _ = channel.send(());
193 }
194
195 let (sender, mut receiver) = oneshot::channel::<()>();
196 self.cancel_channel = Some(sender);
197
198 let previous_task = self.task.take();
199 self.task = Some(cx.spawn(move |project, mut cx| async move {
200 let mut timer = cx.executor().timer(delay).fuse();
201 if let Some(previous_task) = previous_task {
202 previous_task.await;
203 }
204
205 futures::select_biased! {
206 _ = receiver => return,
207 _ = timer => {}
208 }
209
210 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
211 task.await;
212 }
213 }));
214 }
215}
216
217struct LspBufferSnapshot {
218 version: i32,
219 snapshot: TextBufferSnapshot,
220}
221
222/// Message ordered with respect to buffer operations
223enum BufferOrderedMessage {
224 Operation {
225 buffer_id: u64,
226 operation: proto::Operation,
227 },
228 LanguageServerUpdate {
229 language_server_id: LanguageServerId,
230 message: proto::update_language_server::Variant,
231 },
232 Resync,
233}
234
235enum LocalProjectUpdate {
236 WorktreesChanged,
237 CreateBufferForPeer {
238 peer_id: proto::PeerId,
239 buffer_id: u64,
240 },
241}
242
243enum OpenBuffer {
244 Strong(Handle<Buffer>),
245 Weak(WeakHandle<Buffer>),
246 Operations(Vec<Operation>),
247}
248
249#[derive(Clone)]
250enum WorktreeHandle {
251 Strong(Handle<Worktree>),
252 Weak(WeakHandle<Worktree>),
253}
254
255enum ProjectClientState {
256 Local {
257 remote_id: u64,
258 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
259 _send_updates: Task<Result<()>>,
260 },
261 Remote {
262 sharing_has_stopped: bool,
263 remote_id: u64,
264 replica_id: ReplicaId,
265 },
266}
267
268#[derive(Clone, Debug, PartialEq)]
269pub enum Event {
270 LanguageServerAdded(LanguageServerId),
271 LanguageServerRemoved(LanguageServerId),
272 LanguageServerLog(LanguageServerId, String),
273 Notification(String),
274 ActiveEntryChanged(Option<ProjectEntryId>),
275 ActivateProjectPanel,
276 WorktreeAdded,
277 WorktreeRemoved(WorktreeId),
278 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
279 DiskBasedDiagnosticsStarted {
280 language_server_id: LanguageServerId,
281 },
282 DiskBasedDiagnosticsFinished {
283 language_server_id: LanguageServerId,
284 },
285 DiagnosticsUpdated {
286 path: ProjectPath,
287 language_server_id: LanguageServerId,
288 },
289 RemoteIdChanged(Option<u64>),
290 DisconnectedFromHost,
291 Closed,
292 DeletedEntry(ProjectEntryId),
293 CollaboratorUpdated {
294 old_peer_id: proto::PeerId,
295 new_peer_id: proto::PeerId,
296 },
297 CollaboratorJoined(proto::PeerId),
298 CollaboratorLeft(proto::PeerId),
299 RefreshInlayHints,
300}
301
302pub enum LanguageServerState {
303 Starting(Task<Option<Arc<LanguageServer>>>),
304
305 Running {
306 language: Arc<Language>,
307 adapter: Arc<CachedLspAdapter>,
308 server: Arc<LanguageServer>,
309 watched_paths: HashMap<WorktreeId, GlobSet>,
310 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
311 },
312}
313
314#[derive(Serialize)]
315pub struct LanguageServerStatus {
316 pub name: String,
317 pub pending_work: BTreeMap<String, LanguageServerProgress>,
318 pub has_pending_diagnostic_updates: bool,
319 progress_tokens: HashSet<String>,
320}
321
322#[derive(Clone, Debug, Serialize)]
323pub struct LanguageServerProgress {
324 pub message: Option<String>,
325 pub percentage: Option<usize>,
326 #[serde(skip_serializing)]
327 pub last_update_at: Instant,
328}
329
330#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
331pub struct ProjectPath {
332 pub worktree_id: WorktreeId,
333 pub path: Arc<Path>,
334}
335
336#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
337pub struct DiagnosticSummary {
338 pub error_count: usize,
339 pub warning_count: usize,
340}
341
342#[derive(Debug, Clone, PartialEq, Eq, Hash)]
343pub struct Location {
344 pub buffer: Handle<Buffer>,
345 pub range: Range<language2::Anchor>,
346}
347
348#[derive(Debug, Clone, PartialEq, Eq)]
349pub struct InlayHint {
350 pub position: language2::Anchor,
351 pub label: InlayHintLabel,
352 pub kind: Option<InlayHintKind>,
353 pub padding_left: bool,
354 pub padding_right: bool,
355 pub tooltip: Option<InlayHintTooltip>,
356 pub resolve_state: ResolveState,
357}
358
359#[derive(Debug, Clone, PartialEq, Eq)]
360pub enum ResolveState {
361 Resolved,
362 CanResolve(LanguageServerId, Option<lsp2::LSPAny>),
363 Resolving,
364}
365
366impl InlayHint {
367 pub fn text(&self) -> String {
368 match &self.label {
369 InlayHintLabel::String(s) => s.to_owned(),
370 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
371 }
372 }
373}
374
375#[derive(Debug, Clone, PartialEq, Eq)]
376pub enum InlayHintLabel {
377 String(String),
378 LabelParts(Vec<InlayHintLabelPart>),
379}
380
381#[derive(Debug, Clone, PartialEq, Eq)]
382pub struct InlayHintLabelPart {
383 pub value: String,
384 pub tooltip: Option<InlayHintLabelPartTooltip>,
385 pub location: Option<(LanguageServerId, lsp2::Location)>,
386}
387
388#[derive(Debug, Clone, PartialEq, Eq)]
389pub enum InlayHintTooltip {
390 String(String),
391 MarkupContent(MarkupContent),
392}
393
394#[derive(Debug, Clone, PartialEq, Eq)]
395pub enum InlayHintLabelPartTooltip {
396 String(String),
397 MarkupContent(MarkupContent),
398}
399
400#[derive(Debug, Clone, PartialEq, Eq)]
401pub struct MarkupContent {
402 pub kind: HoverBlockKind,
403 pub value: String,
404}
405
406#[derive(Debug, Clone)]
407pub struct LocationLink {
408 pub origin: Option<Location>,
409 pub target: Location,
410}
411
412#[derive(Debug)]
413pub struct DocumentHighlight {
414 pub range: Range<language2::Anchor>,
415 pub kind: DocumentHighlightKind,
416}
417
418#[derive(Clone, Debug)]
419pub struct Symbol {
420 pub language_server_name: LanguageServerName,
421 pub source_worktree_id: WorktreeId,
422 pub path: ProjectPath,
423 pub label: CodeLabel,
424 pub name: String,
425 pub kind: lsp2::SymbolKind,
426 pub range: Range<Unclipped<PointUtf16>>,
427 pub signature: [u8; 32],
428}
429
430#[derive(Clone, Debug, PartialEq)]
431pub struct HoverBlock {
432 pub text: String,
433 pub kind: HoverBlockKind,
434}
435
436#[derive(Clone, Debug, PartialEq, Eq)]
437pub enum HoverBlockKind {
438 PlainText,
439 Markdown,
440 Code { language: String },
441}
442
443#[derive(Debug)]
444pub struct Hover {
445 pub contents: Vec<HoverBlock>,
446 pub range: Option<Range<language2::Anchor>>,
447 pub language: Option<Arc<Language>>,
448}
449
450impl Hover {
451 pub fn is_empty(&self) -> bool {
452 self.contents.iter().all(|block| block.text.is_empty())
453 }
454}
455
456#[derive(Default)]
457pub struct ProjectTransaction(pub HashMap<Handle<Buffer>, language2::Transaction>);
458
459impl DiagnosticSummary {
460 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
461 let mut this = Self {
462 error_count: 0,
463 warning_count: 0,
464 };
465
466 for entry in diagnostics {
467 if entry.diagnostic.is_primary {
468 match entry.diagnostic.severity {
469 DiagnosticSeverity::ERROR => this.error_count += 1,
470 DiagnosticSeverity::WARNING => this.warning_count += 1,
471 _ => {}
472 }
473 }
474 }
475
476 this
477 }
478
479 pub fn is_empty(&self) -> bool {
480 self.error_count == 0 && self.warning_count == 0
481 }
482
483 pub fn to_proto(
484 &self,
485 language_server_id: LanguageServerId,
486 path: &Path,
487 ) -> proto::DiagnosticSummary {
488 proto::DiagnosticSummary {
489 path: path.to_string_lossy().to_string(),
490 language_server_id: language_server_id.0 as u64,
491 error_count: self.error_count as u32,
492 warning_count: self.warning_count as u32,
493 }
494 }
495}
496
497#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
498pub struct ProjectEntryId(usize);
499
500impl ProjectEntryId {
501 pub const MAX: Self = Self(usize::MAX);
502
503 pub fn new(counter: &AtomicUsize) -> Self {
504 Self(counter.fetch_add(1, SeqCst))
505 }
506
507 pub fn from_proto(id: u64) -> Self {
508 Self(id as usize)
509 }
510
511 pub fn to_proto(&self) -> u64 {
512 self.0 as u64
513 }
514
515 pub fn to_usize(&self) -> usize {
516 self.0
517 }
518}
519
520#[derive(Debug, Clone, Copy, PartialEq, Eq)]
521pub enum FormatTrigger {
522 Save,
523 Manual,
524}
525
526struct ProjectLspAdapterDelegate {
527 project: Handle<Project>,
528 http_client: Arc<dyn HttpClient>,
529}
530
531impl FormatTrigger {
532 fn from_proto(value: i32) -> FormatTrigger {
533 match value {
534 0 => FormatTrigger::Save,
535 1 => FormatTrigger::Manual,
536 _ => FormatTrigger::Save,
537 }
538 }
539}
540#[derive(Clone, Debug, PartialEq)]
541enum SearchMatchCandidate {
542 OpenBuffer {
543 buffer: Handle<Buffer>,
544 // This might be an unnamed file without representation on filesystem
545 path: Option<Arc<Path>>,
546 },
547 Path {
548 worktree_id: WorktreeId,
549 path: Arc<Path>,
550 },
551}
552
553type SearchMatchCandidateIndex = usize;
554impl SearchMatchCandidate {
555 fn path(&self) -> Option<Arc<Path>> {
556 match self {
557 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
558 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
559 }
560 }
561}
562
563impl Project {
564 pub fn init_settings(cx: &mut AppContext) {
565 settings2::register::<ProjectSettings>(cx);
566 }
567
568 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
569 Self::init_settings(cx);
570
571 client.add_model_message_handler(Self::handle_add_collaborator);
572 client.add_model_message_handler(Self::handle_update_project_collaborator);
573 client.add_model_message_handler(Self::handle_remove_collaborator);
574 client.add_model_message_handler(Self::handle_buffer_reloaded);
575 client.add_model_message_handler(Self::handle_buffer_saved);
576 client.add_model_message_handler(Self::handle_start_language_server);
577 client.add_model_message_handler(Self::handle_update_language_server);
578 client.add_model_message_handler(Self::handle_update_project);
579 client.add_model_message_handler(Self::handle_unshare_project);
580 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
581 client.add_model_message_handler(Self::handle_update_buffer_file);
582 client.add_model_request_handler(Self::handle_update_buffer);
583 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
584 client.add_model_message_handler(Self::handle_update_worktree);
585 client.add_model_message_handler(Self::handle_update_worktree_settings);
586 client.add_model_request_handler(Self::handle_create_project_entry);
587 client.add_model_request_handler(Self::handle_rename_project_entry);
588 client.add_model_request_handler(Self::handle_copy_project_entry);
589 client.add_model_request_handler(Self::handle_delete_project_entry);
590 client.add_model_request_handler(Self::handle_expand_project_entry);
591 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
592 client.add_model_request_handler(Self::handle_apply_code_action);
593 client.add_model_request_handler(Self::handle_on_type_formatting);
594 client.add_model_request_handler(Self::handle_inlay_hints);
595 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
596 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
597 client.add_model_request_handler(Self::handle_reload_buffers);
598 client.add_model_request_handler(Self::handle_synchronize_buffers);
599 client.add_model_request_handler(Self::handle_format_buffers);
600 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
601 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
602 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
603 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
604 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
605 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
606 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
607 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
608 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
609 client.add_model_request_handler(Self::handle_search_project);
610 client.add_model_request_handler(Self::handle_get_project_symbols);
611 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
612 client.add_model_request_handler(Self::handle_open_buffer_by_id);
613 client.add_model_request_handler(Self::handle_open_buffer_by_path);
614 client.add_model_request_handler(Self::handle_save_buffer);
615 client.add_model_message_handler(Self::handle_update_diff_base);
616 }
617
618 pub fn local(
619 client: Arc<Client>,
620 node: Arc<dyn NodeRuntime>,
621 user_store: Handle<UserStore>,
622 languages: Arc<LanguageRegistry>,
623 fs: Arc<dyn Fs>,
624 cx: &mut AppContext,
625 ) -> Handle<Self> {
626 cx.entity(|cx: &mut ModelContext<Self>| {
627 let (tx, rx) = mpsc::unbounded();
628 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
629 .detach();
630 let copilot_lsp_subscription =
631 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
632 Self {
633 worktrees: Default::default(),
634 buffer_ordered_messages_tx: tx,
635 collaborators: Default::default(),
636 next_buffer_id: 0,
637 opened_buffers: Default::default(),
638 shared_buffers: Default::default(),
639 incomplete_remote_buffers: Default::default(),
640 loading_buffers_by_path: Default::default(),
641 loading_local_worktrees: Default::default(),
642 local_buffer_ids_by_path: Default::default(),
643 local_buffer_ids_by_entry_id: Default::default(),
644 buffer_snapshots: Default::default(),
645 join_project_response_message_id: 0,
646 client_state: None,
647 opened_buffer: watch::channel(),
648 client_subscriptions: Vec::new(),
649 _subscriptions: vec![
650 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
651 cx.on_release(Self::release),
652 cx.on_app_quit(Self::shutdown_language_servers),
653 ],
654 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
655 _maintain_workspace_config: Self::maintain_workspace_config(cx),
656 active_entry: None,
657 languages,
658 client,
659 user_store,
660 fs,
661 next_entry_id: Default::default(),
662 next_diagnostic_group_id: Default::default(),
663 supplementary_language_servers: HashMap::default(),
664 language_servers: Default::default(),
665 language_server_ids: Default::default(),
666 language_server_statuses: Default::default(),
667 last_workspace_edits_by_language_server: Default::default(),
668 buffers_being_formatted: Default::default(),
669 buffers_needing_diff: Default::default(),
670 git_diff_debouncer: DelayedDebounced::new(),
671 nonce: StdRng::from_entropy().gen(),
672 terminals: Terminals {
673 local_handles: Vec::new(),
674 },
675 copilot_lsp_subscription,
676 copilot_log_subscription: None,
677 current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp.clone(),
678 node: Some(node),
679 prettier_instances: HashMap::default(),
680 }
681 })
682 }
683
684 pub async fn remote(
685 remote_id: u64,
686 client: Arc<Client>,
687 user_store: Handle<UserStore>,
688 languages: Arc<LanguageRegistry>,
689 fs: Arc<dyn Fs>,
690 mut cx: AsyncAppContext,
691 ) -> Result<Handle<Self>> {
692 client.authenticate_and_connect(true, &cx).await?;
693
694 let subscription = client.subscribe_to_entity(remote_id)?;
695 let response = client
696 .request_envelope(proto::JoinProject {
697 project_id: remote_id,
698 })
699 .await?;
700 let this = cx.entity(|cx| {
701 let replica_id = response.payload.replica_id as ReplicaId;
702
703 let mut worktrees = Vec::new();
704 for worktree in response.payload.worktrees {
705 let worktree =
706 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
707 worktrees.push(worktree);
708 }
709
710 let (tx, rx) = mpsc::unbounded();
711 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
712 .detach();
713 let copilot_lsp_subscription =
714 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
715 let mut this = Self {
716 worktrees: Vec::new(),
717 buffer_ordered_messages_tx: tx,
718 loading_buffers_by_path: Default::default(),
719 next_buffer_id: 0,
720 opened_buffer: watch::channel(),
721 shared_buffers: Default::default(),
722 incomplete_remote_buffers: Default::default(),
723 loading_local_worktrees: Default::default(),
724 local_buffer_ids_by_path: Default::default(),
725 local_buffer_ids_by_entry_id: Default::default(),
726 active_entry: None,
727 collaborators: Default::default(),
728 join_project_response_message_id: response.message_id,
729 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
730 _maintain_workspace_config: Self::maintain_workspace_config(cx),
731 languages,
732 user_store: user_store.clone(),
733 fs,
734 next_entry_id: Default::default(),
735 next_diagnostic_group_id: Default::default(),
736 client_subscriptions: Default::default(),
737 _subscriptions: vec![
738 cx.on_release(Self::release),
739 cx.on_app_quit(Self::shutdown_language_servers),
740 ],
741 client: client.clone(),
742 client_state: Some(ProjectClientState::Remote {
743 sharing_has_stopped: false,
744 remote_id,
745 replica_id,
746 }),
747 supplementary_language_servers: HashMap::default(),
748 language_servers: Default::default(),
749 language_server_ids: Default::default(),
750 language_server_statuses: response
751 .payload
752 .language_servers
753 .into_iter()
754 .map(|server| {
755 (
756 LanguageServerId(server.id as usize),
757 LanguageServerStatus {
758 name: server.name,
759 pending_work: Default::default(),
760 has_pending_diagnostic_updates: false,
761 progress_tokens: Default::default(),
762 },
763 )
764 })
765 .collect(),
766 last_workspace_edits_by_language_server: Default::default(),
767 opened_buffers: Default::default(),
768 buffers_being_formatted: Default::default(),
769 buffers_needing_diff: Default::default(),
770 git_diff_debouncer: DelayedDebounced::new(),
771 buffer_snapshots: Default::default(),
772 nonce: StdRng::from_entropy().gen(),
773 terminals: Terminals {
774 local_handles: Vec::new(),
775 },
776 copilot_lsp_subscription,
777 copilot_log_subscription: None,
778 current_lsp_settings: settings2::get::<ProjectSettings>(cx).lsp.clone(),
779 node: None,
780 prettier_instances: HashMap::default(),
781 };
782 for worktree in worktrees {
783 let _ = this.add_worktree(&worktree, cx);
784 }
785 this
786 })?;
787 let subscription = subscription.set_model(&this, &mut cx);
788
789 let user_ids = response
790 .payload
791 .collaborators
792 .iter()
793 .map(|peer| peer.user_id)
794 .collect();
795 user_store
796 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
797 .await?;
798
799 this.update(&mut cx, |this, cx| {
800 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
801 this.client_subscriptions.push(subscription);
802 anyhow::Ok(())
803 })?;
804
805 Ok(this)
806 }
807
808 fn release(&mut self, cx: &mut AppContext) {
809 match &self.client_state {
810 Some(ProjectClientState::Local { .. }) => {
811 let _ = self.unshare_internal(cx);
812 }
813 Some(ProjectClientState::Remote { remote_id, .. }) => {
814 let _ = self.client.send(proto::LeaveProject {
815 project_id: *remote_id,
816 });
817 self.disconnected_from_host_internal(cx);
818 }
819 _ => {}
820 }
821 }
822
823 fn shutdown_language_servers(
824 &mut self,
825 _cx: &mut ModelContext<Self>,
826 ) -> impl Future<Output = ()> {
827 let shutdown_futures = self
828 .language_servers
829 .drain()
830 .map(|(_, server_state)| async {
831 use LanguageServerState::*;
832 match server_state {
833 Running { server, .. } => server.shutdown()?.await,
834 Starting(task) => task.await?.shutdown()?.await,
835 }
836 })
837 .collect::<Vec<_>>();
838
839 async move {
840 futures::future::join_all(shutdown_futures).await;
841 }
842 }
843
844 // #[cfg(any(test, feature = "test-support"))]
845 // pub async fn test(
846 // fs: Arc<dyn Fs>,
847 // root_paths: impl IntoIterator<Item = &Path>,
848 // cx: &mut gpui::TestAppContext,
849 // ) -> Handle<Project> {
850 // let mut languages = LanguageRegistry::test();
851 // languages.set_executor(cx.background());
852 // let http_client = util::http::FakeHttpClient::with_404_response();
853 // let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
854 // let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
855 // let project = cx.update(|cx| {
856 // Project::local(
857 // client,
858 // node_runtime::FakeNodeRuntime::new(),
859 // user_store,
860 // Arc::new(languages),
861 // fs,
862 // cx,
863 // )
864 // });
865 // for path in root_paths {
866 // let (tree, _) = project
867 // .update(cx, |project, cx| {
868 // project.find_or_create_local_worktree(path, true, cx)
869 // })
870 // .await
871 // .unwrap();
872 // tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
873 // .await;
874 // }
875 // project
876 // }
877
878 /// Enables a prettier mock that avoids interacting with node runtime, prettier LSP wrapper, or any real file changes.
879 /// Instead, if appends the suffix to every input, this suffix is returned by this method.
880 #[cfg(any(test, feature = "test-support"))]
881 pub fn enable_test_prettier(&mut self, plugins: &[&'static str]) -> &'static str {
882 self.node = Some(node_runtime::FakeNodeRuntime::with_prettier_support(
883 plugins,
884 ));
885 Prettier::FORMAT_SUFFIX
886 }
887
888 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
889 let mut language_servers_to_start = Vec::new();
890 let mut language_formatters_to_check = Vec::new();
891 for buffer in self.opened_buffers.values() {
892 if let Some(buffer) = buffer.upgrade() {
893 let buffer = buffer.read(cx);
894 let buffer_file = File::from_dyn(buffer.file());
895 let buffer_language = buffer.language();
896 let settings = language_settings(buffer_language, buffer.file(), cx);
897 if let Some(language) = buffer_language {
898 if settings.enable_language_server {
899 if let Some(file) = buffer_file {
900 language_servers_to_start
901 .push((file.worktree.clone(), Arc::clone(language)));
902 }
903 }
904 language_formatters_to_check.push((
905 buffer_file.map(|f| f.worktree_id(cx)),
906 Arc::clone(language),
907 settings.clone(),
908 ));
909 }
910 }
911 }
912
913 let mut language_servers_to_stop = Vec::new();
914 let mut language_servers_to_restart = Vec::new();
915 let languages = self.languages.to_vec();
916
917 let new_lsp_settings = settings2::get::<ProjectSettings>(cx).lsp.clone();
918 let current_lsp_settings = &self.current_lsp_settings;
919 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
920 let language = languages.iter().find_map(|l| {
921 let adapter = l
922 .lsp_adapters()
923 .iter()
924 .find(|adapter| &adapter.name == started_lsp_name)?;
925 Some((l, adapter))
926 });
927 if let Some((language, adapter)) = language {
928 let worktree = self.worktree_for_id(*worktree_id, cx);
929 let file = worktree.as_ref().and_then(|tree| {
930 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
931 });
932 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
933 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
934 } else if let Some(worktree) = worktree {
935 let server_name = &adapter.name.0;
936 match (
937 current_lsp_settings.get(server_name),
938 new_lsp_settings.get(server_name),
939 ) {
940 (None, None) => {}
941 (Some(_), None) | (None, Some(_)) => {
942 language_servers_to_restart.push((worktree, Arc::clone(language)));
943 }
944 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
945 if current_lsp_settings != new_lsp_settings {
946 language_servers_to_restart.push((worktree, Arc::clone(language)));
947 }
948 }
949 }
950 }
951 }
952 }
953 self.current_lsp_settings = new_lsp_settings;
954
955 // Stop all newly-disabled language servers.
956 for (worktree_id, adapter_name) in language_servers_to_stop {
957 self.stop_language_server(worktree_id, adapter_name, cx)
958 .detach();
959 }
960
961 for (worktree, language, settings) in language_formatters_to_check {
962 self.install_default_formatters(worktree, &language, &settings, cx)
963 .detach_and_log_err(cx);
964 }
965
966 // Start all the newly-enabled language servers.
967 for (worktree, language) in language_servers_to_start {
968 let worktree_path = worktree.read(cx).abs_path();
969 self.start_language_servers(&worktree, worktree_path, language, cx);
970 }
971
972 // Restart all language servers with changed initialization options.
973 for (worktree, language) in language_servers_to_restart {
974 self.restart_language_servers(worktree, language, cx);
975 }
976
977 if self.copilot_lsp_subscription.is_none() {
978 if let Some(copilot) = Copilot::global(cx) {
979 for buffer in self.opened_buffers.values() {
980 if let Some(buffer) = buffer.upgrade() {
981 self.register_buffer_with_copilot(&buffer, cx);
982 }
983 }
984 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
985 }
986 }
987
988 cx.notify();
989 }
990
991 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Handle<Buffer>> {
992 self.opened_buffers
993 .get(&remote_id)
994 .and_then(|buffer| buffer.upgrade())
995 }
996
997 pub fn languages(&self) -> &Arc<LanguageRegistry> {
998 &self.languages
999 }
1000
1001 pub fn client(&self) -> Arc<Client> {
1002 self.client.clone()
1003 }
1004
1005 pub fn user_store(&self) -> Handle<UserStore> {
1006 self.user_store.clone()
1007 }
1008
1009 pub fn opened_buffers(&self) -> Vec<Handle<Buffer>> {
1010 self.opened_buffers
1011 .values()
1012 .filter_map(|b| b.upgrade())
1013 .collect()
1014 }
1015
1016 #[cfg(any(test, feature = "test-support"))]
1017 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1018 let path = path.into();
1019 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1020 self.opened_buffers.iter().any(|(_, buffer)| {
1021 if let Some(buffer) = buffer.upgrade() {
1022 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1023 if file.worktree == worktree && file.path() == &path.path {
1024 return true;
1025 }
1026 }
1027 }
1028 false
1029 })
1030 } else {
1031 false
1032 }
1033 }
1034
1035 pub fn fs(&self) -> &Arc<dyn Fs> {
1036 &self.fs
1037 }
1038
1039 pub fn remote_id(&self) -> Option<u64> {
1040 match self.client_state.as_ref()? {
1041 ProjectClientState::Local { remote_id, .. }
1042 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
1043 }
1044 }
1045
1046 pub fn replica_id(&self) -> ReplicaId {
1047 match &self.client_state {
1048 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
1049 _ => 0,
1050 }
1051 }
1052
1053 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1054 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
1055 updates_tx
1056 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1057 .ok();
1058 }
1059 cx.notify();
1060 }
1061
1062 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1063 &self.collaborators
1064 }
1065
1066 pub fn host(&self) -> Option<&Collaborator> {
1067 self.collaborators.values().find(|c| c.replica_id == 0)
1068 }
1069
1070 /// Collect all worktrees, including ones that don't appear in the project panel
1071 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Handle<Worktree>> {
1072 self.worktrees
1073 .iter()
1074 .filter_map(move |worktree| worktree.upgrade())
1075 }
1076
1077 /// Collect all user-visible worktrees, the ones that appear in the project panel
1078 pub fn visible_worktrees<'a>(
1079 &'a self,
1080 cx: &'a AppContext,
1081 ) -> impl 'a + DoubleEndedIterator<Item = Handle<Worktree>> {
1082 self.worktrees.iter().filter_map(|worktree| {
1083 worktree.upgrade().and_then(|worktree| {
1084 if worktree.read(cx).is_visible() {
1085 Some(worktree)
1086 } else {
1087 None
1088 }
1089 })
1090 })
1091 }
1092
1093 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1094 self.visible_worktrees(cx)
1095 .map(|tree| tree.read(cx).root_name())
1096 }
1097
1098 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Handle<Worktree>> {
1099 self.worktrees()
1100 .find(|worktree| worktree.read(cx).id() == id)
1101 }
1102
1103 pub fn worktree_for_entry(
1104 &self,
1105 entry_id: ProjectEntryId,
1106 cx: &AppContext,
1107 ) -> Option<Handle<Worktree>> {
1108 self.worktrees()
1109 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1110 }
1111
1112 pub fn worktree_id_for_entry(
1113 &self,
1114 entry_id: ProjectEntryId,
1115 cx: &AppContext,
1116 ) -> Option<WorktreeId> {
1117 self.worktree_for_entry(entry_id, cx)
1118 .map(|worktree| worktree.read(cx).id())
1119 }
1120
1121 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1122 paths.iter().all(|path| self.contains_path(path, cx))
1123 }
1124
1125 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1126 for worktree in self.worktrees() {
1127 let worktree = worktree.read(cx).as_local();
1128 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1129 return true;
1130 }
1131 }
1132 false
1133 }
1134
1135 pub fn create_entry(
1136 &mut self,
1137 project_path: impl Into<ProjectPath>,
1138 is_directory: bool,
1139 cx: &mut ModelContext<Self>,
1140 ) -> Option<Task<Result<Entry>>> {
1141 let project_path = project_path.into();
1142 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1143 if self.is_local() {
1144 Some(worktree.update(cx, |worktree, cx| {
1145 worktree
1146 .as_local_mut()
1147 .unwrap()
1148 .create_entry(project_path.path, is_directory, cx)
1149 }))
1150 } else {
1151 let client = self.client.clone();
1152 let project_id = self.remote_id().unwrap();
1153 Some(cx.spawn(move |_, mut cx| async move {
1154 let response = client
1155 .request(proto::CreateProjectEntry {
1156 worktree_id: project_path.worktree_id.to_proto(),
1157 project_id,
1158 path: project_path.path.to_string_lossy().into(),
1159 is_directory,
1160 })
1161 .await?;
1162 let entry = response
1163 .entry
1164 .ok_or_else(|| anyhow!("missing entry in response"))?;
1165 worktree
1166 .update(&mut cx, |worktree, cx| {
1167 worktree.as_remote_mut().unwrap().insert_entry(
1168 entry,
1169 response.worktree_scan_id as usize,
1170 cx,
1171 )
1172 })?
1173 .await
1174 }))
1175 }
1176 }
1177
1178 pub fn copy_entry(
1179 &mut self,
1180 entry_id: ProjectEntryId,
1181 new_path: impl Into<Arc<Path>>,
1182 cx: &mut ModelContext<Self>,
1183 ) -> Option<Task<Result<Entry>>> {
1184 let worktree = self.worktree_for_entry(entry_id, cx)?;
1185 let new_path = new_path.into();
1186 if self.is_local() {
1187 worktree.update(cx, |worktree, cx| {
1188 worktree
1189 .as_local_mut()
1190 .unwrap()
1191 .copy_entry(entry_id, new_path, cx)
1192 })
1193 } else {
1194 let client = self.client.clone();
1195 let project_id = self.remote_id().unwrap();
1196
1197 Some(cx.spawn(move |_, mut cx| async move {
1198 let response = client
1199 .request(proto::CopyProjectEntry {
1200 project_id,
1201 entry_id: entry_id.to_proto(),
1202 new_path: new_path.to_string_lossy().into(),
1203 })
1204 .await?;
1205 let entry = response
1206 .entry
1207 .ok_or_else(|| anyhow!("missing entry in response"))?;
1208 worktree
1209 .update(&mut cx, |worktree, cx| {
1210 worktree.as_remote_mut().unwrap().insert_entry(
1211 entry,
1212 response.worktree_scan_id as usize,
1213 cx,
1214 )
1215 })?
1216 .await
1217 }))
1218 }
1219 }
1220
1221 pub fn rename_entry(
1222 &mut self,
1223 entry_id: ProjectEntryId,
1224 new_path: impl Into<Arc<Path>>,
1225 cx: &mut ModelContext<Self>,
1226 ) -> Option<Task<Result<Entry>>> {
1227 let worktree = self.worktree_for_entry(entry_id, cx)?;
1228 let new_path = new_path.into();
1229 if self.is_local() {
1230 worktree.update(cx, |worktree, cx| {
1231 worktree
1232 .as_local_mut()
1233 .unwrap()
1234 .rename_entry(entry_id, new_path, cx)
1235 })
1236 } else {
1237 let client = self.client.clone();
1238 let project_id = self.remote_id().unwrap();
1239
1240 Some(cx.spawn(move |_, mut cx| async move {
1241 let response = client
1242 .request(proto::RenameProjectEntry {
1243 project_id,
1244 entry_id: entry_id.to_proto(),
1245 new_path: new_path.to_string_lossy().into(),
1246 })
1247 .await?;
1248 let entry = response
1249 .entry
1250 .ok_or_else(|| anyhow!("missing entry in response"))?;
1251 worktree
1252 .update(&mut cx, |worktree, cx| {
1253 worktree.as_remote_mut().unwrap().insert_entry(
1254 entry,
1255 response.worktree_scan_id as usize,
1256 cx,
1257 )
1258 })?
1259 .await
1260 }))
1261 }
1262 }
1263
1264 pub fn delete_entry(
1265 &mut self,
1266 entry_id: ProjectEntryId,
1267 cx: &mut ModelContext<Self>,
1268 ) -> Option<Task<Result<()>>> {
1269 let worktree = self.worktree_for_entry(entry_id, cx)?;
1270
1271 cx.emit(Event::DeletedEntry(entry_id));
1272
1273 if self.is_local() {
1274 worktree.update(cx, |worktree, cx| {
1275 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1276 })
1277 } else {
1278 let client = self.client.clone();
1279 let project_id = self.remote_id().unwrap();
1280 Some(cx.spawn(move |_, mut cx| async move {
1281 let response = client
1282 .request(proto::DeleteProjectEntry {
1283 project_id,
1284 entry_id: entry_id.to_proto(),
1285 })
1286 .await?;
1287 worktree
1288 .update(&mut cx, move |worktree, cx| {
1289 worktree.as_remote_mut().unwrap().delete_entry(
1290 entry_id,
1291 response.worktree_scan_id as usize,
1292 cx,
1293 )
1294 })?
1295 .await
1296 }))
1297 }
1298 }
1299
1300 pub fn expand_entry(
1301 &mut self,
1302 worktree_id: WorktreeId,
1303 entry_id: ProjectEntryId,
1304 cx: &mut ModelContext<Self>,
1305 ) -> Option<Task<Result<()>>> {
1306 let worktree = self.worktree_for_id(worktree_id, cx)?;
1307 if self.is_local() {
1308 worktree.update(cx, |worktree, cx| {
1309 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1310 })
1311 } else {
1312 let worktree = worktree.downgrade();
1313 let request = self.client.request(proto::ExpandProjectEntry {
1314 project_id: self.remote_id().unwrap(),
1315 entry_id: entry_id.to_proto(),
1316 });
1317 Some(cx.spawn(move |_, mut cx| async move {
1318 let response = request.await?;
1319 if let Some(worktree) = worktree.upgrade() {
1320 worktree
1321 .update(&mut cx, |worktree, _| {
1322 worktree
1323 .as_remote_mut()
1324 .unwrap()
1325 .wait_for_snapshot(response.worktree_scan_id as usize)
1326 })?
1327 .await?;
1328 }
1329 Ok(())
1330 }))
1331 }
1332 }
1333
1334 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1335 if self.client_state.is_some() {
1336 return Err(anyhow!("project was already shared"));
1337 }
1338 self.client_subscriptions.push(
1339 self.client
1340 .subscribe_to_entity(project_id)?
1341 .set_model(&cx.handle(), &mut cx.to_async()),
1342 );
1343
1344 for open_buffer in self.opened_buffers.values_mut() {
1345 match open_buffer {
1346 OpenBuffer::Strong(_) => {}
1347 OpenBuffer::Weak(buffer) => {
1348 if let Some(buffer) = buffer.upgrade() {
1349 *open_buffer = OpenBuffer::Strong(buffer);
1350 }
1351 }
1352 OpenBuffer::Operations(_) => unreachable!(),
1353 }
1354 }
1355
1356 for worktree_handle in self.worktrees.iter_mut() {
1357 match worktree_handle {
1358 WorktreeHandle::Strong(_) => {}
1359 WorktreeHandle::Weak(worktree) => {
1360 if let Some(worktree) = worktree.upgrade() {
1361 *worktree_handle = WorktreeHandle::Strong(worktree);
1362 }
1363 }
1364 }
1365 }
1366
1367 for (server_id, status) in &self.language_server_statuses {
1368 self.client
1369 .send(proto::StartLanguageServer {
1370 project_id,
1371 server: Some(proto::LanguageServer {
1372 id: server_id.0 as u64,
1373 name: status.name.clone(),
1374 }),
1375 })
1376 .log_err();
1377 }
1378
1379 let store = cx.global::<SettingsStore>();
1380 for worktree in self.worktrees() {
1381 let worktree_id = worktree.read(cx).id().to_proto();
1382 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1383 self.client
1384 .send(proto::UpdateWorktreeSettings {
1385 project_id,
1386 worktree_id,
1387 path: path.to_string_lossy().into(),
1388 content: Some(content),
1389 })
1390 .log_err();
1391 }
1392 }
1393
1394 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1395 let client = self.client.clone();
1396 self.client_state = Some(ProjectClientState::Local {
1397 remote_id: project_id,
1398 updates_tx,
1399 _send_updates: cx.spawn(move |this, mut cx| async move {
1400 while let Some(update) = updates_rx.next().await {
1401 match update {
1402 LocalProjectUpdate::WorktreesChanged => {
1403 let worktrees = this.update(&mut cx, |this, _cx| {
1404 this.worktrees().collect::<Vec<_>>()
1405 })?;
1406 let update_project = this
1407 .update(&mut cx, |this, cx| {
1408 this.client.request(proto::UpdateProject {
1409 project_id,
1410 worktrees: this.worktree_metadata_protos(cx),
1411 })
1412 })?
1413 .await;
1414 if update_project.is_ok() {
1415 for worktree in worktrees {
1416 worktree.update(&mut cx, |worktree, cx| {
1417 let worktree = worktree.as_local_mut().unwrap();
1418 worktree.share(project_id, cx).detach_and_log_err(cx)
1419 })?;
1420 }
1421 }
1422 }
1423 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1424 let buffer = this.update(&mut cx, |this, _| {
1425 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1426 let shared_buffers =
1427 this.shared_buffers.entry(peer_id).or_default();
1428 if shared_buffers.insert(buffer_id) {
1429 if let OpenBuffer::Strong(buffer) = buffer {
1430 Some(buffer.clone())
1431 } else {
1432 None
1433 }
1434 } else {
1435 None
1436 }
1437 })?;
1438
1439 let Some(buffer) = buffer else { continue };
1440 let operations =
1441 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1442 let operations = operations.await;
1443 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1444
1445 let initial_state = proto::CreateBufferForPeer {
1446 project_id,
1447 peer_id: Some(peer_id),
1448 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1449 };
1450 if client.send(initial_state).log_err().is_some() {
1451 let client = client.clone();
1452 cx.executor()
1453 .spawn(async move {
1454 let mut chunks = split_operations(operations).peekable();
1455 while let Some(chunk) = chunks.next() {
1456 let is_last = chunks.peek().is_none();
1457 client.send(proto::CreateBufferForPeer {
1458 project_id,
1459 peer_id: Some(peer_id),
1460 variant: Some(
1461 proto::create_buffer_for_peer::Variant::Chunk(
1462 proto::BufferChunk {
1463 buffer_id,
1464 operations: chunk,
1465 is_last,
1466 },
1467 ),
1468 ),
1469 })?;
1470 }
1471 anyhow::Ok(())
1472 })
1473 .await
1474 .log_err();
1475 }
1476 }
1477 }
1478 }
1479 Ok(())
1480 }),
1481 });
1482
1483 self.metadata_changed(cx);
1484 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1485 cx.notify();
1486 Ok(())
1487 }
1488
1489 pub fn reshared(
1490 &mut self,
1491 message: proto::ResharedProject,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Result<()> {
1494 self.shared_buffers.clear();
1495 self.set_collaborators_from_proto(message.collaborators, cx)?;
1496 self.metadata_changed(cx);
1497 Ok(())
1498 }
1499
1500 pub fn rejoined(
1501 &mut self,
1502 message: proto::RejoinedProject,
1503 message_id: u32,
1504 cx: &mut ModelContext<Self>,
1505 ) -> Result<()> {
1506 cx.update_global::<SettingsStore, _>(|store, cx| {
1507 for worktree in &self.worktrees {
1508 store
1509 .clear_local_settings(worktree.handle_id(), cx)
1510 .log_err();
1511 }
1512 });
1513
1514 self.join_project_response_message_id = message_id;
1515 self.set_worktrees_from_proto(message.worktrees, cx)?;
1516 self.set_collaborators_from_proto(message.collaborators, cx)?;
1517 self.language_server_statuses = message
1518 .language_servers
1519 .into_iter()
1520 .map(|server| {
1521 (
1522 LanguageServerId(server.id as usize),
1523 LanguageServerStatus {
1524 name: server.name,
1525 pending_work: Default::default(),
1526 has_pending_diagnostic_updates: false,
1527 progress_tokens: Default::default(),
1528 },
1529 )
1530 })
1531 .collect();
1532 self.buffer_ordered_messages_tx
1533 .unbounded_send(BufferOrderedMessage::Resync)
1534 .unwrap();
1535 cx.notify();
1536 Ok(())
1537 }
1538
1539 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1540 self.unshare_internal(cx)?;
1541 self.metadata_changed(cx);
1542 cx.notify();
1543 Ok(())
1544 }
1545
1546 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1547 if self.is_remote() {
1548 return Err(anyhow!("attempted to unshare a remote project"));
1549 }
1550
1551 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1552 self.collaborators.clear();
1553 self.shared_buffers.clear();
1554 self.client_subscriptions.clear();
1555
1556 for worktree_handle in self.worktrees.iter_mut() {
1557 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1558 let is_visible = worktree.update(cx, |worktree, _| {
1559 worktree.as_local_mut().unwrap().unshare();
1560 worktree.is_visible()
1561 });
1562 if !is_visible {
1563 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1564 }
1565 }
1566 }
1567
1568 for open_buffer in self.opened_buffers.values_mut() {
1569 // Wake up any tasks waiting for peers' edits to this buffer.
1570 if let Some(buffer) = open_buffer.upgrade() {
1571 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1572 }
1573
1574 if let OpenBuffer::Strong(buffer) = open_buffer {
1575 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1576 }
1577 }
1578
1579 self.client.send(proto::UnshareProject {
1580 project_id: remote_id,
1581 })?;
1582
1583 Ok(())
1584 } else {
1585 Err(anyhow!("attempted to unshare an unshared project"))
1586 }
1587 }
1588
1589 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1590 self.disconnected_from_host_internal(cx);
1591 cx.emit(Event::DisconnectedFromHost);
1592 cx.notify();
1593 }
1594
1595 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1596 if let Some(ProjectClientState::Remote {
1597 sharing_has_stopped,
1598 ..
1599 }) = &mut self.client_state
1600 {
1601 *sharing_has_stopped = true;
1602
1603 self.collaborators.clear();
1604
1605 for worktree in &self.worktrees {
1606 if let Some(worktree) = worktree.upgrade() {
1607 worktree.update(cx, |worktree, _| {
1608 if let Some(worktree) = worktree.as_remote_mut() {
1609 worktree.disconnected_from_host();
1610 }
1611 });
1612 }
1613 }
1614
1615 for open_buffer in self.opened_buffers.values_mut() {
1616 // Wake up any tasks waiting for peers' edits to this buffer.
1617 if let Some(buffer) = open_buffer.upgrade() {
1618 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1619 }
1620
1621 if let OpenBuffer::Strong(buffer) = open_buffer {
1622 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1623 }
1624 }
1625
1626 // Wake up all futures currently waiting on a buffer to get opened,
1627 // to give them a chance to fail now that we've disconnected.
1628 *self.opened_buffer.0.borrow_mut() = ();
1629 }
1630 }
1631
1632 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1633 cx.emit(Event::Closed);
1634 }
1635
1636 pub fn is_read_only(&self) -> bool {
1637 match &self.client_state {
1638 Some(ProjectClientState::Remote {
1639 sharing_has_stopped,
1640 ..
1641 }) => *sharing_has_stopped,
1642 _ => false,
1643 }
1644 }
1645
1646 pub fn is_local(&self) -> bool {
1647 match &self.client_state {
1648 Some(ProjectClientState::Remote { .. }) => false,
1649 _ => true,
1650 }
1651 }
1652
1653 pub fn is_remote(&self) -> bool {
1654 !self.is_local()
1655 }
1656
1657 pub fn create_buffer(
1658 &mut self,
1659 text: &str,
1660 language: Option<Arc<Language>>,
1661 cx: &mut ModelContext<Self>,
1662 ) -> Result<Handle<Buffer>> {
1663 if self.is_remote() {
1664 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1665 }
1666 let id = post_inc(&mut self.next_buffer_id);
1667 let buffer = cx.entity(|cx| {
1668 Buffer::new(self.replica_id(), id, text).with_language(
1669 language.unwrap_or_else(|| language2::PLAIN_TEXT.clone()),
1670 cx,
1671 )
1672 });
1673 self.register_buffer(&buffer, cx)?;
1674 Ok(buffer)
1675 }
1676
1677 pub fn open_path(
1678 &mut self,
1679 path: impl Into<ProjectPath>,
1680 cx: &mut ModelContext<Self>,
1681 ) -> Task<Result<(ProjectEntryId, AnyHandle)>> {
1682 let task = self.open_buffer(path, cx);
1683 cx.spawn(move |_, mut cx| async move {
1684 let buffer = task.await?;
1685 let project_entry_id = buffer
1686 .update(&mut cx, |buffer, cx| {
1687 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1688 })?
1689 .ok_or_else(|| anyhow!("no project entry"))?;
1690
1691 let buffer: &AnyHandle = &buffer;
1692 Ok((project_entry_id, buffer.clone()))
1693 })
1694 }
1695
1696 pub fn open_local_buffer(
1697 &mut self,
1698 abs_path: impl AsRef<Path>,
1699 cx: &mut ModelContext<Self>,
1700 ) -> Task<Result<Handle<Buffer>>> {
1701 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1702 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1703 } else {
1704 Task::ready(Err(anyhow!("no such path")))
1705 }
1706 }
1707
1708 pub fn open_buffer(
1709 &mut self,
1710 path: impl Into<ProjectPath>,
1711 cx: &mut ModelContext<Self>,
1712 ) -> Task<Result<Handle<Buffer>>> {
1713 let project_path = path.into();
1714 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1715 worktree
1716 } else {
1717 return Task::ready(Err(anyhow!("no such worktree")));
1718 };
1719
1720 // If there is already a buffer for the given path, then return it.
1721 let existing_buffer = self.get_open_buffer(&project_path, cx);
1722 if let Some(existing_buffer) = existing_buffer {
1723 return Task::ready(Ok(existing_buffer));
1724 }
1725
1726 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1727 // If the given path is already being loaded, then wait for that existing
1728 // task to complete and return the same buffer.
1729 hash_map::Entry::Occupied(e) => e.get().clone(),
1730
1731 // Otherwise, record the fact that this path is now being loaded.
1732 hash_map::Entry::Vacant(entry) => {
1733 let (mut tx, rx) = postage::watch::channel();
1734 entry.insert(rx.clone());
1735
1736 let load_buffer = if worktree.read(cx).is_local() {
1737 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1738 } else {
1739 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1740 };
1741
1742 cx.spawn(move |this, mut cx| async move {
1743 let load_result = load_buffer.await;
1744 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1745 // Record the fact that the buffer is no longer loading.
1746 this.loading_buffers_by_path.remove(&project_path);
1747 let buffer = load_result.map_err(Arc::new)?;
1748 Ok(buffer)
1749 })?);
1750 anyhow::Ok(())
1751 })
1752 .detach();
1753 rx
1754 }
1755 };
1756
1757 cx.executor().spawn(async move {
1758 wait_for_loading_buffer(loading_watch)
1759 .await
1760 .map_err(|error| anyhow!("{}", error))
1761 })
1762 }
1763
1764 fn open_local_buffer_internal(
1765 &mut self,
1766 path: &Arc<Path>,
1767 worktree: &Handle<Worktree>,
1768 cx: &mut ModelContext<Self>,
1769 ) -> Task<Result<Handle<Buffer>>> {
1770 let buffer_id = post_inc(&mut self.next_buffer_id);
1771 let load_buffer = worktree.update(cx, |worktree, cx| {
1772 let worktree = worktree.as_local_mut().unwrap();
1773 worktree.load_buffer(buffer_id, path, cx)
1774 });
1775 cx.spawn(move |this, mut cx| async move {
1776 let buffer = load_buffer.await?;
1777 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1778 Ok(buffer)
1779 })
1780 }
1781
1782 fn open_remote_buffer_internal(
1783 &mut self,
1784 path: &Arc<Path>,
1785 worktree: &Handle<Worktree>,
1786 cx: &mut ModelContext<Self>,
1787 ) -> Task<Result<Handle<Buffer>>> {
1788 let rpc = self.client.clone();
1789 let project_id = self.remote_id().unwrap();
1790 let remote_worktree_id = worktree.read(cx).id();
1791 let path = path.clone();
1792 let path_string = path.to_string_lossy().to_string();
1793 cx.spawn(move |this, mut cx| async move {
1794 let response = rpc
1795 .request(proto::OpenBufferByPath {
1796 project_id,
1797 worktree_id: remote_worktree_id.to_proto(),
1798 path: path_string,
1799 })
1800 .await?;
1801 this.update(&mut cx, |this, cx| {
1802 this.wait_for_remote_buffer(response.buffer_id, cx)
1803 })?
1804 .await
1805 })
1806 }
1807
1808 /// LanguageServerName is owned, because it is inserted into a map
1809 pub fn open_local_buffer_via_lsp(
1810 &mut self,
1811 abs_path: lsp2::Url,
1812 language_server_id: LanguageServerId,
1813 language_server_name: LanguageServerName,
1814 cx: &mut ModelContext<Self>,
1815 ) -> Task<Result<Handle<Buffer>>> {
1816 cx.spawn(move |this, mut cx| async move {
1817 let abs_path = abs_path
1818 .to_file_path()
1819 .map_err(|_| anyhow!("can't convert URI to path"))?;
1820 let (worktree, relative_path) = if let Some(result) =
1821 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1822 {
1823 result
1824 } else {
1825 let worktree = this
1826 .update(&mut cx, |this, cx| {
1827 this.create_local_worktree(&abs_path, false, cx)
1828 })?
1829 .await?;
1830 this.update(&mut cx, |this, cx| {
1831 this.language_server_ids.insert(
1832 (worktree.read(cx).id(), language_server_name),
1833 language_server_id,
1834 );
1835 });
1836 (worktree, PathBuf::new())
1837 };
1838
1839 let project_path = ProjectPath {
1840 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1841 path: relative_path.into(),
1842 };
1843 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1844 .await
1845 })
1846 }
1847
1848 pub fn open_buffer_by_id(
1849 &mut self,
1850 id: u64,
1851 cx: &mut ModelContext<Self>,
1852 ) -> Task<Result<Handle<Buffer>>> {
1853 if let Some(buffer) = self.buffer_for_id(id) {
1854 Task::ready(Ok(buffer))
1855 } else if self.is_local() {
1856 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1857 } else if let Some(project_id) = self.remote_id() {
1858 let request = self
1859 .client
1860 .request(proto::OpenBufferById { project_id, id });
1861 cx.spawn(move |this, mut cx| async move {
1862 let buffer_id = request.await?.buffer_id;
1863 this.update(&mut cx, |this, cx| {
1864 this.wait_for_remote_buffer(buffer_id, cx)
1865 })?
1866 .await
1867 })
1868 } else {
1869 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1870 }
1871 }
1872
1873 pub fn save_buffers(
1874 &self,
1875 buffers: HashSet<Handle<Buffer>>,
1876 cx: &mut ModelContext<Self>,
1877 ) -> Task<Result<()>> {
1878 cx.spawn(move |this, mut cx| async move {
1879 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1880 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1881 .ok()
1882 });
1883 try_join_all(save_tasks).await?;
1884 Ok(())
1885 })
1886 }
1887
1888 pub fn save_buffer(
1889 &self,
1890 buffer: Handle<Buffer>,
1891 cx: &mut ModelContext<Self>,
1892 ) -> Task<Result<()>> {
1893 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1894 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1895 };
1896 let worktree = file.worktree.clone();
1897 let path = file.path.clone();
1898 worktree.update(cx, |worktree, cx| match worktree {
1899 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1900 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1901 })
1902 }
1903
1904 pub fn save_buffer_as(
1905 &mut self,
1906 buffer: Handle<Buffer>,
1907 abs_path: PathBuf,
1908 cx: &mut ModelContext<Self>,
1909 ) -> Task<Result<()>> {
1910 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1911 let old_file = File::from_dyn(buffer.read(cx).file())
1912 .filter(|f| f.is_local())
1913 .cloned();
1914 cx.spawn(move |this, mut cx| async move {
1915 if let Some(old_file) = &old_file {
1916 this.update(&mut cx, |this, cx| {
1917 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1918 });
1919 }
1920 let (worktree, path) = worktree_task.await?;
1921 worktree
1922 .update(&mut cx, |worktree, cx| match worktree {
1923 Worktree::Local(worktree) => {
1924 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1925 }
1926 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1927 })?
1928 .await?;
1929
1930 this.update(&mut cx, |this, cx| {
1931 this.detect_language_for_buffer(&buffer, cx);
1932 this.register_buffer_with_language_servers(&buffer, cx);
1933 });
1934 Ok(())
1935 })
1936 }
1937
1938 pub fn get_open_buffer(
1939 &mut self,
1940 path: &ProjectPath,
1941 cx: &mut ModelContext<Self>,
1942 ) -> Option<Handle<Buffer>> {
1943 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1944 self.opened_buffers.values().find_map(|buffer| {
1945 let buffer = buffer.upgrade()?;
1946 let file = File::from_dyn(buffer.read(cx).file())?;
1947 if file.worktree == worktree && file.path() == &path.path {
1948 Some(buffer)
1949 } else {
1950 None
1951 }
1952 })
1953 }
1954
1955 fn register_buffer(
1956 &mut self,
1957 buffer: &Handle<Buffer>,
1958 cx: &mut ModelContext<Self>,
1959 ) -> Result<()> {
1960 self.request_buffer_diff_recalculation(buffer, cx);
1961 buffer.update(cx, |buffer, _| {
1962 buffer.set_language_registry(self.languages.clone())
1963 });
1964
1965 let remote_id = buffer.read(cx).remote_id();
1966 let is_remote = self.is_remote();
1967 let open_buffer = if is_remote || self.is_shared() {
1968 OpenBuffer::Strong(buffer.clone())
1969 } else {
1970 OpenBuffer::Weak(buffer.downgrade())
1971 };
1972
1973 match self.opened_buffers.entry(remote_id) {
1974 hash_map::Entry::Vacant(entry) => {
1975 entry.insert(open_buffer);
1976 }
1977 hash_map::Entry::Occupied(mut entry) => {
1978 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1979 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1980 } else if entry.get().upgrade().is_some() {
1981 if is_remote {
1982 return Ok(());
1983 } else {
1984 debug_panic!("buffer {} was already registered", remote_id);
1985 Err(anyhow!("buffer {} was already registered", remote_id))?;
1986 }
1987 }
1988 entry.insert(open_buffer);
1989 }
1990 }
1991 cx.subscribe(buffer, |this, buffer, event, cx| {
1992 this.on_buffer_event(buffer, event, cx);
1993 })
1994 .detach();
1995
1996 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1997 if file.is_local {
1998 self.local_buffer_ids_by_path.insert(
1999 ProjectPath {
2000 worktree_id: file.worktree_id(cx),
2001 path: file.path.clone(),
2002 },
2003 remote_id,
2004 );
2005
2006 self.local_buffer_ids_by_entry_id
2007 .insert(file.entry_id, remote_id);
2008 }
2009 }
2010
2011 self.detect_language_for_buffer(buffer, cx);
2012 self.register_buffer_with_language_servers(buffer, cx);
2013 self.register_buffer_with_copilot(buffer, cx);
2014 cx.observe_release(buffer, |this, buffer, cx| {
2015 if let Some(file) = File::from_dyn(buffer.file()) {
2016 if file.is_local() {
2017 let uri = lsp2::Url::from_file_path(file.abs_path(cx)).unwrap();
2018 for server in this.language_servers_for_buffer(buffer, cx) {
2019 server
2020 .1
2021 .notify::<lsp2::notification::DidCloseTextDocument>(
2022 lsp2::DidCloseTextDocumentParams {
2023 text_document: lsp2::TextDocumentIdentifier::new(uri.clone()),
2024 },
2025 )
2026 .log_err();
2027 }
2028 }
2029 }
2030 })
2031 .detach();
2032
2033 *self.opened_buffer.0.borrow_mut() = ();
2034 Ok(())
2035 }
2036
2037 fn register_buffer_with_language_servers(
2038 &mut self,
2039 buffer_handle: &Handle<Buffer>,
2040 cx: &mut ModelContext<Self>,
2041 ) {
2042 let buffer = buffer_handle.read(cx);
2043 let buffer_id = buffer.remote_id();
2044
2045 if let Some(file) = File::from_dyn(buffer.file()) {
2046 if !file.is_local() {
2047 return;
2048 }
2049
2050 let abs_path = file.abs_path(cx);
2051 let uri = lsp2::Url::from_file_path(&abs_path)
2052 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2053 let initial_snapshot = buffer.text_snapshot();
2054 let language = buffer.language().cloned();
2055 let worktree_id = file.worktree_id(cx);
2056
2057 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2058 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2059 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2060 .log_err();
2061 }
2062 }
2063
2064 if let Some(language) = language {
2065 for adapter in language.lsp_adapters() {
2066 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2067 let server = self
2068 .language_server_ids
2069 .get(&(worktree_id, adapter.name.clone()))
2070 .and_then(|id| self.language_servers.get(id))
2071 .and_then(|server_state| {
2072 if let LanguageServerState::Running { server, .. } = server_state {
2073 Some(server.clone())
2074 } else {
2075 None
2076 }
2077 });
2078 let server = match server {
2079 Some(server) => server,
2080 None => continue,
2081 };
2082
2083 server
2084 .notify::<lsp2::notification::DidOpenTextDocument>(
2085 lsp2::DidOpenTextDocumentParams {
2086 text_document: lsp2::TextDocumentItem::new(
2087 uri.clone(),
2088 language_id.unwrap_or_default(),
2089 0,
2090 initial_snapshot.text(),
2091 ),
2092 },
2093 )
2094 .log_err();
2095
2096 buffer_handle.update(cx, |buffer, cx| {
2097 buffer.set_completion_triggers(
2098 server
2099 .capabilities()
2100 .completion_provider
2101 .as_ref()
2102 .and_then(|provider| provider.trigger_characters.clone())
2103 .unwrap_or_default(),
2104 cx,
2105 );
2106 });
2107
2108 let snapshot = LspBufferSnapshot {
2109 version: 0,
2110 snapshot: initial_snapshot.clone(),
2111 };
2112 self.buffer_snapshots
2113 .entry(buffer_id)
2114 .or_default()
2115 .insert(server.server_id(), vec![snapshot]);
2116 }
2117 }
2118 }
2119 }
2120
2121 fn unregister_buffer_from_language_servers(
2122 &mut self,
2123 buffer: &Handle<Buffer>,
2124 old_file: &File,
2125 cx: &mut ModelContext<Self>,
2126 ) {
2127 let old_path = match old_file.as_local() {
2128 Some(local) => local.abs_path(cx),
2129 None => return,
2130 };
2131
2132 buffer.update(cx, |buffer, cx| {
2133 let worktree_id = old_file.worktree_id(cx);
2134 let ids = &self.language_server_ids;
2135
2136 let language = buffer.language().cloned();
2137 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2138 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2139 buffer.update_diagnostics(server_id, Default::default(), cx);
2140 }
2141
2142 self.buffer_snapshots.remove(&buffer.remote_id());
2143 let file_url = lsp2::Url::from_file_path(old_path).unwrap();
2144 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2145 language_server
2146 .notify::<lsp2::notification::DidCloseTextDocument>(
2147 lsp2::DidCloseTextDocumentParams {
2148 text_document: lsp2::TextDocumentIdentifier::new(file_url.clone()),
2149 },
2150 )
2151 .log_err();
2152 }
2153 });
2154 }
2155
2156 fn register_buffer_with_copilot(
2157 &self,
2158 buffer_handle: &Handle<Buffer>,
2159 cx: &mut ModelContext<Self>,
2160 ) {
2161 if let Some(copilot) = Copilot::global(cx) {
2162 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2163 }
2164 }
2165
2166 async fn send_buffer_ordered_messages(
2167 this: WeakHandle<Self>,
2168 rx: UnboundedReceiver<BufferOrderedMessage>,
2169 mut cx: AsyncAppContext,
2170 ) -> Result<()> {
2171 const MAX_BATCH_SIZE: usize = 128;
2172
2173 let mut operations_by_buffer_id = HashMap::default();
2174 async fn flush_operations(
2175 this: &WeakHandle<Project>,
2176 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2177 needs_resync_with_host: &mut bool,
2178 is_local: bool,
2179 cx: &mut AsyncAppContext,
2180 ) -> Result<()> {
2181 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2182 let request = this.update(cx, |this, _| {
2183 let project_id = this.remote_id()?;
2184 Some(this.client.request(proto::UpdateBuffer {
2185 buffer_id,
2186 project_id,
2187 operations,
2188 }))
2189 })?;
2190 if let Some(request) = request {
2191 if request.await.is_err() && !is_local {
2192 *needs_resync_with_host = true;
2193 break;
2194 }
2195 }
2196 }
2197 Ok(())
2198 }
2199
2200 let mut needs_resync_with_host = false;
2201 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2202
2203 while let Some(changes) = changes.next().await {
2204 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2205
2206 for change in changes {
2207 match change {
2208 BufferOrderedMessage::Operation {
2209 buffer_id,
2210 operation,
2211 } => {
2212 if needs_resync_with_host {
2213 continue;
2214 }
2215
2216 operations_by_buffer_id
2217 .entry(buffer_id)
2218 .or_insert(Vec::new())
2219 .push(operation);
2220 }
2221
2222 BufferOrderedMessage::Resync => {
2223 operations_by_buffer_id.clear();
2224 if this
2225 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2226 .await
2227 .is_ok()
2228 {
2229 needs_resync_with_host = false;
2230 }
2231 }
2232
2233 BufferOrderedMessage::LanguageServerUpdate {
2234 language_server_id,
2235 message,
2236 } => {
2237 flush_operations(
2238 &this,
2239 &mut operations_by_buffer_id,
2240 &mut needs_resync_with_host,
2241 is_local,
2242 &mut cx,
2243 )
2244 .await;
2245
2246 this.update(&mut cx, |this, _| {
2247 if let Some(project_id) = this.remote_id() {
2248 this.client
2249 .send(proto::UpdateLanguageServer {
2250 project_id,
2251 language_server_id: language_server_id.0 as u64,
2252 variant: Some(message),
2253 })
2254 .log_err();
2255 }
2256 });
2257 }
2258 }
2259 }
2260
2261 flush_operations(
2262 &this,
2263 &mut operations_by_buffer_id,
2264 &mut needs_resync_with_host,
2265 is_local,
2266 &mut cx,
2267 )
2268 .await;
2269 }
2270
2271 Ok(())
2272 }
2273
2274 fn on_buffer_event(
2275 &mut self,
2276 buffer: Handle<Buffer>,
2277 event: &BufferEvent,
2278 cx: &mut ModelContext<Self>,
2279 ) -> Option<()> {
2280 if matches!(
2281 event,
2282 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2283 ) {
2284 self.request_buffer_diff_recalculation(&buffer, cx);
2285 }
2286
2287 match event {
2288 BufferEvent::Operation(operation) => {
2289 self.buffer_ordered_messages_tx
2290 .unbounded_send(BufferOrderedMessage::Operation {
2291 buffer_id: buffer.read(cx).remote_id(),
2292 operation: language2::proto::serialize_operation(operation),
2293 })
2294 .ok();
2295 }
2296
2297 BufferEvent::Edited { .. } => {
2298 let buffer = buffer.read(cx);
2299 let file = File::from_dyn(buffer.file())?;
2300 let abs_path = file.as_local()?.abs_path(cx);
2301 let uri = lsp2::Url::from_file_path(abs_path).unwrap();
2302 let next_snapshot = buffer.text_snapshot();
2303
2304 let language_servers: Vec<_> = self
2305 .language_servers_for_buffer(buffer, cx)
2306 .map(|i| i.1.clone())
2307 .collect();
2308
2309 for language_server in language_servers {
2310 let language_server = language_server.clone();
2311
2312 let buffer_snapshots = self
2313 .buffer_snapshots
2314 .get_mut(&buffer.remote_id())
2315 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2316 let previous_snapshot = buffer_snapshots.last()?;
2317
2318 let build_incremental_change = || {
2319 buffer
2320 .edits_since::<(PointUtf16, usize)>(
2321 previous_snapshot.snapshot.version(),
2322 )
2323 .map(|edit| {
2324 let edit_start = edit.new.start.0;
2325 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2326 let new_text = next_snapshot
2327 .text_for_range(edit.new.start.1..edit.new.end.1)
2328 .collect();
2329 lsp2::TextDocumentContentChangeEvent {
2330 range: Some(lsp2::Range::new(
2331 point_to_lsp(edit_start),
2332 point_to_lsp(edit_end),
2333 )),
2334 range_length: None,
2335 text: new_text,
2336 }
2337 })
2338 .collect()
2339 };
2340
2341 let document_sync_kind = language_server
2342 .capabilities()
2343 .text_document_sync
2344 .as_ref()
2345 .and_then(|sync| match sync {
2346 lsp2::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2347 lsp2::TextDocumentSyncCapability::Options(options) => options.change,
2348 });
2349
2350 let content_changes: Vec<_> = match document_sync_kind {
2351 Some(lsp2::TextDocumentSyncKind::FULL) => {
2352 vec![lsp2::TextDocumentContentChangeEvent {
2353 range: None,
2354 range_length: None,
2355 text: next_snapshot.text(),
2356 }]
2357 }
2358 Some(lsp2::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2359 _ => {
2360 #[cfg(any(test, feature = "test-support"))]
2361 {
2362 build_incremental_change()
2363 }
2364
2365 #[cfg(not(any(test, feature = "test-support")))]
2366 {
2367 continue;
2368 }
2369 }
2370 };
2371
2372 let next_version = previous_snapshot.version + 1;
2373
2374 buffer_snapshots.push(LspBufferSnapshot {
2375 version: next_version,
2376 snapshot: next_snapshot.clone(),
2377 });
2378
2379 language_server
2380 .notify::<lsp2::notification::DidChangeTextDocument>(
2381 lsp2::DidChangeTextDocumentParams {
2382 text_document: lsp2::VersionedTextDocumentIdentifier::new(
2383 uri.clone(),
2384 next_version,
2385 ),
2386 content_changes,
2387 },
2388 )
2389 .log_err();
2390 }
2391 }
2392
2393 BufferEvent::Saved => {
2394 let file = File::from_dyn(buffer.read(cx).file())?;
2395 let worktree_id = file.worktree_id(cx);
2396 let abs_path = file.as_local()?.abs_path(cx);
2397 let text_document = lsp2::TextDocumentIdentifier {
2398 uri: lsp2::Url::from_file_path(abs_path).unwrap(),
2399 };
2400
2401 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2402 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2403
2404 server
2405 .notify::<lsp2::notification::DidSaveTextDocument>(
2406 lsp2::DidSaveTextDocumentParams {
2407 text_document: text_document.clone(),
2408 text,
2409 },
2410 )
2411 .log_err();
2412 }
2413
2414 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2415 for language_server_id in language_server_ids {
2416 if let Some(LanguageServerState::Running {
2417 adapter,
2418 simulate_disk_based_diagnostics_completion,
2419 ..
2420 }) = self.language_servers.get_mut(&language_server_id)
2421 {
2422 // After saving a buffer using a language server that doesn't provide
2423 // a disk-based progress token, kick off a timer that will reset every
2424 // time the buffer is saved. If the timer eventually fires, simulate
2425 // disk-based diagnostics being finished so that other pieces of UI
2426 // (e.g., project diagnostics view, diagnostic status bar) can update.
2427 // We don't emit an event right away because the language server might take
2428 // some time to publish diagnostics.
2429 if adapter.disk_based_diagnostics_progress_token.is_none() {
2430 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2431 Duration::from_secs(1);
2432
2433 let task = cx.spawn(move |this, mut cx| async move {
2434 cx.executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2435 if let Some(this) = this.upgrade() {
2436 this.update(&mut cx, |this, cx| {
2437 this.disk_based_diagnostics_finished(
2438 language_server_id,
2439 cx,
2440 );
2441 this.buffer_ordered_messages_tx
2442 .unbounded_send(
2443 BufferOrderedMessage::LanguageServerUpdate {
2444 language_server_id,
2445 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2446 },
2447 )
2448 .ok();
2449 });
2450 }
2451 });
2452 *simulate_disk_based_diagnostics_completion = Some(task);
2453 }
2454 }
2455 }
2456 }
2457 BufferEvent::FileHandleChanged => {
2458 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2459 return None;
2460 };
2461
2462 match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
2463 Some(_) => {
2464 return None;
2465 }
2466 None => {
2467 let remote_id = buffer.read(cx).remote_id();
2468 self.local_buffer_ids_by_entry_id
2469 .insert(file.entry_id, remote_id);
2470
2471 self.local_buffer_ids_by_path.insert(
2472 ProjectPath {
2473 worktree_id: file.worktree_id(cx),
2474 path: file.path.clone(),
2475 },
2476 remote_id,
2477 );
2478 }
2479 }
2480 }
2481 _ => {}
2482 }
2483
2484 None
2485 }
2486
2487 fn request_buffer_diff_recalculation(
2488 &mut self,
2489 buffer: &Handle<Buffer>,
2490 cx: &mut ModelContext<Self>,
2491 ) {
2492 self.buffers_needing_diff.insert(buffer.downgrade());
2493 let first_insertion = self.buffers_needing_diff.len() == 1;
2494
2495 let settings = settings2::get::<ProjectSettings>(cx);
2496 let delay = if let Some(delay) = settings.git.gutter_debounce {
2497 delay
2498 } else {
2499 if first_insertion {
2500 let this = cx.weak_handle();
2501 cx.defer(move |cx| {
2502 if let Some(this) = this.upgrade() {
2503 this.update(cx, |this, cx| {
2504 this.recalculate_buffer_diffs(cx).detach();
2505 });
2506 }
2507 });
2508 }
2509 return;
2510 };
2511
2512 const MIN_DELAY: u64 = 50;
2513 let delay = delay.max(MIN_DELAY);
2514 let duration = Duration::from_millis(delay);
2515
2516 self.git_diff_debouncer
2517 .fire_new(duration, cx, move |this, cx| {
2518 this.recalculate_buffer_diffs(cx)
2519 });
2520 }
2521
2522 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2523 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2524 cx.spawn(move |this, mut cx| async move {
2525 let tasks: Vec<_> = buffers
2526 .iter()
2527 .filter_map(|buffer| {
2528 let buffer = buffer.upgrade()?;
2529 buffer
2530 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2531 .ok()
2532 .flatten()
2533 })
2534 .collect();
2535
2536 futures::future::join_all(tasks).await;
2537
2538 this.update(&mut cx, |this, cx| {
2539 if !this.buffers_needing_diff.is_empty() {
2540 this.recalculate_buffer_diffs(cx).detach();
2541 } else {
2542 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2543 for buffer in buffers {
2544 if let Some(buffer) = buffer.upgrade() {
2545 buffer.update(cx, |_, cx| cx.notify());
2546 }
2547 }
2548 }
2549 })
2550 .ok();
2551 })
2552 }
2553
2554 fn language_servers_for_worktree(
2555 &self,
2556 worktree_id: WorktreeId,
2557 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2558 self.language_server_ids
2559 .iter()
2560 .filter_map(move |((language_server_worktree_id, _), id)| {
2561 if *language_server_worktree_id == worktree_id {
2562 if let Some(LanguageServerState::Running {
2563 adapter,
2564 language,
2565 server,
2566 ..
2567 }) = self.language_servers.get(id)
2568 {
2569 return Some((adapter, language, server));
2570 }
2571 }
2572 None
2573 })
2574 }
2575
2576 fn maintain_buffer_languages(
2577 languages: Arc<LanguageRegistry>,
2578 cx: &mut ModelContext<Project>,
2579 ) -> Task<()> {
2580 let mut subscription = languages.subscribe();
2581 let mut prev_reload_count = languages.reload_count();
2582 cx.spawn(move |project, mut cx| async move {
2583 while let Some(()) = subscription.next().await {
2584 if let Some(project) = project.upgrade() {
2585 // If the language registry has been reloaded, then remove and
2586 // re-assign the languages on all open buffers.
2587 let reload_count = languages.reload_count();
2588 if reload_count > prev_reload_count {
2589 prev_reload_count = reload_count;
2590 project.update(&mut cx, |this, cx| {
2591 let buffers = this
2592 .opened_buffers
2593 .values()
2594 .filter_map(|b| b.upgrade())
2595 .collect::<Vec<_>>();
2596 for buffer in buffers {
2597 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned() {
2598 this.unregister_buffer_from_language_servers(&buffer, &f, cx);
2599 buffer.update(cx, |buffer, cx| buffer.set_language(None, cx));
2600 }
2601 }
2602 });
2603 }
2604
2605 project.update(&mut cx, |project, cx| {
2606 let mut plain_text_buffers = Vec::new();
2607 let mut buffers_with_unknown_injections = Vec::new();
2608 for buffer in project.opened_buffers.values() {
2609 if let Some(handle) = buffer.upgrade() {
2610 let buffer = &handle.read(cx);
2611 if buffer.language().is_none()
2612 || buffer.language() == Some(&*language2::PLAIN_TEXT)
2613 {
2614 plain_text_buffers.push(handle);
2615 } else if buffer.contains_unknown_injections() {
2616 buffers_with_unknown_injections.push(handle);
2617 }
2618 }
2619 }
2620
2621 for buffer in plain_text_buffers {
2622 project.detect_language_for_buffer(&buffer, cx);
2623 project.register_buffer_with_language_servers(&buffer, cx);
2624 }
2625
2626 for buffer in buffers_with_unknown_injections {
2627 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2628 }
2629 });
2630 }
2631 }
2632 })
2633 }
2634
2635 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2636 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2637 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2638
2639 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2640 *settings_changed_tx.borrow_mut() = ();
2641 });
2642
2643 cx.spawn(move |this, mut cx| async move {
2644 while let Some(_) = settings_changed_rx.next().await {
2645 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2646 this.language_servers
2647 .values()
2648 .filter_map(|state| match state {
2649 LanguageServerState::Starting(_) => None,
2650 LanguageServerState::Running {
2651 adapter, server, ..
2652 } => Some((adapter.clone(), server.clone())),
2653 })
2654 .collect()
2655 })?;
2656
2657 for (adapter, server) in servers {
2658 let workspace_config =
2659 cx.update(|cx| adapter.workspace_configuration(cx))?.await;
2660 server
2661 .notify::<lsp2::notification::DidChangeConfiguration>(
2662 lsp2::DidChangeConfigurationParams {
2663 settings: workspace_config.clone(),
2664 },
2665 )
2666 .ok();
2667 }
2668 }
2669
2670 drop(settings_observation);
2671 anyhow::Ok(())
2672 })
2673 }
2674
2675 fn detect_language_for_buffer(
2676 &mut self,
2677 buffer_handle: &Handle<Buffer>,
2678 cx: &mut ModelContext<Self>,
2679 ) -> Option<()> {
2680 // If the buffer has a language, set it and start the language server if we haven't already.
2681 let buffer = buffer_handle.read(cx);
2682 let full_path = buffer.file()?.full_path(cx);
2683 let content = buffer.as_rope();
2684 let new_language = self
2685 .languages
2686 .language_for_file(&full_path, Some(content))
2687 .now_or_never()?
2688 .ok()?;
2689 self.set_language_for_buffer(buffer_handle, new_language, cx);
2690 None
2691 }
2692
2693 pub fn set_language_for_buffer(
2694 &mut self,
2695 buffer: &Handle<Buffer>,
2696 new_language: Arc<Language>,
2697 cx: &mut ModelContext<Self>,
2698 ) {
2699 buffer.update(cx, |buffer, cx| {
2700 if buffer.language().map_or(true, |old_language| {
2701 !Arc::ptr_eq(old_language, &new_language)
2702 }) {
2703 buffer.set_language(Some(new_language.clone()), cx);
2704 }
2705 });
2706
2707 let buffer_file = buffer.read(cx).file().cloned();
2708 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2709 let buffer_file = File::from_dyn(buffer_file.as_ref());
2710 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2711
2712 let task_buffer = buffer.clone();
2713 let prettier_installation_task =
2714 self.install_default_formatters(worktree, &new_language, &settings, cx);
2715 cx.spawn(move |project, mut cx| async move {
2716 prettier_installation_task.await?;
2717 let _ = project
2718 .update(&mut cx, |project, cx| {
2719 project.prettier_instance_for_buffer(&task_buffer, cx)
2720 })?
2721 .await;
2722 anyhow::Ok(())
2723 })
2724 .detach_and_log_err(cx);
2725
2726 if let Some(file) = buffer_file {
2727 let worktree = file.worktree.clone();
2728 if let Some(tree) = worktree.read(cx).as_local() {
2729 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2730 }
2731 }
2732 }
2733
2734 fn start_language_servers(
2735 &mut self,
2736 worktree: &Handle<Worktree>,
2737 worktree_path: Arc<Path>,
2738 language: Arc<Language>,
2739 cx: &mut ModelContext<Self>,
2740 ) {
2741 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2742 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2743 if !settings.enable_language_server {
2744 return;
2745 }
2746
2747 let worktree_id = worktree.read(cx).id();
2748 for adapter in language.lsp_adapters() {
2749 self.start_language_server(
2750 worktree_id,
2751 worktree_path.clone(),
2752 adapter.clone(),
2753 language.clone(),
2754 cx,
2755 );
2756 }
2757 }
2758
2759 fn start_language_server(
2760 &mut self,
2761 worktree_id: WorktreeId,
2762 worktree_path: Arc<Path>,
2763 adapter: Arc<CachedLspAdapter>,
2764 language: Arc<Language>,
2765 cx: &mut ModelContext<Self>,
2766 ) {
2767 let key = (worktree_id, adapter.name.clone());
2768 if self.language_server_ids.contains_key(&key) {
2769 return;
2770 }
2771
2772 let pending_server = match self.languages.create_pending_language_server(
2773 language.clone(),
2774 adapter.clone(),
2775 worktree_path,
2776 ProjectLspAdapterDelegate::new(self, cx),
2777 cx,
2778 ) {
2779 Some(pending_server) => pending_server,
2780 None => return,
2781 };
2782
2783 let project_settings = settings2::get::<ProjectSettings>(cx);
2784 let lsp = project_settings.lsp.get(&adapter.name.0);
2785 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2786
2787 let mut initialization_options = adapter.initialization_options.clone();
2788 match (&mut initialization_options, override_options) {
2789 (Some(initialization_options), Some(override_options)) => {
2790 merge_json_value_into(override_options, initialization_options);
2791 }
2792 (None, override_options) => initialization_options = override_options,
2793 _ => {}
2794 }
2795
2796 let server_id = pending_server.server_id;
2797 let container_dir = pending_server.container_dir.clone();
2798 let state = LanguageServerState::Starting({
2799 let adapter = adapter.clone();
2800 let server_name = adapter.name.0.clone();
2801 let language = language.clone();
2802 let key = key.clone();
2803
2804 cx.spawn(move |this, mut cx| async move {
2805 let result = Self::setup_and_insert_language_server(
2806 this.clone(),
2807 initialization_options,
2808 pending_server,
2809 adapter.clone(),
2810 language.clone(),
2811 server_id,
2812 key,
2813 &mut cx,
2814 )
2815 .await;
2816
2817 match result {
2818 Ok(server) => server,
2819
2820 Err(err) => {
2821 log::error!("failed to start language server {:?}: {}", server_name, err);
2822
2823 if let Some(this) = this.upgrade() {
2824 if let Some(container_dir) = container_dir {
2825 let installation_test_binary = adapter
2826 .installation_test_binary(container_dir.to_path_buf())
2827 .await;
2828
2829 this.update(&mut cx, |_, cx| {
2830 Self::check_errored_server(
2831 language,
2832 adapter,
2833 server_id,
2834 installation_test_binary,
2835 cx,
2836 )
2837 });
2838 }
2839 }
2840
2841 None
2842 }
2843 }
2844 })
2845 });
2846
2847 self.language_servers.insert(server_id, state);
2848 self.language_server_ids.insert(key, server_id);
2849 }
2850
2851 fn reinstall_language_server(
2852 &mut self,
2853 language: Arc<Language>,
2854 adapter: Arc<CachedLspAdapter>,
2855 server_id: LanguageServerId,
2856 cx: &mut ModelContext<Self>,
2857 ) -> Option<Task<()>> {
2858 log::info!("beginning to reinstall server");
2859
2860 let existing_server = match self.language_servers.remove(&server_id) {
2861 Some(LanguageServerState::Running { server, .. }) => Some(server),
2862 _ => None,
2863 };
2864
2865 for worktree in &self.worktrees {
2866 if let Some(worktree) = worktree.upgrade() {
2867 let key = (worktree.read(cx).id(), adapter.name.clone());
2868 self.language_server_ids.remove(&key);
2869 }
2870 }
2871
2872 Some(cx.spawn(move |this, mut cx| async move {
2873 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2874 log::info!("shutting down existing server");
2875 task.await;
2876 }
2877
2878 // TODO: This is race-safe with regards to preventing new instances from
2879 // starting while deleting, but existing instances in other projects are going
2880 // to be very confused and messed up
2881 let Some(task) = this
2882 .update(&mut cx, |this, cx| {
2883 this.languages.delete_server_container(adapter.clone(), cx)
2884 })
2885 .log_err()
2886 else {
2887 return;
2888 };
2889 task.await;
2890
2891 this.update(&mut cx, |this, mut cx| {
2892 let worktrees = this.worktrees.clone();
2893 for worktree in worktrees {
2894 let worktree = match worktree.upgrade() {
2895 Some(worktree) => worktree.read(cx),
2896 None => continue,
2897 };
2898 let worktree_id = worktree.id();
2899 let root_path = worktree.abs_path();
2900
2901 this.start_language_server(
2902 worktree_id,
2903 root_path,
2904 adapter.clone(),
2905 language.clone(),
2906 &mut cx,
2907 );
2908 }
2909 })
2910 .ok();
2911 }))
2912 }
2913
2914 async fn setup_and_insert_language_server(
2915 this: WeakHandle<Self>,
2916 initialization_options: Option<serde_json::Value>,
2917 pending_server: PendingLanguageServer,
2918 adapter: Arc<CachedLspAdapter>,
2919 language: Arc<Language>,
2920 server_id: LanguageServerId,
2921 key: (WorktreeId, LanguageServerName),
2922 cx: &mut AsyncAppContext,
2923 ) -> Result<Option<Arc<LanguageServer>>> {
2924 let setup = Self::setup_pending_language_server(
2925 this.clone(),
2926 initialization_options,
2927 pending_server,
2928 adapter.clone(),
2929 server_id,
2930 cx,
2931 );
2932
2933 let language_server = match setup.await? {
2934 Some(language_server) => language_server,
2935 None => return Ok(None),
2936 };
2937 let this = match this.upgrade() {
2938 Some(this) => this,
2939 None => return Err(anyhow!("failed to upgrade project handle")),
2940 };
2941
2942 this.update(cx, |this, cx| {
2943 this.insert_newly_running_language_server(
2944 language,
2945 adapter,
2946 language_server.clone(),
2947 server_id,
2948 key,
2949 cx,
2950 )
2951 })?;
2952
2953 Ok(Some(language_server))
2954 }
2955
2956 async fn setup_pending_language_server(
2957 this: WeakHandle<Self>,
2958 initialization_options: Option<serde_json::Value>,
2959 pending_server: PendingLanguageServer,
2960 adapter: Arc<CachedLspAdapter>,
2961 server_id: LanguageServerId,
2962 cx: &mut AsyncAppContext,
2963 ) -> Result<Option<Arc<LanguageServer>>> {
2964 let workspace_config = cx.update(|cx| adapter.workspace_configuration(cx))?.await;
2965 let language_server = match pending_server.task.await? {
2966 Some(server) => server,
2967 None => return Ok(None),
2968 };
2969
2970 language_server
2971 .on_notification::<lsp2::notification::PublishDiagnostics, _>({
2972 let adapter = adapter.clone();
2973 let this = this.clone();
2974 move |mut params, mut cx| {
2975 let adapter = adapter.clone();
2976 adapter.process_diagnostics(&mut params);
2977 if let Some(this) = this.upgrade() {
2978 this.update(&mut cx, |this, cx| {
2979 this.update_diagnostics(
2980 server_id,
2981 params,
2982 &adapter.disk_based_diagnostic_sources,
2983 cx,
2984 )
2985 .log_err();
2986 });
2987 }
2988 }
2989 })
2990 .detach();
2991
2992 language_server
2993 .on_request::<lsp2::request::WorkspaceConfiguration, _, _>({
2994 let adapter = adapter.clone();
2995 move |params, cx| {
2996 let adapter = adapter.clone();
2997 async move {
2998 let workspace_config =
2999 cx.update(|cx| adapter.workspace_configuration(cx))?.await;
3000 Ok(params
3001 .items
3002 .into_iter()
3003 .map(|item| {
3004 if let Some(section) = &item.section {
3005 workspace_config
3006 .get(section)
3007 .cloned()
3008 .unwrap_or(serde_json::Value::Null)
3009 } else {
3010 workspace_config.clone()
3011 }
3012 })
3013 .collect())
3014 }
3015 }
3016 })
3017 .detach();
3018
3019 // Even though we don't have handling for these requests, respond to them to
3020 // avoid stalling any language server like `gopls` which waits for a response
3021 // to these requests when initializing.
3022 language_server
3023 .on_request::<lsp2::request::WorkDoneProgressCreate, _, _>({
3024 let this = this.clone();
3025 move |params, mut cx| {
3026 let this = this.clone();
3027 async move {
3028 this.update(&mut cx, |this, _| {
3029 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3030 {
3031 if let lsp2::NumberOrString::String(token) = params.token {
3032 status.progress_tokens.insert(token);
3033 }
3034 }
3035 })?;
3036
3037 Ok(())
3038 }
3039 }
3040 })
3041 .detach();
3042 language_server
3043 .on_request::<lsp2::request::RegisterCapability, _, _>({
3044 let this = this.clone();
3045 move |params, mut cx| {
3046 let this = this.clone();
3047 async move {
3048 for reg in params.registrations {
3049 if reg.method == "workspace/didChangeWatchedFiles" {
3050 if let Some(options) = reg.register_options {
3051 let options = serde_json::from_value(options)?;
3052 this.update(&mut cx, |this, cx| {
3053 this.on_lsp_did_change_watched_files(
3054 server_id, options, cx,
3055 );
3056 })?;
3057 }
3058 }
3059 }
3060 Ok(())
3061 }
3062 }
3063 })
3064 .detach();
3065
3066 language_server
3067 .on_request::<lsp2::request::ApplyWorkspaceEdit, _, _>({
3068 let adapter = adapter.clone();
3069 let this = this.clone();
3070 move |params, cx| {
3071 Self::on_lsp_workspace_edit(
3072 this.clone(),
3073 params,
3074 server_id,
3075 adapter.clone(),
3076 cx,
3077 )
3078 }
3079 })
3080 .detach();
3081
3082 language_server
3083 .on_request::<lsp2::request::InlayHintRefreshRequest, _, _>({
3084 let this = this.clone();
3085 move |(), mut cx| {
3086 let this = this.clone();
3087 async move {
3088 this.update(&mut cx, |project, cx| {
3089 cx.emit(Event::RefreshInlayHints);
3090 project.remote_id().map(|project_id| {
3091 project.client.send(proto::RefreshInlayHints { project_id })
3092 })
3093 })?
3094 .transpose()?;
3095 Ok(())
3096 }
3097 }
3098 })
3099 .detach();
3100
3101 let disk_based_diagnostics_progress_token =
3102 adapter.disk_based_diagnostics_progress_token.clone();
3103
3104 language_server
3105 .on_notification::<lsp2::notification::Progress, _>(move |params, mut cx| {
3106 if let Some(this) = this.upgrade() {
3107 this.update(&mut cx, |this, cx| {
3108 this.on_lsp_progress(
3109 params,
3110 server_id,
3111 disk_based_diagnostics_progress_token.clone(),
3112 cx,
3113 );
3114 });
3115 }
3116 })
3117 .detach();
3118
3119 let language_server = language_server.initialize(initialization_options).await?;
3120
3121 language_server
3122 .notify::<lsp2::notification::DidChangeConfiguration>(
3123 lsp2::DidChangeConfigurationParams {
3124 settings: workspace_config,
3125 },
3126 )
3127 .ok();
3128
3129 Ok(Some(language_server))
3130 }
3131
3132 fn insert_newly_running_language_server(
3133 &mut self,
3134 language: Arc<Language>,
3135 adapter: Arc<CachedLspAdapter>,
3136 language_server: Arc<LanguageServer>,
3137 server_id: LanguageServerId,
3138 key: (WorktreeId, LanguageServerName),
3139 cx: &mut ModelContext<Self>,
3140 ) -> Result<()> {
3141 // If the language server for this key doesn't match the server id, don't store the
3142 // server. Which will cause it to be dropped, killing the process
3143 if self
3144 .language_server_ids
3145 .get(&key)
3146 .map(|id| id != &server_id)
3147 .unwrap_or(false)
3148 {
3149 return Ok(());
3150 }
3151
3152 // Update language_servers collection with Running variant of LanguageServerState
3153 // indicating that the server is up and running and ready
3154 self.language_servers.insert(
3155 server_id,
3156 LanguageServerState::Running {
3157 adapter: adapter.clone(),
3158 language: language.clone(),
3159 watched_paths: Default::default(),
3160 server: language_server.clone(),
3161 simulate_disk_based_diagnostics_completion: None,
3162 },
3163 );
3164
3165 self.language_server_statuses.insert(
3166 server_id,
3167 LanguageServerStatus {
3168 name: language_server.name().to_string(),
3169 pending_work: Default::default(),
3170 has_pending_diagnostic_updates: false,
3171 progress_tokens: Default::default(),
3172 },
3173 );
3174
3175 cx.emit(Event::LanguageServerAdded(server_id));
3176
3177 if let Some(project_id) = self.remote_id() {
3178 self.client.send(proto::StartLanguageServer {
3179 project_id,
3180 server: Some(proto::LanguageServer {
3181 id: server_id.0 as u64,
3182 name: language_server.name().to_string(),
3183 }),
3184 })?;
3185 }
3186
3187 // Tell the language server about every open buffer in the worktree that matches the language.
3188 for buffer in self.opened_buffers.values() {
3189 if let Some(buffer_handle) = buffer.upgrade() {
3190 let buffer = buffer_handle.read(cx);
3191 let file = match File::from_dyn(buffer.file()) {
3192 Some(file) => file,
3193 None => continue,
3194 };
3195 let language = match buffer.language() {
3196 Some(language) => language,
3197 None => continue,
3198 };
3199
3200 if file.worktree.read(cx).id() != key.0
3201 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3202 {
3203 continue;
3204 }
3205
3206 let file = match file.as_local() {
3207 Some(file) => file,
3208 None => continue,
3209 };
3210
3211 let versions = self
3212 .buffer_snapshots
3213 .entry(buffer.remote_id())
3214 .or_default()
3215 .entry(server_id)
3216 .or_insert_with(|| {
3217 vec![LspBufferSnapshot {
3218 version: 0,
3219 snapshot: buffer.text_snapshot(),
3220 }]
3221 });
3222
3223 let snapshot = versions.last().unwrap();
3224 let version = snapshot.version;
3225 let initial_snapshot = &snapshot.snapshot;
3226 let uri = lsp2::Url::from_file_path(file.abs_path(cx)).unwrap();
3227 language_server.notify::<lsp2::notification::DidOpenTextDocument>(
3228 lsp2::DidOpenTextDocumentParams {
3229 text_document: lsp2::TextDocumentItem::new(
3230 uri,
3231 adapter
3232 .language_ids
3233 .get(language.name().as_ref())
3234 .cloned()
3235 .unwrap_or_default(),
3236 version,
3237 initial_snapshot.text(),
3238 ),
3239 },
3240 )?;
3241
3242 buffer_handle.update(cx, |buffer, cx| {
3243 buffer.set_completion_triggers(
3244 language_server
3245 .capabilities()
3246 .completion_provider
3247 .as_ref()
3248 .and_then(|provider| provider.trigger_characters.clone())
3249 .unwrap_or_default(),
3250 cx,
3251 )
3252 });
3253 }
3254 }
3255
3256 cx.notify();
3257 Ok(())
3258 }
3259
3260 // Returns a list of all of the worktrees which no longer have a language server and the root path
3261 // for the stopped server
3262 fn stop_language_server(
3263 &mut self,
3264 worktree_id: WorktreeId,
3265 adapter_name: LanguageServerName,
3266 cx: &mut ModelContext<Self>,
3267 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3268 let key = (worktree_id, adapter_name);
3269 if let Some(server_id) = self.language_server_ids.remove(&key) {
3270 log::info!("stopping language server {}", key.1 .0);
3271
3272 // Remove other entries for this language server as well
3273 let mut orphaned_worktrees = vec![worktree_id];
3274 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3275 for other_key in other_keys {
3276 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3277 self.language_server_ids.remove(&other_key);
3278 orphaned_worktrees.push(other_key.0);
3279 }
3280 }
3281
3282 for buffer in self.opened_buffers.values() {
3283 if let Some(buffer) = buffer.upgrade() {
3284 buffer.update(cx, |buffer, cx| {
3285 buffer.update_diagnostics(server_id, Default::default(), cx);
3286 });
3287 }
3288 }
3289 for worktree in &self.worktrees {
3290 if let Some(worktree) = worktree.upgrade() {
3291 worktree.update(cx, |worktree, cx| {
3292 if let Some(worktree) = worktree.as_local_mut() {
3293 worktree.clear_diagnostics_for_language_server(server_id, cx);
3294 }
3295 });
3296 }
3297 }
3298
3299 self.language_server_statuses.remove(&server_id);
3300 cx.notify();
3301
3302 let server_state = self.language_servers.remove(&server_id);
3303 cx.emit(Event::LanguageServerRemoved(server_id));
3304 cx.spawn(move |this, mut cx| async move {
3305 let mut root_path = None;
3306
3307 let server = match server_state {
3308 Some(LanguageServerState::Starting(task)) => task.await,
3309 Some(LanguageServerState::Running { server, .. }) => Some(server),
3310 None => None,
3311 };
3312
3313 if let Some(server) = server {
3314 root_path = Some(server.root_path().clone());
3315 if let Some(shutdown) = server.shutdown() {
3316 shutdown.await;
3317 }
3318 }
3319
3320 if let Some(this) = this.upgrade() {
3321 this.update(&mut cx, |this, cx| {
3322 this.language_server_statuses.remove(&server_id);
3323 cx.notify();
3324 });
3325 }
3326
3327 (root_path, orphaned_worktrees)
3328 })
3329 } else {
3330 Task::ready((None, Vec::new()))
3331 }
3332 }
3333
3334 pub fn restart_language_servers_for_buffers(
3335 &mut self,
3336 buffers: impl IntoIterator<Item = Handle<Buffer>>,
3337 cx: &mut ModelContext<Self>,
3338 ) -> Option<()> {
3339 let language_server_lookup_info: HashSet<(Handle<Worktree>, Arc<Language>)> = buffers
3340 .into_iter()
3341 .filter_map(|buffer| {
3342 let buffer = buffer.read(cx);
3343 let file = File::from_dyn(buffer.file())?;
3344 let full_path = file.full_path(cx);
3345 let language = self
3346 .languages
3347 .language_for_file(&full_path, Some(buffer.as_rope()))
3348 .now_or_never()?
3349 .ok()?;
3350 Some((file.worktree.clone(), language))
3351 })
3352 .collect();
3353 for (worktree, language) in language_server_lookup_info {
3354 self.restart_language_servers(worktree, language, cx);
3355 }
3356
3357 None
3358 }
3359
3360 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3361 fn restart_language_servers(
3362 &mut self,
3363 worktree: Handle<Worktree>,
3364 language: Arc<Language>,
3365 cx: &mut ModelContext<Self>,
3366 ) {
3367 let worktree_id = worktree.read(cx).id();
3368 let fallback_path = worktree.read(cx).abs_path();
3369
3370 let mut stops = Vec::new();
3371 for adapter in language.lsp_adapters() {
3372 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3373 }
3374
3375 if stops.is_empty() {
3376 return;
3377 }
3378 let mut stops = stops.into_iter();
3379
3380 cx.spawn(move |this, mut cx| async move {
3381 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3382 for stop in stops {
3383 let (_, worktrees) = stop.await;
3384 orphaned_worktrees.extend_from_slice(&worktrees);
3385 }
3386
3387 let this = match this.upgrade() {
3388 Some(this) => this,
3389 None => return,
3390 };
3391
3392 this.update(&mut cx, |this, cx| {
3393 // Attempt to restart using original server path. Fallback to passed in
3394 // path if we could not retrieve the root path
3395 let root_path = original_root_path
3396 .map(|path_buf| Arc::from(path_buf.as_path()))
3397 .unwrap_or(fallback_path);
3398
3399 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3400
3401 // Lookup new server ids and set them for each of the orphaned worktrees
3402 for adapter in language.lsp_adapters() {
3403 if let Some(new_server_id) = this
3404 .language_server_ids
3405 .get(&(worktree_id, adapter.name.clone()))
3406 .cloned()
3407 {
3408 for &orphaned_worktree in &orphaned_worktrees {
3409 this.language_server_ids
3410 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3411 }
3412 }
3413 }
3414 });
3415 })
3416 .detach();
3417 }
3418
3419 fn check_errored_server(
3420 language: Arc<Language>,
3421 adapter: Arc<CachedLspAdapter>,
3422 server_id: LanguageServerId,
3423 installation_test_binary: Option<LanguageServerBinary>,
3424 cx: &mut ModelContext<Self>,
3425 ) {
3426 if !adapter.can_be_reinstalled() {
3427 log::info!(
3428 "Validation check requested for {:?} but it cannot be reinstalled",
3429 adapter.name.0
3430 );
3431 return;
3432 }
3433
3434 cx.spawn(move |this, mut cx| async move {
3435 log::info!("About to spawn test binary");
3436
3437 // A lack of test binary counts as a failure
3438 let process = installation_test_binary.and_then(|binary| {
3439 smol::process::Command::new(&binary.path)
3440 .current_dir(&binary.path)
3441 .args(binary.arguments)
3442 .stdin(Stdio::piped())
3443 .stdout(Stdio::piped())
3444 .stderr(Stdio::inherit())
3445 .kill_on_drop(true)
3446 .spawn()
3447 .ok()
3448 });
3449
3450 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3451 let mut timeout = cx.executor().timer(PROCESS_TIMEOUT).fuse();
3452
3453 let mut errored = false;
3454 if let Some(mut process) = process {
3455 futures::select! {
3456 status = process.status().fuse() => match status {
3457 Ok(status) => errored = !status.success(),
3458 Err(_) => errored = true,
3459 },
3460
3461 _ = timeout => {
3462 log::info!("test binary time-ed out, this counts as a success");
3463 _ = process.kill();
3464 }
3465 }
3466 } else {
3467 log::warn!("test binary failed to launch");
3468 errored = true;
3469 }
3470
3471 if errored {
3472 log::warn!("test binary check failed");
3473 let task = this
3474 .update(&mut cx, move |this, mut cx| {
3475 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3476 })
3477 .ok()
3478 .flatten();
3479
3480 if let Some(task) = task {
3481 task.await;
3482 }
3483 }
3484 })
3485 .detach();
3486 }
3487
3488 fn on_lsp_progress(
3489 &mut self,
3490 progress: lsp2::ProgressParams,
3491 language_server_id: LanguageServerId,
3492 disk_based_diagnostics_progress_token: Option<String>,
3493 cx: &mut ModelContext<Self>,
3494 ) {
3495 let token = match progress.token {
3496 lsp2::NumberOrString::String(token) => token,
3497 lsp2::NumberOrString::Number(token) => {
3498 log::info!("skipping numeric progress token {}", token);
3499 return;
3500 }
3501 };
3502 let lsp2::ProgressParamsValue::WorkDone(progress) = progress.value;
3503 let language_server_status =
3504 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3505 status
3506 } else {
3507 return;
3508 };
3509
3510 if !language_server_status.progress_tokens.contains(&token) {
3511 return;
3512 }
3513
3514 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3515 .as_ref()
3516 .map_or(false, |disk_based_token| {
3517 token.starts_with(disk_based_token)
3518 });
3519
3520 match progress {
3521 lsp2::WorkDoneProgress::Begin(report) => {
3522 if is_disk_based_diagnostics_progress {
3523 language_server_status.has_pending_diagnostic_updates = true;
3524 self.disk_based_diagnostics_started(language_server_id, cx);
3525 self.buffer_ordered_messages_tx
3526 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3527 language_server_id,
3528 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3529 })
3530 .ok();
3531 } else {
3532 self.on_lsp_work_start(
3533 language_server_id,
3534 token.clone(),
3535 LanguageServerProgress {
3536 message: report.message.clone(),
3537 percentage: report.percentage.map(|p| p as usize),
3538 last_update_at: Instant::now(),
3539 },
3540 cx,
3541 );
3542 self.buffer_ordered_messages_tx
3543 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3544 language_server_id,
3545 message: proto::update_language_server::Variant::WorkStart(
3546 proto::LspWorkStart {
3547 token,
3548 message: report.message,
3549 percentage: report.percentage.map(|p| p as u32),
3550 },
3551 ),
3552 })
3553 .ok();
3554 }
3555 }
3556 lsp2::WorkDoneProgress::Report(report) => {
3557 if !is_disk_based_diagnostics_progress {
3558 self.on_lsp_work_progress(
3559 language_server_id,
3560 token.clone(),
3561 LanguageServerProgress {
3562 message: report.message.clone(),
3563 percentage: report.percentage.map(|p| p as usize),
3564 last_update_at: Instant::now(),
3565 },
3566 cx,
3567 );
3568 self.buffer_ordered_messages_tx
3569 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3570 language_server_id,
3571 message: proto::update_language_server::Variant::WorkProgress(
3572 proto::LspWorkProgress {
3573 token,
3574 message: report.message,
3575 percentage: report.percentage.map(|p| p as u32),
3576 },
3577 ),
3578 })
3579 .ok();
3580 }
3581 }
3582 lsp2::WorkDoneProgress::End(_) => {
3583 language_server_status.progress_tokens.remove(&token);
3584
3585 if is_disk_based_diagnostics_progress {
3586 language_server_status.has_pending_diagnostic_updates = false;
3587 self.disk_based_diagnostics_finished(language_server_id, cx);
3588 self.buffer_ordered_messages_tx
3589 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3590 language_server_id,
3591 message:
3592 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3593 Default::default(),
3594 ),
3595 })
3596 .ok();
3597 } else {
3598 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3599 self.buffer_ordered_messages_tx
3600 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3601 language_server_id,
3602 message: proto::update_language_server::Variant::WorkEnd(
3603 proto::LspWorkEnd { token },
3604 ),
3605 })
3606 .ok();
3607 }
3608 }
3609 }
3610 }
3611
3612 fn on_lsp_work_start(
3613 &mut self,
3614 language_server_id: LanguageServerId,
3615 token: String,
3616 progress: LanguageServerProgress,
3617 cx: &mut ModelContext<Self>,
3618 ) {
3619 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3620 status.pending_work.insert(token, progress);
3621 cx.notify();
3622 }
3623 }
3624
3625 fn on_lsp_work_progress(
3626 &mut self,
3627 language_server_id: LanguageServerId,
3628 token: String,
3629 progress: LanguageServerProgress,
3630 cx: &mut ModelContext<Self>,
3631 ) {
3632 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3633 let entry = status
3634 .pending_work
3635 .entry(token)
3636 .or_insert(LanguageServerProgress {
3637 message: Default::default(),
3638 percentage: Default::default(),
3639 last_update_at: progress.last_update_at,
3640 });
3641 if progress.message.is_some() {
3642 entry.message = progress.message;
3643 }
3644 if progress.percentage.is_some() {
3645 entry.percentage = progress.percentage;
3646 }
3647 entry.last_update_at = progress.last_update_at;
3648 cx.notify();
3649 }
3650 }
3651
3652 fn on_lsp_work_end(
3653 &mut self,
3654 language_server_id: LanguageServerId,
3655 token: String,
3656 cx: &mut ModelContext<Self>,
3657 ) {
3658 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3659 cx.emit(Event::RefreshInlayHints);
3660 status.pending_work.remove(&token);
3661 cx.notify();
3662 }
3663 }
3664
3665 fn on_lsp_did_change_watched_files(
3666 &mut self,
3667 language_server_id: LanguageServerId,
3668 params: DidChangeWatchedFilesRegistrationOptions,
3669 cx: &mut ModelContext<Self>,
3670 ) {
3671 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3672 self.language_servers.get_mut(&language_server_id)
3673 {
3674 let mut builders = HashMap::default();
3675 for watcher in params.watchers {
3676 for worktree in &self.worktrees {
3677 if let Some(worktree) = worktree.upgrade() {
3678 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3679 if let Some(abs_path) = tree.abs_path().to_str() {
3680 let relative_glob_pattern = match &watcher.glob_pattern {
3681 lsp2::GlobPattern::String(s) => s
3682 .strip_prefix(abs_path)
3683 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3684 lsp2::GlobPattern::Relative(rp) => {
3685 let base_uri = match &rp.base_uri {
3686 lsp2::OneOf::Left(workspace_folder) => {
3687 &workspace_folder.uri
3688 }
3689 lsp2::OneOf::Right(base_uri) => base_uri,
3690 };
3691 base_uri.to_file_path().ok().and_then(|file_path| {
3692 (file_path.to_str() == Some(abs_path))
3693 .then_some(rp.pattern.as_str())
3694 })
3695 }
3696 };
3697 if let Some(relative_glob_pattern) = relative_glob_pattern {
3698 let literal_prefix =
3699 glob_literal_prefix(&relative_glob_pattern);
3700 tree.as_local_mut()
3701 .unwrap()
3702 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3703 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3704 builders
3705 .entry(tree.id())
3706 .or_insert_with(|| GlobSetBuilder::new())
3707 .add(glob);
3708 }
3709 return true;
3710 }
3711 }
3712 false
3713 });
3714 if glob_is_inside_worktree {
3715 break;
3716 }
3717 }
3718 }
3719 }
3720
3721 watched_paths.clear();
3722 for (worktree_id, builder) in builders {
3723 if let Ok(globset) = builder.build() {
3724 watched_paths.insert(worktree_id, globset);
3725 }
3726 }
3727
3728 cx.notify();
3729 }
3730 }
3731
3732 async fn on_lsp_workspace_edit(
3733 this: WeakHandle<Self>,
3734 params: lsp2::ApplyWorkspaceEditParams,
3735 server_id: LanguageServerId,
3736 adapter: Arc<CachedLspAdapter>,
3737 mut cx: AsyncAppContext,
3738 ) -> Result<lsp2::ApplyWorkspaceEditResponse> {
3739 let this = this
3740 .upgrade()
3741 .ok_or_else(|| anyhow!("project project closed"))?;
3742 let language_server = this
3743 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3744 .ok_or_else(|| anyhow!("language server not found"))?;
3745 let transaction = Self::deserialize_workspace_edit(
3746 this.clone(),
3747 params.edit,
3748 true,
3749 adapter.clone(),
3750 language_server.clone(),
3751 &mut cx,
3752 )
3753 .await
3754 .log_err();
3755 this.update(&mut cx, |this, _| {
3756 if let Some(transaction) = transaction {
3757 this.last_workspace_edits_by_language_server
3758 .insert(server_id, transaction);
3759 }
3760 });
3761 Ok(lsp2::ApplyWorkspaceEditResponse {
3762 applied: true,
3763 failed_change: None,
3764 failure_reason: None,
3765 })
3766 }
3767
3768 pub fn language_server_statuses(
3769 &self,
3770 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3771 self.language_server_statuses.values()
3772 }
3773
3774 pub fn update_diagnostics(
3775 &mut self,
3776 language_server_id: LanguageServerId,
3777 mut params: lsp2::PublishDiagnosticsParams,
3778 disk_based_sources: &[String],
3779 cx: &mut ModelContext<Self>,
3780 ) -> Result<()> {
3781 let abs_path = params
3782 .uri
3783 .to_file_path()
3784 .map_err(|_| anyhow!("URI is not a file"))?;
3785 let mut diagnostics = Vec::default();
3786 let mut primary_diagnostic_group_ids = HashMap::default();
3787 let mut sources_by_group_id = HashMap::default();
3788 let mut supporting_diagnostics = HashMap::default();
3789
3790 // Ensure that primary diagnostics are always the most severe
3791 params.diagnostics.sort_by_key(|item| item.severity);
3792
3793 for diagnostic in ¶ms.diagnostics {
3794 let source = diagnostic.source.as_ref();
3795 let code = diagnostic.code.as_ref().map(|code| match code {
3796 lsp2::NumberOrString::Number(code) => code.to_string(),
3797 lsp2::NumberOrString::String(code) => code.clone(),
3798 });
3799 let range = range_from_lsp(diagnostic.range);
3800 let is_supporting = diagnostic
3801 .related_information
3802 .as_ref()
3803 .map_or(false, |infos| {
3804 infos.iter().any(|info| {
3805 primary_diagnostic_group_ids.contains_key(&(
3806 source,
3807 code.clone(),
3808 range_from_lsp(info.location.range),
3809 ))
3810 })
3811 });
3812
3813 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3814 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3815 });
3816
3817 if is_supporting {
3818 supporting_diagnostics.insert(
3819 (source, code.clone(), range),
3820 (diagnostic.severity, is_unnecessary),
3821 );
3822 } else {
3823 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3824 let is_disk_based =
3825 source.map_or(false, |source| disk_based_sources.contains(source));
3826
3827 sources_by_group_id.insert(group_id, source);
3828 primary_diagnostic_group_ids
3829 .insert((source, code.clone(), range.clone()), group_id);
3830
3831 diagnostics.push(DiagnosticEntry {
3832 range,
3833 diagnostic: Diagnostic {
3834 source: diagnostic.source.clone(),
3835 code: code.clone(),
3836 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3837 message: diagnostic.message.clone(),
3838 group_id,
3839 is_primary: true,
3840 is_valid: true,
3841 is_disk_based,
3842 is_unnecessary,
3843 },
3844 });
3845 if let Some(infos) = &diagnostic.related_information {
3846 for info in infos {
3847 if info.location.uri == params.uri && !info.message.is_empty() {
3848 let range = range_from_lsp(info.location.range);
3849 diagnostics.push(DiagnosticEntry {
3850 range,
3851 diagnostic: Diagnostic {
3852 source: diagnostic.source.clone(),
3853 code: code.clone(),
3854 severity: DiagnosticSeverity::INFORMATION,
3855 message: info.message.clone(),
3856 group_id,
3857 is_primary: false,
3858 is_valid: true,
3859 is_disk_based,
3860 is_unnecessary: false,
3861 },
3862 });
3863 }
3864 }
3865 }
3866 }
3867 }
3868
3869 for entry in &mut diagnostics {
3870 let diagnostic = &mut entry.diagnostic;
3871 if !diagnostic.is_primary {
3872 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3873 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3874 source,
3875 diagnostic.code.clone(),
3876 entry.range.clone(),
3877 )) {
3878 if let Some(severity) = severity {
3879 diagnostic.severity = severity;
3880 }
3881 diagnostic.is_unnecessary = is_unnecessary;
3882 }
3883 }
3884 }
3885
3886 self.update_diagnostic_entries(
3887 language_server_id,
3888 abs_path,
3889 params.version,
3890 diagnostics,
3891 cx,
3892 )?;
3893 Ok(())
3894 }
3895
3896 pub fn update_diagnostic_entries(
3897 &mut self,
3898 server_id: LanguageServerId,
3899 abs_path: PathBuf,
3900 version: Option<i32>,
3901 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3902 cx: &mut ModelContext<Project>,
3903 ) -> Result<(), anyhow::Error> {
3904 let (worktree, relative_path) = self
3905 .find_local_worktree(&abs_path, cx)
3906 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3907
3908 let project_path = ProjectPath {
3909 worktree_id: worktree.read(cx).id(),
3910 path: relative_path.into(),
3911 };
3912
3913 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3914 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3915 }
3916
3917 let updated = worktree.update(cx, |worktree, cx| {
3918 worktree
3919 .as_local_mut()
3920 .ok_or_else(|| anyhow!("not a local worktree"))?
3921 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3922 })?;
3923 if updated {
3924 cx.emit(Event::DiagnosticsUpdated {
3925 language_server_id: server_id,
3926 path: project_path,
3927 });
3928 }
3929 Ok(())
3930 }
3931
3932 fn update_buffer_diagnostics(
3933 &mut self,
3934 buffer: &Handle<Buffer>,
3935 server_id: LanguageServerId,
3936 version: Option<i32>,
3937 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3938 cx: &mut ModelContext<Self>,
3939 ) -> Result<()> {
3940 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3941 Ordering::Equal
3942 .then_with(|| b.is_primary.cmp(&a.is_primary))
3943 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3944 .then_with(|| a.severity.cmp(&b.severity))
3945 .then_with(|| a.message.cmp(&b.message))
3946 }
3947
3948 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3949
3950 diagnostics.sort_unstable_by(|a, b| {
3951 Ordering::Equal
3952 .then_with(|| a.range.start.cmp(&b.range.start))
3953 .then_with(|| b.range.end.cmp(&a.range.end))
3954 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3955 });
3956
3957 let mut sanitized_diagnostics = Vec::new();
3958 let edits_since_save = Patch::new(
3959 snapshot
3960 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
3961 .collect(),
3962 );
3963 for entry in diagnostics {
3964 let start;
3965 let end;
3966 if entry.diagnostic.is_disk_based {
3967 // Some diagnostics are based on files on disk instead of buffers'
3968 // current contents. Adjust these diagnostics' ranges to reflect
3969 // any unsaved edits.
3970 start = edits_since_save.old_to_new(entry.range.start);
3971 end = edits_since_save.old_to_new(entry.range.end);
3972 } else {
3973 start = entry.range.start;
3974 end = entry.range.end;
3975 }
3976
3977 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
3978 ..snapshot.clip_point_utf16(end, Bias::Right);
3979
3980 // Expand empty ranges by one codepoint
3981 if range.start == range.end {
3982 // This will be go to the next boundary when being clipped
3983 range.end.column += 1;
3984 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
3985 if range.start == range.end && range.end.column > 0 {
3986 range.start.column -= 1;
3987 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
3988 }
3989 }
3990
3991 sanitized_diagnostics.push(DiagnosticEntry {
3992 range,
3993 diagnostic: entry.diagnostic,
3994 });
3995 }
3996 drop(edits_since_save);
3997
3998 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
3999 buffer.update(cx, |buffer, cx| {
4000 buffer.update_diagnostics(server_id, set, cx)
4001 });
4002 Ok(())
4003 }
4004
4005 pub fn reload_buffers(
4006 &self,
4007 buffers: HashSet<Handle<Buffer>>,
4008 push_to_history: bool,
4009 cx: &mut ModelContext<Self>,
4010 ) -> Task<Result<ProjectTransaction>> {
4011 let mut local_buffers = Vec::new();
4012 let mut remote_buffers = None;
4013 for buffer_handle in buffers {
4014 let buffer = buffer_handle.read(cx);
4015 if buffer.is_dirty() {
4016 if let Some(file) = File::from_dyn(buffer.file()) {
4017 if file.is_local() {
4018 local_buffers.push(buffer_handle);
4019 } else {
4020 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4021 }
4022 }
4023 }
4024 }
4025
4026 let remote_buffers = self.remote_id().zip(remote_buffers);
4027 let client = self.client.clone();
4028
4029 cx.spawn(move |this, mut cx| async move {
4030 let mut project_transaction = ProjectTransaction::default();
4031
4032 if let Some((project_id, remote_buffers)) = remote_buffers {
4033 let response = client
4034 .request(proto::ReloadBuffers {
4035 project_id,
4036 buffer_ids: remote_buffers
4037 .iter()
4038 .filter_map(|buffer| {
4039 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4040 })
4041 .collect(),
4042 })
4043 .await?
4044 .transaction
4045 .ok_or_else(|| anyhow!("missing transaction"))?;
4046 project_transaction = this
4047 .update(&mut cx, |this, cx| {
4048 this.deserialize_project_transaction(response, push_to_history, cx)
4049 })?
4050 .await?;
4051 }
4052
4053 for buffer in local_buffers {
4054 let transaction = buffer
4055 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4056 .await?;
4057 buffer.update(&mut cx, |buffer, cx| {
4058 if let Some(transaction) = transaction {
4059 if !push_to_history {
4060 buffer.forget_transaction(transaction.id);
4061 }
4062 project_transaction.0.insert(cx.handle(), transaction);
4063 }
4064 });
4065 }
4066
4067 Ok(project_transaction)
4068 })
4069 }
4070
4071 pub fn format(
4072 &self,
4073 buffers: HashSet<Handle<Buffer>>,
4074 push_to_history: bool,
4075 trigger: FormatTrigger,
4076 cx: &mut ModelContext<Project>,
4077 ) -> Task<anyhow::Result<ProjectTransaction>> {
4078 if self.is_local() {
4079 let mut buffers_with_paths_and_servers = buffers
4080 .into_iter()
4081 .filter_map(|buffer_handle| {
4082 let buffer = buffer_handle.read(cx);
4083 let file = File::from_dyn(buffer.file())?;
4084 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4085 let server = self
4086 .primary_language_server_for_buffer(buffer, cx)
4087 .map(|s| s.1.clone());
4088 Some((buffer_handle, buffer_abs_path, server))
4089 })
4090 .collect::<Vec<_>>();
4091
4092 cx.spawn(move |this, mut cx| async move {
4093 // Do not allow multiple concurrent formatting requests for the
4094 // same buffer.
4095 this.update(&mut cx, |this, cx| {
4096 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4097 this.buffers_being_formatted
4098 .insert(buffer.read(cx).remote_id())
4099 });
4100 });
4101
4102 let _cleanup = defer({
4103 let this = this.clone();
4104 let mut cx = cx.clone();
4105 let buffers = &buffers_with_paths_and_servers;
4106 move || {
4107 this.update(&mut cx, |this, cx| {
4108 for (buffer, _, _) in buffers {
4109 this.buffers_being_formatted
4110 .remove(&buffer.read(cx).remote_id());
4111 }
4112 });
4113 }
4114 });
4115
4116 let mut project_transaction = ProjectTransaction::default();
4117 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4118 let settings = buffer.update(&mut cx, |buffer, cx| {
4119 language_settings(buffer.language(), buffer.file(), cx).clone()
4120 })?;
4121
4122 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4123 let ensure_final_newline = settings.ensure_final_newline_on_save;
4124 let format_on_save = settings.format_on_save.clone();
4125 let formatter = settings.formatter.clone();
4126 let tab_size = settings.tab_size;
4127
4128 // First, format buffer's whitespace according to the settings.
4129 let trailing_whitespace_diff = if remove_trailing_whitespace {
4130 Some(
4131 buffer
4132 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4133 .await,
4134 )
4135 } else {
4136 None
4137 };
4138 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4139 buffer.finalize_last_transaction();
4140 buffer.start_transaction();
4141 if let Some(diff) = trailing_whitespace_diff {
4142 buffer.apply_diff(diff, cx);
4143 }
4144 if ensure_final_newline {
4145 buffer.ensure_final_newline(cx);
4146 }
4147 buffer.end_transaction(cx)
4148 })?;
4149
4150 // Currently, formatting operations are represented differently depending on
4151 // whether they come from a language server or an external command.
4152 enum FormatOperation {
4153 Lsp(Vec<(Range<Anchor>, String)>),
4154 External(Diff),
4155 Prettier(Diff),
4156 }
4157
4158 // Apply language-specific formatting using either a language server
4159 // or external command.
4160 let mut format_operation = None;
4161 match (formatter, format_on_save) {
4162 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4163
4164 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4165 | (_, FormatOnSave::LanguageServer) => {
4166 if let Some((language_server, buffer_abs_path)) =
4167 language_server.as_ref().zip(buffer_abs_path.as_ref())
4168 {
4169 format_operation = Some(FormatOperation::Lsp(
4170 Self::format_via_lsp(
4171 &this,
4172 &buffer,
4173 buffer_abs_path,
4174 &language_server,
4175 tab_size,
4176 &mut cx,
4177 )
4178 .await
4179 .context("failed to format via language server")?,
4180 ));
4181 }
4182 }
4183
4184 (
4185 Formatter::External { command, arguments },
4186 FormatOnSave::On | FormatOnSave::Off,
4187 )
4188 | (_, FormatOnSave::External { command, arguments }) => {
4189 if let Some(buffer_abs_path) = buffer_abs_path {
4190 format_operation = Self::format_via_external_command(
4191 buffer,
4192 buffer_abs_path,
4193 &command,
4194 &arguments,
4195 &mut cx,
4196 )
4197 .await
4198 .context(format!(
4199 "failed to format via external command {:?}",
4200 command
4201 ))?
4202 .map(FormatOperation::External);
4203 }
4204 }
4205 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4206 if let Some(prettier_task) = this
4207 .update(&mut cx, |project, cx| {
4208 project.prettier_instance_for_buffer(buffer, cx)
4209 })?.await {
4210 match prettier_task.await
4211 {
4212 Ok(prettier) => {
4213 let buffer_path = buffer.update(&mut cx, |buffer, cx| {
4214 File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
4215 })?;
4216 format_operation = Some(FormatOperation::Prettier(
4217 prettier
4218 .format(buffer, buffer_path, &mut cx)
4219 .await
4220 .context("formatting via prettier")?,
4221 ));
4222 }
4223 Err(e) => anyhow::bail!(
4224 "Failed to create prettier instance for buffer during autoformatting: {e:#}"
4225 ),
4226 }
4227 } else if let Some((language_server, buffer_abs_path)) =
4228 language_server.as_ref().zip(buffer_abs_path.as_ref())
4229 {
4230 format_operation = Some(FormatOperation::Lsp(
4231 Self::format_via_lsp(
4232 &this,
4233 &buffer,
4234 buffer_abs_path,
4235 &language_server,
4236 tab_size,
4237 &mut cx,
4238 )
4239 .await
4240 .context("failed to format via language server")?,
4241 ));
4242 }
4243 }
4244 (Formatter::Prettier { .. }, FormatOnSave::On | FormatOnSave::Off) => {
4245 if let Some(prettier_task) = this
4246 .update(&mut cx, |project, cx| {
4247 project.prettier_instance_for_buffer(buffer, cx)
4248 })?.await {
4249 match prettier_task.await
4250 {
4251 Ok(prettier) => {
4252 let buffer_path = buffer.update(&mut cx, |buffer, cx| {
4253 File::from_dyn(buffer.file()).map(|file| file.abs_path(cx))
4254 })?;
4255 format_operation = Some(FormatOperation::Prettier(
4256 prettier
4257 .format(buffer, buffer_path, &mut cx)
4258 .await
4259 .context("formatting via prettier")?,
4260 ));
4261 }
4262 Err(e) => anyhow::bail!(
4263 "Failed to create prettier instance for buffer during formatting: {e:#}"
4264 ),
4265 }
4266 }
4267 }
4268 };
4269
4270 buffer.update(&mut cx, |b, cx| {
4271 // If the buffer had its whitespace formatted and was edited while the language-specific
4272 // formatting was being computed, avoid applying the language-specific formatting, because
4273 // it can't be grouped with the whitespace formatting in the undo history.
4274 if let Some(transaction_id) = whitespace_transaction_id {
4275 if b.peek_undo_stack()
4276 .map_or(true, |e| e.transaction_id() != transaction_id)
4277 {
4278 format_operation.take();
4279 }
4280 }
4281
4282 // Apply any language-specific formatting, and group the two formatting operations
4283 // in the buffer's undo history.
4284 if let Some(operation) = format_operation {
4285 match operation {
4286 FormatOperation::Lsp(edits) => {
4287 b.edit(edits, None, cx);
4288 }
4289 FormatOperation::External(diff) => {
4290 b.apply_diff(diff, cx);
4291 }
4292 FormatOperation::Prettier(diff) => {
4293 b.apply_diff(diff, cx);
4294 }
4295 }
4296
4297 if let Some(transaction_id) = whitespace_transaction_id {
4298 b.group_until_transaction(transaction_id);
4299 }
4300 }
4301
4302 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4303 if !push_to_history {
4304 b.forget_transaction(transaction.id);
4305 }
4306 project_transaction.0.insert(buffer.clone(), transaction);
4307 }
4308 });
4309 }
4310
4311 Ok(project_transaction)
4312 })
4313 } else {
4314 let remote_id = self.remote_id();
4315 let client = self.client.clone();
4316 cx.spawn(move |this, mut cx| async move {
4317 let mut project_transaction = ProjectTransaction::default();
4318 if let Some(project_id) = remote_id {
4319 let response = client
4320 .request(proto::FormatBuffers {
4321 project_id,
4322 trigger: trigger as i32,
4323 buffer_ids: buffers
4324 .iter()
4325 .map(|buffer| {
4326 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4327 })
4328 .collect::<Result<_>>()?,
4329 })
4330 .await?
4331 .transaction
4332 .ok_or_else(|| anyhow!("missing transaction"))?;
4333 project_transaction = this
4334 .update(&mut cx, |this, cx| {
4335 this.deserialize_project_transaction(response, push_to_history, cx)
4336 })?
4337 .await?;
4338 }
4339 Ok(project_transaction)
4340 })
4341 }
4342 }
4343
4344 async fn format_via_lsp(
4345 this: &WeakHandle<Self>,
4346 buffer: &Handle<Buffer>,
4347 abs_path: &Path,
4348 language_server: &Arc<LanguageServer>,
4349 tab_size: NonZeroU32,
4350 cx: &mut AsyncAppContext,
4351 ) -> Result<Vec<(Range<Anchor>, String)>> {
4352 let uri = lsp2::Url::from_file_path(abs_path)
4353 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4354 let text_document = lsp2::TextDocumentIdentifier::new(uri);
4355 let capabilities = &language_server.capabilities();
4356
4357 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4358 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4359
4360 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4361 language_server
4362 .request::<lsp2::request::Formatting>(lsp2::DocumentFormattingParams {
4363 text_document,
4364 options: lsp_command::lsp_formatting_options(tab_size.get()),
4365 work_done_progress_params: Default::default(),
4366 })
4367 .await?
4368 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4369 let buffer_start = lsp2::Position::new(0, 0);
4370 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4371
4372 language_server
4373 .request::<lsp2::request::RangeFormatting>(lsp2::DocumentRangeFormattingParams {
4374 text_document,
4375 range: lsp2::Range::new(buffer_start, buffer_end),
4376 options: lsp_command::lsp_formatting_options(tab_size.get()),
4377 work_done_progress_params: Default::default(),
4378 })
4379 .await?
4380 } else {
4381 None
4382 };
4383
4384 if let Some(lsp_edits) = lsp_edits {
4385 this.update(cx, |this, cx| {
4386 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4387 })?
4388 .await
4389 } else {
4390 Ok(Vec::new())
4391 }
4392 }
4393
4394 async fn format_via_external_command(
4395 buffer: &Handle<Buffer>,
4396 buffer_abs_path: &Path,
4397 command: &str,
4398 arguments: &[String],
4399 cx: &mut AsyncAppContext,
4400 ) -> Result<Option<Diff>> {
4401 let working_dir_path = buffer.update(cx, |buffer, cx| {
4402 let file = File::from_dyn(buffer.file())?;
4403 let worktree = file.worktree.read(cx).as_local()?;
4404 let mut worktree_path = worktree.abs_path().to_path_buf();
4405 if worktree.root_entry()?.is_file() {
4406 worktree_path.pop();
4407 }
4408 Some(worktree_path)
4409 })?;
4410
4411 if let Some(working_dir_path) = working_dir_path {
4412 let mut child =
4413 smol::process::Command::new(command)
4414 .args(arguments.iter().map(|arg| {
4415 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4416 }))
4417 .current_dir(&working_dir_path)
4418 .stdin(smol::process::Stdio::piped())
4419 .stdout(smol::process::Stdio::piped())
4420 .stderr(smol::process::Stdio::piped())
4421 .spawn()?;
4422 let stdin = child
4423 .stdin
4424 .as_mut()
4425 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4426 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4427 for chunk in text.chunks() {
4428 stdin.write_all(chunk.as_bytes()).await?;
4429 }
4430 stdin.flush().await?;
4431
4432 let output = child.output().await?;
4433 if !output.status.success() {
4434 return Err(anyhow!(
4435 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4436 output.status.code(),
4437 String::from_utf8_lossy(&output.stdout),
4438 String::from_utf8_lossy(&output.stderr),
4439 ));
4440 }
4441
4442 let stdout = String::from_utf8(output.stdout)?;
4443 Ok(Some(
4444 buffer
4445 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4446 .await,
4447 ))
4448 } else {
4449 Ok(None)
4450 }
4451 }
4452
4453 pub fn definition<T: ToPointUtf16>(
4454 &self,
4455 buffer: &Handle<Buffer>,
4456 position: T,
4457 cx: &mut ModelContext<Self>,
4458 ) -> Task<Result<Vec<LocationLink>>> {
4459 let position = position.to_point_utf16(buffer.read(cx));
4460 self.request_lsp(
4461 buffer.clone(),
4462 LanguageServerToQuery::Primary,
4463 GetDefinition { position },
4464 cx,
4465 )
4466 }
4467
4468 pub fn type_definition<T: ToPointUtf16>(
4469 &self,
4470 buffer: &Handle<Buffer>,
4471 position: T,
4472 cx: &mut ModelContext<Self>,
4473 ) -> Task<Result<Vec<LocationLink>>> {
4474 let position = position.to_point_utf16(buffer.read(cx));
4475 self.request_lsp(
4476 buffer.clone(),
4477 LanguageServerToQuery::Primary,
4478 GetTypeDefinition { position },
4479 cx,
4480 )
4481 }
4482
4483 pub fn references<T: ToPointUtf16>(
4484 &self,
4485 buffer: &Handle<Buffer>,
4486 position: T,
4487 cx: &mut ModelContext<Self>,
4488 ) -> Task<Result<Vec<Location>>> {
4489 let position = position.to_point_utf16(buffer.read(cx));
4490 self.request_lsp(
4491 buffer.clone(),
4492 LanguageServerToQuery::Primary,
4493 GetReferences { position },
4494 cx,
4495 )
4496 }
4497
4498 pub fn document_highlights<T: ToPointUtf16>(
4499 &self,
4500 buffer: &Handle<Buffer>,
4501 position: T,
4502 cx: &mut ModelContext<Self>,
4503 ) -> Task<Result<Vec<DocumentHighlight>>> {
4504 let position = position.to_point_utf16(buffer.read(cx));
4505 self.request_lsp(
4506 buffer.clone(),
4507 LanguageServerToQuery::Primary,
4508 GetDocumentHighlights { position },
4509 cx,
4510 )
4511 }
4512
4513 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4514 if self.is_local() {
4515 let mut requests = Vec::new();
4516 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4517 let worktree_id = *worktree_id;
4518 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4519 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4520 Some(worktree) => worktree,
4521 None => continue,
4522 };
4523 let worktree_abs_path = worktree.abs_path().clone();
4524
4525 let (adapter, language, server) = match self.language_servers.get(server_id) {
4526 Some(LanguageServerState::Running {
4527 adapter,
4528 language,
4529 server,
4530 ..
4531 }) => (adapter.clone(), language.clone(), server),
4532
4533 _ => continue,
4534 };
4535
4536 requests.push(
4537 server
4538 .request::<lsp2::request::WorkspaceSymbolRequest>(
4539 lsp2::WorkspaceSymbolParams {
4540 query: query.to_string(),
4541 ..Default::default()
4542 },
4543 )
4544 .log_err()
4545 .map(move |response| {
4546 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4547 lsp2::WorkspaceSymbolResponse::Flat(flat_responses) => {
4548 flat_responses.into_iter().map(|lsp_symbol| {
4549 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4550 }).collect::<Vec<_>>()
4551 }
4552 lsp2::WorkspaceSymbolResponse::Nested(nested_responses) => {
4553 nested_responses.into_iter().filter_map(|lsp_symbol| {
4554 let location = match lsp_symbol.location {
4555 OneOf::Left(location) => location,
4556 OneOf::Right(_) => {
4557 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4558 return None
4559 }
4560 };
4561 Some((lsp_symbol.name, lsp_symbol.kind, location))
4562 }).collect::<Vec<_>>()
4563 }
4564 }).unwrap_or_default();
4565
4566 (
4567 adapter,
4568 language,
4569 worktree_id,
4570 worktree_abs_path,
4571 lsp_symbols,
4572 )
4573 }),
4574 );
4575 }
4576
4577 cx.spawn(move |this, mut cx| async move {
4578 let responses = futures::future::join_all(requests).await;
4579 let this = match this.upgrade() {
4580 Some(this) => this,
4581 None => return Ok(Vec::new()),
4582 };
4583
4584 let symbols = this.update(&mut cx, |this, cx| {
4585 let mut symbols = Vec::new();
4586 for (
4587 adapter,
4588 adapter_language,
4589 source_worktree_id,
4590 worktree_abs_path,
4591 lsp_symbols,
4592 ) in responses
4593 {
4594 symbols.extend(lsp_symbols.into_iter().filter_map(
4595 |(symbol_name, symbol_kind, symbol_location)| {
4596 let abs_path = symbol_location.uri.to_file_path().ok()?;
4597 let mut worktree_id = source_worktree_id;
4598 let path;
4599 if let Some((worktree, rel_path)) =
4600 this.find_local_worktree(&abs_path, cx)
4601 {
4602 worktree_id = worktree.read(cx).id();
4603 path = rel_path;
4604 } else {
4605 path = relativize_path(&worktree_abs_path, &abs_path);
4606 }
4607
4608 let project_path = ProjectPath {
4609 worktree_id,
4610 path: path.into(),
4611 };
4612 let signature = this.symbol_signature(&project_path);
4613 let adapter_language = adapter_language.clone();
4614 let language = this
4615 .languages
4616 .language_for_file(&project_path.path, None)
4617 .unwrap_or_else(move |_| adapter_language);
4618 let language_server_name = adapter.name.clone();
4619 Some(async move {
4620 let language = language.await;
4621 let label =
4622 language.label_for_symbol(&symbol_name, symbol_kind).await;
4623
4624 Symbol {
4625 language_server_name,
4626 source_worktree_id,
4627 path: project_path,
4628 label: label.unwrap_or_else(|| {
4629 CodeLabel::plain(symbol_name.clone(), None)
4630 }),
4631 kind: symbol_kind,
4632 name: symbol_name,
4633 range: range_from_lsp(symbol_location.range),
4634 signature,
4635 }
4636 })
4637 },
4638 ));
4639 }
4640
4641 symbols
4642 })?;
4643
4644 Ok(futures::future::join_all(symbols).await)
4645 })
4646 } else if let Some(project_id) = self.remote_id() {
4647 let request = self.client.request(proto::GetProjectSymbols {
4648 project_id,
4649 query: query.to_string(),
4650 });
4651 cx.spawn(move |this, mut cx| async move {
4652 let response = request.await?;
4653 let mut symbols = Vec::new();
4654 if let Some(this) = this.upgrade() {
4655 let new_symbols = this.update(&mut cx, |this, _| {
4656 response
4657 .symbols
4658 .into_iter()
4659 .map(|symbol| this.deserialize_symbol(symbol))
4660 .collect::<Vec<_>>()
4661 })?;
4662 symbols = futures::future::join_all(new_symbols)
4663 .await
4664 .into_iter()
4665 .filter_map(|symbol| symbol.log_err())
4666 .collect::<Vec<_>>();
4667 }
4668 Ok(symbols)
4669 })
4670 } else {
4671 Task::ready(Ok(Default::default()))
4672 }
4673 }
4674
4675 pub fn open_buffer_for_symbol(
4676 &mut self,
4677 symbol: &Symbol,
4678 cx: &mut ModelContext<Self>,
4679 ) -> Task<Result<Handle<Buffer>>> {
4680 if self.is_local() {
4681 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4682 symbol.source_worktree_id,
4683 symbol.language_server_name.clone(),
4684 )) {
4685 *id
4686 } else {
4687 return Task::ready(Err(anyhow!(
4688 "language server for worktree and language not found"
4689 )));
4690 };
4691
4692 let worktree_abs_path = if let Some(worktree_abs_path) = self
4693 .worktree_for_id(symbol.path.worktree_id, cx)
4694 .and_then(|worktree| worktree.read(cx).as_local())
4695 .map(|local_worktree| local_worktree.abs_path())
4696 {
4697 worktree_abs_path
4698 } else {
4699 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4700 };
4701 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
4702 let symbol_uri = if let Ok(uri) = lsp2::Url::from_file_path(symbol_abs_path) {
4703 uri
4704 } else {
4705 return Task::ready(Err(anyhow!("invalid symbol path")));
4706 };
4707
4708 self.open_local_buffer_via_lsp(
4709 symbol_uri,
4710 language_server_id,
4711 symbol.language_server_name.clone(),
4712 cx,
4713 )
4714 } else if let Some(project_id) = self.remote_id() {
4715 let request = self.client.request(proto::OpenBufferForSymbol {
4716 project_id,
4717 symbol: Some(serialize_symbol(symbol)),
4718 });
4719 cx.spawn(move |this, mut cx| async move {
4720 let response = request.await?;
4721 this.update(&mut cx, |this, cx| {
4722 this.wait_for_remote_buffer(response.buffer_id, cx)
4723 })?
4724 .await
4725 })
4726 } else {
4727 Task::ready(Err(anyhow!("project does not have a remote id")))
4728 }
4729 }
4730
4731 pub fn hover<T: ToPointUtf16>(
4732 &self,
4733 buffer: &Handle<Buffer>,
4734 position: T,
4735 cx: &mut ModelContext<Self>,
4736 ) -> Task<Result<Option<Hover>>> {
4737 let position = position.to_point_utf16(buffer.read(cx));
4738 self.request_lsp(
4739 buffer.clone(),
4740 LanguageServerToQuery::Primary,
4741 GetHover { position },
4742 cx,
4743 )
4744 }
4745
4746 pub fn completions<T: ToOffset + ToPointUtf16>(
4747 &self,
4748 buffer: &Handle<Buffer>,
4749 position: T,
4750 cx: &mut ModelContext<Self>,
4751 ) -> Task<Result<Vec<Completion>>> {
4752 let position = position.to_point_utf16(buffer.read(cx));
4753 if self.is_local() {
4754 let snapshot = buffer.read(cx).snapshot();
4755 let offset = position.to_offset(&snapshot);
4756 let scope = snapshot.language_scope_at(offset);
4757
4758 let server_ids: Vec<_> = self
4759 .language_servers_for_buffer(buffer.read(cx), cx)
4760 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4761 .filter(|(adapter, _)| {
4762 scope
4763 .as_ref()
4764 .map(|scope| scope.language_allowed(&adapter.name))
4765 .unwrap_or(true)
4766 })
4767 .map(|(_, server)| server.server_id())
4768 .collect();
4769
4770 let buffer = buffer.clone();
4771 cx.spawn(move |this, mut cx| async move {
4772 let mut tasks = Vec::with_capacity(server_ids.len());
4773 this.update(&mut cx, |this, cx| {
4774 for server_id in server_ids {
4775 tasks.push(this.request_lsp(
4776 buffer.clone(),
4777 LanguageServerToQuery::Other(server_id),
4778 GetCompletions { position },
4779 cx,
4780 ));
4781 }
4782 });
4783
4784 let mut completions = Vec::new();
4785 for task in tasks {
4786 if let Ok(new_completions) = task.await {
4787 completions.extend_from_slice(&new_completions);
4788 }
4789 }
4790
4791 Ok(completions)
4792 })
4793 } else if let Some(project_id) = self.remote_id() {
4794 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4795 } else {
4796 Task::ready(Ok(Default::default()))
4797 }
4798 }
4799
4800 pub fn apply_additional_edits_for_completion(
4801 &self,
4802 buffer_handle: Handle<Buffer>,
4803 completion: Completion,
4804 push_to_history: bool,
4805 cx: &mut ModelContext<Self>,
4806 ) -> Task<Result<Option<Transaction>>> {
4807 let buffer = buffer_handle.read(cx);
4808 let buffer_id = buffer.remote_id();
4809
4810 if self.is_local() {
4811 let server_id = completion.server_id;
4812 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4813 Some((_, server)) => server.clone(),
4814 _ => return Task::ready(Ok(Default::default())),
4815 };
4816
4817 cx.spawn(move |this, mut cx| async move {
4818 let can_resolve = lang_server
4819 .capabilities()
4820 .completion_provider
4821 .as_ref()
4822 .and_then(|options| options.resolve_provider)
4823 .unwrap_or(false);
4824 let additional_text_edits = if can_resolve {
4825 lang_server
4826 .request::<lsp2::request::ResolveCompletionItem>(completion.lsp_completion)
4827 .await?
4828 .additional_text_edits
4829 } else {
4830 completion.lsp_completion.additional_text_edits
4831 };
4832 if let Some(edits) = additional_text_edits {
4833 let edits = this
4834 .update(&mut cx, |this, cx| {
4835 this.edits_from_lsp(
4836 &buffer_handle,
4837 edits,
4838 lang_server.server_id(),
4839 None,
4840 cx,
4841 )
4842 })?
4843 .await?;
4844
4845 buffer_handle.update(&mut cx, |buffer, cx| {
4846 buffer.finalize_last_transaction();
4847 buffer.start_transaction();
4848
4849 for (range, text) in edits {
4850 let primary = &completion.old_range;
4851 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4852 && primary.end.cmp(&range.start, buffer).is_ge();
4853 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4854 && range.end.cmp(&primary.end, buffer).is_ge();
4855
4856 //Skip additional edits which overlap with the primary completion edit
4857 //https://github.com/zed-industries/zed/pull/1871
4858 if !start_within && !end_within {
4859 buffer.edit([(range, text)], None, cx);
4860 }
4861 }
4862
4863 let transaction = if buffer.end_transaction(cx).is_some() {
4864 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4865 if !push_to_history {
4866 buffer.forget_transaction(transaction.id);
4867 }
4868 Some(transaction)
4869 } else {
4870 None
4871 };
4872 Ok(transaction)
4873 })?
4874 } else {
4875 Ok(None)
4876 }
4877 })
4878 } else if let Some(project_id) = self.remote_id() {
4879 let client = self.client.clone();
4880 cx.spawn(move |_, mut cx| async move {
4881 let response = client
4882 .request(proto::ApplyCompletionAdditionalEdits {
4883 project_id,
4884 buffer_id,
4885 completion: Some(language2::proto::serialize_completion(&completion)),
4886 })
4887 .await?;
4888
4889 if let Some(transaction) = response.transaction {
4890 let transaction = language2::proto::deserialize_transaction(transaction)?;
4891 buffer_handle
4892 .update(&mut cx, |buffer, _| {
4893 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4894 })?
4895 .await?;
4896 if push_to_history {
4897 buffer_handle.update(&mut cx, |buffer, _| {
4898 buffer.push_transaction(transaction.clone(), Instant::now());
4899 });
4900 }
4901 Ok(Some(transaction))
4902 } else {
4903 Ok(None)
4904 }
4905 })
4906 } else {
4907 Task::ready(Err(anyhow!("project does not have a remote id")))
4908 }
4909 }
4910
4911 pub fn code_actions<T: Clone + ToOffset>(
4912 &self,
4913 buffer_handle: &Handle<Buffer>,
4914 range: Range<T>,
4915 cx: &mut ModelContext<Self>,
4916 ) -> Task<Result<Vec<CodeAction>>> {
4917 let buffer = buffer_handle.read(cx);
4918 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4919 self.request_lsp(
4920 buffer_handle.clone(),
4921 LanguageServerToQuery::Primary,
4922 GetCodeActions { range },
4923 cx,
4924 )
4925 }
4926
4927 pub fn apply_code_action(
4928 &self,
4929 buffer_handle: Handle<Buffer>,
4930 mut action: CodeAction,
4931 push_to_history: bool,
4932 cx: &mut ModelContext<Self>,
4933 ) -> Task<Result<ProjectTransaction>> {
4934 if self.is_local() {
4935 let buffer = buffer_handle.read(cx);
4936 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4937 self.language_server_for_buffer(buffer, action.server_id, cx)
4938 {
4939 (adapter.clone(), server.clone())
4940 } else {
4941 return Task::ready(Ok(Default::default()));
4942 };
4943 let range = action.range.to_point_utf16(buffer);
4944
4945 cx.spawn(move |this, mut cx| async move {
4946 if let Some(lsp_range) = action
4947 .lsp_action
4948 .data
4949 .as_mut()
4950 .and_then(|d| d.get_mut("codeActionParams"))
4951 .and_then(|d| d.get_mut("range"))
4952 {
4953 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4954 action.lsp_action = lang_server
4955 .request::<lsp2::request::CodeActionResolveRequest>(action.lsp_action)
4956 .await?;
4957 } else {
4958 let actions = this
4959 .update(&mut cx, |this, cx| {
4960 this.code_actions(&buffer_handle, action.range, cx)
4961 })?
4962 .await?;
4963 action.lsp_action = actions
4964 .into_iter()
4965 .find(|a| a.lsp_action.title == action.lsp_action.title)
4966 .ok_or_else(|| anyhow!("code action is outdated"))?
4967 .lsp_action;
4968 }
4969
4970 if let Some(edit) = action.lsp_action.edit {
4971 if edit.changes.is_some() || edit.document_changes.is_some() {
4972 return Self::deserialize_workspace_edit(
4973 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
4974 edit,
4975 push_to_history,
4976 lsp_adapter.clone(),
4977 lang_server.clone(),
4978 &mut cx,
4979 )
4980 .await;
4981 }
4982 }
4983
4984 if let Some(command) = action.lsp_action.command {
4985 this.update(&mut cx, |this, _| {
4986 this.last_workspace_edits_by_language_server
4987 .remove(&lang_server.server_id());
4988 });
4989
4990 let result = lang_server
4991 .request::<lsp2::request::ExecuteCommand>(lsp2::ExecuteCommandParams {
4992 command: command.command,
4993 arguments: command.arguments.unwrap_or_default(),
4994 ..Default::default()
4995 })
4996 .await;
4997
4998 if let Err(err) = result {
4999 // TODO: LSP ERROR
5000 return Err(err);
5001 }
5002
5003 return Ok(this.update(&mut cx, |this, _| {
5004 this.last_workspace_edits_by_language_server
5005 .remove(&lang_server.server_id())
5006 .unwrap_or_default()
5007 })?);
5008 }
5009
5010 Ok(ProjectTransaction::default())
5011 })
5012 } else if let Some(project_id) = self.remote_id() {
5013 let client = self.client.clone();
5014 let request = proto::ApplyCodeAction {
5015 project_id,
5016 buffer_id: buffer_handle.read(cx).remote_id(),
5017 action: Some(language2::proto::serialize_code_action(&action)),
5018 };
5019 cx.spawn(move |this, mut cx| async move {
5020 let response = client
5021 .request(request)
5022 .await?
5023 .transaction
5024 .ok_or_else(|| anyhow!("missing transaction"))?;
5025 this.update(&mut cx, |this, cx| {
5026 this.deserialize_project_transaction(response, push_to_history, cx)
5027 })?
5028 .await
5029 })
5030 } else {
5031 Task::ready(Err(anyhow!("project does not have a remote id")))
5032 }
5033 }
5034
5035 fn apply_on_type_formatting(
5036 &self,
5037 buffer: Handle<Buffer>,
5038 position: Anchor,
5039 trigger: String,
5040 cx: &mut ModelContext<Self>,
5041 ) -> Task<Result<Option<Transaction>>> {
5042 if self.is_local() {
5043 cx.spawn(move |this, mut cx| async move {
5044 // Do not allow multiple concurrent formatting requests for the
5045 // same buffer.
5046 this.update(&mut cx, |this, cx| {
5047 this.buffers_being_formatted
5048 .insert(buffer.read(cx).remote_id())
5049 });
5050
5051 let _cleanup = defer({
5052 let this = this.clone();
5053 let mut cx = cx.clone();
5054 let closure_buffer = buffer.clone();
5055 move || {
5056 this.update(&mut cx, |this, cx| {
5057 this.buffers_being_formatted
5058 .remove(&closure_buffer.read(cx).remote_id());
5059 });
5060 }
5061 });
5062
5063 buffer
5064 .update(&mut cx, |buffer, _| {
5065 buffer.wait_for_edits(Some(position.timestamp))
5066 })?
5067 .await?;
5068 this.update(&mut cx, |this, cx| {
5069 let position = position.to_point_utf16(buffer.read(cx));
5070 this.on_type_format(buffer, position, trigger, false, cx)
5071 })?
5072 .await
5073 })
5074 } else if let Some(project_id) = self.remote_id() {
5075 let client = self.client.clone();
5076 let request = proto::OnTypeFormatting {
5077 project_id,
5078 buffer_id: buffer.read(cx).remote_id(),
5079 position: Some(serialize_anchor(&position)),
5080 trigger,
5081 version: serialize_version(&buffer.read(cx).version()),
5082 };
5083 cx.spawn(move |_, _| async move {
5084 client
5085 .request(request)
5086 .await?
5087 .transaction
5088 .map(language2::proto::deserialize_transaction)
5089 .transpose()
5090 })
5091 } else {
5092 Task::ready(Err(anyhow!("project does not have a remote id")))
5093 }
5094 }
5095
5096 async fn deserialize_edits(
5097 this: Handle<Self>,
5098 buffer_to_edit: Handle<Buffer>,
5099 edits: Vec<lsp2::TextEdit>,
5100 push_to_history: bool,
5101 _: Arc<CachedLspAdapter>,
5102 language_server: Arc<LanguageServer>,
5103 cx: &mut AsyncAppContext,
5104 ) -> Result<Option<Transaction>> {
5105 let edits = this
5106 .update(cx, |this, cx| {
5107 this.edits_from_lsp(
5108 &buffer_to_edit,
5109 edits,
5110 language_server.server_id(),
5111 None,
5112 cx,
5113 )
5114 })?
5115 .await?;
5116
5117 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5118 buffer.finalize_last_transaction();
5119 buffer.start_transaction();
5120 for (range, text) in edits {
5121 buffer.edit([(range, text)], None, cx);
5122 }
5123
5124 if buffer.end_transaction(cx).is_some() {
5125 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5126 if !push_to_history {
5127 buffer.forget_transaction(transaction.id);
5128 }
5129 Some(transaction)
5130 } else {
5131 None
5132 }
5133 })?;
5134
5135 Ok(transaction)
5136 }
5137
5138 async fn deserialize_workspace_edit(
5139 this: Handle<Self>,
5140 edit: lsp2::WorkspaceEdit,
5141 push_to_history: bool,
5142 lsp_adapter: Arc<CachedLspAdapter>,
5143 language_server: Arc<LanguageServer>,
5144 cx: &mut AsyncAppContext,
5145 ) -> Result<ProjectTransaction> {
5146 let fs = this.update(cx, |this, _| this.fs.clone())?;
5147 let mut operations = Vec::new();
5148 if let Some(document_changes) = edit.document_changes {
5149 match document_changes {
5150 lsp2::DocumentChanges::Edits(edits) => {
5151 operations.extend(edits.into_iter().map(lsp2::DocumentChangeOperation::Edit))
5152 }
5153 lsp2::DocumentChanges::Operations(ops) => operations = ops,
5154 }
5155 } else if let Some(changes) = edit.changes {
5156 operations.extend(changes.into_iter().map(|(uri, edits)| {
5157 lsp2::DocumentChangeOperation::Edit(lsp2::TextDocumentEdit {
5158 text_document: lsp2::OptionalVersionedTextDocumentIdentifier {
5159 uri,
5160 version: None,
5161 },
5162 edits: edits.into_iter().map(OneOf::Left).collect(),
5163 })
5164 }));
5165 }
5166
5167 let mut project_transaction = ProjectTransaction::default();
5168 for operation in operations {
5169 match operation {
5170 lsp2::DocumentChangeOperation::Op(lsp2::ResourceOp::Create(op)) => {
5171 let abs_path = op
5172 .uri
5173 .to_file_path()
5174 .map_err(|_| anyhow!("can't convert URI to path"))?;
5175
5176 if let Some(parent_path) = abs_path.parent() {
5177 fs.create_dir(parent_path).await?;
5178 }
5179 if abs_path.ends_with("/") {
5180 fs.create_dir(&abs_path).await?;
5181 } else {
5182 fs.create_file(
5183 &abs_path,
5184 op.options
5185 .map(|options| fs::CreateOptions {
5186 overwrite: options.overwrite.unwrap_or(false),
5187 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5188 })
5189 .unwrap_or_default(),
5190 )
5191 .await?;
5192 }
5193 }
5194
5195 lsp2::DocumentChangeOperation::Op(lsp2::ResourceOp::Rename(op)) => {
5196 let source_abs_path = op
5197 .old_uri
5198 .to_file_path()
5199 .map_err(|_| anyhow!("can't convert URI to path"))?;
5200 let target_abs_path = op
5201 .new_uri
5202 .to_file_path()
5203 .map_err(|_| anyhow!("can't convert URI to path"))?;
5204 fs.rename(
5205 &source_abs_path,
5206 &target_abs_path,
5207 op.options
5208 .map(|options| fs::RenameOptions {
5209 overwrite: options.overwrite.unwrap_or(false),
5210 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5211 })
5212 .unwrap_or_default(),
5213 )
5214 .await?;
5215 }
5216
5217 lsp2::DocumentChangeOperation::Op(lsp2::ResourceOp::Delete(op)) => {
5218 let abs_path = op
5219 .uri
5220 .to_file_path()
5221 .map_err(|_| anyhow!("can't convert URI to path"))?;
5222 let options = op
5223 .options
5224 .map(|options| fs::RemoveOptions {
5225 recursive: options.recursive.unwrap_or(false),
5226 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5227 })
5228 .unwrap_or_default();
5229 if abs_path.ends_with("/") {
5230 fs.remove_dir(&abs_path, options).await?;
5231 } else {
5232 fs.remove_file(&abs_path, options).await?;
5233 }
5234 }
5235
5236 lsp2::DocumentChangeOperation::Edit(op) => {
5237 let buffer_to_edit = this
5238 .update(cx, |this, cx| {
5239 this.open_local_buffer_via_lsp(
5240 op.text_document.uri,
5241 language_server.server_id(),
5242 lsp_adapter.name.clone(),
5243 cx,
5244 )
5245 })?
5246 .await?;
5247
5248 let edits = this
5249 .update(cx, |this, cx| {
5250 let edits = op.edits.into_iter().map(|edit| match edit {
5251 OneOf::Left(edit) => edit,
5252 OneOf::Right(edit) => edit.text_edit,
5253 });
5254 this.edits_from_lsp(
5255 &buffer_to_edit,
5256 edits,
5257 language_server.server_id(),
5258 op.text_document.version,
5259 cx,
5260 )
5261 })?
5262 .await?;
5263
5264 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5265 buffer.finalize_last_transaction();
5266 buffer.start_transaction();
5267 for (range, text) in edits {
5268 buffer.edit([(range, text)], None, cx);
5269 }
5270 let transaction = if buffer.end_transaction(cx).is_some() {
5271 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5272 if !push_to_history {
5273 buffer.forget_transaction(transaction.id);
5274 }
5275 Some(transaction)
5276 } else {
5277 None
5278 };
5279
5280 transaction
5281 })?;
5282 if let Some(transaction) = transaction {
5283 project_transaction.0.insert(buffer_to_edit, transaction);
5284 }
5285 }
5286 }
5287 }
5288
5289 Ok(project_transaction)
5290 }
5291
5292 pub fn prepare_rename<T: ToPointUtf16>(
5293 &self,
5294 buffer: Handle<Buffer>,
5295 position: T,
5296 cx: &mut ModelContext<Self>,
5297 ) -> Task<Result<Option<Range<Anchor>>>> {
5298 let position = position.to_point_utf16(buffer.read(cx));
5299 self.request_lsp(
5300 buffer,
5301 LanguageServerToQuery::Primary,
5302 PrepareRename { position },
5303 cx,
5304 )
5305 }
5306
5307 pub fn perform_rename<T: ToPointUtf16>(
5308 &self,
5309 buffer: Handle<Buffer>,
5310 position: T,
5311 new_name: String,
5312 push_to_history: bool,
5313 cx: &mut ModelContext<Self>,
5314 ) -> Task<Result<ProjectTransaction>> {
5315 let position = position.to_point_utf16(buffer.read(cx));
5316 self.request_lsp(
5317 buffer,
5318 LanguageServerToQuery::Primary,
5319 PerformRename {
5320 position,
5321 new_name,
5322 push_to_history,
5323 },
5324 cx,
5325 )
5326 }
5327
5328 pub fn on_type_format<T: ToPointUtf16>(
5329 &self,
5330 buffer: Handle<Buffer>,
5331 position: T,
5332 trigger: String,
5333 push_to_history: bool,
5334 cx: &mut ModelContext<Self>,
5335 ) -> Task<Result<Option<Transaction>>> {
5336 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5337 let position = position.to_point_utf16(buffer);
5338 (
5339 position,
5340 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5341 .tab_size,
5342 )
5343 });
5344 self.request_lsp(
5345 buffer.clone(),
5346 LanguageServerToQuery::Primary,
5347 OnTypeFormatting {
5348 position,
5349 trigger,
5350 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5351 push_to_history,
5352 },
5353 cx,
5354 )
5355 }
5356
5357 pub fn inlay_hints<T: ToOffset>(
5358 &self,
5359 buffer_handle: Handle<Buffer>,
5360 range: Range<T>,
5361 cx: &mut ModelContext<Self>,
5362 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5363 let buffer = buffer_handle.read(cx);
5364 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5365 let range_start = range.start;
5366 let range_end = range.end;
5367 let buffer_id = buffer.remote_id();
5368 let buffer_version = buffer.version().clone();
5369 let lsp_request = InlayHints { range };
5370
5371 if self.is_local() {
5372 let lsp_request_task = self.request_lsp(
5373 buffer_handle.clone(),
5374 LanguageServerToQuery::Primary,
5375 lsp_request,
5376 cx,
5377 );
5378 cx.spawn(move |_, mut cx| async move {
5379 buffer_handle
5380 .update(&mut cx, |buffer, _| {
5381 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5382 })?
5383 .await
5384 .context("waiting for inlay hint request range edits")?;
5385 lsp_request_task.await.context("inlay hints LSP request")
5386 })
5387 } else if let Some(project_id) = self.remote_id() {
5388 let client = self.client.clone();
5389 let request = proto::InlayHints {
5390 project_id,
5391 buffer_id,
5392 start: Some(serialize_anchor(&range_start)),
5393 end: Some(serialize_anchor(&range_end)),
5394 version: serialize_version(&buffer_version),
5395 };
5396 cx.spawn(move |project, cx| async move {
5397 let response = client
5398 .request(request)
5399 .await
5400 .context("inlay hints proto request")?;
5401 let hints_request_result = LspCommand::response_from_proto(
5402 lsp_request,
5403 response,
5404 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5405 buffer_handle.clone(),
5406 cx,
5407 )
5408 .await;
5409
5410 hints_request_result.context("inlay hints proto response conversion")
5411 })
5412 } else {
5413 Task::ready(Err(anyhow!("project does not have a remote id")))
5414 }
5415 }
5416
5417 pub fn resolve_inlay_hint(
5418 &self,
5419 hint: InlayHint,
5420 buffer_handle: Handle<Buffer>,
5421 server_id: LanguageServerId,
5422 cx: &mut ModelContext<Self>,
5423 ) -> Task<anyhow::Result<InlayHint>> {
5424 if self.is_local() {
5425 let buffer = buffer_handle.read(cx);
5426 let (_, lang_server) = if let Some((adapter, server)) =
5427 self.language_server_for_buffer(buffer, server_id, cx)
5428 {
5429 (adapter.clone(), server.clone())
5430 } else {
5431 return Task::ready(Ok(hint));
5432 };
5433 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5434 return Task::ready(Ok(hint));
5435 }
5436
5437 let buffer_snapshot = buffer.snapshot();
5438 cx.spawn(move |_, mut cx| async move {
5439 let resolve_task = lang_server.request::<lsp2::request::InlayHintResolveRequest>(
5440 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5441 );
5442 let resolved_hint = resolve_task
5443 .await
5444 .context("inlay hint resolve LSP request")?;
5445 let resolved_hint = InlayHints::lsp_to_project_hint(
5446 resolved_hint,
5447 &buffer_handle,
5448 server_id,
5449 ResolveState::Resolved,
5450 false,
5451 &mut cx,
5452 )
5453 .await?;
5454 Ok(resolved_hint)
5455 })
5456 } else if let Some(project_id) = self.remote_id() {
5457 let client = self.client.clone();
5458 let request = proto::ResolveInlayHint {
5459 project_id,
5460 buffer_id: buffer_handle.read(cx).remote_id(),
5461 language_server_id: server_id.0 as u64,
5462 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5463 };
5464 cx.spawn(move |_, _| async move {
5465 let response = client
5466 .request(request)
5467 .await
5468 .context("inlay hints proto request")?;
5469 match response.hint {
5470 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5471 .context("inlay hints proto resolve response conversion"),
5472 None => Ok(hint),
5473 }
5474 })
5475 } else {
5476 Task::ready(Err(anyhow!("project does not have a remote id")))
5477 }
5478 }
5479
5480 #[allow(clippy::type_complexity)]
5481 pub fn search(
5482 &self,
5483 query: SearchQuery,
5484 cx: &mut ModelContext<Self>,
5485 ) -> Receiver<(Handle<Buffer>, Vec<Range<Anchor>>)> {
5486 if self.is_local() {
5487 self.search_local(query, cx)
5488 } else if let Some(project_id) = self.remote_id() {
5489 let (tx, rx) = smol::channel::unbounded();
5490 let request = self.client.request(query.to_proto(project_id));
5491 cx.spawn(move |this, mut cx| async move {
5492 let response = request.await?;
5493 let mut result = HashMap::default();
5494 for location in response.locations {
5495 let target_buffer = this
5496 .update(&mut cx, |this, cx| {
5497 this.wait_for_remote_buffer(location.buffer_id, cx)
5498 })?
5499 .await?;
5500 let start = location
5501 .start
5502 .and_then(deserialize_anchor)
5503 .ok_or_else(|| anyhow!("missing target start"))?;
5504 let end = location
5505 .end
5506 .and_then(deserialize_anchor)
5507 .ok_or_else(|| anyhow!("missing target end"))?;
5508 result
5509 .entry(target_buffer)
5510 .or_insert(Vec::new())
5511 .push(start..end)
5512 }
5513 for (buffer, ranges) in result {
5514 let _ = tx.send((buffer, ranges)).await;
5515 }
5516 Result::<(), anyhow::Error>::Ok(())
5517 })
5518 .detach_and_log_err(cx);
5519 rx
5520 } else {
5521 unimplemented!();
5522 }
5523 }
5524
5525 pub fn search_local(
5526 &self,
5527 query: SearchQuery,
5528 cx: &mut ModelContext<Self>,
5529 ) -> Receiver<(Handle<Buffer>, Vec<Range<Anchor>>)> {
5530 // Local search is split into several phases.
5531 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5532 // and the second phase that finds positions of all the matches found in the candidate files.
5533 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5534 //
5535 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5536 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5537 //
5538 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5539 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5540 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5541 // 2. At this point, we have a list of all potentially matching buffers/files.
5542 // We sort that list by buffer path - this list is retained for later use.
5543 // We ensure that all buffers are now opened and available in project.
5544 // 3. We run a scan over all the candidate buffers on multiple background threads.
5545 // We cannot assume that there will even be a match - while at least one match
5546 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5547 // There is also an auxilliary background thread responsible for result gathering.
5548 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5549 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5550 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5551 // entry - which might already be available thanks to out-of-order processing.
5552 //
5553 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5554 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5555 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5556 // in face of constantly updating list of sorted matches.
5557 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5558 let snapshots = self
5559 .visible_worktrees(cx)
5560 .filter_map(|tree| {
5561 let tree = tree.read(cx).as_local()?;
5562 Some(tree.snapshot())
5563 })
5564 .collect::<Vec<_>>();
5565
5566 let background = cx.executor().clone();
5567 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
5568 if path_count == 0 {
5569 let (_, rx) = smol::channel::bounded(1024);
5570 return rx;
5571 }
5572 let workers = background.num_cpus().min(path_count);
5573 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5574 let mut unnamed_files = vec![];
5575 let opened_buffers = self
5576 .opened_buffers
5577 .iter()
5578 .filter_map(|(_, b)| {
5579 let buffer = b.upgrade()?;
5580 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
5581 if let Some(path) = snapshot.file().map(|file| file.path()) {
5582 Some((path.clone(), (buffer, snapshot)))
5583 } else {
5584 unnamed_files.push(buffer);
5585 None
5586 }
5587 })
5588 .collect();
5589 cx.executor()
5590 .spawn(Self::background_search(
5591 unnamed_files,
5592 opened_buffers,
5593 cx.executor().clone(),
5594 self.fs.clone(),
5595 workers,
5596 query.clone(),
5597 path_count,
5598 snapshots,
5599 matching_paths_tx,
5600 ))
5601 .detach();
5602
5603 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5604 let background = cx.executor().clone();
5605 let (result_tx, result_rx) = smol::channel::bounded(1024);
5606 cx.executor()
5607 .spawn(async move {
5608 let Ok(buffers) = buffers.await else {
5609 return;
5610 };
5611
5612 let buffers_len = buffers.len();
5613 if buffers_len == 0 {
5614 return;
5615 }
5616 let query = &query;
5617 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5618 background
5619 .scoped(|scope| {
5620 #[derive(Clone)]
5621 struct FinishedStatus {
5622 entry: Option<(Handle<Buffer>, Vec<Range<Anchor>>)>,
5623 buffer_index: SearchMatchCandidateIndex,
5624 }
5625
5626 for _ in 0..workers {
5627 let finished_tx = finished_tx.clone();
5628 let mut buffers_rx = buffers_rx.clone();
5629 scope.spawn(async move {
5630 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5631 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5632 {
5633 if query.file_matches(
5634 snapshot.file().map(|file| file.path().as_ref()),
5635 ) {
5636 query
5637 .search(&snapshot, None)
5638 .await
5639 .iter()
5640 .map(|range| {
5641 snapshot.anchor_before(range.start)
5642 ..snapshot.anchor_after(range.end)
5643 })
5644 .collect()
5645 } else {
5646 Vec::new()
5647 }
5648 } else {
5649 Vec::new()
5650 };
5651
5652 let status = if !buffer_matches.is_empty() {
5653 let entry = if let Some((buffer, _)) = entry.as_ref() {
5654 Some((buffer.clone(), buffer_matches))
5655 } else {
5656 None
5657 };
5658 FinishedStatus {
5659 entry,
5660 buffer_index,
5661 }
5662 } else {
5663 FinishedStatus {
5664 entry: None,
5665 buffer_index,
5666 }
5667 };
5668 if finished_tx.send(status).await.is_err() {
5669 break;
5670 }
5671 }
5672 });
5673 }
5674 // Report sorted matches
5675 scope.spawn(async move {
5676 let mut current_index = 0;
5677 let mut scratch = vec![None; buffers_len];
5678 while let Some(status) = finished_rx.next().await {
5679 debug_assert!(
5680 scratch[status.buffer_index].is_none(),
5681 "Got match status of position {} twice",
5682 status.buffer_index
5683 );
5684 let index = status.buffer_index;
5685 scratch[index] = Some(status);
5686 while current_index < buffers_len {
5687 let Some(current_entry) = scratch[current_index].take() else {
5688 // We intentionally **do not** increment `current_index` here. When next element arrives
5689 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5690 // this time.
5691 break;
5692 };
5693 if let Some(entry) = current_entry.entry {
5694 result_tx.send(entry).await.log_err();
5695 }
5696 current_index += 1;
5697 }
5698 if current_index == buffers_len {
5699 break;
5700 }
5701 }
5702 });
5703 })
5704 .await;
5705 })
5706 .detach();
5707 result_rx
5708 }
5709
5710 /// Pick paths that might potentially contain a match of a given search query.
5711 async fn background_search(
5712 unnamed_buffers: Vec<Handle<Buffer>>,
5713 opened_buffers: HashMap<Arc<Path>, (Handle<Buffer>, BufferSnapshot)>,
5714 executor: Executor,
5715 fs: Arc<dyn Fs>,
5716 workers: usize,
5717 query: SearchQuery,
5718 path_count: usize,
5719 snapshots: Vec<LocalSnapshot>,
5720 matching_paths_tx: Sender<SearchMatchCandidate>,
5721 ) {
5722 let fs = &fs;
5723 let query = &query;
5724 let matching_paths_tx = &matching_paths_tx;
5725 let snapshots = &snapshots;
5726 let paths_per_worker = (path_count + workers - 1) / workers;
5727 for buffer in unnamed_buffers {
5728 matching_paths_tx
5729 .send(SearchMatchCandidate::OpenBuffer {
5730 buffer: buffer.clone(),
5731 path: None,
5732 })
5733 .await
5734 .log_err();
5735 }
5736 for (path, (buffer, _)) in opened_buffers.iter() {
5737 matching_paths_tx
5738 .send(SearchMatchCandidate::OpenBuffer {
5739 buffer: buffer.clone(),
5740 path: Some(path.clone()),
5741 })
5742 .await
5743 .log_err();
5744 }
5745 executor
5746 .scoped(|scope| {
5747 for worker_ix in 0..workers {
5748 let worker_start_ix = worker_ix * paths_per_worker;
5749 let worker_end_ix = worker_start_ix + paths_per_worker;
5750 let unnamed_buffers = opened_buffers.clone();
5751 scope.spawn(async move {
5752 let mut snapshot_start_ix = 0;
5753 let mut abs_path = PathBuf::new();
5754 for snapshot in snapshots {
5755 let snapshot_end_ix = snapshot_start_ix + snapshot.visible_file_count();
5756 if worker_end_ix <= snapshot_start_ix {
5757 break;
5758 } else if worker_start_ix > snapshot_end_ix {
5759 snapshot_start_ix = snapshot_end_ix;
5760 continue;
5761 } else {
5762 let start_in_snapshot =
5763 worker_start_ix.saturating_sub(snapshot_start_ix);
5764 let end_in_snapshot =
5765 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5766
5767 for entry in snapshot
5768 .files(false, start_in_snapshot)
5769 .take(end_in_snapshot - start_in_snapshot)
5770 {
5771 if matching_paths_tx.is_closed() {
5772 break;
5773 }
5774 if unnamed_buffers.contains_key(&entry.path) {
5775 continue;
5776 }
5777 let matches = if query.file_matches(Some(&entry.path)) {
5778 abs_path.clear();
5779 abs_path.push(&snapshot.abs_path());
5780 abs_path.push(&entry.path);
5781 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5782 {
5783 query.detect(file).unwrap_or(false)
5784 } else {
5785 false
5786 }
5787 } else {
5788 false
5789 };
5790
5791 if matches {
5792 let project_path = SearchMatchCandidate::Path {
5793 worktree_id: snapshot.id(),
5794 path: entry.path.clone(),
5795 };
5796 if matching_paths_tx.send(project_path).await.is_err() {
5797 break;
5798 }
5799 }
5800 }
5801
5802 snapshot_start_ix = snapshot_end_ix;
5803 }
5804 }
5805 });
5806 }
5807 })
5808 .await;
5809 }
5810
5811 fn request_lsp<R: LspCommand>(
5812 &self,
5813 buffer_handle: Handle<Buffer>,
5814 server: LanguageServerToQuery,
5815 request: R,
5816 cx: &mut ModelContext<Self>,
5817 ) -> Task<Result<R::Response>>
5818 where
5819 <R::LspRequest as lsp2::request::Request>::Result: Send,
5820 <R::LspRequest as lsp2::request::Request>::Params: Send,
5821 {
5822 let buffer = buffer_handle.read(cx);
5823 if self.is_local() {
5824 let language_server = match server {
5825 LanguageServerToQuery::Primary => {
5826 match self.primary_language_server_for_buffer(buffer, cx) {
5827 Some((_, server)) => Some(Arc::clone(server)),
5828 None => return Task::ready(Ok(Default::default())),
5829 }
5830 }
5831 LanguageServerToQuery::Other(id) => self
5832 .language_server_for_buffer(buffer, id, cx)
5833 .map(|(_, server)| Arc::clone(server)),
5834 };
5835 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
5836 if let (Some(file), Some(language_server)) = (file, language_server) {
5837 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
5838 return cx.spawn(move |this, cx| async move {
5839 if !request.check_capabilities(language_server.capabilities()) {
5840 return Ok(Default::default());
5841 }
5842
5843 let result = language_server.request::<R::LspRequest>(lsp_params).await;
5844 let response = match result {
5845 Ok(response) => response,
5846
5847 Err(err) => {
5848 log::warn!(
5849 "Generic lsp request to {} failed: {}",
5850 language_server.name(),
5851 err
5852 );
5853 return Err(err);
5854 }
5855 };
5856
5857 request
5858 .response_from_lsp(
5859 response,
5860 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
5861 buffer_handle,
5862 language_server.server_id(),
5863 cx,
5864 )
5865 .await
5866 });
5867 }
5868 } else if let Some(project_id) = self.remote_id() {
5869 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
5870 }
5871
5872 Task::ready(Ok(Default::default()))
5873 }
5874
5875 fn send_lsp_proto_request<R: LspCommand>(
5876 &self,
5877 buffer: Handle<Buffer>,
5878 project_id: u64,
5879 request: R,
5880 cx: &mut ModelContext<'_, Project>,
5881 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
5882 let rpc = self.client.clone();
5883 let message = request.to_proto(project_id, buffer.read(cx));
5884 cx.spawn(move |this, mut cx| async move {
5885 // Ensure the project is still alive by the time the task
5886 // is scheduled.
5887 this.upgrade().context("project dropped")?;
5888 let response = rpc.request(message).await?;
5889 let this = this.upgrade().context("project dropped")?;
5890 if this.update(&mut cx, |this, _| this.is_read_only())? {
5891 Err(anyhow!("disconnected before completing request"))
5892 } else {
5893 request
5894 .response_from_proto(response, this, buffer, cx)
5895 .await
5896 }
5897 })
5898 }
5899
5900 fn sort_candidates_and_open_buffers(
5901 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
5902 cx: &mut ModelContext<Self>,
5903 ) -> (
5904 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
5905 Receiver<(
5906 Option<(Handle<Buffer>, BufferSnapshot)>,
5907 SearchMatchCandidateIndex,
5908 )>,
5909 ) {
5910 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
5911 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
5912 cx.spawn(move |this, cx| async move {
5913 let mut buffers = vec![];
5914 while let Some(entry) = matching_paths_rx.next().await {
5915 buffers.push(entry);
5916 }
5917 buffers.sort_by_key(|candidate| candidate.path());
5918 let matching_paths = buffers.clone();
5919 let _ = sorted_buffers_tx.send(buffers);
5920 for (index, candidate) in matching_paths.into_iter().enumerate() {
5921 if buffers_tx.is_closed() {
5922 break;
5923 }
5924 let this = this.clone();
5925 let buffers_tx = buffers_tx.clone();
5926 cx.spawn(move |mut cx| async move {
5927 let buffer = match candidate {
5928 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
5929 SearchMatchCandidate::Path { worktree_id, path } => this
5930 .update(&mut cx, |this, cx| {
5931 this.open_buffer((worktree_id, path), cx)
5932 })?
5933 .await
5934 .log_err(),
5935 };
5936 if let Some(buffer) = buffer {
5937 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
5938 buffers_tx
5939 .send((Some((buffer, snapshot)), index))
5940 .await
5941 .log_err();
5942 } else {
5943 buffers_tx.send((None, index)).await.log_err();
5944 }
5945
5946 Ok::<_, anyhow::Error>(())
5947 })
5948 .detach();
5949 }
5950 })
5951 .detach();
5952 (sorted_buffers_rx, buffers_rx)
5953 }
5954
5955 pub fn find_or_create_local_worktree(
5956 &mut self,
5957 abs_path: impl AsRef<Path>,
5958 visible: bool,
5959 cx: &mut ModelContext<Self>,
5960 ) -> Task<Result<(Handle<Worktree>, PathBuf)>> {
5961 let abs_path = abs_path.as_ref();
5962 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
5963 Task::ready(Ok((tree, relative_path)))
5964 } else {
5965 let worktree = self.create_local_worktree(abs_path, visible, cx);
5966 cx.executor()
5967 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
5968 }
5969 }
5970
5971 pub fn find_local_worktree(
5972 &self,
5973 abs_path: &Path,
5974 cx: &AppContext,
5975 ) -> Option<(Handle<Worktree>, PathBuf)> {
5976 for tree in &self.worktrees {
5977 if let Some(tree) = tree.upgrade() {
5978 if let Some(relative_path) = tree
5979 .read(cx)
5980 .as_local()
5981 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
5982 {
5983 return Some((tree.clone(), relative_path.into()));
5984 }
5985 }
5986 }
5987 None
5988 }
5989
5990 pub fn is_shared(&self) -> bool {
5991 match &self.client_state {
5992 Some(ProjectClientState::Local { .. }) => true,
5993 _ => false,
5994 }
5995 }
5996
5997 fn create_local_worktree(
5998 &mut self,
5999 abs_path: impl AsRef<Path>,
6000 visible: bool,
6001 cx: &mut ModelContext<Self>,
6002 ) -> Task<Result<Handle<Worktree>>> {
6003 let fs = self.fs.clone();
6004 let client = self.client.clone();
6005 let next_entry_id = self.next_entry_id.clone();
6006 let path: Arc<Path> = abs_path.as_ref().into();
6007 let task = self
6008 .loading_local_worktrees
6009 .entry(path.clone())
6010 .or_insert_with(|| {
6011 cx.spawn(move |project, mut cx| {
6012 async move {
6013 let worktree = Worktree::local(
6014 client.clone(),
6015 path.clone(),
6016 visible,
6017 fs,
6018 next_entry_id,
6019 &mut cx,
6020 )
6021 .await;
6022
6023 project.update(&mut cx, |project, _| {
6024 project.loading_local_worktrees.remove(&path);
6025 });
6026
6027 let worktree = worktree?;
6028 project.update(&mut cx, |project, cx| project.add_worktree(&worktree, cx));
6029 Ok(worktree)
6030 }
6031 .map_err(Arc::new)
6032 })
6033 .shared()
6034 })
6035 .clone();
6036 cx.executor().spawn(async move {
6037 match task.await {
6038 Ok(worktree) => Ok(worktree),
6039 Err(err) => Err(anyhow!("{}", err)),
6040 }
6041 })
6042 }
6043
6044 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6045 self.worktrees.retain(|worktree| {
6046 if let Some(worktree) = worktree.upgrade() {
6047 let id = worktree.read(cx).id();
6048 if id == id_to_remove {
6049 cx.emit(Event::WorktreeRemoved(id));
6050 false
6051 } else {
6052 true
6053 }
6054 } else {
6055 false
6056 }
6057 });
6058 self.metadata_changed(cx);
6059 }
6060
6061 fn add_worktree(&mut self, worktree: &Handle<Worktree>, cx: &mut ModelContext<Self>) {
6062 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6063 if worktree.read(cx).is_local() {
6064 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6065 worktree::Event::UpdatedEntries(changes) => {
6066 this.update_local_worktree_buffers(&worktree, changes, cx);
6067 this.update_local_worktree_language_servers(&worktree, changes, cx);
6068 this.update_local_worktree_settings(&worktree, changes, cx);
6069 this.update_prettier_settings(&worktree, changes, cx);
6070 cx.emit(Event::WorktreeUpdatedEntries(
6071 worktree.read(cx).id(),
6072 changes.clone(),
6073 ));
6074 }
6075 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6076 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6077 }
6078 })
6079 .detach();
6080 }
6081
6082 let push_strong_handle = {
6083 let worktree = worktree.read(cx);
6084 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6085 };
6086 if push_strong_handle {
6087 self.worktrees
6088 .push(WorktreeHandle::Strong(worktree.clone()));
6089 } else {
6090 self.worktrees
6091 .push(WorktreeHandle::Weak(worktree.downgrade()));
6092 }
6093
6094 let handle_id = worktree.entity_id();
6095 cx.observe_release(worktree, move |this, worktree, cx| {
6096 let _ = this.remove_worktree(worktree.id(), cx);
6097 cx.update_global::<SettingsStore, _>(|store, cx| {
6098 store
6099 .clear_local_settings(handle_id.as_u64() as usize, cx)
6100 .log_err()
6101 });
6102 })
6103 .detach();
6104
6105 cx.emit(Event::WorktreeAdded);
6106 self.metadata_changed(cx);
6107 }
6108
6109 fn update_local_worktree_buffers(
6110 &mut self,
6111 worktree_handle: &Handle<Worktree>,
6112 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6113 cx: &mut ModelContext<Self>,
6114 ) {
6115 let snapshot = worktree_handle.read(cx).snapshot();
6116
6117 let mut renamed_buffers = Vec::new();
6118 for (path, entry_id, _) in changes {
6119 let worktree_id = worktree_handle.read(cx).id();
6120 let project_path = ProjectPath {
6121 worktree_id,
6122 path: path.clone(),
6123 };
6124
6125 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6126 Some(&buffer_id) => buffer_id,
6127 None => match self.local_buffer_ids_by_path.get(&project_path) {
6128 Some(&buffer_id) => buffer_id,
6129 None => {
6130 continue;
6131 }
6132 },
6133 };
6134
6135 let open_buffer = self.opened_buffers.get(&buffer_id);
6136 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6137 buffer
6138 } else {
6139 self.opened_buffers.remove(&buffer_id);
6140 self.local_buffer_ids_by_path.remove(&project_path);
6141 self.local_buffer_ids_by_entry_id.remove(entry_id);
6142 continue;
6143 };
6144
6145 buffer.update(cx, |buffer, cx| {
6146 if let Some(old_file) = File::from_dyn(buffer.file()) {
6147 if old_file.worktree != *worktree_handle {
6148 return;
6149 }
6150
6151 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
6152 File {
6153 is_local: true,
6154 entry_id: entry.id,
6155 mtime: entry.mtime,
6156 path: entry.path.clone(),
6157 worktree: worktree_handle.clone(),
6158 is_deleted: false,
6159 }
6160 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6161 File {
6162 is_local: true,
6163 entry_id: entry.id,
6164 mtime: entry.mtime,
6165 path: entry.path.clone(),
6166 worktree: worktree_handle.clone(),
6167 is_deleted: false,
6168 }
6169 } else {
6170 File {
6171 is_local: true,
6172 entry_id: old_file.entry_id,
6173 path: old_file.path().clone(),
6174 mtime: old_file.mtime(),
6175 worktree: worktree_handle.clone(),
6176 is_deleted: true,
6177 }
6178 };
6179
6180 let old_path = old_file.abs_path(cx);
6181 if new_file.abs_path(cx) != old_path {
6182 renamed_buffers.push((cx.handle(), old_file.clone()));
6183 self.local_buffer_ids_by_path.remove(&project_path);
6184 self.local_buffer_ids_by_path.insert(
6185 ProjectPath {
6186 worktree_id,
6187 path: path.clone(),
6188 },
6189 buffer_id,
6190 );
6191 }
6192
6193 if new_file.entry_id != *entry_id {
6194 self.local_buffer_ids_by_entry_id.remove(entry_id);
6195 self.local_buffer_ids_by_entry_id
6196 .insert(new_file.entry_id, buffer_id);
6197 }
6198
6199 if new_file != *old_file {
6200 if let Some(project_id) = self.remote_id() {
6201 self.client
6202 .send(proto::UpdateBufferFile {
6203 project_id,
6204 buffer_id: buffer_id as u64,
6205 file: Some(new_file.to_proto()),
6206 })
6207 .log_err();
6208 }
6209
6210 buffer.file_updated(Arc::new(new_file), cx).detach();
6211 }
6212 }
6213 });
6214 }
6215
6216 for (buffer, old_file) in renamed_buffers {
6217 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6218 self.detect_language_for_buffer(&buffer, cx);
6219 self.register_buffer_with_language_servers(&buffer, cx);
6220 }
6221 }
6222
6223 fn update_local_worktree_language_servers(
6224 &mut self,
6225 worktree_handle: &Handle<Worktree>,
6226 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6227 cx: &mut ModelContext<Self>,
6228 ) {
6229 if changes.is_empty() {
6230 return;
6231 }
6232
6233 let worktree_id = worktree_handle.read(cx).id();
6234 let mut language_server_ids = self
6235 .language_server_ids
6236 .iter()
6237 .filter_map(|((server_worktree_id, _), server_id)| {
6238 (*server_worktree_id == worktree_id).then_some(*server_id)
6239 })
6240 .collect::<Vec<_>>();
6241 language_server_ids.sort();
6242 language_server_ids.dedup();
6243
6244 let abs_path = worktree_handle.read(cx).abs_path();
6245 for server_id in &language_server_ids {
6246 if let Some(LanguageServerState::Running {
6247 server,
6248 watched_paths,
6249 ..
6250 }) = self.language_servers.get(server_id)
6251 {
6252 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6253 let params = lsp2::DidChangeWatchedFilesParams {
6254 changes: changes
6255 .iter()
6256 .filter_map(|(path, _, change)| {
6257 if !watched_paths.is_match(&path) {
6258 return None;
6259 }
6260 let typ = match change {
6261 PathChange::Loaded => return None,
6262 PathChange::Added => lsp2::FileChangeType::CREATED,
6263 PathChange::Removed => lsp2::FileChangeType::DELETED,
6264 PathChange::Updated => lsp2::FileChangeType::CHANGED,
6265 PathChange::AddedOrUpdated => lsp2::FileChangeType::CHANGED,
6266 };
6267 Some(lsp2::FileEvent {
6268 uri: lsp2::Url::from_file_path(abs_path.join(path)).unwrap(),
6269 typ,
6270 })
6271 })
6272 .collect(),
6273 };
6274
6275 if !params.changes.is_empty() {
6276 server
6277 .notify::<lsp2::notification::DidChangeWatchedFiles>(params)
6278 .log_err();
6279 }
6280 }
6281 }
6282 }
6283 }
6284
6285 fn update_local_worktree_buffers_git_repos(
6286 &mut self,
6287 worktree_handle: Handle<Worktree>,
6288 changed_repos: &UpdatedGitRepositoriesSet,
6289 cx: &mut ModelContext<Self>,
6290 ) {
6291 debug_assert!(worktree_handle.read(cx).is_local());
6292
6293 // Identify the loading buffers whose containing repository that has changed.
6294 let future_buffers = self
6295 .loading_buffers_by_path
6296 .iter()
6297 .filter_map(|(project_path, receiver)| {
6298 if project_path.worktree_id != worktree_handle.read(cx).id() {
6299 return None;
6300 }
6301 let path = &project_path.path;
6302 changed_repos
6303 .iter()
6304 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6305 let receiver = receiver.clone();
6306 let path = path.clone();
6307 Some(async move {
6308 wait_for_loading_buffer(receiver)
6309 .await
6310 .ok()
6311 .map(|buffer| (buffer, path))
6312 })
6313 })
6314 .collect::<FuturesUnordered<_>>();
6315
6316 // Identify the current buffers whose containing repository has changed.
6317 let current_buffers = self
6318 .opened_buffers
6319 .values()
6320 .filter_map(|buffer| {
6321 let buffer = buffer.upgrade()?;
6322 let file = File::from_dyn(buffer.read(cx).file())?;
6323 if file.worktree != worktree_handle {
6324 return None;
6325 }
6326 let path = file.path();
6327 changed_repos
6328 .iter()
6329 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6330 Some((buffer, path.clone()))
6331 })
6332 .collect::<Vec<_>>();
6333
6334 if future_buffers.len() + current_buffers.len() == 0 {
6335 return;
6336 }
6337
6338 let remote_id = self.remote_id();
6339 let client = self.client.clone();
6340 cx.spawn(move |_, mut cx| async move {
6341 // Wait for all of the buffers to load.
6342 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6343
6344 // Reload the diff base for every buffer whose containing git repository has changed.
6345 let snapshot =
6346 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6347 let diff_bases_by_buffer = cx
6348 .executor()
6349 .spawn(async move {
6350 future_buffers
6351 .into_iter()
6352 .filter_map(|e| e)
6353 .chain(current_buffers)
6354 .filter_map(|(buffer, path)| {
6355 let (work_directory, repo) =
6356 snapshot.repository_and_work_directory_for_path(&path)?;
6357 let repo = snapshot.get_local_repo(&repo)?;
6358 let relative_path = path.strip_prefix(&work_directory).ok()?;
6359 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6360 Some((buffer, base_text))
6361 })
6362 .collect::<Vec<_>>()
6363 })
6364 .await;
6365
6366 // Assign the new diff bases on all of the buffers.
6367 for (buffer, diff_base) in diff_bases_by_buffer {
6368 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6369 buffer.set_diff_base(diff_base.clone(), cx);
6370 buffer.remote_id()
6371 })?;
6372 if let Some(project_id) = remote_id {
6373 client
6374 .send(proto::UpdateDiffBase {
6375 project_id,
6376 buffer_id,
6377 diff_base,
6378 })
6379 .log_err();
6380 }
6381 }
6382
6383 anyhow::Ok(())
6384 })
6385 .detach();
6386 }
6387
6388 fn update_local_worktree_settings(
6389 &mut self,
6390 worktree: &Handle<Worktree>,
6391 changes: &UpdatedEntriesSet,
6392 cx: &mut ModelContext<Self>,
6393 ) {
6394 let project_id = self.remote_id();
6395 let worktree_id = worktree.entity_id();
6396 let worktree = worktree.read(cx).as_local().unwrap();
6397 let remote_worktree_id = worktree.id();
6398
6399 let mut settings_contents = Vec::new();
6400 for (path, _, change) in changes.iter() {
6401 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6402 let settings_dir = Arc::from(
6403 path.ancestors()
6404 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6405 .unwrap(),
6406 );
6407 let fs = self.fs.clone();
6408 let removed = *change == PathChange::Removed;
6409 let abs_path = worktree.absolutize(path);
6410 settings_contents.push(async move {
6411 (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
6412 });
6413 }
6414 }
6415
6416 if settings_contents.is_empty() {
6417 return;
6418 }
6419
6420 let client = self.client.clone();
6421 cx.spawn(move |_, cx| async move {
6422 let settings_contents: Vec<(Arc<Path>, _)> =
6423 futures::future::join_all(settings_contents).await;
6424 cx.update(|cx| {
6425 cx.update_global::<SettingsStore, _>(|store, cx| {
6426 for (directory, file_content) in settings_contents {
6427 let file_content = file_content.and_then(|content| content.log_err());
6428 store
6429 .set_local_settings(
6430 worktree_id.as_u64() as usize,
6431 directory.clone(),
6432 file_content.as_ref().map(String::as_str),
6433 cx,
6434 )
6435 .log_err();
6436 if let Some(remote_id) = project_id {
6437 client
6438 .send(proto::UpdateWorktreeSettings {
6439 project_id: remote_id,
6440 worktree_id: remote_worktree_id.to_proto(),
6441 path: directory.to_string_lossy().into_owned(),
6442 content: file_content,
6443 })
6444 .log_err();
6445 }
6446 }
6447 });
6448 });
6449 })
6450 .detach();
6451 }
6452
6453 fn update_prettier_settings(
6454 &self,
6455 worktree: &Handle<Worktree>,
6456 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6457 cx: &mut ModelContext<'_, Project>,
6458 ) {
6459 let prettier_config_files = Prettier::CONFIG_FILE_NAMES
6460 .iter()
6461 .map(Path::new)
6462 .collect::<HashSet<_>>();
6463
6464 let prettier_config_file_changed = changes
6465 .iter()
6466 .filter(|(_, _, change)| !matches!(change, PathChange::Loaded))
6467 .filter(|(path, _, _)| {
6468 !path
6469 .components()
6470 .any(|component| component.as_os_str().to_string_lossy() == "node_modules")
6471 })
6472 .find(|(path, _, _)| prettier_config_files.contains(path.as_ref()));
6473 let current_worktree_id = worktree.read(cx).id();
6474 if let Some((config_path, _, _)) = prettier_config_file_changed {
6475 log::info!(
6476 "Prettier config file {config_path:?} changed, reloading prettier instances for worktree {current_worktree_id}"
6477 );
6478 let prettiers_to_reload = self
6479 .prettier_instances
6480 .iter()
6481 .filter_map(|((worktree_id, prettier_path), prettier_task)| {
6482 if worktree_id.is_none() || worktree_id == &Some(current_worktree_id) {
6483 Some((*worktree_id, prettier_path.clone(), prettier_task.clone()))
6484 } else {
6485 None
6486 }
6487 })
6488 .collect::<Vec<_>>();
6489
6490 cx.executor()
6491 .spawn(async move {
6492 for task_result in future::join_all(prettiers_to_reload.into_iter().map(|(worktree_id, prettier_path, prettier_task)| {
6493 async move {
6494 prettier_task.await?
6495 .clear_cache()
6496 .await
6497 .with_context(|| {
6498 format!(
6499 "clearing prettier {prettier_path:?} cache for worktree {worktree_id:?} on prettier settings update"
6500 )
6501 })
6502 .map_err(Arc::new)
6503 }
6504 }))
6505 .await
6506 {
6507 if let Err(e) = task_result {
6508 log::error!("Failed to clear cache for prettier: {e:#}");
6509 }
6510 }
6511 })
6512 .detach();
6513 }
6514 }
6515
6516 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6517 let new_active_entry = entry.and_then(|project_path| {
6518 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6519 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6520 Some(entry.id)
6521 });
6522 if new_active_entry != self.active_entry {
6523 self.active_entry = new_active_entry;
6524 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6525 }
6526 }
6527
6528 pub fn language_servers_running_disk_based_diagnostics(
6529 &self,
6530 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6531 self.language_server_statuses
6532 .iter()
6533 .filter_map(|(id, status)| {
6534 if status.has_pending_diagnostic_updates {
6535 Some(*id)
6536 } else {
6537 None
6538 }
6539 })
6540 }
6541
6542 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
6543 let mut summary = DiagnosticSummary::default();
6544 for (_, _, path_summary) in self.diagnostic_summaries(cx) {
6545 summary.error_count += path_summary.error_count;
6546 summary.warning_count += path_summary.warning_count;
6547 }
6548 summary
6549 }
6550
6551 pub fn diagnostic_summaries<'a>(
6552 &'a self,
6553 cx: &'a AppContext,
6554 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6555 self.visible_worktrees(cx).flat_map(move |worktree| {
6556 let worktree = worktree.read(cx);
6557 let worktree_id = worktree.id();
6558 worktree
6559 .diagnostic_summaries()
6560 .map(move |(path, server_id, summary)| {
6561 (ProjectPath { worktree_id, path }, server_id, summary)
6562 })
6563 })
6564 }
6565
6566 pub fn disk_based_diagnostics_started(
6567 &mut self,
6568 language_server_id: LanguageServerId,
6569 cx: &mut ModelContext<Self>,
6570 ) {
6571 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6572 }
6573
6574 pub fn disk_based_diagnostics_finished(
6575 &mut self,
6576 language_server_id: LanguageServerId,
6577 cx: &mut ModelContext<Self>,
6578 ) {
6579 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6580 }
6581
6582 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6583 self.active_entry
6584 }
6585
6586 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6587 self.worktree_for_id(path.worktree_id, cx)?
6588 .read(cx)
6589 .entry_for_path(&path.path)
6590 .cloned()
6591 }
6592
6593 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6594 let worktree = self.worktree_for_entry(entry_id, cx)?;
6595 let worktree = worktree.read(cx);
6596 let worktree_id = worktree.id();
6597 let path = worktree.entry_for_id(entry_id)?.path.clone();
6598 Some(ProjectPath { worktree_id, path })
6599 }
6600
6601 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6602 let workspace_root = self
6603 .worktree_for_id(project_path.worktree_id, cx)?
6604 .read(cx)
6605 .abs_path();
6606 let project_path = project_path.path.as_ref();
6607
6608 Some(if project_path == Path::new("") {
6609 workspace_root.to_path_buf()
6610 } else {
6611 workspace_root.join(project_path)
6612 })
6613 }
6614
6615 // RPC message handlers
6616
6617 async fn handle_unshare_project(
6618 this: Handle<Self>,
6619 _: TypedEnvelope<proto::UnshareProject>,
6620 _: Arc<Client>,
6621 mut cx: AsyncAppContext,
6622 ) -> Result<()> {
6623 this.update(&mut cx, |this, cx| {
6624 if this.is_local() {
6625 this.unshare(cx)?;
6626 } else {
6627 this.disconnected_from_host(cx);
6628 }
6629 Ok(())
6630 })?
6631 }
6632
6633 async fn handle_add_collaborator(
6634 this: Handle<Self>,
6635 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6636 _: Arc<Client>,
6637 mut cx: AsyncAppContext,
6638 ) -> Result<()> {
6639 let collaborator = envelope
6640 .payload
6641 .collaborator
6642 .take()
6643 .ok_or_else(|| anyhow!("empty collaborator"))?;
6644
6645 let collaborator = Collaborator::from_proto(collaborator)?;
6646 this.update(&mut cx, |this, cx| {
6647 this.shared_buffers.remove(&collaborator.peer_id);
6648 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6649 this.collaborators
6650 .insert(collaborator.peer_id, collaborator);
6651 cx.notify();
6652 });
6653
6654 Ok(())
6655 }
6656
6657 async fn handle_update_project_collaborator(
6658 this: Handle<Self>,
6659 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6660 _: Arc<Client>,
6661 mut cx: AsyncAppContext,
6662 ) -> Result<()> {
6663 let old_peer_id = envelope
6664 .payload
6665 .old_peer_id
6666 .ok_or_else(|| anyhow!("missing old peer id"))?;
6667 let new_peer_id = envelope
6668 .payload
6669 .new_peer_id
6670 .ok_or_else(|| anyhow!("missing new peer id"))?;
6671 this.update(&mut cx, |this, cx| {
6672 let collaborator = this
6673 .collaborators
6674 .remove(&old_peer_id)
6675 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6676 let is_host = collaborator.replica_id == 0;
6677 this.collaborators.insert(new_peer_id, collaborator);
6678
6679 let buffers = this.shared_buffers.remove(&old_peer_id);
6680 log::info!(
6681 "peer {} became {}. moving buffers {:?}",
6682 old_peer_id,
6683 new_peer_id,
6684 &buffers
6685 );
6686 if let Some(buffers) = buffers {
6687 this.shared_buffers.insert(new_peer_id, buffers);
6688 }
6689
6690 if is_host {
6691 this.opened_buffers
6692 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6693 this.buffer_ordered_messages_tx
6694 .unbounded_send(BufferOrderedMessage::Resync)
6695 .unwrap();
6696 }
6697
6698 cx.emit(Event::CollaboratorUpdated {
6699 old_peer_id,
6700 new_peer_id,
6701 });
6702 cx.notify();
6703 Ok(())
6704 })?
6705 }
6706
6707 async fn handle_remove_collaborator(
6708 this: Handle<Self>,
6709 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6710 _: Arc<Client>,
6711 mut cx: AsyncAppContext,
6712 ) -> Result<()> {
6713 this.update(&mut cx, |this, cx| {
6714 let peer_id = envelope
6715 .payload
6716 .peer_id
6717 .ok_or_else(|| anyhow!("invalid peer id"))?;
6718 let replica_id = this
6719 .collaborators
6720 .remove(&peer_id)
6721 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6722 .replica_id;
6723 for buffer in this.opened_buffers.values() {
6724 if let Some(buffer) = buffer.upgrade() {
6725 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6726 }
6727 }
6728 this.shared_buffers.remove(&peer_id);
6729
6730 cx.emit(Event::CollaboratorLeft(peer_id));
6731 cx.notify();
6732 Ok(())
6733 })?
6734 }
6735
6736 async fn handle_update_project(
6737 this: Handle<Self>,
6738 envelope: TypedEnvelope<proto::UpdateProject>,
6739 _: Arc<Client>,
6740 mut cx: AsyncAppContext,
6741 ) -> Result<()> {
6742 this.update(&mut cx, |this, cx| {
6743 // Don't handle messages that were sent before the response to us joining the project
6744 if envelope.message_id > this.join_project_response_message_id {
6745 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6746 }
6747 Ok(())
6748 })?
6749 }
6750
6751 async fn handle_update_worktree(
6752 this: Handle<Self>,
6753 envelope: TypedEnvelope<proto::UpdateWorktree>,
6754 _: Arc<Client>,
6755 mut cx: AsyncAppContext,
6756 ) -> Result<()> {
6757 this.update(&mut cx, |this, cx| {
6758 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6759 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6760 worktree.update(cx, |worktree, _| {
6761 let worktree = worktree.as_remote_mut().unwrap();
6762 worktree.update_from_remote(envelope.payload);
6763 });
6764 }
6765 Ok(())
6766 })?
6767 }
6768
6769 async fn handle_update_worktree_settings(
6770 this: Handle<Self>,
6771 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6772 _: Arc<Client>,
6773 mut cx: AsyncAppContext,
6774 ) -> Result<()> {
6775 this.update(&mut cx, |this, cx| {
6776 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6777 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6778 cx.update_global::<SettingsStore, _>(|store, cx| {
6779 store
6780 .set_local_settings(
6781 worktree.entity_id().as_u64() as usize,
6782 PathBuf::from(&envelope.payload.path).into(),
6783 envelope.payload.content.as_ref().map(String::as_str),
6784 cx,
6785 )
6786 .log_err();
6787 });
6788 }
6789 Ok(())
6790 })?
6791 }
6792
6793 async fn handle_create_project_entry(
6794 this: Handle<Self>,
6795 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6796 _: Arc<Client>,
6797 mut cx: AsyncAppContext,
6798 ) -> Result<proto::ProjectEntryResponse> {
6799 let worktree = this.update(&mut cx, |this, cx| {
6800 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6801 this.worktree_for_id(worktree_id, cx)
6802 .ok_or_else(|| anyhow!("worktree not found"))
6803 })??;
6804 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6805 let entry = worktree
6806 .update(&mut cx, |worktree, cx| {
6807 let worktree = worktree.as_local_mut().unwrap();
6808 let path = PathBuf::from(envelope.payload.path);
6809 worktree.create_entry(path, envelope.payload.is_directory, cx)
6810 })?
6811 .await?;
6812 Ok(proto::ProjectEntryResponse {
6813 entry: Some((&entry).into()),
6814 worktree_scan_id: worktree_scan_id as u64,
6815 })
6816 }
6817
6818 async fn handle_rename_project_entry(
6819 this: Handle<Self>,
6820 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6821 _: Arc<Client>,
6822 mut cx: AsyncAppContext,
6823 ) -> Result<proto::ProjectEntryResponse> {
6824 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6825 let worktree = this.update(&mut cx, |this, cx| {
6826 this.worktree_for_entry(entry_id, cx)
6827 .ok_or_else(|| anyhow!("worktree not found"))
6828 })??;
6829 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6830 let entry = worktree
6831 .update(&mut cx, |worktree, cx| {
6832 let new_path = PathBuf::from(envelope.payload.new_path);
6833 worktree
6834 .as_local_mut()
6835 .unwrap()
6836 .rename_entry(entry_id, new_path, cx)
6837 .ok_or_else(|| anyhow!("invalid entry"))
6838 })??
6839 .await?;
6840 Ok(proto::ProjectEntryResponse {
6841 entry: Some((&entry).into()),
6842 worktree_scan_id: worktree_scan_id as u64,
6843 })
6844 }
6845
6846 async fn handle_copy_project_entry(
6847 this: Handle<Self>,
6848 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6849 _: Arc<Client>,
6850 mut cx: AsyncAppContext,
6851 ) -> Result<proto::ProjectEntryResponse> {
6852 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6853 let worktree = this.update(&mut cx, |this, cx| {
6854 this.worktree_for_entry(entry_id, cx)
6855 .ok_or_else(|| anyhow!("worktree not found"))
6856 })??;
6857 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6858 let entry = worktree
6859 .update(&mut cx, |worktree, cx| {
6860 let new_path = PathBuf::from(envelope.payload.new_path);
6861 worktree
6862 .as_local_mut()
6863 .unwrap()
6864 .copy_entry(entry_id, new_path, cx)
6865 .ok_or_else(|| anyhow!("invalid entry"))
6866 })??
6867 .await?;
6868 Ok(proto::ProjectEntryResponse {
6869 entry: Some((&entry).into()),
6870 worktree_scan_id: worktree_scan_id as u64,
6871 })
6872 }
6873
6874 async fn handle_delete_project_entry(
6875 this: Handle<Self>,
6876 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
6877 _: Arc<Client>,
6878 mut cx: AsyncAppContext,
6879 ) -> Result<proto::ProjectEntryResponse> {
6880 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6881
6882 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)));
6883
6884 let worktree = this.update(&mut cx, |this, cx| {
6885 this.worktree_for_entry(entry_id, cx)
6886 .ok_or_else(|| anyhow!("worktree not found"))
6887 })??;
6888 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6889 worktree
6890 .update(&mut cx, |worktree, cx| {
6891 worktree
6892 .as_local_mut()
6893 .unwrap()
6894 .delete_entry(entry_id, cx)
6895 .ok_or_else(|| anyhow!("invalid entry"))
6896 })??
6897 .await?;
6898 Ok(proto::ProjectEntryResponse {
6899 entry: None,
6900 worktree_scan_id: worktree_scan_id as u64,
6901 })
6902 }
6903
6904 async fn handle_expand_project_entry(
6905 this: Handle<Self>,
6906 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
6907 _: Arc<Client>,
6908 mut cx: AsyncAppContext,
6909 ) -> Result<proto::ExpandProjectEntryResponse> {
6910 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6911 let worktree = this
6912 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
6913 .ok_or_else(|| anyhow!("invalid request"))?;
6914 worktree
6915 .update(&mut cx, |worktree, cx| {
6916 worktree
6917 .as_local_mut()
6918 .unwrap()
6919 .expand_entry(entry_id, cx)
6920 .ok_or_else(|| anyhow!("invalid entry"))
6921 })??
6922 .await?;
6923 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
6924 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
6925 }
6926
6927 async fn handle_update_diagnostic_summary(
6928 this: Handle<Self>,
6929 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
6930 _: Arc<Client>,
6931 mut cx: AsyncAppContext,
6932 ) -> Result<()> {
6933 this.update(&mut cx, |this, cx| {
6934 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6935 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6936 if let Some(summary) = envelope.payload.summary {
6937 let project_path = ProjectPath {
6938 worktree_id,
6939 path: Path::new(&summary.path).into(),
6940 };
6941 worktree.update(cx, |worktree, _| {
6942 worktree
6943 .as_remote_mut()
6944 .unwrap()
6945 .update_diagnostic_summary(project_path.path.clone(), &summary);
6946 });
6947 cx.emit(Event::DiagnosticsUpdated {
6948 language_server_id: LanguageServerId(summary.language_server_id as usize),
6949 path: project_path,
6950 });
6951 }
6952 }
6953 Ok(())
6954 })?
6955 }
6956
6957 async fn handle_start_language_server(
6958 this: Handle<Self>,
6959 envelope: TypedEnvelope<proto::StartLanguageServer>,
6960 _: Arc<Client>,
6961 mut cx: AsyncAppContext,
6962 ) -> Result<()> {
6963 let server = envelope
6964 .payload
6965 .server
6966 .ok_or_else(|| anyhow!("invalid server"))?;
6967 this.update(&mut cx, |this, cx| {
6968 this.language_server_statuses.insert(
6969 LanguageServerId(server.id as usize),
6970 LanguageServerStatus {
6971 name: server.name,
6972 pending_work: Default::default(),
6973 has_pending_diagnostic_updates: false,
6974 progress_tokens: Default::default(),
6975 },
6976 );
6977 cx.notify();
6978 });
6979 Ok(())
6980 }
6981
6982 async fn handle_update_language_server(
6983 this: Handle<Self>,
6984 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
6985 _: Arc<Client>,
6986 mut cx: AsyncAppContext,
6987 ) -> Result<()> {
6988 this.update(&mut cx, |this, cx| {
6989 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
6990
6991 match envelope
6992 .payload
6993 .variant
6994 .ok_or_else(|| anyhow!("invalid variant"))?
6995 {
6996 proto::update_language_server::Variant::WorkStart(payload) => {
6997 this.on_lsp_work_start(
6998 language_server_id,
6999 payload.token,
7000 LanguageServerProgress {
7001 message: payload.message,
7002 percentage: payload.percentage.map(|p| p as usize),
7003 last_update_at: Instant::now(),
7004 },
7005 cx,
7006 );
7007 }
7008
7009 proto::update_language_server::Variant::WorkProgress(payload) => {
7010 this.on_lsp_work_progress(
7011 language_server_id,
7012 payload.token,
7013 LanguageServerProgress {
7014 message: payload.message,
7015 percentage: payload.percentage.map(|p| p as usize),
7016 last_update_at: Instant::now(),
7017 },
7018 cx,
7019 );
7020 }
7021
7022 proto::update_language_server::Variant::WorkEnd(payload) => {
7023 this.on_lsp_work_end(language_server_id, payload.token, cx);
7024 }
7025
7026 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7027 this.disk_based_diagnostics_started(language_server_id, cx);
7028 }
7029
7030 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7031 this.disk_based_diagnostics_finished(language_server_id, cx)
7032 }
7033 }
7034
7035 Ok(())
7036 })?
7037 }
7038
7039 async fn handle_update_buffer(
7040 this: Handle<Self>,
7041 envelope: TypedEnvelope<proto::UpdateBuffer>,
7042 _: Arc<Client>,
7043 mut cx: AsyncAppContext,
7044 ) -> Result<proto::Ack> {
7045 this.update(&mut cx, |this, cx| {
7046 let payload = envelope.payload.clone();
7047 let buffer_id = payload.buffer_id;
7048 let ops = payload
7049 .operations
7050 .into_iter()
7051 .map(language2::proto::deserialize_operation)
7052 .collect::<Result<Vec<_>, _>>()?;
7053 let is_remote = this.is_remote();
7054 match this.opened_buffers.entry(buffer_id) {
7055 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7056 OpenBuffer::Strong(buffer) => {
7057 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7058 }
7059 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7060 OpenBuffer::Weak(_) => {}
7061 },
7062 hash_map::Entry::Vacant(e) => {
7063 assert!(
7064 is_remote,
7065 "received buffer update from {:?}",
7066 envelope.original_sender_id
7067 );
7068 e.insert(OpenBuffer::Operations(ops));
7069 }
7070 }
7071 Ok(proto::Ack {})
7072 })?
7073 }
7074
7075 async fn handle_create_buffer_for_peer(
7076 this: Handle<Self>,
7077 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7078 _: Arc<Client>,
7079 mut cx: AsyncAppContext,
7080 ) -> Result<()> {
7081 this.update(&mut cx, |this, cx| {
7082 match envelope
7083 .payload
7084 .variant
7085 .ok_or_else(|| anyhow!("missing variant"))?
7086 {
7087 proto::create_buffer_for_peer::Variant::State(mut state) => {
7088 let mut buffer_file = None;
7089 if let Some(file) = state.file.take() {
7090 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7091 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7092 anyhow!("no worktree found for id {}", file.worktree_id)
7093 })?;
7094 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7095 as Arc<dyn language2::File>);
7096 }
7097
7098 let buffer_id = state.id;
7099 let buffer = cx.entity(|_| {
7100 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
7101 });
7102 this.incomplete_remote_buffers
7103 .insert(buffer_id, Some(buffer));
7104 }
7105 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7106 let buffer = this
7107 .incomplete_remote_buffers
7108 .get(&chunk.buffer_id)
7109 .cloned()
7110 .flatten()
7111 .ok_or_else(|| {
7112 anyhow!(
7113 "received chunk for buffer {} without initial state",
7114 chunk.buffer_id
7115 )
7116 })?;
7117 let operations = chunk
7118 .operations
7119 .into_iter()
7120 .map(language2::proto::deserialize_operation)
7121 .collect::<Result<Vec<_>>>()?;
7122 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7123
7124 if chunk.is_last {
7125 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7126 this.register_buffer(&buffer, cx)?;
7127 }
7128 }
7129 }
7130
7131 Ok(())
7132 })?
7133 }
7134
7135 async fn handle_update_diff_base(
7136 this: Handle<Self>,
7137 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7138 _: Arc<Client>,
7139 mut cx: AsyncAppContext,
7140 ) -> Result<()> {
7141 this.update(&mut cx, |this, cx| {
7142 let buffer_id = envelope.payload.buffer_id;
7143 let diff_base = envelope.payload.diff_base;
7144 if let Some(buffer) = this
7145 .opened_buffers
7146 .get_mut(&buffer_id)
7147 .and_then(|b| b.upgrade())
7148 .or_else(|| {
7149 this.incomplete_remote_buffers
7150 .get(&buffer_id)
7151 .cloned()
7152 .flatten()
7153 })
7154 {
7155 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7156 }
7157 Ok(())
7158 })?
7159 }
7160
7161 async fn handle_update_buffer_file(
7162 this: Handle<Self>,
7163 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7164 _: Arc<Client>,
7165 mut cx: AsyncAppContext,
7166 ) -> Result<()> {
7167 let buffer_id = envelope.payload.buffer_id;
7168
7169 this.update(&mut cx, |this, cx| {
7170 let payload = envelope.payload.clone();
7171 if let Some(buffer) = this
7172 .opened_buffers
7173 .get(&buffer_id)
7174 .and_then(|b| b.upgrade())
7175 .or_else(|| {
7176 this.incomplete_remote_buffers
7177 .get(&buffer_id)
7178 .cloned()
7179 .flatten()
7180 })
7181 {
7182 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7183 let worktree = this
7184 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7185 .ok_or_else(|| anyhow!("no such worktree"))?;
7186 let file = File::from_proto(file, worktree, cx)?;
7187 buffer.update(cx, |buffer, cx| {
7188 buffer.file_updated(Arc::new(file), cx).detach();
7189 });
7190 this.detect_language_for_buffer(&buffer, cx);
7191 }
7192 Ok(())
7193 })?
7194 }
7195
7196 async fn handle_save_buffer(
7197 this: Handle<Self>,
7198 envelope: TypedEnvelope<proto::SaveBuffer>,
7199 _: Arc<Client>,
7200 mut cx: AsyncAppContext,
7201 ) -> Result<proto::BufferSaved> {
7202 let buffer_id = envelope.payload.buffer_id;
7203 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7204 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7205 let buffer = this
7206 .opened_buffers
7207 .get(&buffer_id)
7208 .and_then(|buffer| buffer.upgrade())
7209 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7210 anyhow::Ok((project_id, buffer))
7211 })??;
7212 buffer
7213 .update(&mut cx, |buffer, _| {
7214 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7215 })?
7216 .await?;
7217 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7218
7219 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7220 .await?;
7221 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7222 project_id,
7223 buffer_id,
7224 version: serialize_version(buffer.saved_version()),
7225 mtime: Some(buffer.saved_mtime().into()),
7226 fingerprint: language2::proto::serialize_fingerprint(
7227 buffer.saved_version_fingerprint(),
7228 ),
7229 })?)
7230 }
7231
7232 async fn handle_reload_buffers(
7233 this: Handle<Self>,
7234 envelope: TypedEnvelope<proto::ReloadBuffers>,
7235 _: Arc<Client>,
7236 mut cx: AsyncAppContext,
7237 ) -> Result<proto::ReloadBuffersResponse> {
7238 let sender_id = envelope.original_sender_id()?;
7239 let reload = this.update(&mut cx, |this, cx| {
7240 let mut buffers = HashSet::default();
7241 for buffer_id in &envelope.payload.buffer_ids {
7242 buffers.insert(
7243 this.opened_buffers
7244 .get(buffer_id)
7245 .and_then(|buffer| buffer.upgrade())
7246 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7247 );
7248 }
7249 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7250 })??;
7251
7252 let project_transaction = reload.await?;
7253 let project_transaction = this.update(&mut cx, |this, cx| {
7254 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7255 })?;
7256 Ok(proto::ReloadBuffersResponse {
7257 transaction: Some(project_transaction),
7258 })
7259 }
7260
7261 async fn handle_synchronize_buffers(
7262 this: Handle<Self>,
7263 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7264 _: Arc<Client>,
7265 mut cx: AsyncAppContext,
7266 ) -> Result<proto::SynchronizeBuffersResponse> {
7267 let project_id = envelope.payload.project_id;
7268 let mut response = proto::SynchronizeBuffersResponse {
7269 buffers: Default::default(),
7270 };
7271
7272 this.update(&mut cx, |this, cx| {
7273 let Some(guest_id) = envelope.original_sender_id else {
7274 error!("missing original_sender_id on SynchronizeBuffers request");
7275 return;
7276 };
7277
7278 this.shared_buffers.entry(guest_id).or_default().clear();
7279 for buffer in envelope.payload.buffers {
7280 let buffer_id = buffer.id;
7281 let remote_version = language2::proto::deserialize_version(&buffer.version);
7282 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7283 this.shared_buffers
7284 .entry(guest_id)
7285 .or_default()
7286 .insert(buffer_id);
7287
7288 let buffer = buffer.read(cx);
7289 response.buffers.push(proto::BufferVersion {
7290 id: buffer_id,
7291 version: language2::proto::serialize_version(&buffer.version),
7292 });
7293
7294 let operations = buffer.serialize_ops(Some(remote_version), cx);
7295 let client = this.client.clone();
7296 if let Some(file) = buffer.file() {
7297 client
7298 .send(proto::UpdateBufferFile {
7299 project_id,
7300 buffer_id: buffer_id as u64,
7301 file: Some(file.to_proto()),
7302 })
7303 .log_err();
7304 }
7305
7306 client
7307 .send(proto::UpdateDiffBase {
7308 project_id,
7309 buffer_id: buffer_id as u64,
7310 diff_base: buffer.diff_base().map(Into::into),
7311 })
7312 .log_err();
7313
7314 client
7315 .send(proto::BufferReloaded {
7316 project_id,
7317 buffer_id,
7318 version: language2::proto::serialize_version(buffer.saved_version()),
7319 mtime: Some(buffer.saved_mtime().into()),
7320 fingerprint: language2::proto::serialize_fingerprint(
7321 buffer.saved_version_fingerprint(),
7322 ),
7323 line_ending: language2::proto::serialize_line_ending(
7324 buffer.line_ending(),
7325 ) as i32,
7326 })
7327 .log_err();
7328
7329 cx.executor()
7330 .spawn(
7331 async move {
7332 let operations = operations.await;
7333 for chunk in split_operations(operations) {
7334 client
7335 .request(proto::UpdateBuffer {
7336 project_id,
7337 buffer_id,
7338 operations: chunk,
7339 })
7340 .await?;
7341 }
7342 anyhow::Ok(())
7343 }
7344 .log_err(),
7345 )
7346 .detach();
7347 }
7348 }
7349 });
7350
7351 Ok(response)
7352 }
7353
7354 async fn handle_format_buffers(
7355 this: Handle<Self>,
7356 envelope: TypedEnvelope<proto::FormatBuffers>,
7357 _: Arc<Client>,
7358 mut cx: AsyncAppContext,
7359 ) -> Result<proto::FormatBuffersResponse> {
7360 let sender_id = envelope.original_sender_id()?;
7361 let format = this.update(&mut cx, |this, cx| {
7362 let mut buffers = HashSet::default();
7363 for buffer_id in &envelope.payload.buffer_ids {
7364 buffers.insert(
7365 this.opened_buffers
7366 .get(buffer_id)
7367 .and_then(|buffer| buffer.upgrade())
7368 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7369 );
7370 }
7371 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7372 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7373 })??;
7374
7375 let project_transaction = format.await?;
7376 let project_transaction = this.update(&mut cx, |this, cx| {
7377 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7378 })?;
7379 Ok(proto::FormatBuffersResponse {
7380 transaction: Some(project_transaction),
7381 })
7382 }
7383
7384 async fn handle_apply_additional_edits_for_completion(
7385 this: Handle<Self>,
7386 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7387 _: Arc<Client>,
7388 mut cx: AsyncAppContext,
7389 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7390 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7391 let buffer = this
7392 .opened_buffers
7393 .get(&envelope.payload.buffer_id)
7394 .and_then(|buffer| buffer.upgrade())
7395 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7396 let language = buffer.read(cx).language();
7397 let completion = language2::proto::deserialize_completion(
7398 envelope
7399 .payload
7400 .completion
7401 .ok_or_else(|| anyhow!("invalid completion"))?,
7402 language.cloned(),
7403 );
7404 Ok::<_, anyhow::Error>((buffer, completion))
7405 })??;
7406
7407 let completion = completion.await?;
7408
7409 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7410 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7411 })?;
7412
7413 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7414 transaction: apply_additional_edits
7415 .await?
7416 .as_ref()
7417 .map(language2::proto::serialize_transaction),
7418 })
7419 }
7420
7421 async fn handle_apply_code_action(
7422 this: Handle<Self>,
7423 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7424 _: Arc<Client>,
7425 mut cx: AsyncAppContext,
7426 ) -> Result<proto::ApplyCodeActionResponse> {
7427 let sender_id = envelope.original_sender_id()?;
7428 let action = language2::proto::deserialize_code_action(
7429 envelope
7430 .payload
7431 .action
7432 .ok_or_else(|| anyhow!("invalid action"))?,
7433 )?;
7434 let apply_code_action = this.update(&mut cx, |this, cx| {
7435 let buffer = this
7436 .opened_buffers
7437 .get(&envelope.payload.buffer_id)
7438 .and_then(|buffer| buffer.upgrade())
7439 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7440 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7441 })??;
7442
7443 let project_transaction = apply_code_action.await?;
7444 let project_transaction = this.update(&mut cx, |this, cx| {
7445 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7446 })?;
7447 Ok(proto::ApplyCodeActionResponse {
7448 transaction: Some(project_transaction),
7449 })
7450 }
7451
7452 async fn handle_on_type_formatting(
7453 this: Handle<Self>,
7454 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7455 _: Arc<Client>,
7456 mut cx: AsyncAppContext,
7457 ) -> Result<proto::OnTypeFormattingResponse> {
7458 let on_type_formatting = this.update(&mut cx, |this, cx| {
7459 let buffer = this
7460 .opened_buffers
7461 .get(&envelope.payload.buffer_id)
7462 .and_then(|buffer| buffer.upgrade())
7463 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7464 let position = envelope
7465 .payload
7466 .position
7467 .and_then(deserialize_anchor)
7468 .ok_or_else(|| anyhow!("invalid position"))?;
7469 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7470 buffer,
7471 position,
7472 envelope.payload.trigger.clone(),
7473 cx,
7474 ))
7475 })??;
7476
7477 let transaction = on_type_formatting
7478 .await?
7479 .as_ref()
7480 .map(language2::proto::serialize_transaction);
7481 Ok(proto::OnTypeFormattingResponse { transaction })
7482 }
7483
7484 async fn handle_inlay_hints(
7485 this: Handle<Self>,
7486 envelope: TypedEnvelope<proto::InlayHints>,
7487 _: Arc<Client>,
7488 mut cx: AsyncAppContext,
7489 ) -> Result<proto::InlayHintsResponse> {
7490 let sender_id = envelope.original_sender_id()?;
7491 let buffer = this.update(&mut cx, |this, _| {
7492 this.opened_buffers
7493 .get(&envelope.payload.buffer_id)
7494 .and_then(|buffer| buffer.upgrade())
7495 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7496 })??;
7497 let buffer_version = deserialize_version(&envelope.payload.version);
7498
7499 buffer
7500 .update(&mut cx, |buffer, _| {
7501 buffer.wait_for_version(buffer_version.clone())
7502 })?
7503 .await
7504 .with_context(|| {
7505 format!(
7506 "waiting for version {:?} for buffer {}",
7507 buffer_version,
7508 buffer.entity_id()
7509 )
7510 })?;
7511
7512 let start = envelope
7513 .payload
7514 .start
7515 .and_then(deserialize_anchor)
7516 .context("missing range start")?;
7517 let end = envelope
7518 .payload
7519 .end
7520 .and_then(deserialize_anchor)
7521 .context("missing range end")?;
7522 let buffer_hints = this
7523 .update(&mut cx, |project, cx| {
7524 project.inlay_hints(buffer, start..end, cx)
7525 })?
7526 .await
7527 .context("inlay hints fetch")?;
7528
7529 Ok(this.update(&mut cx, |project, cx| {
7530 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7531 })?)
7532 }
7533
7534 async fn handle_resolve_inlay_hint(
7535 this: Handle<Self>,
7536 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7537 _: Arc<Client>,
7538 mut cx: AsyncAppContext,
7539 ) -> Result<proto::ResolveInlayHintResponse> {
7540 let proto_hint = envelope
7541 .payload
7542 .hint
7543 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7544 let hint = InlayHints::proto_to_project_hint(proto_hint)
7545 .context("resolved proto inlay hint conversion")?;
7546 let buffer = this.update(&mut cx, |this, _cx| {
7547 this.opened_buffers
7548 .get(&envelope.payload.buffer_id)
7549 .and_then(|buffer| buffer.upgrade())
7550 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7551 })??;
7552 let response_hint = this
7553 .update(&mut cx, |project, cx| {
7554 project.resolve_inlay_hint(
7555 hint,
7556 buffer,
7557 LanguageServerId(envelope.payload.language_server_id as usize),
7558 cx,
7559 )
7560 })?
7561 .await
7562 .context("inlay hints fetch")?;
7563 Ok(proto::ResolveInlayHintResponse {
7564 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7565 })
7566 }
7567
7568 async fn handle_refresh_inlay_hints(
7569 this: Handle<Self>,
7570 _: TypedEnvelope<proto::RefreshInlayHints>,
7571 _: Arc<Client>,
7572 mut cx: AsyncAppContext,
7573 ) -> Result<proto::Ack> {
7574 this.update(&mut cx, |_, cx| {
7575 cx.emit(Event::RefreshInlayHints);
7576 })?;
7577 Ok(proto::Ack {})
7578 }
7579
7580 async fn handle_lsp_command<T: LspCommand>(
7581 this: Handle<Self>,
7582 envelope: TypedEnvelope<T::ProtoRequest>,
7583 _: Arc<Client>,
7584 mut cx: AsyncAppContext,
7585 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7586 where
7587 <T::LspRequest as lsp2::request::Request>::Params: Send,
7588 <T::LspRequest as lsp2::request::Request>::Result: Send,
7589 {
7590 let sender_id = envelope.original_sender_id()?;
7591 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7592 let buffer_handle = this.update(&mut cx, |this, _cx| {
7593 this.opened_buffers
7594 .get(&buffer_id)
7595 .and_then(|buffer| buffer.upgrade())
7596 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7597 })??;
7598 let request = T::from_proto(
7599 envelope.payload,
7600 this.clone(),
7601 buffer_handle.clone(),
7602 cx.clone(),
7603 )
7604 .await?;
7605 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7606 let response = this
7607 .update(&mut cx, |this, cx| {
7608 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7609 })?
7610 .await?;
7611 this.update(&mut cx, |this, cx| {
7612 Ok(T::response_to_proto(
7613 response,
7614 this,
7615 sender_id,
7616 &buffer_version,
7617 cx,
7618 ))
7619 })?
7620 }
7621
7622 async fn handle_get_project_symbols(
7623 this: Handle<Self>,
7624 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7625 _: Arc<Client>,
7626 mut cx: AsyncAppContext,
7627 ) -> Result<proto::GetProjectSymbolsResponse> {
7628 let symbols = this
7629 .update(&mut cx, |this, cx| {
7630 this.symbols(&envelope.payload.query, cx)
7631 })?
7632 .await?;
7633
7634 Ok(proto::GetProjectSymbolsResponse {
7635 symbols: symbols.iter().map(serialize_symbol).collect(),
7636 })
7637 }
7638
7639 async fn handle_search_project(
7640 this: Handle<Self>,
7641 envelope: TypedEnvelope<proto::SearchProject>,
7642 _: Arc<Client>,
7643 mut cx: AsyncAppContext,
7644 ) -> Result<proto::SearchProjectResponse> {
7645 let peer_id = envelope.original_sender_id()?;
7646 let query = SearchQuery::from_proto(envelope.payload)?;
7647 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7648
7649 cx.spawn(move |mut cx| async move {
7650 let mut locations = Vec::new();
7651 while let Some((buffer, ranges)) = result.next().await {
7652 for range in ranges {
7653 let start = serialize_anchor(&range.start);
7654 let end = serialize_anchor(&range.end);
7655 let buffer_id = this.update(&mut cx, |this, cx| {
7656 this.create_buffer_for_peer(&buffer, peer_id, cx)
7657 })?;
7658 locations.push(proto::Location {
7659 buffer_id,
7660 start: Some(start),
7661 end: Some(end),
7662 });
7663 }
7664 }
7665 Ok(proto::SearchProjectResponse { locations })
7666 })
7667 .await
7668 }
7669
7670 async fn handle_open_buffer_for_symbol(
7671 this: Handle<Self>,
7672 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7673 _: Arc<Client>,
7674 mut cx: AsyncAppContext,
7675 ) -> Result<proto::OpenBufferForSymbolResponse> {
7676 let peer_id = envelope.original_sender_id()?;
7677 let symbol = envelope
7678 .payload
7679 .symbol
7680 .ok_or_else(|| anyhow!("invalid symbol"))?;
7681 let symbol = this
7682 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
7683 .await?;
7684 let symbol = this.update(&mut cx, |this, _| {
7685 let signature = this.symbol_signature(&symbol.path);
7686 if signature == symbol.signature {
7687 Ok(symbol)
7688 } else {
7689 Err(anyhow!("invalid symbol signature"))
7690 }
7691 })??;
7692 let buffer = this
7693 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
7694 .await?;
7695
7696 Ok(proto::OpenBufferForSymbolResponse {
7697 buffer_id: this.update(&mut cx, |this, cx| {
7698 this.create_buffer_for_peer(&buffer, peer_id, cx)
7699 })?,
7700 })
7701 }
7702
7703 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7704 let mut hasher = Sha256::new();
7705 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7706 hasher.update(project_path.path.to_string_lossy().as_bytes());
7707 hasher.update(self.nonce.to_be_bytes());
7708 hasher.finalize().as_slice().try_into().unwrap()
7709 }
7710
7711 async fn handle_open_buffer_by_id(
7712 this: Handle<Self>,
7713 envelope: TypedEnvelope<proto::OpenBufferById>,
7714 _: Arc<Client>,
7715 mut cx: AsyncAppContext,
7716 ) -> Result<proto::OpenBufferResponse> {
7717 let peer_id = envelope.original_sender_id()?;
7718 let buffer = this
7719 .update(&mut cx, |this, cx| {
7720 this.open_buffer_by_id(envelope.payload.id, cx)
7721 })?
7722 .await?;
7723 this.update(&mut cx, |this, cx| {
7724 Ok(proto::OpenBufferResponse {
7725 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7726 })
7727 })?
7728 }
7729
7730 async fn handle_open_buffer_by_path(
7731 this: Handle<Self>,
7732 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7733 _: Arc<Client>,
7734 mut cx: AsyncAppContext,
7735 ) -> Result<proto::OpenBufferResponse> {
7736 let peer_id = envelope.original_sender_id()?;
7737 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7738 let open_buffer = this.update(&mut cx, |this, cx| {
7739 this.open_buffer(
7740 ProjectPath {
7741 worktree_id,
7742 path: PathBuf::from(envelope.payload.path).into(),
7743 },
7744 cx,
7745 )
7746 })?;
7747
7748 let buffer = open_buffer.await?;
7749 this.update(&mut cx, |this, cx| {
7750 Ok(proto::OpenBufferResponse {
7751 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7752 })
7753 })?
7754 }
7755
7756 fn serialize_project_transaction_for_peer(
7757 &mut self,
7758 project_transaction: ProjectTransaction,
7759 peer_id: proto::PeerId,
7760 cx: &mut AppContext,
7761 ) -> proto::ProjectTransaction {
7762 let mut serialized_transaction = proto::ProjectTransaction {
7763 buffer_ids: Default::default(),
7764 transactions: Default::default(),
7765 };
7766 for (buffer, transaction) in project_transaction.0 {
7767 serialized_transaction
7768 .buffer_ids
7769 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7770 serialized_transaction
7771 .transactions
7772 .push(language2::proto::serialize_transaction(&transaction));
7773 }
7774 serialized_transaction
7775 }
7776
7777 fn deserialize_project_transaction(
7778 &mut self,
7779 message: proto::ProjectTransaction,
7780 push_to_history: bool,
7781 cx: &mut ModelContext<Self>,
7782 ) -> Task<Result<ProjectTransaction>> {
7783 cx.spawn(move |this, mut cx| async move {
7784 let mut project_transaction = ProjectTransaction::default();
7785 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7786 {
7787 let buffer = this
7788 .update(&mut cx, |this, cx| {
7789 this.wait_for_remote_buffer(buffer_id, cx)
7790 })?
7791 .await?;
7792 let transaction = language2::proto::deserialize_transaction(transaction)?;
7793 project_transaction.0.insert(buffer, transaction);
7794 }
7795
7796 for (buffer, transaction) in &project_transaction.0 {
7797 buffer
7798 .update(&mut cx, |buffer, _| {
7799 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7800 })?
7801 .await?;
7802
7803 if push_to_history {
7804 buffer.update(&mut cx, |buffer, _| {
7805 buffer.push_transaction(transaction.clone(), Instant::now());
7806 });
7807 }
7808 }
7809
7810 Ok(project_transaction)
7811 })
7812 }
7813
7814 fn create_buffer_for_peer(
7815 &mut self,
7816 buffer: &Handle<Buffer>,
7817 peer_id: proto::PeerId,
7818 cx: &mut AppContext,
7819 ) -> u64 {
7820 let buffer_id = buffer.read(cx).remote_id();
7821 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
7822 updates_tx
7823 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7824 .ok();
7825 }
7826 buffer_id
7827 }
7828
7829 fn wait_for_remote_buffer(
7830 &mut self,
7831 id: u64,
7832 cx: &mut ModelContext<Self>,
7833 ) -> Task<Result<Handle<Buffer>>> {
7834 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7835
7836 cx.spawn(move |this, mut cx| async move {
7837 let buffer = loop {
7838 let Some(this) = this.upgrade() else {
7839 return Err(anyhow!("project dropped"));
7840 };
7841
7842 let buffer = this.update(&mut cx, |this, _cx| {
7843 this.opened_buffers
7844 .get(&id)
7845 .and_then(|buffer| buffer.upgrade())
7846 })?;
7847
7848 if let Some(buffer) = buffer {
7849 break buffer;
7850 } else if this.update(&mut cx, |this, _| this.is_read_only())? {
7851 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7852 }
7853
7854 this.update(&mut cx, |this, _| {
7855 this.incomplete_remote_buffers.entry(id).or_default();
7856 })?;
7857 drop(this);
7858
7859 opened_buffer_rx
7860 .next()
7861 .await
7862 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
7863 };
7864
7865 Ok(buffer)
7866 })
7867 }
7868
7869 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
7870 let project_id = match self.client_state.as_ref() {
7871 Some(ProjectClientState::Remote {
7872 sharing_has_stopped,
7873 remote_id,
7874 ..
7875 }) => {
7876 if *sharing_has_stopped {
7877 return Task::ready(Err(anyhow!(
7878 "can't synchronize remote buffers on a readonly project"
7879 )));
7880 } else {
7881 *remote_id
7882 }
7883 }
7884 Some(ProjectClientState::Local { .. }) | None => {
7885 return Task::ready(Err(anyhow!(
7886 "can't synchronize remote buffers on a local project"
7887 )))
7888 }
7889 };
7890
7891 let client = self.client.clone();
7892 cx.spawn(move |this, mut cx| async move {
7893 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
7894 let buffers = this
7895 .opened_buffers
7896 .iter()
7897 .filter_map(|(id, buffer)| {
7898 let buffer = buffer.upgrade()?;
7899 Some(proto::BufferVersion {
7900 id: *id,
7901 version: language2::proto::serialize_version(&buffer.read(cx).version),
7902 })
7903 })
7904 .collect();
7905 let incomplete_buffer_ids = this
7906 .incomplete_remote_buffers
7907 .keys()
7908 .copied()
7909 .collect::<Vec<_>>();
7910
7911 (buffers, incomplete_buffer_ids)
7912 })?;
7913 let response = client
7914 .request(proto::SynchronizeBuffers {
7915 project_id,
7916 buffers,
7917 })
7918 .await?;
7919
7920 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
7921 response
7922 .buffers
7923 .into_iter()
7924 .map(|buffer| {
7925 let client = client.clone();
7926 let buffer_id = buffer.id;
7927 let remote_version = language2::proto::deserialize_version(&buffer.version);
7928 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7929 let operations =
7930 buffer.read(cx).serialize_ops(Some(remote_version), cx);
7931 cx.executor().spawn(async move {
7932 let operations = operations.await;
7933 for chunk in split_operations(operations) {
7934 client
7935 .request(proto::UpdateBuffer {
7936 project_id,
7937 buffer_id,
7938 operations: chunk,
7939 })
7940 .await?;
7941 }
7942 anyhow::Ok(())
7943 })
7944 } else {
7945 Task::ready(Ok(()))
7946 }
7947 })
7948 .collect::<Vec<_>>()
7949 })?;
7950
7951 // Any incomplete buffers have open requests waiting. Request that the host sends
7952 // creates these buffers for us again to unblock any waiting futures.
7953 for id in incomplete_buffer_ids {
7954 cx.executor()
7955 .spawn(client.request(proto::OpenBufferById { project_id, id }))
7956 .detach();
7957 }
7958
7959 futures::future::join_all(send_updates_for_buffers)
7960 .await
7961 .into_iter()
7962 .collect()
7963 })
7964 }
7965
7966 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
7967 self.worktrees()
7968 .map(|worktree| {
7969 let worktree = worktree.read(cx);
7970 proto::WorktreeMetadata {
7971 id: worktree.id().to_proto(),
7972 root_name: worktree.root_name().into(),
7973 visible: worktree.is_visible(),
7974 abs_path: worktree.abs_path().to_string_lossy().into(),
7975 }
7976 })
7977 .collect()
7978 }
7979
7980 fn set_worktrees_from_proto(
7981 &mut self,
7982 worktrees: Vec<proto::WorktreeMetadata>,
7983 cx: &mut ModelContext<Project>,
7984 ) -> Result<()> {
7985 let replica_id = self.replica_id();
7986 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
7987
7988 let mut old_worktrees_by_id = self
7989 .worktrees
7990 .drain(..)
7991 .filter_map(|worktree| {
7992 let worktree = worktree.upgrade()?;
7993 Some((worktree.read(cx).id(), worktree))
7994 })
7995 .collect::<HashMap<_, _>>();
7996
7997 for worktree in worktrees {
7998 if let Some(old_worktree) =
7999 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8000 {
8001 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8002 } else {
8003 let worktree =
8004 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8005 let _ = self.add_worktree(&worktree, cx);
8006 }
8007 }
8008
8009 self.metadata_changed(cx);
8010 for id in old_worktrees_by_id.keys() {
8011 cx.emit(Event::WorktreeRemoved(*id));
8012 }
8013
8014 Ok(())
8015 }
8016
8017 fn set_collaborators_from_proto(
8018 &mut self,
8019 messages: Vec<proto::Collaborator>,
8020 cx: &mut ModelContext<Self>,
8021 ) -> Result<()> {
8022 let mut collaborators = HashMap::default();
8023 for message in messages {
8024 let collaborator = Collaborator::from_proto(message)?;
8025 collaborators.insert(collaborator.peer_id, collaborator);
8026 }
8027 for old_peer_id in self.collaborators.keys() {
8028 if !collaborators.contains_key(old_peer_id) {
8029 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8030 }
8031 }
8032 self.collaborators = collaborators;
8033 Ok(())
8034 }
8035
8036 fn deserialize_symbol(
8037 &self,
8038 serialized_symbol: proto::Symbol,
8039 ) -> impl Future<Output = Result<Symbol>> {
8040 let languages = self.languages.clone();
8041 async move {
8042 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8043 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8044 let start = serialized_symbol
8045 .start
8046 .ok_or_else(|| anyhow!("invalid start"))?;
8047 let end = serialized_symbol
8048 .end
8049 .ok_or_else(|| anyhow!("invalid end"))?;
8050 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8051 let path = ProjectPath {
8052 worktree_id,
8053 path: PathBuf::from(serialized_symbol.path).into(),
8054 };
8055 let language = languages
8056 .language_for_file(&path.path, None)
8057 .await
8058 .log_err();
8059 Ok(Symbol {
8060 language_server_name: LanguageServerName(
8061 serialized_symbol.language_server_name.into(),
8062 ),
8063 source_worktree_id,
8064 path,
8065 label: {
8066 match language {
8067 Some(language) => {
8068 language
8069 .label_for_symbol(&serialized_symbol.name, kind)
8070 .await
8071 }
8072 None => None,
8073 }
8074 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8075 },
8076
8077 name: serialized_symbol.name,
8078 range: Unclipped(PointUtf16::new(start.row, start.column))
8079 ..Unclipped(PointUtf16::new(end.row, end.column)),
8080 kind,
8081 signature: serialized_symbol
8082 .signature
8083 .try_into()
8084 .map_err(|_| anyhow!("invalid signature"))?,
8085 })
8086 }
8087 }
8088
8089 async fn handle_buffer_saved(
8090 this: Handle<Self>,
8091 envelope: TypedEnvelope<proto::BufferSaved>,
8092 _: Arc<Client>,
8093 mut cx: AsyncAppContext,
8094 ) -> Result<()> {
8095 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8096 let version = deserialize_version(&envelope.payload.version);
8097 let mtime = envelope
8098 .payload
8099 .mtime
8100 .ok_or_else(|| anyhow!("missing mtime"))?
8101 .into();
8102
8103 this.update(&mut cx, |this, cx| {
8104 let buffer = this
8105 .opened_buffers
8106 .get(&envelope.payload.buffer_id)
8107 .and_then(|buffer| buffer.upgrade())
8108 .or_else(|| {
8109 this.incomplete_remote_buffers
8110 .get(&envelope.payload.buffer_id)
8111 .and_then(|b| b.clone())
8112 });
8113 if let Some(buffer) = buffer {
8114 buffer.update(cx, |buffer, cx| {
8115 buffer.did_save(version, fingerprint, mtime, cx);
8116 });
8117 }
8118 Ok(())
8119 })?
8120 }
8121
8122 async fn handle_buffer_reloaded(
8123 this: Handle<Self>,
8124 envelope: TypedEnvelope<proto::BufferReloaded>,
8125 _: Arc<Client>,
8126 mut cx: AsyncAppContext,
8127 ) -> Result<()> {
8128 let payload = envelope.payload;
8129 let version = deserialize_version(&payload.version);
8130 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8131 let line_ending = deserialize_line_ending(
8132 proto::LineEnding::from_i32(payload.line_ending)
8133 .ok_or_else(|| anyhow!("missing line ending"))?,
8134 );
8135 let mtime = payload
8136 .mtime
8137 .ok_or_else(|| anyhow!("missing mtime"))?
8138 .into();
8139 this.update(&mut cx, |this, cx| {
8140 let buffer = this
8141 .opened_buffers
8142 .get(&payload.buffer_id)
8143 .and_then(|buffer| buffer.upgrade())
8144 .or_else(|| {
8145 this.incomplete_remote_buffers
8146 .get(&payload.buffer_id)
8147 .cloned()
8148 .flatten()
8149 });
8150 if let Some(buffer) = buffer {
8151 buffer.update(cx, |buffer, cx| {
8152 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8153 });
8154 }
8155 Ok(())
8156 })?
8157 }
8158
8159 #[allow(clippy::type_complexity)]
8160 fn edits_from_lsp(
8161 &mut self,
8162 buffer: &Handle<Buffer>,
8163 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp2::TextEdit>,
8164 server_id: LanguageServerId,
8165 version: Option<i32>,
8166 cx: &mut ModelContext<Self>,
8167 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8168 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8169 cx.executor().spawn(async move {
8170 let snapshot = snapshot?;
8171 let mut lsp_edits = lsp_edits
8172 .into_iter()
8173 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8174 .collect::<Vec<_>>();
8175 lsp_edits.sort_by_key(|(range, _)| range.start);
8176
8177 let mut lsp_edits = lsp_edits.into_iter().peekable();
8178 let mut edits = Vec::new();
8179 while let Some((range, mut new_text)) = lsp_edits.next() {
8180 // Clip invalid ranges provided by the language server.
8181 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8182 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8183
8184 // Combine any LSP edits that are adjacent.
8185 //
8186 // Also, combine LSP edits that are separated from each other by only
8187 // a newline. This is important because for some code actions,
8188 // Rust-analyzer rewrites the entire buffer via a series of edits that
8189 // are separated by unchanged newline characters.
8190 //
8191 // In order for the diffing logic below to work properly, any edits that
8192 // cancel each other out must be combined into one.
8193 while let Some((next_range, next_text)) = lsp_edits.peek() {
8194 if next_range.start.0 > range.end {
8195 if next_range.start.0.row > range.end.row + 1
8196 || next_range.start.0.column > 0
8197 || snapshot.clip_point_utf16(
8198 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8199 Bias::Left,
8200 ) > range.end
8201 {
8202 break;
8203 }
8204 new_text.push('\n');
8205 }
8206 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8207 new_text.push_str(next_text);
8208 lsp_edits.next();
8209 }
8210
8211 // For multiline edits, perform a diff of the old and new text so that
8212 // we can identify the changes more precisely, preserving the locations
8213 // of any anchors positioned in the unchanged regions.
8214 if range.end.row > range.start.row {
8215 let mut offset = range.start.to_offset(&snapshot);
8216 let old_text = snapshot.text_for_range(range).collect::<String>();
8217
8218 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8219 let mut moved_since_edit = true;
8220 for change in diff.iter_all_changes() {
8221 let tag = change.tag();
8222 let value = change.value();
8223 match tag {
8224 ChangeTag::Equal => {
8225 offset += value.len();
8226 moved_since_edit = true;
8227 }
8228 ChangeTag::Delete => {
8229 let start = snapshot.anchor_after(offset);
8230 let end = snapshot.anchor_before(offset + value.len());
8231 if moved_since_edit {
8232 edits.push((start..end, String::new()));
8233 } else {
8234 edits.last_mut().unwrap().0.end = end;
8235 }
8236 offset += value.len();
8237 moved_since_edit = false;
8238 }
8239 ChangeTag::Insert => {
8240 if moved_since_edit {
8241 let anchor = snapshot.anchor_after(offset);
8242 edits.push((anchor..anchor, value.to_string()));
8243 } else {
8244 edits.last_mut().unwrap().1.push_str(value);
8245 }
8246 moved_since_edit = false;
8247 }
8248 }
8249 }
8250 } else if range.end == range.start {
8251 let anchor = snapshot.anchor_after(range.start);
8252 edits.push((anchor..anchor, new_text));
8253 } else {
8254 let edit_start = snapshot.anchor_after(range.start);
8255 let edit_end = snapshot.anchor_before(range.end);
8256 edits.push((edit_start..edit_end, new_text));
8257 }
8258 }
8259
8260 Ok(edits)
8261 })
8262 }
8263
8264 fn buffer_snapshot_for_lsp_version(
8265 &mut self,
8266 buffer: &Handle<Buffer>,
8267 server_id: LanguageServerId,
8268 version: Option<i32>,
8269 cx: &AppContext,
8270 ) -> Result<TextBufferSnapshot> {
8271 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8272
8273 if let Some(version) = version {
8274 let buffer_id = buffer.read(cx).remote_id();
8275 let snapshots = self
8276 .buffer_snapshots
8277 .get_mut(&buffer_id)
8278 .and_then(|m| m.get_mut(&server_id))
8279 .ok_or_else(|| {
8280 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8281 })?;
8282
8283 let found_snapshot = snapshots
8284 .binary_search_by_key(&version, |e| e.version)
8285 .map(|ix| snapshots[ix].snapshot.clone())
8286 .map_err(|_| {
8287 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8288 })?;
8289
8290 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8291 Ok(found_snapshot)
8292 } else {
8293 Ok((buffer.read(cx)).text_snapshot())
8294 }
8295 }
8296
8297 pub fn language_servers(
8298 &self,
8299 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8300 self.language_server_ids
8301 .iter()
8302 .map(|((worktree_id, server_name), server_id)| {
8303 (*server_id, server_name.clone(), *worktree_id)
8304 })
8305 }
8306
8307 pub fn supplementary_language_servers(
8308 &self,
8309 ) -> impl '_
8310 + Iterator<
8311 Item = (
8312 &LanguageServerId,
8313 &(LanguageServerName, Arc<LanguageServer>),
8314 ),
8315 > {
8316 self.supplementary_language_servers.iter()
8317 }
8318
8319 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8320 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8321 Some(server.clone())
8322 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8323 Some(Arc::clone(server))
8324 } else {
8325 None
8326 }
8327 }
8328
8329 pub fn language_servers_for_buffer(
8330 &self,
8331 buffer: &Buffer,
8332 cx: &AppContext,
8333 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8334 self.language_server_ids_for_buffer(buffer, cx)
8335 .into_iter()
8336 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8337 LanguageServerState::Running {
8338 adapter, server, ..
8339 } => Some((adapter, server)),
8340 _ => None,
8341 })
8342 }
8343
8344 fn primary_language_server_for_buffer(
8345 &self,
8346 buffer: &Buffer,
8347 cx: &AppContext,
8348 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8349 self.language_servers_for_buffer(buffer, cx).next()
8350 }
8351
8352 pub fn language_server_for_buffer(
8353 &self,
8354 buffer: &Buffer,
8355 server_id: LanguageServerId,
8356 cx: &AppContext,
8357 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8358 self.language_servers_for_buffer(buffer, cx)
8359 .find(|(_, s)| s.server_id() == server_id)
8360 }
8361
8362 fn language_server_ids_for_buffer(
8363 &self,
8364 buffer: &Buffer,
8365 cx: &AppContext,
8366 ) -> Vec<LanguageServerId> {
8367 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8368 let worktree_id = file.worktree_id(cx);
8369 language
8370 .lsp_adapters()
8371 .iter()
8372 .flat_map(|adapter| {
8373 let key = (worktree_id, adapter.name.clone());
8374 self.language_server_ids.get(&key).copied()
8375 })
8376 .collect()
8377 } else {
8378 Vec::new()
8379 }
8380 }
8381
8382 fn prettier_instance_for_buffer(
8383 &mut self,
8384 buffer: &Handle<Buffer>,
8385 cx: &mut ModelContext<Self>,
8386 ) -> Task<Option<Shared<Task<Result<Arc<Prettier>, Arc<anyhow::Error>>>>>> {
8387 let buffer = buffer.read(cx);
8388 let buffer_file = buffer.file();
8389 let Some(buffer_language) = buffer.language() else {
8390 return Task::ready(None);
8391 };
8392 if !buffer_language
8393 .lsp_adapters()
8394 .iter()
8395 .flat_map(|adapter| adapter.enabled_formatters())
8396 .any(|formatter| matches!(formatter, BundledFormatter::Prettier { .. }))
8397 {
8398 return Task::ready(None);
8399 }
8400
8401 let buffer_file = File::from_dyn(buffer_file);
8402 let buffer_path = buffer_file.map(|file| Arc::clone(file.path()));
8403 let worktree_path = buffer_file
8404 .as_ref()
8405 .and_then(|file| Some(file.worktree.read(cx).abs_path()));
8406 let worktree_id = buffer_file.map(|file| file.worktree_id(cx));
8407 if self.is_local() || worktree_id.is_none() || worktree_path.is_none() {
8408 let Some(node) = self.node.as_ref().map(Arc::clone) else {
8409 return Task::ready(None);
8410 };
8411 let fs = self.fs.clone();
8412 cx.spawn(move |this, mut cx| async move {
8413 let prettier_dir = match cx
8414 .executor()
8415 .spawn(Prettier::locate(
8416 worktree_path.zip(buffer_path).map(
8417 |(worktree_root_path, starting_path)| LocateStart {
8418 worktree_root_path,
8419 starting_path,
8420 },
8421 ),
8422 fs,
8423 ))
8424 .await
8425 {
8426 Ok(path) => path,
8427 Err(e) => {
8428 return Some(
8429 Task::ready(Err(Arc::new(e.context(
8430 "determining prettier path for worktree {worktree_path:?}",
8431 ))))
8432 .shared(),
8433 );
8434 }
8435 };
8436
8437 if let Some(existing_prettier) = this
8438 .update(&mut cx, |project, _| {
8439 project
8440 .prettier_instances
8441 .get(&(worktree_id, prettier_dir.clone()))
8442 .cloned()
8443 })
8444 .ok()
8445 .flatten()
8446 {
8447 return Some(existing_prettier);
8448 }
8449
8450 log::info!("Found prettier in {prettier_dir:?}, starting.");
8451 let task_prettier_dir = prettier_dir.clone();
8452 let new_prettier_task = cx
8453 .spawn({
8454 let this = this.clone();
8455 move |mut cx| async move {
8456 let new_server_id = this.update(&mut cx, |this, _| {
8457 this.languages.next_language_server_id()
8458 })?;
8459 let prettier = Prettier::start(
8460 worktree_id.map(|id| id.to_usize()),
8461 new_server_id,
8462 task_prettier_dir,
8463 node,
8464 cx.clone(),
8465 )
8466 .await
8467 .context("prettier start")
8468 .map_err(Arc::new)?;
8469 log::info!("Started prettier in {:?}", prettier.prettier_dir());
8470
8471 if let Some(prettier_server) = prettier.server() {
8472 this.update(&mut cx, |project, cx| {
8473 let name = if prettier.is_default() {
8474 LanguageServerName(Arc::from("prettier (default)"))
8475 } else {
8476 let prettier_dir = prettier.prettier_dir();
8477 let worktree_path = prettier
8478 .worktree_id()
8479 .map(WorktreeId::from_usize)
8480 .and_then(|id| project.worktree_for_id(id, cx))
8481 .map(|worktree| worktree.read(cx).abs_path());
8482 match worktree_path {
8483 Some(worktree_path) => {
8484 if worktree_path.as_ref() == prettier_dir {
8485 LanguageServerName(Arc::from(format!(
8486 "prettier ({})",
8487 prettier_dir
8488 .file_name()
8489 .and_then(|name| name.to_str())
8490 .unwrap_or_default()
8491 )))
8492 } else {
8493 let dir_to_display = match prettier_dir
8494 .strip_prefix(&worktree_path)
8495 .ok()
8496 {
8497 Some(relative_path) => relative_path,
8498 None => prettier_dir,
8499 };
8500 LanguageServerName(Arc::from(format!(
8501 "prettier ({})",
8502 dir_to_display.display(),
8503 )))
8504 }
8505 }
8506 None => LanguageServerName(Arc::from(format!(
8507 "prettier ({})",
8508 prettier_dir.display(),
8509 ))),
8510 }
8511 };
8512
8513 project
8514 .supplementary_language_servers
8515 .insert(new_server_id, (name, Arc::clone(prettier_server)));
8516 cx.emit(Event::LanguageServerAdded(new_server_id));
8517 })?;
8518 }
8519 Ok(Arc::new(prettier)).map_err(Arc::new)
8520 }
8521 })
8522 .shared();
8523 this.update(&mut cx, |project, _| {
8524 project
8525 .prettier_instances
8526 .insert((worktree_id, prettier_dir), new_prettier_task.clone());
8527 });
8528 Some(new_prettier_task)
8529 })
8530 } else if self.remote_id().is_some() {
8531 return Task::ready(None);
8532 } else {
8533 Task::ready(Some(
8534 Task::ready(Err(Arc::new(anyhow!("project does not have a remote id")))).shared(),
8535 ))
8536 }
8537 }
8538
8539 fn install_default_formatters(
8540 &self,
8541 worktree: Option<WorktreeId>,
8542 new_language: &Language,
8543 language_settings: &LanguageSettings,
8544 cx: &mut ModelContext<Self>,
8545 ) -> Task<anyhow::Result<()>> {
8546 match &language_settings.formatter {
8547 Formatter::Prettier { .. } | Formatter::Auto => {}
8548 Formatter::LanguageServer | Formatter::External { .. } => return Task::ready(Ok(())),
8549 };
8550 let Some(node) = self.node.as_ref().cloned() else {
8551 return Task::ready(Ok(()));
8552 };
8553
8554 let mut prettier_plugins = None;
8555 for formatter in new_language
8556 .lsp_adapters()
8557 .into_iter()
8558 .flat_map(|adapter| adapter.enabled_formatters())
8559 {
8560 match formatter {
8561 BundledFormatter::Prettier { plugin_names, .. } => prettier_plugins
8562 .get_or_insert_with(|| HashSet::default())
8563 .extend(plugin_names),
8564 }
8565 }
8566 let Some(prettier_plugins) = prettier_plugins else {
8567 return Task::ready(Ok(()));
8568 };
8569
8570 let default_prettier_dir = DEFAULT_PRETTIER_DIR.as_path();
8571 let already_running_prettier = self
8572 .prettier_instances
8573 .get(&(worktree, default_prettier_dir.to_path_buf()))
8574 .cloned();
8575
8576 let fs = Arc::clone(&self.fs);
8577 cx.executor()
8578 .spawn(async move {
8579 let prettier_wrapper_path = default_prettier_dir.join(PRETTIER_SERVER_FILE);
8580 // method creates parent directory if it doesn't exist
8581 fs.save(&prettier_wrapper_path, &Rope::from(PRETTIER_SERVER_JS), LineEnding::Unix).await
8582 .with_context(|| format!("writing {PRETTIER_SERVER_FILE} file at {prettier_wrapper_path:?}"))?;
8583
8584 let packages_to_versions = future::try_join_all(
8585 prettier_plugins
8586 .iter()
8587 .chain(Some(&"prettier"))
8588 .map(|package_name| async {
8589 let returned_package_name = package_name.to_string();
8590 let latest_version = node.npm_package_latest_version(package_name)
8591 .await
8592 .with_context(|| {
8593 format!("fetching latest npm version for package {returned_package_name}")
8594 })?;
8595 anyhow::Ok((returned_package_name, latest_version))
8596 }),
8597 )
8598 .await
8599 .context("fetching latest npm versions")?;
8600
8601 log::info!("Fetching default prettier and plugins: {packages_to_versions:?}");
8602 let borrowed_packages = packages_to_versions.iter().map(|(package, version)| {
8603 (package.as_str(), version.as_str())
8604 }).collect::<Vec<_>>();
8605 node.npm_install_packages(default_prettier_dir, &borrowed_packages).await.context("fetching formatter packages")?;
8606
8607 if !prettier_plugins.is_empty() {
8608 if let Some(prettier) = already_running_prettier {
8609 prettier.await.map_err(|e| anyhow::anyhow!("Default prettier startup await failure: {e:#}"))?.clear_cache().await.context("clearing default prettier cache after plugins install")?;
8610 }
8611 }
8612
8613 anyhow::Ok(())
8614 })
8615 }
8616}
8617
8618fn subscribe_for_copilot_events(
8619 copilot: &Handle<Copilot>,
8620 cx: &mut ModelContext<'_, Project>,
8621) -> gpui2::Subscription {
8622 cx.subscribe(
8623 copilot,
8624 |project, copilot, copilot_event, cx| match copilot_event {
8625 copilot2::Event::CopilotLanguageServerStarted => {
8626 match copilot.read(cx).language_server() {
8627 Some((name, copilot_server)) => {
8628 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8629 if !copilot_server.has_notification_handler::<copilot2::request::LogMessage>() {
8630 let new_server_id = copilot_server.server_id();
8631 let weak_project = cx.weak_handle();
8632 let copilot_log_subscription = copilot_server
8633 .on_notification::<copilot2::request::LogMessage, _>(
8634 move |params, mut cx| {
8635 weak_project.update(&mut cx, |_, cx| {
8636 cx.emit(Event::LanguageServerLog(
8637 new_server_id,
8638 params.message,
8639 ));
8640 }).ok();
8641 },
8642 );
8643 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8644 project.copilot_log_subscription = Some(copilot_log_subscription);
8645 cx.emit(Event::LanguageServerAdded(new_server_id));
8646 }
8647 }
8648 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8649 }
8650 }
8651 },
8652 )
8653}
8654
8655fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8656 let mut literal_end = 0;
8657 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8658 if part.contains(&['*', '?', '{', '}']) {
8659 break;
8660 } else {
8661 if i > 0 {
8662 // Acount for separator prior to this part
8663 literal_end += path::MAIN_SEPARATOR.len_utf8();
8664 }
8665 literal_end += part.len();
8666 }
8667 }
8668 &glob[..literal_end]
8669}
8670
8671impl WorktreeHandle {
8672 pub fn upgrade(&self) -> Option<Handle<Worktree>> {
8673 match self {
8674 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8675 WorktreeHandle::Weak(handle) => handle.upgrade(),
8676 }
8677 }
8678
8679 pub fn handle_id(&self) -> usize {
8680 match self {
8681 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8682 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8683 }
8684 }
8685}
8686
8687impl OpenBuffer {
8688 pub fn upgrade(&self) -> Option<Handle<Buffer>> {
8689 match self {
8690 OpenBuffer::Strong(handle) => Some(handle.clone()),
8691 OpenBuffer::Weak(handle) => handle.upgrade(),
8692 OpenBuffer::Operations(_) => None,
8693 }
8694 }
8695}
8696
8697pub struct PathMatchCandidateSet {
8698 pub snapshot: Snapshot,
8699 pub include_ignored: bool,
8700 pub include_root_name: bool,
8701}
8702
8703impl<'a> fuzzy2::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8704 type Candidates = PathMatchCandidateSetIter<'a>;
8705
8706 fn id(&self) -> usize {
8707 self.snapshot.id().to_usize()
8708 }
8709
8710 fn len(&self) -> usize {
8711 if self.include_ignored {
8712 self.snapshot.file_count()
8713 } else {
8714 self.snapshot.visible_file_count()
8715 }
8716 }
8717
8718 fn prefix(&self) -> Arc<str> {
8719 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8720 self.snapshot.root_name().into()
8721 } else if self.include_root_name {
8722 format!("{}/", self.snapshot.root_name()).into()
8723 } else {
8724 "".into()
8725 }
8726 }
8727
8728 fn candidates(&'a self, start: usize) -> Self::Candidates {
8729 PathMatchCandidateSetIter {
8730 traversal: self.snapshot.files(self.include_ignored, start),
8731 }
8732 }
8733}
8734
8735pub struct PathMatchCandidateSetIter<'a> {
8736 traversal: Traversal<'a>,
8737}
8738
8739impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8740 type Item = fuzzy2::PathMatchCandidate<'a>;
8741
8742 fn next(&mut self) -> Option<Self::Item> {
8743 self.traversal.next().map(|entry| {
8744 if let EntryKind::File(char_bag) = entry.kind {
8745 fuzzy2::PathMatchCandidate {
8746 path: &entry.path,
8747 char_bag,
8748 }
8749 } else {
8750 unreachable!()
8751 }
8752 })
8753 }
8754}
8755
8756impl EventEmitter for Project {
8757 type Event = Event;
8758}
8759
8760impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8761 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8762 Self {
8763 worktree_id,
8764 path: path.as_ref().into(),
8765 }
8766 }
8767}
8768
8769impl ProjectLspAdapterDelegate {
8770 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8771 Arc::new(Self {
8772 project: cx.handle(),
8773 http_client: project.client.http_client(),
8774 })
8775 }
8776}
8777
8778impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8779 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8780 self.project
8781 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8782 }
8783
8784 fn http_client(&self) -> Arc<dyn HttpClient> {
8785 self.http_client.clone()
8786 }
8787}
8788
8789fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8790 proto::Symbol {
8791 language_server_name: symbol.language_server_name.0.to_string(),
8792 source_worktree_id: symbol.source_worktree_id.to_proto(),
8793 worktree_id: symbol.path.worktree_id.to_proto(),
8794 path: symbol.path.path.to_string_lossy().to_string(),
8795 name: symbol.name.clone(),
8796 kind: unsafe { mem::transmute(symbol.kind) },
8797 start: Some(proto::PointUtf16 {
8798 row: symbol.range.start.0.row,
8799 column: symbol.range.start.0.column,
8800 }),
8801 end: Some(proto::PointUtf16 {
8802 row: symbol.range.end.0.row,
8803 column: symbol.range.end.0.column,
8804 }),
8805 signature: symbol.signature.to_vec(),
8806 }
8807}
8808
8809fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8810 let mut path_components = path.components();
8811 let mut base_components = base.components();
8812 let mut components: Vec<Component> = Vec::new();
8813 loop {
8814 match (path_components.next(), base_components.next()) {
8815 (None, None) => break,
8816 (Some(a), None) => {
8817 components.push(a);
8818 components.extend(path_components.by_ref());
8819 break;
8820 }
8821 (None, _) => components.push(Component::ParentDir),
8822 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8823 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8824 (Some(a), Some(_)) => {
8825 components.push(Component::ParentDir);
8826 for _ in base_components {
8827 components.push(Component::ParentDir);
8828 }
8829 components.push(a);
8830 components.extend(path_components.by_ref());
8831 break;
8832 }
8833 }
8834 }
8835 components.iter().map(|c| c.as_os_str()).collect()
8836}
8837
8838impl Item for Buffer {
8839 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8840 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8841 }
8842
8843 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8844 File::from_dyn(self.file()).map(|file| ProjectPath {
8845 worktree_id: file.worktree_id(cx),
8846 path: file.path().clone(),
8847 })
8848 }
8849}
8850
8851async fn wait_for_loading_buffer(
8852 mut receiver: postage::watch::Receiver<Option<Result<Handle<Buffer>, Arc<anyhow::Error>>>>,
8853) -> Result<Handle<Buffer>, Arc<anyhow::Error>> {
8854 loop {
8855 if let Some(result) = receiver.borrow().as_ref() {
8856 match result {
8857 Ok(buffer) => return Ok(buffer.to_owned()),
8858 Err(e) => return Err(e.to_owned()),
8859 }
8860 }
8861 receiver.next().await;
8862 }
8863}
8864
8865fn include_text(server: &lsp2::LanguageServer) -> bool {
8866 server
8867 .capabilities()
8868 .text_document_sync
8869 .as_ref()
8870 .and_then(|sync| match sync {
8871 lsp2::TextDocumentSyncCapability::Kind(_) => None,
8872 lsp2::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8873 })
8874 .and_then(|save_options| match save_options {
8875 lsp2::TextDocumentSyncSaveOptions::Supported(_) => None,
8876 lsp2::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8877 })
8878 .unwrap_or(false)
8879}