1mod ignore;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7pub mod terminals;
8pub mod worktree;
9
10#[cfg(test)]
11mod project_tests;
12#[cfg(test)]
13mod worktree_tests;
14
15use anyhow::{anyhow, Context as _, Result};
16use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
17use clock::ReplicaId;
18use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
19use copilot::Copilot;
20use futures::{
21 channel::{
22 mpsc::{self, UnboundedReceiver},
23 oneshot,
24 },
25 future::{try_join_all, Shared},
26 stream::FuturesUnordered,
27 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
28};
29use globset::{Glob, GlobSet, GlobSetBuilder};
30use gpui::{
31 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
32 Model, ModelContext, Task, WeakModel,
33};
34use itertools::Itertools;
35use language::{
36 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
37 markdown, point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability,
43 CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
44 Documentation, Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName,
45 LocalFile, LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer,
46 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use parking_lot::{Mutex, RwLock};
56use postage::watch;
57use prettier_support::{DefaultPrettier, PrettierInstance};
58use project_settings::{LspSettings, ProjectSettings};
59use rand::prelude::*;
60use search::SearchQuery;
61use serde::Serialize;
62use settings::{Settings, SettingsStore};
63use sha2::{Digest, Sha256};
64use similar::{ChangeTag, TextDiff};
65use smol::channel::{Receiver, Sender};
66use smol::lock::Semaphore;
67use std::{
68 cmp::{self, Ordering},
69 convert::TryInto,
70 hash::Hash,
71 mem,
72 num::NonZeroU32,
73 ops::Range,
74 path::{self, Component, Path, PathBuf},
75 process::Stdio,
76 str,
77 sync::{
78 atomic::{AtomicUsize, Ordering::SeqCst},
79 Arc,
80 },
81 time::{Duration, Instant},
82};
83use terminals::Terminals;
84use text::Anchor;
85use util::{
86 debug_panic, defer, http::HttpClient, merge_json_value_into,
87 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
88};
89
90pub use fs::*;
91#[cfg(any(test, feature = "test-support"))]
92pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
93pub use worktree::*;
94
95const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
96
97pub trait Item {
98 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
99 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
100}
101
102pub struct Project {
103 worktrees: Vec<WorktreeHandle>,
104 active_entry: Option<ProjectEntryId>,
105 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
106 languages: Arc<LanguageRegistry>,
107 supplementary_language_servers:
108 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
109 language_servers: HashMap<LanguageServerId, LanguageServerState>,
110 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
111 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
112 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
113 client: Arc<client::Client>,
114 next_entry_id: Arc<AtomicUsize>,
115 join_project_response_message_id: u32,
116 next_diagnostic_group_id: usize,
117 user_store: Model<UserStore>,
118 fs: Arc<dyn Fs>,
119 client_state: ProjectClientState,
120 collaborators: HashMap<proto::PeerId, Collaborator>,
121 client_subscriptions: Vec<client::Subscription>,
122 _subscriptions: Vec<gpui::Subscription>,
123 next_buffer_id: u64,
124 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
125 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
126 #[allow(clippy::type_complexity)]
127 loading_buffers_by_path: HashMap<
128 ProjectPath,
129 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
130 >,
131 #[allow(clippy::type_complexity)]
132 loading_local_worktrees:
133 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
134 opened_buffers: HashMap<u64, OpenBuffer>,
135 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
136 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
137 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
138 /// Used for re-issuing buffer requests when peers temporarily disconnect
139 incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>,
140 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
141 buffers_being_formatted: HashSet<u64>,
142 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
143 git_diff_debouncer: DelayedDebounced,
144 nonce: u128,
145 _maintain_buffer_languages: Task<()>,
146 _maintain_workspace_config: Task<Result<()>>,
147 terminals: Terminals,
148 copilot_lsp_subscription: Option<gpui::Subscription>,
149 copilot_log_subscription: Option<lsp::Subscription>,
150 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
151 node: Option<Arc<dyn NodeRuntime>>,
152 default_prettier: DefaultPrettier,
153 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
154 prettier_instances: HashMap<PathBuf, PrettierInstance>,
155}
156
157struct DelayedDebounced {
158 task: Option<Task<()>>,
159 cancel_channel: Option<oneshot::Sender<()>>,
160}
161
162pub enum LanguageServerToQuery {
163 Primary,
164 Other(LanguageServerId),
165}
166
167impl DelayedDebounced {
168 fn new() -> DelayedDebounced {
169 DelayedDebounced {
170 task: None,
171 cancel_channel: None,
172 }
173 }
174
175 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
176 where
177 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
178 {
179 if let Some(channel) = self.cancel_channel.take() {
180 _ = channel.send(());
181 }
182
183 let (sender, mut receiver) = oneshot::channel::<()>();
184 self.cancel_channel = Some(sender);
185
186 let previous_task = self.task.take();
187 self.task = Some(cx.spawn(move |project, mut cx| async move {
188 let mut timer = cx.background_executor().timer(delay).fuse();
189 if let Some(previous_task) = previous_task {
190 previous_task.await;
191 }
192
193 futures::select_biased! {
194 _ = receiver => return,
195 _ = timer => {}
196 }
197
198 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
199 task.await;
200 }
201 }));
202 }
203}
204
205struct LspBufferSnapshot {
206 version: i32,
207 snapshot: TextBufferSnapshot,
208}
209
210/// Message ordered with respect to buffer operations
211enum BufferOrderedMessage {
212 Operation {
213 buffer_id: u64,
214 operation: proto::Operation,
215 },
216 LanguageServerUpdate {
217 language_server_id: LanguageServerId,
218 message: proto::update_language_server::Variant,
219 },
220 Resync,
221}
222
223enum LocalProjectUpdate {
224 WorktreesChanged,
225 CreateBufferForPeer {
226 peer_id: proto::PeerId,
227 buffer_id: u64,
228 },
229}
230
231enum OpenBuffer {
232 Strong(Model<Buffer>),
233 Weak(WeakModel<Buffer>),
234 Operations(Vec<Operation>),
235}
236
237#[derive(Clone)]
238enum WorktreeHandle {
239 Strong(Model<Worktree>),
240 Weak(WeakModel<Worktree>),
241}
242
243#[derive(Debug)]
244enum ProjectClientState {
245 Local,
246 Shared {
247 remote_id: u64,
248 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
249 _send_updates: Task<Result<()>>,
250 },
251 Remote {
252 sharing_has_stopped: bool,
253 capability: Capability,
254 remote_id: u64,
255 replica_id: ReplicaId,
256 },
257}
258
259#[derive(Clone, Debug, PartialEq)]
260pub enum Event {
261 LanguageServerAdded(LanguageServerId),
262 LanguageServerRemoved(LanguageServerId),
263 LanguageServerLog(LanguageServerId, String),
264 Notification(String),
265 ActiveEntryChanged(Option<ProjectEntryId>),
266 ActivateProjectPanel,
267 WorktreeAdded,
268 WorktreeRemoved(WorktreeId),
269 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
270 DiskBasedDiagnosticsStarted {
271 language_server_id: LanguageServerId,
272 },
273 DiskBasedDiagnosticsFinished {
274 language_server_id: LanguageServerId,
275 },
276 DiagnosticsUpdated {
277 path: ProjectPath,
278 language_server_id: LanguageServerId,
279 },
280 RemoteIdChanged(Option<u64>),
281 DisconnectedFromHost,
282 Closed,
283 DeletedEntry(ProjectEntryId),
284 CollaboratorUpdated {
285 old_peer_id: proto::PeerId,
286 new_peer_id: proto::PeerId,
287 },
288 CollaboratorJoined(proto::PeerId),
289 CollaboratorLeft(proto::PeerId),
290 RefreshInlayHints,
291 RevealInProjectPanel(ProjectEntryId),
292}
293
294pub enum LanguageServerState {
295 Starting(Task<Option<Arc<LanguageServer>>>),
296
297 Running {
298 language: Arc<Language>,
299 adapter: Arc<CachedLspAdapter>,
300 server: Arc<LanguageServer>,
301 watched_paths: HashMap<WorktreeId, GlobSet>,
302 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
303 },
304}
305
306#[derive(Serialize)]
307pub struct LanguageServerStatus {
308 pub name: String,
309 pub pending_work: BTreeMap<String, LanguageServerProgress>,
310 pub has_pending_diagnostic_updates: bool,
311 progress_tokens: HashSet<String>,
312}
313
314#[derive(Clone, Debug, Serialize)]
315pub struct LanguageServerProgress {
316 pub message: Option<String>,
317 pub percentage: Option<usize>,
318 #[serde(skip_serializing)]
319 pub last_update_at: Instant,
320}
321
322#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
323pub struct ProjectPath {
324 pub worktree_id: WorktreeId,
325 pub path: Arc<Path>,
326}
327
328#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
329pub struct DiagnosticSummary {
330 pub error_count: usize,
331 pub warning_count: usize,
332}
333
334#[derive(Debug, Clone, PartialEq, Eq, Hash)]
335pub struct Location {
336 pub buffer: Model<Buffer>,
337 pub range: Range<language::Anchor>,
338}
339
340#[derive(Debug, Clone, PartialEq, Eq)]
341pub struct InlayHint {
342 pub position: language::Anchor,
343 pub label: InlayHintLabel,
344 pub kind: Option<InlayHintKind>,
345 pub padding_left: bool,
346 pub padding_right: bool,
347 pub tooltip: Option<InlayHintTooltip>,
348 pub resolve_state: ResolveState,
349}
350
351#[derive(Debug, Clone, PartialEq, Eq)]
352pub enum ResolveState {
353 Resolved,
354 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
355 Resolving,
356}
357
358impl InlayHint {
359 pub fn text(&self) -> String {
360 match &self.label {
361 InlayHintLabel::String(s) => s.to_owned(),
362 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
363 }
364 }
365}
366
367#[derive(Debug, Clone, PartialEq, Eq)]
368pub enum InlayHintLabel {
369 String(String),
370 LabelParts(Vec<InlayHintLabelPart>),
371}
372
373#[derive(Debug, Clone, PartialEq, Eq)]
374pub struct InlayHintLabelPart {
375 pub value: String,
376 pub tooltip: Option<InlayHintLabelPartTooltip>,
377 pub location: Option<(LanguageServerId, lsp::Location)>,
378}
379
380#[derive(Debug, Clone, PartialEq, Eq)]
381pub enum InlayHintTooltip {
382 String(String),
383 MarkupContent(MarkupContent),
384}
385
386#[derive(Debug, Clone, PartialEq, Eq)]
387pub enum InlayHintLabelPartTooltip {
388 String(String),
389 MarkupContent(MarkupContent),
390}
391
392#[derive(Debug, Clone, PartialEq, Eq)]
393pub struct MarkupContent {
394 pub kind: HoverBlockKind,
395 pub value: String,
396}
397
398#[derive(Debug, Clone)]
399pub struct LocationLink {
400 pub origin: Option<Location>,
401 pub target: Location,
402}
403
404#[derive(Debug)]
405pub struct DocumentHighlight {
406 pub range: Range<language::Anchor>,
407 pub kind: DocumentHighlightKind,
408}
409
410#[derive(Clone, Debug)]
411pub struct Symbol {
412 pub language_server_name: LanguageServerName,
413 pub source_worktree_id: WorktreeId,
414 pub path: ProjectPath,
415 pub label: CodeLabel,
416 pub name: String,
417 pub kind: lsp::SymbolKind,
418 pub range: Range<Unclipped<PointUtf16>>,
419 pub signature: [u8; 32],
420}
421
422#[derive(Clone, Debug, PartialEq)]
423pub struct HoverBlock {
424 pub text: String,
425 pub kind: HoverBlockKind,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum HoverBlockKind {
430 PlainText,
431 Markdown,
432 Code { language: String },
433}
434
435#[derive(Debug)]
436pub struct Hover {
437 pub contents: Vec<HoverBlock>,
438 pub range: Option<Range<language::Anchor>>,
439 pub language: Option<Arc<Language>>,
440}
441
442impl Hover {
443 pub fn is_empty(&self) -> bool {
444 self.contents.iter().all(|block| block.text.is_empty())
445 }
446}
447
448#[derive(Default)]
449pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
450
451impl DiagnosticSummary {
452 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
453 let mut this = Self {
454 error_count: 0,
455 warning_count: 0,
456 };
457
458 for entry in diagnostics {
459 if entry.diagnostic.is_primary {
460 match entry.diagnostic.severity {
461 DiagnosticSeverity::ERROR => this.error_count += 1,
462 DiagnosticSeverity::WARNING => this.warning_count += 1,
463 _ => {}
464 }
465 }
466 }
467
468 this
469 }
470
471 pub fn is_empty(&self) -> bool {
472 self.error_count == 0 && self.warning_count == 0
473 }
474
475 pub fn to_proto(
476 &self,
477 language_server_id: LanguageServerId,
478 path: &Path,
479 ) -> proto::DiagnosticSummary {
480 proto::DiagnosticSummary {
481 path: path.to_string_lossy().to_string(),
482 language_server_id: language_server_id.0 as u64,
483 error_count: self.error_count as u32,
484 warning_count: self.warning_count as u32,
485 }
486 }
487}
488
489#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
490pub struct ProjectEntryId(usize);
491
492impl ProjectEntryId {
493 pub const MAX: Self = Self(usize::MAX);
494
495 pub fn new(counter: &AtomicUsize) -> Self {
496 Self(counter.fetch_add(1, SeqCst))
497 }
498
499 pub fn from_proto(id: u64) -> Self {
500 Self(id as usize)
501 }
502
503 pub fn to_proto(&self) -> u64 {
504 self.0 as u64
505 }
506
507 pub fn to_usize(&self) -> usize {
508 self.0
509 }
510}
511
512#[derive(Debug, Clone, Copy, PartialEq, Eq)]
513pub enum FormatTrigger {
514 Save,
515 Manual,
516}
517
518struct ProjectLspAdapterDelegate {
519 project: Model<Project>,
520 http_client: Arc<dyn HttpClient>,
521}
522
523// Currently, formatting operations are represented differently depending on
524// whether they come from a language server or an external command.
525enum FormatOperation {
526 Lsp(Vec<(Range<Anchor>, String)>),
527 External(Diff),
528 Prettier(Diff),
529}
530
531impl FormatTrigger {
532 fn from_proto(value: i32) -> FormatTrigger {
533 match value {
534 0 => FormatTrigger::Save,
535 1 => FormatTrigger::Manual,
536 _ => FormatTrigger::Save,
537 }
538 }
539}
540#[derive(Clone, Debug, PartialEq)]
541enum SearchMatchCandidate {
542 OpenBuffer {
543 buffer: Model<Buffer>,
544 // This might be an unnamed file without representation on filesystem
545 path: Option<Arc<Path>>,
546 },
547 Path {
548 worktree_id: WorktreeId,
549 is_ignored: bool,
550 path: Arc<Path>,
551 },
552}
553
554type SearchMatchCandidateIndex = usize;
555impl SearchMatchCandidate {
556 fn path(&self) -> Option<Arc<Path>> {
557 match self {
558 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
559 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
560 }
561 }
562}
563
564impl Project {
565 pub fn init_settings(cx: &mut AppContext) {
566 ProjectSettings::register(cx);
567 }
568
569 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
570 Self::init_settings(cx);
571
572 client.add_model_message_handler(Self::handle_add_collaborator);
573 client.add_model_message_handler(Self::handle_update_project_collaborator);
574 client.add_model_message_handler(Self::handle_remove_collaborator);
575 client.add_model_message_handler(Self::handle_buffer_reloaded);
576 client.add_model_message_handler(Self::handle_buffer_saved);
577 client.add_model_message_handler(Self::handle_start_language_server);
578 client.add_model_message_handler(Self::handle_update_language_server);
579 client.add_model_message_handler(Self::handle_update_project);
580 client.add_model_message_handler(Self::handle_unshare_project);
581 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
582 client.add_model_message_handler(Self::handle_update_buffer_file);
583 client.add_model_request_handler(Self::handle_update_buffer);
584 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
585 client.add_model_message_handler(Self::handle_update_worktree);
586 client.add_model_message_handler(Self::handle_update_worktree_settings);
587 client.add_model_request_handler(Self::handle_create_project_entry);
588 client.add_model_request_handler(Self::handle_rename_project_entry);
589 client.add_model_request_handler(Self::handle_copy_project_entry);
590 client.add_model_request_handler(Self::handle_delete_project_entry);
591 client.add_model_request_handler(Self::handle_expand_project_entry);
592 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
593 client.add_model_request_handler(Self::handle_resolve_completion_documentation);
594 client.add_model_request_handler(Self::handle_apply_code_action);
595 client.add_model_request_handler(Self::handle_on_type_formatting);
596 client.add_model_request_handler(Self::handle_inlay_hints);
597 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
598 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
599 client.add_model_request_handler(Self::handle_reload_buffers);
600 client.add_model_request_handler(Self::handle_synchronize_buffers);
601 client.add_model_request_handler(Self::handle_format_buffers);
602 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
603 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
604 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
605 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
606 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
607 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
608 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
609 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
610 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
611 client.add_model_request_handler(Self::handle_search_project);
612 client.add_model_request_handler(Self::handle_get_project_symbols);
613 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
614 client.add_model_request_handler(Self::handle_open_buffer_by_id);
615 client.add_model_request_handler(Self::handle_open_buffer_by_path);
616 client.add_model_request_handler(Self::handle_save_buffer);
617 client.add_model_message_handler(Self::handle_update_diff_base);
618 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
619 }
620
621 pub fn local(
622 client: Arc<Client>,
623 node: Arc<dyn NodeRuntime>,
624 user_store: Model<UserStore>,
625 languages: Arc<LanguageRegistry>,
626 fs: Arc<dyn Fs>,
627 cx: &mut AppContext,
628 ) -> Model<Self> {
629 cx.new_model(|cx: &mut ModelContext<Self>| {
630 let (tx, rx) = mpsc::unbounded();
631 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
632 .detach();
633 let copilot_lsp_subscription =
634 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
635 Self {
636 worktrees: Vec::new(),
637 buffer_ordered_messages_tx: tx,
638 collaborators: Default::default(),
639 next_buffer_id: 0,
640 opened_buffers: Default::default(),
641 shared_buffers: Default::default(),
642 incomplete_remote_buffers: Default::default(),
643 loading_buffers_by_path: Default::default(),
644 loading_local_worktrees: Default::default(),
645 local_buffer_ids_by_path: Default::default(),
646 local_buffer_ids_by_entry_id: Default::default(),
647 buffer_snapshots: Default::default(),
648 join_project_response_message_id: 0,
649 client_state: ProjectClientState::Local,
650 opened_buffer: watch::channel(),
651 client_subscriptions: Vec::new(),
652 _subscriptions: vec![
653 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
654 cx.on_release(Self::release),
655 cx.on_app_quit(Self::shutdown_language_servers),
656 ],
657 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
658 _maintain_workspace_config: Self::maintain_workspace_config(cx),
659 active_entry: None,
660 languages,
661 client,
662 user_store,
663 fs,
664 next_entry_id: Default::default(),
665 next_diagnostic_group_id: Default::default(),
666 supplementary_language_servers: HashMap::default(),
667 language_servers: Default::default(),
668 language_server_ids: HashMap::default(),
669 language_server_statuses: Default::default(),
670 last_workspace_edits_by_language_server: Default::default(),
671 buffers_being_formatted: Default::default(),
672 buffers_needing_diff: Default::default(),
673 git_diff_debouncer: DelayedDebounced::new(),
674 nonce: StdRng::from_entropy().gen(),
675 terminals: Terminals {
676 local_handles: Vec::new(),
677 },
678 copilot_lsp_subscription,
679 copilot_log_subscription: None,
680 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
681 node: Some(node),
682 default_prettier: DefaultPrettier::default(),
683 prettiers_per_worktree: HashMap::default(),
684 prettier_instances: HashMap::default(),
685 }
686 })
687 }
688
689 pub async fn remote(
690 remote_id: u64,
691 client: Arc<Client>,
692 user_store: Model<UserStore>,
693 languages: Arc<LanguageRegistry>,
694 fs: Arc<dyn Fs>,
695 role: proto::ChannelRole,
696 mut cx: AsyncAppContext,
697 ) -> Result<Model<Self>> {
698 client.authenticate_and_connect(true, &cx).await?;
699
700 let subscription = client.subscribe_to_entity(remote_id)?;
701 let response = client
702 .request_envelope(proto::JoinProject {
703 project_id: remote_id,
704 })
705 .await?;
706 let this = cx.new_model(|cx| {
707 let replica_id = response.payload.replica_id as ReplicaId;
708
709 let mut worktrees = Vec::new();
710 for worktree in response.payload.worktrees {
711 let worktree =
712 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
713 worktrees.push(worktree);
714 }
715
716 let (tx, rx) = mpsc::unbounded();
717 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
718 .detach();
719 let copilot_lsp_subscription =
720 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
721 let mut this = Self {
722 worktrees: Vec::new(),
723 buffer_ordered_messages_tx: tx,
724 loading_buffers_by_path: Default::default(),
725 next_buffer_id: 0,
726 opened_buffer: watch::channel(),
727 shared_buffers: Default::default(),
728 incomplete_remote_buffers: Default::default(),
729 loading_local_worktrees: Default::default(),
730 local_buffer_ids_by_path: Default::default(),
731 local_buffer_ids_by_entry_id: Default::default(),
732 active_entry: None,
733 collaborators: Default::default(),
734 join_project_response_message_id: response.message_id,
735 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
736 _maintain_workspace_config: Self::maintain_workspace_config(cx),
737 languages,
738 user_store: user_store.clone(),
739 fs,
740 next_entry_id: Default::default(),
741 next_diagnostic_group_id: Default::default(),
742 client_subscriptions: Default::default(),
743 _subscriptions: vec![
744 cx.on_release(Self::release),
745 cx.on_app_quit(Self::shutdown_language_servers),
746 ],
747 client: client.clone(),
748 client_state: ProjectClientState::Remote {
749 sharing_has_stopped: false,
750 capability: Capability::ReadWrite,
751 remote_id,
752 replica_id,
753 },
754 supplementary_language_servers: HashMap::default(),
755 language_servers: Default::default(),
756 language_server_ids: HashMap::default(),
757 language_server_statuses: response
758 .payload
759 .language_servers
760 .into_iter()
761 .map(|server| {
762 (
763 LanguageServerId(server.id as usize),
764 LanguageServerStatus {
765 name: server.name,
766 pending_work: Default::default(),
767 has_pending_diagnostic_updates: false,
768 progress_tokens: Default::default(),
769 },
770 )
771 })
772 .collect(),
773 last_workspace_edits_by_language_server: Default::default(),
774 opened_buffers: Default::default(),
775 buffers_being_formatted: Default::default(),
776 buffers_needing_diff: Default::default(),
777 git_diff_debouncer: DelayedDebounced::new(),
778 buffer_snapshots: Default::default(),
779 nonce: StdRng::from_entropy().gen(),
780 terminals: Terminals {
781 local_handles: Vec::new(),
782 },
783 copilot_lsp_subscription,
784 copilot_log_subscription: None,
785 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
786 node: None,
787 default_prettier: DefaultPrettier::default(),
788 prettiers_per_worktree: HashMap::default(),
789 prettier_instances: HashMap::default(),
790 };
791 this.set_role(role, cx);
792 for worktree in worktrees {
793 let _ = this.add_worktree(&worktree, cx);
794 }
795 this
796 })?;
797 let subscription = subscription.set_model(&this, &mut cx);
798
799 let user_ids = response
800 .payload
801 .collaborators
802 .iter()
803 .map(|peer| peer.user_id)
804 .collect();
805 user_store
806 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
807 .await?;
808
809 this.update(&mut cx, |this, cx| {
810 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
811 this.client_subscriptions.push(subscription);
812 anyhow::Ok(())
813 })??;
814
815 Ok(this)
816 }
817
818 fn release(&mut self, cx: &mut AppContext) {
819 match &self.client_state {
820 ProjectClientState::Local => {}
821 ProjectClientState::Shared { .. } => {
822 let _ = self.unshare_internal(cx);
823 }
824 ProjectClientState::Remote { remote_id, .. } => {
825 let _ = self.client.send(proto::LeaveProject {
826 project_id: *remote_id,
827 });
828 self.disconnected_from_host_internal(cx);
829 }
830 }
831 }
832
833 fn shutdown_language_servers(
834 &mut self,
835 _cx: &mut ModelContext<Self>,
836 ) -> impl Future<Output = ()> {
837 let shutdown_futures = self
838 .language_servers
839 .drain()
840 .map(|(_, server_state)| async {
841 use LanguageServerState::*;
842 match server_state {
843 Running { server, .. } => server.shutdown()?.await,
844 Starting(task) => task.await?.shutdown()?.await,
845 }
846 })
847 .collect::<Vec<_>>();
848
849 async move {
850 futures::future::join_all(shutdown_futures).await;
851 }
852 }
853
854 #[cfg(any(test, feature = "test-support"))]
855 pub async fn test(
856 fs: Arc<dyn Fs>,
857 root_paths: impl IntoIterator<Item = &Path>,
858 cx: &mut gpui::TestAppContext,
859 ) -> Model<Project> {
860 let mut languages = LanguageRegistry::test();
861 languages.set_executor(cx.executor());
862 let http_client = util::http::FakeHttpClient::with_404_response();
863 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
864 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
865 let project = cx.update(|cx| {
866 Project::local(
867 client,
868 node_runtime::FakeNodeRuntime::new(),
869 user_store,
870 Arc::new(languages),
871 fs,
872 cx,
873 )
874 });
875 for path in root_paths {
876 let (tree, _) = project
877 .update(cx, |project, cx| {
878 project.find_or_create_local_worktree(path, true, cx)
879 })
880 .await
881 .unwrap();
882 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
883 .await;
884 }
885 project
886 }
887
888 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
889 let mut language_servers_to_start = Vec::new();
890 let mut language_formatters_to_check = Vec::new();
891 for buffer in self.opened_buffers.values() {
892 if let Some(buffer) = buffer.upgrade() {
893 let buffer = buffer.read(cx);
894 let buffer_file = File::from_dyn(buffer.file());
895 let buffer_language = buffer.language();
896 let settings = language_settings(buffer_language, buffer.file(), cx);
897 if let Some(language) = buffer_language {
898 if settings.enable_language_server {
899 if let Some(file) = buffer_file {
900 language_servers_to_start
901 .push((file.worktree.clone(), Arc::clone(language)));
902 }
903 }
904 language_formatters_to_check.push((
905 buffer_file.map(|f| f.worktree_id(cx)),
906 Arc::clone(language),
907 settings.clone(),
908 ));
909 }
910 }
911 }
912
913 let mut language_servers_to_stop = Vec::new();
914 let mut language_servers_to_restart = Vec::new();
915 let languages = self.languages.to_vec();
916
917 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
918 let current_lsp_settings = &self.current_lsp_settings;
919 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
920 let language = languages.iter().find_map(|l| {
921 let adapter = l
922 .lsp_adapters()
923 .iter()
924 .find(|adapter| &adapter.name == started_lsp_name)?;
925 Some((l, adapter))
926 });
927 if let Some((language, adapter)) = language {
928 let worktree = self.worktree_for_id(*worktree_id, cx);
929 let file = worktree.as_ref().and_then(|tree| {
930 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
931 });
932 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
933 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
934 } else if let Some(worktree) = worktree {
935 let server_name = &adapter.name.0;
936 match (
937 current_lsp_settings.get(server_name),
938 new_lsp_settings.get(server_name),
939 ) {
940 (None, None) => {}
941 (Some(_), None) | (None, Some(_)) => {
942 language_servers_to_restart.push((worktree, Arc::clone(language)));
943 }
944 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
945 if current_lsp_settings != new_lsp_settings {
946 language_servers_to_restart.push((worktree, Arc::clone(language)));
947 }
948 }
949 }
950 }
951 }
952 }
953 self.current_lsp_settings = new_lsp_settings;
954
955 // Stop all newly-disabled language servers.
956 for (worktree_id, adapter_name) in language_servers_to_stop {
957 self.stop_language_server(worktree_id, adapter_name, cx)
958 .detach();
959 }
960
961 let mut prettier_plugins_by_worktree = HashMap::default();
962 for (worktree, language, settings) in language_formatters_to_check {
963 if let Some(plugins) =
964 prettier_support::prettier_plugins_for_language(&language, &settings)
965 {
966 prettier_plugins_by_worktree
967 .entry(worktree)
968 .or_insert_with(|| HashSet::default())
969 .extend(plugins);
970 }
971 }
972 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
973 self.install_default_prettier(worktree, prettier_plugins, cx);
974 }
975
976 // Start all the newly-enabled language servers.
977 for (worktree, language) in language_servers_to_start {
978 self.start_language_servers(&worktree, language, cx);
979 }
980
981 // Restart all language servers with changed initialization options.
982 for (worktree, language) in language_servers_to_restart {
983 self.restart_language_servers(worktree, language, cx);
984 }
985
986 if self.copilot_lsp_subscription.is_none() {
987 if let Some(copilot) = Copilot::global(cx) {
988 for buffer in self.opened_buffers.values() {
989 if let Some(buffer) = buffer.upgrade() {
990 self.register_buffer_with_copilot(&buffer, cx);
991 }
992 }
993 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
994 }
995 }
996
997 cx.notify();
998 }
999
1000 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> {
1001 self.opened_buffers
1002 .get(&remote_id)
1003 .and_then(|buffer| buffer.upgrade())
1004 }
1005
1006 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1007 &self.languages
1008 }
1009
1010 pub fn client(&self) -> Arc<Client> {
1011 self.client.clone()
1012 }
1013
1014 pub fn user_store(&self) -> Model<UserStore> {
1015 self.user_store.clone()
1016 }
1017
1018 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1019 self.opened_buffers
1020 .values()
1021 .filter_map(|b| b.upgrade())
1022 .collect()
1023 }
1024
1025 #[cfg(any(test, feature = "test-support"))]
1026 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1027 let path = path.into();
1028 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1029 self.opened_buffers.iter().any(|(_, buffer)| {
1030 if let Some(buffer) = buffer.upgrade() {
1031 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1032 if file.worktree == worktree && file.path() == &path.path {
1033 return true;
1034 }
1035 }
1036 }
1037 false
1038 })
1039 } else {
1040 false
1041 }
1042 }
1043
1044 pub fn fs(&self) -> &Arc<dyn Fs> {
1045 &self.fs
1046 }
1047
1048 pub fn remote_id(&self) -> Option<u64> {
1049 match self.client_state {
1050 ProjectClientState::Local => None,
1051 ProjectClientState::Shared { remote_id, .. }
1052 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1053 }
1054 }
1055
1056 pub fn replica_id(&self) -> ReplicaId {
1057 match self.client_state {
1058 ProjectClientState::Remote { replica_id, .. } => replica_id,
1059 _ => 0,
1060 }
1061 }
1062
1063 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1064 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1065 updates_tx
1066 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1067 .ok();
1068 }
1069 cx.notify();
1070 }
1071
1072 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1073 &self.collaborators
1074 }
1075
1076 pub fn host(&self) -> Option<&Collaborator> {
1077 self.collaborators.values().find(|c| c.replica_id == 0)
1078 }
1079
1080 /// Collect all worktrees, including ones that don't appear in the project panel
1081 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1082 self.worktrees
1083 .iter()
1084 .filter_map(move |worktree| worktree.upgrade())
1085 }
1086
1087 /// Collect all user-visible worktrees, the ones that appear in the project panel
1088 pub fn visible_worktrees<'a>(
1089 &'a self,
1090 cx: &'a AppContext,
1091 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1092 self.worktrees.iter().filter_map(|worktree| {
1093 worktree.upgrade().and_then(|worktree| {
1094 if worktree.read(cx).is_visible() {
1095 Some(worktree)
1096 } else {
1097 None
1098 }
1099 })
1100 })
1101 }
1102
1103 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1104 self.visible_worktrees(cx)
1105 .map(|tree| tree.read(cx).root_name())
1106 }
1107
1108 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1109 self.worktrees()
1110 .find(|worktree| worktree.read(cx).id() == id)
1111 }
1112
1113 pub fn worktree_for_entry(
1114 &self,
1115 entry_id: ProjectEntryId,
1116 cx: &AppContext,
1117 ) -> Option<Model<Worktree>> {
1118 self.worktrees()
1119 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1120 }
1121
1122 pub fn worktree_id_for_entry(
1123 &self,
1124 entry_id: ProjectEntryId,
1125 cx: &AppContext,
1126 ) -> Option<WorktreeId> {
1127 self.worktree_for_entry(entry_id, cx)
1128 .map(|worktree| worktree.read(cx).id())
1129 }
1130
1131 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1132 paths.iter().all(|path| self.contains_path(path, cx))
1133 }
1134
1135 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1136 for worktree in self.worktrees() {
1137 let worktree = worktree.read(cx).as_local();
1138 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1139 return true;
1140 }
1141 }
1142 false
1143 }
1144
1145 pub fn create_entry(
1146 &mut self,
1147 project_path: impl Into<ProjectPath>,
1148 is_directory: bool,
1149 cx: &mut ModelContext<Self>,
1150 ) -> Task<Result<Option<Entry>>> {
1151 let project_path = project_path.into();
1152 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1153 return Task::ready(Ok(None));
1154 };
1155 if self.is_local() {
1156 worktree.update(cx, |worktree, cx| {
1157 worktree
1158 .as_local_mut()
1159 .unwrap()
1160 .create_entry(project_path.path, is_directory, cx)
1161 })
1162 } else {
1163 let client = self.client.clone();
1164 let project_id = self.remote_id().unwrap();
1165 cx.spawn(move |_, mut cx| async move {
1166 let response = client
1167 .request(proto::CreateProjectEntry {
1168 worktree_id: project_path.worktree_id.to_proto(),
1169 project_id,
1170 path: project_path.path.to_string_lossy().into(),
1171 is_directory,
1172 })
1173 .await?;
1174 match response.entry {
1175 Some(entry) => worktree
1176 .update(&mut cx, |worktree, cx| {
1177 worktree.as_remote_mut().unwrap().insert_entry(
1178 entry,
1179 response.worktree_scan_id as usize,
1180 cx,
1181 )
1182 })?
1183 .await
1184 .map(Some),
1185 None => Ok(None),
1186 }
1187 })
1188 }
1189 }
1190
1191 pub fn copy_entry(
1192 &mut self,
1193 entry_id: ProjectEntryId,
1194 new_path: impl Into<Arc<Path>>,
1195 cx: &mut ModelContext<Self>,
1196 ) -> Task<Result<Option<Entry>>> {
1197 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1198 return Task::ready(Ok(None));
1199 };
1200 let new_path = new_path.into();
1201 if self.is_local() {
1202 worktree.update(cx, |worktree, cx| {
1203 worktree
1204 .as_local_mut()
1205 .unwrap()
1206 .copy_entry(entry_id, new_path, cx)
1207 })
1208 } else {
1209 let client = self.client.clone();
1210 let project_id = self.remote_id().unwrap();
1211
1212 cx.spawn(move |_, mut cx| async move {
1213 let response = client
1214 .request(proto::CopyProjectEntry {
1215 project_id,
1216 entry_id: entry_id.to_proto(),
1217 new_path: new_path.to_string_lossy().into(),
1218 })
1219 .await?;
1220 match response.entry {
1221 Some(entry) => worktree
1222 .update(&mut cx, |worktree, cx| {
1223 worktree.as_remote_mut().unwrap().insert_entry(
1224 entry,
1225 response.worktree_scan_id as usize,
1226 cx,
1227 )
1228 })?
1229 .await
1230 .map(Some),
1231 None => Ok(None),
1232 }
1233 })
1234 }
1235 }
1236
1237 pub fn rename_entry(
1238 &mut self,
1239 entry_id: ProjectEntryId,
1240 new_path: impl Into<Arc<Path>>,
1241 cx: &mut ModelContext<Self>,
1242 ) -> Task<Result<Option<Entry>>> {
1243 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1244 return Task::ready(Ok(None));
1245 };
1246 let new_path = new_path.into();
1247 if self.is_local() {
1248 worktree.update(cx, |worktree, cx| {
1249 worktree
1250 .as_local_mut()
1251 .unwrap()
1252 .rename_entry(entry_id, new_path, cx)
1253 })
1254 } else {
1255 let client = self.client.clone();
1256 let project_id = self.remote_id().unwrap();
1257
1258 cx.spawn(move |_, mut cx| async move {
1259 let response = client
1260 .request(proto::RenameProjectEntry {
1261 project_id,
1262 entry_id: entry_id.to_proto(),
1263 new_path: new_path.to_string_lossy().into(),
1264 })
1265 .await?;
1266 match response.entry {
1267 Some(entry) => worktree
1268 .update(&mut cx, |worktree, cx| {
1269 worktree.as_remote_mut().unwrap().insert_entry(
1270 entry,
1271 response.worktree_scan_id as usize,
1272 cx,
1273 )
1274 })?
1275 .await
1276 .map(Some),
1277 None => Ok(None),
1278 }
1279 })
1280 }
1281 }
1282
1283 pub fn delete_entry(
1284 &mut self,
1285 entry_id: ProjectEntryId,
1286 cx: &mut ModelContext<Self>,
1287 ) -> Option<Task<Result<()>>> {
1288 let worktree = self.worktree_for_entry(entry_id, cx)?;
1289
1290 cx.emit(Event::DeletedEntry(entry_id));
1291
1292 if self.is_local() {
1293 worktree.update(cx, |worktree, cx| {
1294 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1295 })
1296 } else {
1297 let client = self.client.clone();
1298 let project_id = self.remote_id().unwrap();
1299 Some(cx.spawn(move |_, mut cx| async move {
1300 let response = client
1301 .request(proto::DeleteProjectEntry {
1302 project_id,
1303 entry_id: entry_id.to_proto(),
1304 })
1305 .await?;
1306 worktree
1307 .update(&mut cx, move |worktree, cx| {
1308 worktree.as_remote_mut().unwrap().delete_entry(
1309 entry_id,
1310 response.worktree_scan_id as usize,
1311 cx,
1312 )
1313 })?
1314 .await
1315 }))
1316 }
1317 }
1318
1319 pub fn expand_entry(
1320 &mut self,
1321 worktree_id: WorktreeId,
1322 entry_id: ProjectEntryId,
1323 cx: &mut ModelContext<Self>,
1324 ) -> Option<Task<Result<()>>> {
1325 let worktree = self.worktree_for_id(worktree_id, cx)?;
1326 if self.is_local() {
1327 worktree.update(cx, |worktree, cx| {
1328 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1329 })
1330 } else {
1331 let worktree = worktree.downgrade();
1332 let request = self.client.request(proto::ExpandProjectEntry {
1333 project_id: self.remote_id().unwrap(),
1334 entry_id: entry_id.to_proto(),
1335 });
1336 Some(cx.spawn(move |_, mut cx| async move {
1337 let response = request.await?;
1338 if let Some(worktree) = worktree.upgrade() {
1339 worktree
1340 .update(&mut cx, |worktree, _| {
1341 worktree
1342 .as_remote_mut()
1343 .unwrap()
1344 .wait_for_snapshot(response.worktree_scan_id as usize)
1345 })?
1346 .await?;
1347 }
1348 Ok(())
1349 }))
1350 }
1351 }
1352
1353 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1354 if !matches!(self.client_state, ProjectClientState::Local) {
1355 return Err(anyhow!("project was already shared"));
1356 }
1357 self.client_subscriptions.push(
1358 self.client
1359 .subscribe_to_entity(project_id)?
1360 .set_model(&cx.handle(), &mut cx.to_async()),
1361 );
1362
1363 for open_buffer in self.opened_buffers.values_mut() {
1364 match open_buffer {
1365 OpenBuffer::Strong(_) => {}
1366 OpenBuffer::Weak(buffer) => {
1367 if let Some(buffer) = buffer.upgrade() {
1368 *open_buffer = OpenBuffer::Strong(buffer);
1369 }
1370 }
1371 OpenBuffer::Operations(_) => unreachable!(),
1372 }
1373 }
1374
1375 for worktree_handle in self.worktrees.iter_mut() {
1376 match worktree_handle {
1377 WorktreeHandle::Strong(_) => {}
1378 WorktreeHandle::Weak(worktree) => {
1379 if let Some(worktree) = worktree.upgrade() {
1380 *worktree_handle = WorktreeHandle::Strong(worktree);
1381 }
1382 }
1383 }
1384 }
1385
1386 for (server_id, status) in &self.language_server_statuses {
1387 self.client
1388 .send(proto::StartLanguageServer {
1389 project_id,
1390 server: Some(proto::LanguageServer {
1391 id: server_id.0 as u64,
1392 name: status.name.clone(),
1393 }),
1394 })
1395 .log_err();
1396 }
1397
1398 let store = cx.global::<SettingsStore>();
1399 for worktree in self.worktrees() {
1400 let worktree_id = worktree.read(cx).id().to_proto();
1401 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1402 self.client
1403 .send(proto::UpdateWorktreeSettings {
1404 project_id,
1405 worktree_id,
1406 path: path.to_string_lossy().into(),
1407 content: Some(content),
1408 })
1409 .log_err();
1410 }
1411 }
1412
1413 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1414 let client = self.client.clone();
1415 self.client_state = ProjectClientState::Shared {
1416 remote_id: project_id,
1417 updates_tx,
1418 _send_updates: cx.spawn(move |this, mut cx| async move {
1419 while let Some(update) = updates_rx.next().await {
1420 match update {
1421 LocalProjectUpdate::WorktreesChanged => {
1422 let worktrees = this.update(&mut cx, |this, _cx| {
1423 this.worktrees().collect::<Vec<_>>()
1424 })?;
1425 let update_project = this
1426 .update(&mut cx, |this, cx| {
1427 this.client.request(proto::UpdateProject {
1428 project_id,
1429 worktrees: this.worktree_metadata_protos(cx),
1430 })
1431 })?
1432 .await;
1433 if update_project.is_ok() {
1434 for worktree in worktrees {
1435 worktree.update(&mut cx, |worktree, cx| {
1436 let worktree = worktree.as_local_mut().unwrap();
1437 worktree.share(project_id, cx).detach_and_log_err(cx)
1438 })?;
1439 }
1440 }
1441 }
1442 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1443 let buffer = this.update(&mut cx, |this, _| {
1444 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1445 let shared_buffers =
1446 this.shared_buffers.entry(peer_id).or_default();
1447 if shared_buffers.insert(buffer_id) {
1448 if let OpenBuffer::Strong(buffer) = buffer {
1449 Some(buffer.clone())
1450 } else {
1451 None
1452 }
1453 } else {
1454 None
1455 }
1456 })?;
1457
1458 let Some(buffer) = buffer else { continue };
1459 let operations =
1460 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1461 let operations = operations.await;
1462 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1463
1464 let initial_state = proto::CreateBufferForPeer {
1465 project_id,
1466 peer_id: Some(peer_id),
1467 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1468 };
1469 if client.send(initial_state).log_err().is_some() {
1470 let client = client.clone();
1471 cx.background_executor()
1472 .spawn(async move {
1473 let mut chunks = split_operations(operations).peekable();
1474 while let Some(chunk) = chunks.next() {
1475 let is_last = chunks.peek().is_none();
1476 client.send(proto::CreateBufferForPeer {
1477 project_id,
1478 peer_id: Some(peer_id),
1479 variant: Some(
1480 proto::create_buffer_for_peer::Variant::Chunk(
1481 proto::BufferChunk {
1482 buffer_id,
1483 operations: chunk,
1484 is_last,
1485 },
1486 ),
1487 ),
1488 })?;
1489 }
1490 anyhow::Ok(())
1491 })
1492 .await
1493 .log_err();
1494 }
1495 }
1496 }
1497 }
1498 Ok(())
1499 }),
1500 };
1501
1502 self.metadata_changed(cx);
1503 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1504 cx.notify();
1505 Ok(())
1506 }
1507
1508 pub fn reshared(
1509 &mut self,
1510 message: proto::ResharedProject,
1511 cx: &mut ModelContext<Self>,
1512 ) -> Result<()> {
1513 self.shared_buffers.clear();
1514 self.set_collaborators_from_proto(message.collaborators, cx)?;
1515 self.metadata_changed(cx);
1516 Ok(())
1517 }
1518
1519 pub fn rejoined(
1520 &mut self,
1521 message: proto::RejoinedProject,
1522 message_id: u32,
1523 cx: &mut ModelContext<Self>,
1524 ) -> Result<()> {
1525 cx.update_global::<SettingsStore, _>(|store, cx| {
1526 for worktree in &self.worktrees {
1527 store
1528 .clear_local_settings(worktree.handle_id(), cx)
1529 .log_err();
1530 }
1531 });
1532
1533 self.join_project_response_message_id = message_id;
1534 self.set_worktrees_from_proto(message.worktrees, cx)?;
1535 self.set_collaborators_from_proto(message.collaborators, cx)?;
1536 self.language_server_statuses = message
1537 .language_servers
1538 .into_iter()
1539 .map(|server| {
1540 (
1541 LanguageServerId(server.id as usize),
1542 LanguageServerStatus {
1543 name: server.name,
1544 pending_work: Default::default(),
1545 has_pending_diagnostic_updates: false,
1546 progress_tokens: Default::default(),
1547 },
1548 )
1549 })
1550 .collect();
1551 self.buffer_ordered_messages_tx
1552 .unbounded_send(BufferOrderedMessage::Resync)
1553 .unwrap();
1554 cx.notify();
1555 Ok(())
1556 }
1557
1558 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1559 self.unshare_internal(cx)?;
1560 self.metadata_changed(cx);
1561 cx.notify();
1562 Ok(())
1563 }
1564
1565 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1566 if self.is_remote() {
1567 return Err(anyhow!("attempted to unshare a remote project"));
1568 }
1569
1570 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1571 self.client_state = ProjectClientState::Local;
1572 self.collaborators.clear();
1573 self.shared_buffers.clear();
1574 self.client_subscriptions.clear();
1575
1576 for worktree_handle in self.worktrees.iter_mut() {
1577 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1578 let is_visible = worktree.update(cx, |worktree, _| {
1579 worktree.as_local_mut().unwrap().unshare();
1580 worktree.is_visible()
1581 });
1582 if !is_visible {
1583 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1584 }
1585 }
1586 }
1587
1588 for open_buffer in self.opened_buffers.values_mut() {
1589 // Wake up any tasks waiting for peers' edits to this buffer.
1590 if let Some(buffer) = open_buffer.upgrade() {
1591 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1592 }
1593
1594 if let OpenBuffer::Strong(buffer) = open_buffer {
1595 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1596 }
1597 }
1598
1599 self.client.send(proto::UnshareProject {
1600 project_id: remote_id,
1601 })?;
1602
1603 Ok(())
1604 } else {
1605 Err(anyhow!("attempted to unshare an unshared project"))
1606 }
1607 }
1608
1609 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1610 self.disconnected_from_host_internal(cx);
1611 cx.emit(Event::DisconnectedFromHost);
1612 cx.notify();
1613 }
1614
1615 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1616 let new_capability =
1617 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1618 Capability::ReadWrite
1619 } else {
1620 Capability::ReadOnly
1621 };
1622 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1623 if *capability == new_capability {
1624 return;
1625 }
1626
1627 *capability = new_capability;
1628 for buffer in self.opened_buffers() {
1629 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1630 }
1631 }
1632 }
1633
1634 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1635 if let ProjectClientState::Remote {
1636 sharing_has_stopped,
1637 ..
1638 } = &mut self.client_state
1639 {
1640 *sharing_has_stopped = true;
1641
1642 self.collaborators.clear();
1643
1644 for worktree in &self.worktrees {
1645 if let Some(worktree) = worktree.upgrade() {
1646 worktree.update(cx, |worktree, _| {
1647 if let Some(worktree) = worktree.as_remote_mut() {
1648 worktree.disconnected_from_host();
1649 }
1650 });
1651 }
1652 }
1653
1654 for open_buffer in self.opened_buffers.values_mut() {
1655 // Wake up any tasks waiting for peers' edits to this buffer.
1656 if let Some(buffer) = open_buffer.upgrade() {
1657 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1658 }
1659
1660 if let OpenBuffer::Strong(buffer) = open_buffer {
1661 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1662 }
1663 }
1664
1665 // Wake up all futures currently waiting on a buffer to get opened,
1666 // to give them a chance to fail now that we've disconnected.
1667 *self.opened_buffer.0.borrow_mut() = ();
1668 }
1669 }
1670
1671 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1672 cx.emit(Event::Closed);
1673 }
1674
1675 pub fn is_disconnected(&self) -> bool {
1676 match &self.client_state {
1677 ProjectClientState::Remote {
1678 sharing_has_stopped,
1679 ..
1680 } => *sharing_has_stopped,
1681 _ => false,
1682 }
1683 }
1684
1685 pub fn capability(&self) -> Capability {
1686 match &self.client_state {
1687 ProjectClientState::Remote { capability, .. } => *capability,
1688 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1689 }
1690 }
1691
1692 pub fn is_read_only(&self) -> bool {
1693 self.is_disconnected() || self.capability() == Capability::ReadOnly
1694 }
1695
1696 pub fn is_local(&self) -> bool {
1697 match &self.client_state {
1698 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1699 ProjectClientState::Remote { .. } => false,
1700 }
1701 }
1702
1703 pub fn is_remote(&self) -> bool {
1704 !self.is_local()
1705 }
1706
1707 pub fn create_buffer(
1708 &mut self,
1709 text: &str,
1710 language: Option<Arc<Language>>,
1711 cx: &mut ModelContext<Self>,
1712 ) -> Result<Model<Buffer>> {
1713 if self.is_remote() {
1714 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1715 }
1716 let id = post_inc(&mut self.next_buffer_id);
1717 let buffer = cx.new_model(|cx| {
1718 Buffer::new(self.replica_id(), id, text)
1719 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1720 });
1721 self.register_buffer(&buffer, cx)?;
1722 Ok(buffer)
1723 }
1724
1725 pub fn open_path(
1726 &mut self,
1727 path: ProjectPath,
1728 cx: &mut ModelContext<Self>,
1729 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1730 let task = self.open_buffer(path.clone(), cx);
1731 cx.spawn(move |_, cx| async move {
1732 let buffer = task.await?;
1733 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1734 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1735 })?;
1736
1737 let buffer: &AnyModel = &buffer;
1738 Ok((project_entry_id, buffer.clone()))
1739 })
1740 }
1741
1742 pub fn open_local_buffer(
1743 &mut self,
1744 abs_path: impl AsRef<Path>,
1745 cx: &mut ModelContext<Self>,
1746 ) -> Task<Result<Model<Buffer>>> {
1747 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1748 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1749 } else {
1750 Task::ready(Err(anyhow!("no such path")))
1751 }
1752 }
1753
1754 pub fn open_buffer(
1755 &mut self,
1756 path: impl Into<ProjectPath>,
1757 cx: &mut ModelContext<Self>,
1758 ) -> Task<Result<Model<Buffer>>> {
1759 let project_path = path.into();
1760 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1761 worktree
1762 } else {
1763 return Task::ready(Err(anyhow!("no such worktree")));
1764 };
1765
1766 // If there is already a buffer for the given path, then return it.
1767 let existing_buffer = self.get_open_buffer(&project_path, cx);
1768 if let Some(existing_buffer) = existing_buffer {
1769 return Task::ready(Ok(existing_buffer));
1770 }
1771
1772 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1773 // If the given path is already being loaded, then wait for that existing
1774 // task to complete and return the same buffer.
1775 hash_map::Entry::Occupied(e) => e.get().clone(),
1776
1777 // Otherwise, record the fact that this path is now being loaded.
1778 hash_map::Entry::Vacant(entry) => {
1779 let (mut tx, rx) = postage::watch::channel();
1780 entry.insert(rx.clone());
1781
1782 let load_buffer = if worktree.read(cx).is_local() {
1783 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1784 } else {
1785 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1786 };
1787
1788 let project_path = project_path.clone();
1789 cx.spawn(move |this, mut cx| async move {
1790 let load_result = load_buffer.await;
1791 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1792 // Record the fact that the buffer is no longer loading.
1793 this.loading_buffers_by_path.remove(&project_path);
1794 let buffer = load_result.map_err(Arc::new)?;
1795 Ok(buffer)
1796 })?);
1797 anyhow::Ok(())
1798 })
1799 .detach();
1800 rx
1801 }
1802 };
1803
1804 cx.background_executor().spawn(async move {
1805 wait_for_loading_buffer(loading_watch)
1806 .await
1807 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1808 })
1809 }
1810
1811 fn open_local_buffer_internal(
1812 &mut self,
1813 path: &Arc<Path>,
1814 worktree: &Model<Worktree>,
1815 cx: &mut ModelContext<Self>,
1816 ) -> Task<Result<Model<Buffer>>> {
1817 let buffer_id = post_inc(&mut self.next_buffer_id);
1818 let load_buffer = worktree.update(cx, |worktree, cx| {
1819 let worktree = worktree.as_local_mut().unwrap();
1820 worktree.load_buffer(buffer_id, path, cx)
1821 });
1822 cx.spawn(move |this, mut cx| async move {
1823 let buffer = load_buffer.await?;
1824 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1825 Ok(buffer)
1826 })
1827 }
1828
1829 fn open_remote_buffer_internal(
1830 &mut self,
1831 path: &Arc<Path>,
1832 worktree: &Model<Worktree>,
1833 cx: &mut ModelContext<Self>,
1834 ) -> Task<Result<Model<Buffer>>> {
1835 let rpc = self.client.clone();
1836 let project_id = self.remote_id().unwrap();
1837 let remote_worktree_id = worktree.read(cx).id();
1838 let path = path.clone();
1839 let path_string = path.to_string_lossy().to_string();
1840 cx.spawn(move |this, mut cx| async move {
1841 let response = rpc
1842 .request(proto::OpenBufferByPath {
1843 project_id,
1844 worktree_id: remote_worktree_id.to_proto(),
1845 path: path_string,
1846 })
1847 .await?;
1848 this.update(&mut cx, |this, cx| {
1849 this.wait_for_remote_buffer(response.buffer_id, cx)
1850 })?
1851 .await
1852 })
1853 }
1854
1855 /// LanguageServerName is owned, because it is inserted into a map
1856 pub fn open_local_buffer_via_lsp(
1857 &mut self,
1858 abs_path: lsp::Url,
1859 language_server_id: LanguageServerId,
1860 language_server_name: LanguageServerName,
1861 cx: &mut ModelContext<Self>,
1862 ) -> Task<Result<Model<Buffer>>> {
1863 cx.spawn(move |this, mut cx| async move {
1864 let abs_path = abs_path
1865 .to_file_path()
1866 .map_err(|_| anyhow!("can't convert URI to path"))?;
1867 let (worktree, relative_path) = if let Some(result) =
1868 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1869 {
1870 result
1871 } else {
1872 let worktree = this
1873 .update(&mut cx, |this, cx| {
1874 this.create_local_worktree(&abs_path, false, cx)
1875 })?
1876 .await?;
1877 this.update(&mut cx, |this, cx| {
1878 this.language_server_ids.insert(
1879 (worktree.read(cx).id(), language_server_name),
1880 language_server_id,
1881 );
1882 })
1883 .ok();
1884 (worktree, PathBuf::new())
1885 };
1886
1887 let project_path = ProjectPath {
1888 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1889 path: relative_path.into(),
1890 };
1891 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1892 .await
1893 })
1894 }
1895
1896 pub fn open_buffer_by_id(
1897 &mut self,
1898 id: u64,
1899 cx: &mut ModelContext<Self>,
1900 ) -> Task<Result<Model<Buffer>>> {
1901 if let Some(buffer) = self.buffer_for_id(id) {
1902 Task::ready(Ok(buffer))
1903 } else if self.is_local() {
1904 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1905 } else if let Some(project_id) = self.remote_id() {
1906 let request = self
1907 .client
1908 .request(proto::OpenBufferById { project_id, id });
1909 cx.spawn(move |this, mut cx| async move {
1910 let buffer_id = request.await?.buffer_id;
1911 this.update(&mut cx, |this, cx| {
1912 this.wait_for_remote_buffer(buffer_id, cx)
1913 })?
1914 .await
1915 })
1916 } else {
1917 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1918 }
1919 }
1920
1921 pub fn save_buffers(
1922 &self,
1923 buffers: HashSet<Model<Buffer>>,
1924 cx: &mut ModelContext<Self>,
1925 ) -> Task<Result<()>> {
1926 cx.spawn(move |this, mut cx| async move {
1927 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1928 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1929 .ok()
1930 });
1931 try_join_all(save_tasks).await?;
1932 Ok(())
1933 })
1934 }
1935
1936 pub fn save_buffer(
1937 &self,
1938 buffer: Model<Buffer>,
1939 cx: &mut ModelContext<Self>,
1940 ) -> Task<Result<()>> {
1941 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1942 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1943 };
1944 let worktree = file.worktree.clone();
1945 let path = file.path.clone();
1946 worktree.update(cx, |worktree, cx| match worktree {
1947 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1948 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1949 })
1950 }
1951
1952 pub fn save_buffer_as(
1953 &mut self,
1954 buffer: Model<Buffer>,
1955 abs_path: PathBuf,
1956 cx: &mut ModelContext<Self>,
1957 ) -> Task<Result<()>> {
1958 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1959 let old_file = File::from_dyn(buffer.read(cx).file())
1960 .filter(|f| f.is_local())
1961 .cloned();
1962 cx.spawn(move |this, mut cx| async move {
1963 if let Some(old_file) = &old_file {
1964 this.update(&mut cx, |this, cx| {
1965 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1966 })?;
1967 }
1968 let (worktree, path) = worktree_task.await?;
1969 worktree
1970 .update(&mut cx, |worktree, cx| match worktree {
1971 Worktree::Local(worktree) => {
1972 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1973 }
1974 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1975 })?
1976 .await?;
1977
1978 this.update(&mut cx, |this, cx| {
1979 this.detect_language_for_buffer(&buffer, cx);
1980 this.register_buffer_with_language_servers(&buffer, cx);
1981 })?;
1982 Ok(())
1983 })
1984 }
1985
1986 pub fn get_open_buffer(
1987 &mut self,
1988 path: &ProjectPath,
1989 cx: &mut ModelContext<Self>,
1990 ) -> Option<Model<Buffer>> {
1991 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1992 self.opened_buffers.values().find_map(|buffer| {
1993 let buffer = buffer.upgrade()?;
1994 let file = File::from_dyn(buffer.read(cx).file())?;
1995 if file.worktree == worktree && file.path() == &path.path {
1996 Some(buffer)
1997 } else {
1998 None
1999 }
2000 })
2001 }
2002
2003 fn register_buffer(
2004 &mut self,
2005 buffer: &Model<Buffer>,
2006 cx: &mut ModelContext<Self>,
2007 ) -> Result<()> {
2008 self.request_buffer_diff_recalculation(buffer, cx);
2009 buffer.update(cx, |buffer, _| {
2010 buffer.set_language_registry(self.languages.clone())
2011 });
2012
2013 let remote_id = buffer.read(cx).remote_id();
2014 let is_remote = self.is_remote();
2015 let open_buffer = if is_remote || self.is_shared() {
2016 OpenBuffer::Strong(buffer.clone())
2017 } else {
2018 OpenBuffer::Weak(buffer.downgrade())
2019 };
2020
2021 match self.opened_buffers.entry(remote_id) {
2022 hash_map::Entry::Vacant(entry) => {
2023 entry.insert(open_buffer);
2024 }
2025 hash_map::Entry::Occupied(mut entry) => {
2026 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2027 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2028 } else if entry.get().upgrade().is_some() {
2029 if is_remote {
2030 return Ok(());
2031 } else {
2032 debug_panic!("buffer {} was already registered", remote_id);
2033 Err(anyhow!("buffer {} was already registered", remote_id))?;
2034 }
2035 }
2036 entry.insert(open_buffer);
2037 }
2038 }
2039 cx.subscribe(buffer, |this, buffer, event, cx| {
2040 this.on_buffer_event(buffer, event, cx);
2041 })
2042 .detach();
2043
2044 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2045 if file.is_local {
2046 self.local_buffer_ids_by_path.insert(
2047 ProjectPath {
2048 worktree_id: file.worktree_id(cx),
2049 path: file.path.clone(),
2050 },
2051 remote_id,
2052 );
2053
2054 if let Some(entry_id) = file.entry_id {
2055 self.local_buffer_ids_by_entry_id
2056 .insert(entry_id, remote_id);
2057 }
2058 }
2059 }
2060
2061 self.detect_language_for_buffer(buffer, cx);
2062 self.register_buffer_with_language_servers(buffer, cx);
2063 self.register_buffer_with_copilot(buffer, cx);
2064 cx.observe_release(buffer, |this, buffer, cx| {
2065 if let Some(file) = File::from_dyn(buffer.file()) {
2066 if file.is_local() {
2067 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2068 for server in this.language_servers_for_buffer(buffer, cx) {
2069 server
2070 .1
2071 .notify::<lsp::notification::DidCloseTextDocument>(
2072 lsp::DidCloseTextDocumentParams {
2073 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2074 },
2075 )
2076 .log_err();
2077 }
2078 }
2079 }
2080 })
2081 .detach();
2082
2083 *self.opened_buffer.0.borrow_mut() = ();
2084 Ok(())
2085 }
2086
2087 fn register_buffer_with_language_servers(
2088 &mut self,
2089 buffer_handle: &Model<Buffer>,
2090 cx: &mut ModelContext<Self>,
2091 ) {
2092 let buffer = buffer_handle.read(cx);
2093 let buffer_id = buffer.remote_id();
2094
2095 if let Some(file) = File::from_dyn(buffer.file()) {
2096 if !file.is_local() {
2097 return;
2098 }
2099
2100 let abs_path = file.abs_path(cx);
2101 let uri = lsp::Url::from_file_path(&abs_path)
2102 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2103 let initial_snapshot = buffer.text_snapshot();
2104 let language = buffer.language().cloned();
2105 let worktree_id = file.worktree_id(cx);
2106
2107 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2108 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2109 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2110 .log_err();
2111 }
2112 }
2113
2114 if let Some(language) = language {
2115 for adapter in language.lsp_adapters() {
2116 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2117 let server = self
2118 .language_server_ids
2119 .get(&(worktree_id, adapter.name.clone()))
2120 .and_then(|id| self.language_servers.get(id))
2121 .and_then(|server_state| {
2122 if let LanguageServerState::Running { server, .. } = server_state {
2123 Some(server.clone())
2124 } else {
2125 None
2126 }
2127 });
2128 let server = match server {
2129 Some(server) => server,
2130 None => continue,
2131 };
2132
2133 server
2134 .notify::<lsp::notification::DidOpenTextDocument>(
2135 lsp::DidOpenTextDocumentParams {
2136 text_document: lsp::TextDocumentItem::new(
2137 uri.clone(),
2138 language_id.unwrap_or_default(),
2139 0,
2140 initial_snapshot.text(),
2141 ),
2142 },
2143 )
2144 .log_err();
2145
2146 buffer_handle.update(cx, |buffer, cx| {
2147 buffer.set_completion_triggers(
2148 server
2149 .capabilities()
2150 .completion_provider
2151 .as_ref()
2152 .and_then(|provider| provider.trigger_characters.clone())
2153 .unwrap_or_default(),
2154 cx,
2155 );
2156 });
2157
2158 let snapshot = LspBufferSnapshot {
2159 version: 0,
2160 snapshot: initial_snapshot.clone(),
2161 };
2162 self.buffer_snapshots
2163 .entry(buffer_id)
2164 .or_default()
2165 .insert(server.server_id(), vec![snapshot]);
2166 }
2167 }
2168 }
2169 }
2170
2171 fn unregister_buffer_from_language_servers(
2172 &mut self,
2173 buffer: &Model<Buffer>,
2174 old_file: &File,
2175 cx: &mut ModelContext<Self>,
2176 ) {
2177 let old_path = match old_file.as_local() {
2178 Some(local) => local.abs_path(cx),
2179 None => return,
2180 };
2181
2182 buffer.update(cx, |buffer, cx| {
2183 let worktree_id = old_file.worktree_id(cx);
2184 let ids = &self.language_server_ids;
2185
2186 let language = buffer.language().cloned();
2187 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2188 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2189 buffer.update_diagnostics(server_id, Default::default(), cx);
2190 }
2191
2192 self.buffer_snapshots.remove(&buffer.remote_id());
2193 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2194 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2195 language_server
2196 .notify::<lsp::notification::DidCloseTextDocument>(
2197 lsp::DidCloseTextDocumentParams {
2198 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2199 },
2200 )
2201 .log_err();
2202 }
2203 });
2204 }
2205
2206 fn register_buffer_with_copilot(
2207 &self,
2208 buffer_handle: &Model<Buffer>,
2209 cx: &mut ModelContext<Self>,
2210 ) {
2211 if let Some(copilot) = Copilot::global(cx) {
2212 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2213 }
2214 }
2215
2216 async fn send_buffer_ordered_messages(
2217 this: WeakModel<Self>,
2218 rx: UnboundedReceiver<BufferOrderedMessage>,
2219 mut cx: AsyncAppContext,
2220 ) -> Result<()> {
2221 const MAX_BATCH_SIZE: usize = 128;
2222
2223 let mut operations_by_buffer_id = HashMap::default();
2224 async fn flush_operations(
2225 this: &WeakModel<Project>,
2226 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2227 needs_resync_with_host: &mut bool,
2228 is_local: bool,
2229 cx: &mut AsyncAppContext,
2230 ) -> Result<()> {
2231 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2232 let request = this.update(cx, |this, _| {
2233 let project_id = this.remote_id()?;
2234 Some(this.client.request(proto::UpdateBuffer {
2235 buffer_id,
2236 project_id,
2237 operations,
2238 }))
2239 })?;
2240 if let Some(request) = request {
2241 if request.await.is_err() && !is_local {
2242 *needs_resync_with_host = true;
2243 break;
2244 }
2245 }
2246 }
2247 Ok(())
2248 }
2249
2250 let mut needs_resync_with_host = false;
2251 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2252
2253 while let Some(changes) = changes.next().await {
2254 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2255
2256 for change in changes {
2257 match change {
2258 BufferOrderedMessage::Operation {
2259 buffer_id,
2260 operation,
2261 } => {
2262 if needs_resync_with_host {
2263 continue;
2264 }
2265
2266 operations_by_buffer_id
2267 .entry(buffer_id)
2268 .or_insert(Vec::new())
2269 .push(operation);
2270 }
2271
2272 BufferOrderedMessage::Resync => {
2273 operations_by_buffer_id.clear();
2274 if this
2275 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2276 .await
2277 .is_ok()
2278 {
2279 needs_resync_with_host = false;
2280 }
2281 }
2282
2283 BufferOrderedMessage::LanguageServerUpdate {
2284 language_server_id,
2285 message,
2286 } => {
2287 flush_operations(
2288 &this,
2289 &mut operations_by_buffer_id,
2290 &mut needs_resync_with_host,
2291 is_local,
2292 &mut cx,
2293 )
2294 .await?;
2295
2296 this.update(&mut cx, |this, _| {
2297 if let Some(project_id) = this.remote_id() {
2298 this.client
2299 .send(proto::UpdateLanguageServer {
2300 project_id,
2301 language_server_id: language_server_id.0 as u64,
2302 variant: Some(message),
2303 })
2304 .log_err();
2305 }
2306 })?;
2307 }
2308 }
2309 }
2310
2311 flush_operations(
2312 &this,
2313 &mut operations_by_buffer_id,
2314 &mut needs_resync_with_host,
2315 is_local,
2316 &mut cx,
2317 )
2318 .await?;
2319 }
2320
2321 Ok(())
2322 }
2323
2324 fn on_buffer_event(
2325 &mut self,
2326 buffer: Model<Buffer>,
2327 event: &BufferEvent,
2328 cx: &mut ModelContext<Self>,
2329 ) -> Option<()> {
2330 if matches!(
2331 event,
2332 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2333 ) {
2334 self.request_buffer_diff_recalculation(&buffer, cx);
2335 }
2336
2337 match event {
2338 BufferEvent::Operation(operation) => {
2339 self.buffer_ordered_messages_tx
2340 .unbounded_send(BufferOrderedMessage::Operation {
2341 buffer_id: buffer.read(cx).remote_id(),
2342 operation: language::proto::serialize_operation(operation),
2343 })
2344 .ok();
2345 }
2346
2347 BufferEvent::Edited { .. } => {
2348 let buffer = buffer.read(cx);
2349 let file = File::from_dyn(buffer.file())?;
2350 let abs_path = file.as_local()?.abs_path(cx);
2351 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2352 let next_snapshot = buffer.text_snapshot();
2353
2354 let language_servers: Vec<_> = self
2355 .language_servers_for_buffer(buffer, cx)
2356 .map(|i| i.1.clone())
2357 .collect();
2358
2359 for language_server in language_servers {
2360 let language_server = language_server.clone();
2361
2362 let buffer_snapshots = self
2363 .buffer_snapshots
2364 .get_mut(&buffer.remote_id())
2365 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2366 let previous_snapshot = buffer_snapshots.last()?;
2367
2368 let build_incremental_change = || {
2369 buffer
2370 .edits_since::<(PointUtf16, usize)>(
2371 previous_snapshot.snapshot.version(),
2372 )
2373 .map(|edit| {
2374 let edit_start = edit.new.start.0;
2375 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2376 let new_text = next_snapshot
2377 .text_for_range(edit.new.start.1..edit.new.end.1)
2378 .collect();
2379 lsp::TextDocumentContentChangeEvent {
2380 range: Some(lsp::Range::new(
2381 point_to_lsp(edit_start),
2382 point_to_lsp(edit_end),
2383 )),
2384 range_length: None,
2385 text: new_text,
2386 }
2387 })
2388 .collect()
2389 };
2390
2391 let document_sync_kind = language_server
2392 .capabilities()
2393 .text_document_sync
2394 .as_ref()
2395 .and_then(|sync| match sync {
2396 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2397 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2398 });
2399
2400 let content_changes: Vec<_> = match document_sync_kind {
2401 Some(lsp::TextDocumentSyncKind::FULL) => {
2402 vec![lsp::TextDocumentContentChangeEvent {
2403 range: None,
2404 range_length: None,
2405 text: next_snapshot.text(),
2406 }]
2407 }
2408 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2409 _ => {
2410 #[cfg(any(test, feature = "test-support"))]
2411 {
2412 build_incremental_change()
2413 }
2414
2415 #[cfg(not(any(test, feature = "test-support")))]
2416 {
2417 continue;
2418 }
2419 }
2420 };
2421
2422 let next_version = previous_snapshot.version + 1;
2423
2424 buffer_snapshots.push(LspBufferSnapshot {
2425 version: next_version,
2426 snapshot: next_snapshot.clone(),
2427 });
2428
2429 language_server
2430 .notify::<lsp::notification::DidChangeTextDocument>(
2431 lsp::DidChangeTextDocumentParams {
2432 text_document: lsp::VersionedTextDocumentIdentifier::new(
2433 uri.clone(),
2434 next_version,
2435 ),
2436 content_changes,
2437 },
2438 )
2439 .log_err();
2440 }
2441 }
2442
2443 BufferEvent::Saved => {
2444 let file = File::from_dyn(buffer.read(cx).file())?;
2445 let worktree_id = file.worktree_id(cx);
2446 let abs_path = file.as_local()?.abs_path(cx);
2447 let text_document = lsp::TextDocumentIdentifier {
2448 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2449 };
2450
2451 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2452 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2453
2454 server
2455 .notify::<lsp::notification::DidSaveTextDocument>(
2456 lsp::DidSaveTextDocumentParams {
2457 text_document: text_document.clone(),
2458 text,
2459 },
2460 )
2461 .log_err();
2462 }
2463
2464 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2465 for language_server_id in language_server_ids {
2466 if let Some(LanguageServerState::Running {
2467 adapter,
2468 simulate_disk_based_diagnostics_completion,
2469 ..
2470 }) = self.language_servers.get_mut(&language_server_id)
2471 {
2472 // After saving a buffer using a language server that doesn't provide
2473 // a disk-based progress token, kick off a timer that will reset every
2474 // time the buffer is saved. If the timer eventually fires, simulate
2475 // disk-based diagnostics being finished so that other pieces of UI
2476 // (e.g., project diagnostics view, diagnostic status bar) can update.
2477 // We don't emit an event right away because the language server might take
2478 // some time to publish diagnostics.
2479 if adapter.disk_based_diagnostics_progress_token.is_none() {
2480 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2481 Duration::from_secs(1);
2482
2483 let task = cx.spawn(move |this, mut cx| async move {
2484 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2485 if let Some(this) = this.upgrade() {
2486 this.update(&mut cx, |this, cx| {
2487 this.disk_based_diagnostics_finished(
2488 language_server_id,
2489 cx,
2490 );
2491 this.buffer_ordered_messages_tx
2492 .unbounded_send(
2493 BufferOrderedMessage::LanguageServerUpdate {
2494 language_server_id,
2495 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2496 },
2497 )
2498 .ok();
2499 }).ok();
2500 }
2501 });
2502 *simulate_disk_based_diagnostics_completion = Some(task);
2503 }
2504 }
2505 }
2506 }
2507 BufferEvent::FileHandleChanged => {
2508 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2509 return None;
2510 };
2511
2512 let remote_id = buffer.read(cx).remote_id();
2513 if let Some(entry_id) = file.entry_id {
2514 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2515 Some(_) => {
2516 return None;
2517 }
2518 None => {
2519 self.local_buffer_ids_by_entry_id
2520 .insert(entry_id, remote_id);
2521 }
2522 }
2523 };
2524 self.local_buffer_ids_by_path.insert(
2525 ProjectPath {
2526 worktree_id: file.worktree_id(cx),
2527 path: file.path.clone(),
2528 },
2529 remote_id,
2530 );
2531 }
2532 _ => {}
2533 }
2534
2535 None
2536 }
2537
2538 fn request_buffer_diff_recalculation(
2539 &mut self,
2540 buffer: &Model<Buffer>,
2541 cx: &mut ModelContext<Self>,
2542 ) {
2543 self.buffers_needing_diff.insert(buffer.downgrade());
2544 let first_insertion = self.buffers_needing_diff.len() == 1;
2545
2546 let settings = ProjectSettings::get_global(cx);
2547 let delay = if let Some(delay) = settings.git.gutter_debounce {
2548 delay
2549 } else {
2550 if first_insertion {
2551 let this = cx.weak_model();
2552 cx.defer(move |cx| {
2553 if let Some(this) = this.upgrade() {
2554 this.update(cx, |this, cx| {
2555 this.recalculate_buffer_diffs(cx).detach();
2556 });
2557 }
2558 });
2559 }
2560 return;
2561 };
2562
2563 const MIN_DELAY: u64 = 50;
2564 let delay = delay.max(MIN_DELAY);
2565 let duration = Duration::from_millis(delay);
2566
2567 self.git_diff_debouncer
2568 .fire_new(duration, cx, move |this, cx| {
2569 this.recalculate_buffer_diffs(cx)
2570 });
2571 }
2572
2573 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2574 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2575 cx.spawn(move |this, mut cx| async move {
2576 let tasks: Vec<_> = buffers
2577 .iter()
2578 .filter_map(|buffer| {
2579 let buffer = buffer.upgrade()?;
2580 buffer
2581 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2582 .ok()
2583 .flatten()
2584 })
2585 .collect();
2586
2587 futures::future::join_all(tasks).await;
2588
2589 this.update(&mut cx, |this, cx| {
2590 if !this.buffers_needing_diff.is_empty() {
2591 this.recalculate_buffer_diffs(cx).detach();
2592 } else {
2593 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2594 for buffer in buffers {
2595 if let Some(buffer) = buffer.upgrade() {
2596 buffer.update(cx, |_, cx| cx.notify());
2597 }
2598 }
2599 }
2600 })
2601 .ok();
2602 })
2603 }
2604
2605 fn language_servers_for_worktree(
2606 &self,
2607 worktree_id: WorktreeId,
2608 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2609 self.language_server_ids
2610 .iter()
2611 .filter_map(move |((language_server_worktree_id, _), id)| {
2612 if *language_server_worktree_id == worktree_id {
2613 if let Some(LanguageServerState::Running {
2614 adapter,
2615 language,
2616 server,
2617 ..
2618 }) = self.language_servers.get(id)
2619 {
2620 return Some((adapter, language, server));
2621 }
2622 }
2623 None
2624 })
2625 }
2626
2627 fn maintain_buffer_languages(
2628 languages: Arc<LanguageRegistry>,
2629 cx: &mut ModelContext<Project>,
2630 ) -> Task<()> {
2631 let mut subscription = languages.subscribe();
2632 let mut prev_reload_count = languages.reload_count();
2633 cx.spawn(move |project, mut cx| async move {
2634 while let Some(()) = subscription.next().await {
2635 if let Some(project) = project.upgrade() {
2636 // If the language registry has been reloaded, then remove and
2637 // re-assign the languages on all open buffers.
2638 let reload_count = languages.reload_count();
2639 if reload_count > prev_reload_count {
2640 prev_reload_count = reload_count;
2641 project
2642 .update(&mut cx, |this, cx| {
2643 let buffers = this
2644 .opened_buffers
2645 .values()
2646 .filter_map(|b| b.upgrade())
2647 .collect::<Vec<_>>();
2648 for buffer in buffers {
2649 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2650 {
2651 this.unregister_buffer_from_language_servers(
2652 &buffer, &f, cx,
2653 );
2654 buffer
2655 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2656 }
2657 }
2658 })
2659 .ok();
2660 }
2661
2662 project
2663 .update(&mut cx, |project, cx| {
2664 let mut plain_text_buffers = Vec::new();
2665 let mut buffers_with_unknown_injections = Vec::new();
2666 for buffer in project.opened_buffers.values() {
2667 if let Some(handle) = buffer.upgrade() {
2668 let buffer = &handle.read(cx);
2669 if buffer.language().is_none()
2670 || buffer.language() == Some(&*language::PLAIN_TEXT)
2671 {
2672 plain_text_buffers.push(handle);
2673 } else if buffer.contains_unknown_injections() {
2674 buffers_with_unknown_injections.push(handle);
2675 }
2676 }
2677 }
2678
2679 for buffer in plain_text_buffers {
2680 project.detect_language_for_buffer(&buffer, cx);
2681 project.register_buffer_with_language_servers(&buffer, cx);
2682 }
2683
2684 for buffer in buffers_with_unknown_injections {
2685 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2686 }
2687 })
2688 .ok();
2689 }
2690 }
2691 })
2692 }
2693
2694 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2695 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2696 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2697
2698 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2699 *settings_changed_tx.borrow_mut() = ();
2700 });
2701
2702 cx.spawn(move |this, mut cx| async move {
2703 while let Some(()) = settings_changed_rx.next().await {
2704 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2705 this.language_servers
2706 .values()
2707 .filter_map(|state| match state {
2708 LanguageServerState::Starting(_) => None,
2709 LanguageServerState::Running {
2710 adapter, server, ..
2711 } => Some((adapter.clone(), server.clone())),
2712 })
2713 .collect()
2714 })?;
2715
2716 for (adapter, server) in servers {
2717 let workspace_config =
2718 cx.update(|cx| adapter.workspace_configuration(server.root_path(), cx))?;
2719 server
2720 .notify::<lsp::notification::DidChangeConfiguration>(
2721 lsp::DidChangeConfigurationParams {
2722 settings: workspace_config.clone(),
2723 },
2724 )
2725 .ok();
2726 }
2727 }
2728
2729 drop(settings_observation);
2730 anyhow::Ok(())
2731 })
2732 }
2733
2734 fn detect_language_for_buffer(
2735 &mut self,
2736 buffer_handle: &Model<Buffer>,
2737 cx: &mut ModelContext<Self>,
2738 ) -> Option<()> {
2739 // If the buffer has a language, set it and start the language server if we haven't already.
2740 let buffer = buffer_handle.read(cx);
2741 let full_path = buffer.file()?.full_path(cx);
2742 let content = buffer.as_rope();
2743 let new_language = self
2744 .languages
2745 .language_for_file(&full_path, Some(content))
2746 .now_or_never()?
2747 .ok()?;
2748 self.set_language_for_buffer(buffer_handle, new_language, cx);
2749 None
2750 }
2751
2752 pub fn set_language_for_buffer(
2753 &mut self,
2754 buffer: &Model<Buffer>,
2755 new_language: Arc<Language>,
2756 cx: &mut ModelContext<Self>,
2757 ) {
2758 buffer.update(cx, |buffer, cx| {
2759 if buffer.language().map_or(true, |old_language| {
2760 !Arc::ptr_eq(old_language, &new_language)
2761 }) {
2762 buffer.set_language(Some(new_language.clone()), cx);
2763 }
2764 });
2765
2766 let buffer_file = buffer.read(cx).file().cloned();
2767 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2768 let buffer_file = File::from_dyn(buffer_file.as_ref());
2769 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2770 if let Some(prettier_plugins) =
2771 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2772 {
2773 self.install_default_prettier(worktree, prettier_plugins, cx);
2774 };
2775 if let Some(file) = buffer_file {
2776 let worktree = file.worktree.clone();
2777 if worktree.read(cx).is_local() {
2778 self.start_language_servers(&worktree, new_language, cx);
2779 }
2780 }
2781 }
2782
2783 fn start_language_servers(
2784 &mut self,
2785 worktree: &Model<Worktree>,
2786 language: Arc<Language>,
2787 cx: &mut ModelContext<Self>,
2788 ) {
2789 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2790 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2791 if !settings.enable_language_server {
2792 return;
2793 }
2794
2795 for adapter in language.lsp_adapters() {
2796 self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
2797 }
2798 }
2799
2800 fn start_language_server(
2801 &mut self,
2802 worktree: &Model<Worktree>,
2803 adapter: Arc<CachedLspAdapter>,
2804 language: Arc<Language>,
2805 cx: &mut ModelContext<Self>,
2806 ) {
2807 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2808 return;
2809 }
2810
2811 let worktree = worktree.read(cx);
2812 let worktree_id = worktree.id();
2813 let worktree_path = worktree.abs_path();
2814 let key = (worktree_id, adapter.name.clone());
2815 if self.language_server_ids.contains_key(&key) {
2816 return;
2817 }
2818
2819 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2820 let pending_server = match self.languages.create_pending_language_server(
2821 stderr_capture.clone(),
2822 language.clone(),
2823 adapter.clone(),
2824 Arc::clone(&worktree_path),
2825 ProjectLspAdapterDelegate::new(self, cx),
2826 cx,
2827 ) {
2828 Some(pending_server) => pending_server,
2829 None => return,
2830 };
2831
2832 let project_settings = ProjectSettings::get_global(cx);
2833 let lsp = project_settings.lsp.get(&adapter.name.0);
2834 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2835
2836 let server_id = pending_server.server_id;
2837 let container_dir = pending_server.container_dir.clone();
2838 let state = LanguageServerState::Starting({
2839 let adapter = adapter.clone();
2840 let server_name = adapter.name.0.clone();
2841 let language = language.clone();
2842 let key = key.clone();
2843
2844 cx.spawn(move |this, mut cx| async move {
2845 let result = Self::setup_and_insert_language_server(
2846 this.clone(),
2847 &worktree_path,
2848 override_options,
2849 pending_server,
2850 adapter.clone(),
2851 language.clone(),
2852 server_id,
2853 key,
2854 &mut cx,
2855 )
2856 .await;
2857
2858 match result {
2859 Ok(server) => {
2860 stderr_capture.lock().take();
2861 server
2862 }
2863
2864 Err(err) => {
2865 log::error!("failed to start language server {server_name:?}: {err}");
2866 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2867
2868 let this = this.upgrade()?;
2869 let container_dir = container_dir?;
2870
2871 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2872 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2873 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2874 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2875 return None;
2876 }
2877
2878 let installation_test_binary = adapter
2879 .installation_test_binary(container_dir.to_path_buf())
2880 .await;
2881
2882 this.update(&mut cx, |_, cx| {
2883 Self::check_errored_server(
2884 language,
2885 adapter,
2886 server_id,
2887 installation_test_binary,
2888 cx,
2889 )
2890 })
2891 .ok();
2892
2893 None
2894 }
2895 }
2896 })
2897 });
2898
2899 self.language_servers.insert(server_id, state);
2900 self.language_server_ids.insert(key, server_id);
2901 }
2902
2903 fn reinstall_language_server(
2904 &mut self,
2905 language: Arc<Language>,
2906 adapter: Arc<CachedLspAdapter>,
2907 server_id: LanguageServerId,
2908 cx: &mut ModelContext<Self>,
2909 ) -> Option<Task<()>> {
2910 log::info!("beginning to reinstall server");
2911
2912 let existing_server = match self.language_servers.remove(&server_id) {
2913 Some(LanguageServerState::Running { server, .. }) => Some(server),
2914 _ => None,
2915 };
2916
2917 for worktree in &self.worktrees {
2918 if let Some(worktree) = worktree.upgrade() {
2919 let key = (worktree.read(cx).id(), adapter.name.clone());
2920 self.language_server_ids.remove(&key);
2921 }
2922 }
2923
2924 Some(cx.spawn(move |this, mut cx| async move {
2925 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2926 log::info!("shutting down existing server");
2927 task.await;
2928 }
2929
2930 // TODO: This is race-safe with regards to preventing new instances from
2931 // starting while deleting, but existing instances in other projects are going
2932 // to be very confused and messed up
2933 let Some(task) = this
2934 .update(&mut cx, |this, cx| {
2935 this.languages.delete_server_container(adapter.clone(), cx)
2936 })
2937 .log_err()
2938 else {
2939 return;
2940 };
2941 task.await;
2942
2943 this.update(&mut cx, |this, cx| {
2944 let worktrees = this.worktrees.clone();
2945 for worktree in worktrees {
2946 if let Some(worktree) = worktree.upgrade() {
2947 this.start_language_server(
2948 &worktree,
2949 adapter.clone(),
2950 language.clone(),
2951 cx,
2952 );
2953 }
2954 }
2955 })
2956 .ok();
2957 }))
2958 }
2959
2960 async fn setup_and_insert_language_server(
2961 this: WeakModel<Self>,
2962 worktree_path: &Path,
2963 override_initialization_options: Option<serde_json::Value>,
2964 pending_server: PendingLanguageServer,
2965 adapter: Arc<CachedLspAdapter>,
2966 language: Arc<Language>,
2967 server_id: LanguageServerId,
2968 key: (WorktreeId, LanguageServerName),
2969 cx: &mut AsyncAppContext,
2970 ) -> Result<Option<Arc<LanguageServer>>> {
2971 let language_server = Self::setup_pending_language_server(
2972 this.clone(),
2973 override_initialization_options,
2974 pending_server,
2975 worktree_path,
2976 adapter.clone(),
2977 server_id,
2978 cx,
2979 )
2980 .await?;
2981
2982 let this = match this.upgrade() {
2983 Some(this) => this,
2984 None => return Err(anyhow!("failed to upgrade project handle")),
2985 };
2986
2987 this.update(cx, |this, cx| {
2988 this.insert_newly_running_language_server(
2989 language,
2990 adapter,
2991 language_server.clone(),
2992 server_id,
2993 key,
2994 cx,
2995 )
2996 })??;
2997
2998 Ok(Some(language_server))
2999 }
3000
3001 async fn setup_pending_language_server(
3002 this: WeakModel<Self>,
3003 override_options: Option<serde_json::Value>,
3004 pending_server: PendingLanguageServer,
3005 worktree_path: &Path,
3006 adapter: Arc<CachedLspAdapter>,
3007 server_id: LanguageServerId,
3008 cx: &mut AsyncAppContext,
3009 ) -> Result<Arc<LanguageServer>> {
3010 let workspace_config =
3011 cx.update(|cx| adapter.workspace_configuration(worktree_path, cx))?;
3012 let language_server = pending_server.task.await?;
3013
3014 language_server
3015 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3016 let adapter = adapter.clone();
3017 let this = this.clone();
3018 move |mut params, mut cx| {
3019 let adapter = adapter.clone();
3020 if let Some(this) = this.upgrade() {
3021 adapter.process_diagnostics(&mut params);
3022 this.update(&mut cx, |this, cx| {
3023 this.update_diagnostics(
3024 server_id,
3025 params,
3026 &adapter.disk_based_diagnostic_sources,
3027 cx,
3028 )
3029 .log_err();
3030 })
3031 .ok();
3032 }
3033 }
3034 })
3035 .detach();
3036
3037 language_server
3038 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3039 let adapter = adapter.clone();
3040 let worktree_path = worktree_path.to_path_buf();
3041 move |params, cx| {
3042 let adapter = adapter.clone();
3043 let worktree_path = worktree_path.clone();
3044 async move {
3045 let workspace_config =
3046 cx.update(|cx| adapter.workspace_configuration(&worktree_path, cx))?;
3047 Ok(params
3048 .items
3049 .into_iter()
3050 .map(|item| {
3051 if let Some(section) = &item.section {
3052 workspace_config
3053 .get(section)
3054 .cloned()
3055 .unwrap_or(serde_json::Value::Null)
3056 } else {
3057 workspace_config.clone()
3058 }
3059 })
3060 .collect())
3061 }
3062 }
3063 })
3064 .detach();
3065
3066 // Even though we don't have handling for these requests, respond to them to
3067 // avoid stalling any language server like `gopls` which waits for a response
3068 // to these requests when initializing.
3069 language_server
3070 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3071 let this = this.clone();
3072 move |params, mut cx| {
3073 let this = this.clone();
3074 async move {
3075 this.update(&mut cx, |this, _| {
3076 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3077 {
3078 if let lsp::NumberOrString::String(token) = params.token {
3079 status.progress_tokens.insert(token);
3080 }
3081 }
3082 })?;
3083
3084 Ok(())
3085 }
3086 }
3087 })
3088 .detach();
3089
3090 language_server
3091 .on_request::<lsp::request::RegisterCapability, _, _>({
3092 let this = this.clone();
3093 move |params, mut cx| {
3094 let this = this.clone();
3095 async move {
3096 for reg in params.registrations {
3097 if reg.method == "workspace/didChangeWatchedFiles" {
3098 if let Some(options) = reg.register_options {
3099 let options = serde_json::from_value(options)?;
3100 this.update(&mut cx, |this, cx| {
3101 this.on_lsp_did_change_watched_files(
3102 server_id, options, cx,
3103 );
3104 })?;
3105 }
3106 }
3107 }
3108 Ok(())
3109 }
3110 }
3111 })
3112 .detach();
3113
3114 language_server
3115 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3116 let adapter = adapter.clone();
3117 let this = this.clone();
3118 move |params, cx| {
3119 Self::on_lsp_workspace_edit(
3120 this.clone(),
3121 params,
3122 server_id,
3123 adapter.clone(),
3124 cx,
3125 )
3126 }
3127 })
3128 .detach();
3129
3130 language_server
3131 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3132 let this = this.clone();
3133 move |(), mut cx| {
3134 let this = this.clone();
3135 async move {
3136 this.update(&mut cx, |project, cx| {
3137 cx.emit(Event::RefreshInlayHints);
3138 project.remote_id().map(|project_id| {
3139 project.client.send(proto::RefreshInlayHints { project_id })
3140 })
3141 })?
3142 .transpose()?;
3143 Ok(())
3144 }
3145 }
3146 })
3147 .detach();
3148
3149 let disk_based_diagnostics_progress_token =
3150 adapter.disk_based_diagnostics_progress_token.clone();
3151
3152 language_server
3153 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3154 if let Some(this) = this.upgrade() {
3155 this.update(&mut cx, |this, cx| {
3156 this.on_lsp_progress(
3157 params,
3158 server_id,
3159 disk_based_diagnostics_progress_token.clone(),
3160 cx,
3161 );
3162 })
3163 .ok();
3164 }
3165 })
3166 .detach();
3167 let mut initialization_options = adapter.adapter.initialization_options();
3168 match (&mut initialization_options, override_options) {
3169 (Some(initialization_options), Some(override_options)) => {
3170 merge_json_value_into(override_options, initialization_options);
3171 }
3172 (None, override_options) => initialization_options = override_options,
3173 _ => {}
3174 }
3175 let language_server = language_server.initialize(initialization_options).await?;
3176
3177 language_server
3178 .notify::<lsp::notification::DidChangeConfiguration>(
3179 lsp::DidChangeConfigurationParams {
3180 settings: workspace_config,
3181 },
3182 )
3183 .ok();
3184
3185 Ok(language_server)
3186 }
3187
3188 fn insert_newly_running_language_server(
3189 &mut self,
3190 language: Arc<Language>,
3191 adapter: Arc<CachedLspAdapter>,
3192 language_server: Arc<LanguageServer>,
3193 server_id: LanguageServerId,
3194 key: (WorktreeId, LanguageServerName),
3195 cx: &mut ModelContext<Self>,
3196 ) -> Result<()> {
3197 // If the language server for this key doesn't match the server id, don't store the
3198 // server. Which will cause it to be dropped, killing the process
3199 if self
3200 .language_server_ids
3201 .get(&key)
3202 .map(|id| id != &server_id)
3203 .unwrap_or(false)
3204 {
3205 return Ok(());
3206 }
3207
3208 // Update language_servers collection with Running variant of LanguageServerState
3209 // indicating that the server is up and running and ready
3210 self.language_servers.insert(
3211 server_id,
3212 LanguageServerState::Running {
3213 adapter: adapter.clone(),
3214 language: language.clone(),
3215 watched_paths: Default::default(),
3216 server: language_server.clone(),
3217 simulate_disk_based_diagnostics_completion: None,
3218 },
3219 );
3220
3221 self.language_server_statuses.insert(
3222 server_id,
3223 LanguageServerStatus {
3224 name: language_server.name().to_string(),
3225 pending_work: Default::default(),
3226 has_pending_diagnostic_updates: false,
3227 progress_tokens: Default::default(),
3228 },
3229 );
3230
3231 cx.emit(Event::LanguageServerAdded(server_id));
3232
3233 if let Some(project_id) = self.remote_id() {
3234 self.client.send(proto::StartLanguageServer {
3235 project_id,
3236 server: Some(proto::LanguageServer {
3237 id: server_id.0 as u64,
3238 name: language_server.name().to_string(),
3239 }),
3240 })?;
3241 }
3242
3243 // Tell the language server about every open buffer in the worktree that matches the language.
3244 for buffer in self.opened_buffers.values() {
3245 if let Some(buffer_handle) = buffer.upgrade() {
3246 let buffer = buffer_handle.read(cx);
3247 let file = match File::from_dyn(buffer.file()) {
3248 Some(file) => file,
3249 None => continue,
3250 };
3251 let language = match buffer.language() {
3252 Some(language) => language,
3253 None => continue,
3254 };
3255
3256 if file.worktree.read(cx).id() != key.0
3257 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3258 {
3259 continue;
3260 }
3261
3262 let file = match file.as_local() {
3263 Some(file) => file,
3264 None => continue,
3265 };
3266
3267 let versions = self
3268 .buffer_snapshots
3269 .entry(buffer.remote_id())
3270 .or_default()
3271 .entry(server_id)
3272 .or_insert_with(|| {
3273 vec![LspBufferSnapshot {
3274 version: 0,
3275 snapshot: buffer.text_snapshot(),
3276 }]
3277 });
3278
3279 let snapshot = versions.last().unwrap();
3280 let version = snapshot.version;
3281 let initial_snapshot = &snapshot.snapshot;
3282 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3283 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3284 lsp::DidOpenTextDocumentParams {
3285 text_document: lsp::TextDocumentItem::new(
3286 uri,
3287 adapter
3288 .language_ids
3289 .get(language.name().as_ref())
3290 .cloned()
3291 .unwrap_or_default(),
3292 version,
3293 initial_snapshot.text(),
3294 ),
3295 },
3296 )?;
3297
3298 buffer_handle.update(cx, |buffer, cx| {
3299 buffer.set_completion_triggers(
3300 language_server
3301 .capabilities()
3302 .completion_provider
3303 .as_ref()
3304 .and_then(|provider| provider.trigger_characters.clone())
3305 .unwrap_or_default(),
3306 cx,
3307 )
3308 });
3309 }
3310 }
3311
3312 cx.notify();
3313 Ok(())
3314 }
3315
3316 // Returns a list of all of the worktrees which no longer have a language server and the root path
3317 // for the stopped server
3318 fn stop_language_server(
3319 &mut self,
3320 worktree_id: WorktreeId,
3321 adapter_name: LanguageServerName,
3322 cx: &mut ModelContext<Self>,
3323 ) -> Task<Vec<WorktreeId>> {
3324 let key = (worktree_id, adapter_name);
3325 if let Some(server_id) = self.language_server_ids.remove(&key) {
3326 log::info!("stopping language server {}", key.1 .0);
3327
3328 // Remove other entries for this language server as well
3329 let mut orphaned_worktrees = vec![worktree_id];
3330 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3331 for other_key in other_keys {
3332 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3333 self.language_server_ids.remove(&other_key);
3334 orphaned_worktrees.push(other_key.0);
3335 }
3336 }
3337
3338 for buffer in self.opened_buffers.values() {
3339 if let Some(buffer) = buffer.upgrade() {
3340 buffer.update(cx, |buffer, cx| {
3341 buffer.update_diagnostics(server_id, Default::default(), cx);
3342 });
3343 }
3344 }
3345 for worktree in &self.worktrees {
3346 if let Some(worktree) = worktree.upgrade() {
3347 worktree.update(cx, |worktree, cx| {
3348 if let Some(worktree) = worktree.as_local_mut() {
3349 worktree.clear_diagnostics_for_language_server(server_id, cx);
3350 }
3351 });
3352 }
3353 }
3354
3355 self.language_server_statuses.remove(&server_id);
3356 cx.notify();
3357
3358 let server_state = self.language_servers.remove(&server_id);
3359 cx.emit(Event::LanguageServerRemoved(server_id));
3360 cx.spawn(move |this, mut cx| async move {
3361 let server = match server_state {
3362 Some(LanguageServerState::Starting(task)) => task.await,
3363 Some(LanguageServerState::Running { server, .. }) => Some(server),
3364 None => None,
3365 };
3366
3367 if let Some(server) = server {
3368 if let Some(shutdown) = server.shutdown() {
3369 shutdown.await;
3370 }
3371 }
3372
3373 if let Some(this) = this.upgrade() {
3374 this.update(&mut cx, |this, cx| {
3375 this.language_server_statuses.remove(&server_id);
3376 cx.notify();
3377 })
3378 .ok();
3379 }
3380
3381 orphaned_worktrees
3382 })
3383 } else {
3384 Task::ready(Vec::new())
3385 }
3386 }
3387
3388 pub fn restart_language_servers_for_buffers(
3389 &mut self,
3390 buffers: impl IntoIterator<Item = Model<Buffer>>,
3391 cx: &mut ModelContext<Self>,
3392 ) -> Option<()> {
3393 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3394 .into_iter()
3395 .filter_map(|buffer| {
3396 let buffer = buffer.read(cx);
3397 let file = File::from_dyn(buffer.file())?;
3398 let full_path = file.full_path(cx);
3399 let language = self
3400 .languages
3401 .language_for_file(&full_path, Some(buffer.as_rope()))
3402 .now_or_never()?
3403 .ok()?;
3404 Some((file.worktree.clone(), language))
3405 })
3406 .collect();
3407 for (worktree, language) in language_server_lookup_info {
3408 self.restart_language_servers(worktree, language, cx);
3409 }
3410
3411 None
3412 }
3413
3414 fn restart_language_servers(
3415 &mut self,
3416 worktree: Model<Worktree>,
3417 language: Arc<Language>,
3418 cx: &mut ModelContext<Self>,
3419 ) {
3420 let worktree_id = worktree.read(cx).id();
3421
3422 let stop_tasks = language
3423 .lsp_adapters()
3424 .iter()
3425 .map(|adapter| {
3426 let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
3427 (stop_task, adapter.name.clone())
3428 })
3429 .collect::<Vec<_>>();
3430 if stop_tasks.is_empty() {
3431 return;
3432 }
3433
3434 cx.spawn(move |this, mut cx| async move {
3435 // For each stopped language server, record all of the worktrees with which
3436 // it was associated.
3437 let mut affected_worktrees = Vec::new();
3438 for (stop_task, language_server_name) in stop_tasks {
3439 for affected_worktree_id in stop_task.await {
3440 affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
3441 }
3442 }
3443
3444 this.update(&mut cx, |this, cx| {
3445 // Restart the language server for the given worktree.
3446 this.start_language_servers(&worktree, language.clone(), cx);
3447
3448 // Lookup new server ids and set them for each of the orphaned worktrees
3449 for (affected_worktree_id, language_server_name) in affected_worktrees {
3450 if let Some(new_server_id) = this
3451 .language_server_ids
3452 .get(&(worktree_id, language_server_name.clone()))
3453 .cloned()
3454 {
3455 this.language_server_ids
3456 .insert((affected_worktree_id, language_server_name), new_server_id);
3457 }
3458 }
3459 })
3460 .ok();
3461 })
3462 .detach();
3463 }
3464
3465 fn check_errored_server(
3466 language: Arc<Language>,
3467 adapter: Arc<CachedLspAdapter>,
3468 server_id: LanguageServerId,
3469 installation_test_binary: Option<LanguageServerBinary>,
3470 cx: &mut ModelContext<Self>,
3471 ) {
3472 if !adapter.can_be_reinstalled() {
3473 log::info!(
3474 "Validation check requested for {:?} but it cannot be reinstalled",
3475 adapter.name.0
3476 );
3477 return;
3478 }
3479
3480 cx.spawn(move |this, mut cx| async move {
3481 log::info!("About to spawn test binary");
3482
3483 // A lack of test binary counts as a failure
3484 let process = installation_test_binary.and_then(|binary| {
3485 smol::process::Command::new(&binary.path)
3486 .current_dir(&binary.path)
3487 .args(binary.arguments)
3488 .stdin(Stdio::piped())
3489 .stdout(Stdio::piped())
3490 .stderr(Stdio::inherit())
3491 .kill_on_drop(true)
3492 .spawn()
3493 .ok()
3494 });
3495
3496 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3497 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3498
3499 let mut errored = false;
3500 if let Some(mut process) = process {
3501 futures::select! {
3502 status = process.status().fuse() => match status {
3503 Ok(status) => errored = !status.success(),
3504 Err(_) => errored = true,
3505 },
3506
3507 _ = timeout => {
3508 log::info!("test binary time-ed out, this counts as a success");
3509 _ = process.kill();
3510 }
3511 }
3512 } else {
3513 log::warn!("test binary failed to launch");
3514 errored = true;
3515 }
3516
3517 if errored {
3518 log::warn!("test binary check failed");
3519 let task = this
3520 .update(&mut cx, move |this, cx| {
3521 this.reinstall_language_server(language, adapter, server_id, cx)
3522 })
3523 .ok()
3524 .flatten();
3525
3526 if let Some(task) = task {
3527 task.await;
3528 }
3529 }
3530 })
3531 .detach();
3532 }
3533
3534 fn on_lsp_progress(
3535 &mut self,
3536 progress: lsp::ProgressParams,
3537 language_server_id: LanguageServerId,
3538 disk_based_diagnostics_progress_token: Option<String>,
3539 cx: &mut ModelContext<Self>,
3540 ) {
3541 let token = match progress.token {
3542 lsp::NumberOrString::String(token) => token,
3543 lsp::NumberOrString::Number(token) => {
3544 log::info!("skipping numeric progress token {}", token);
3545 return;
3546 }
3547 };
3548 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3549 let language_server_status =
3550 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3551 status
3552 } else {
3553 return;
3554 };
3555
3556 if !language_server_status.progress_tokens.contains(&token) {
3557 return;
3558 }
3559
3560 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3561 .as_ref()
3562 .map_or(false, |disk_based_token| {
3563 token.starts_with(disk_based_token)
3564 });
3565
3566 match progress {
3567 lsp::WorkDoneProgress::Begin(report) => {
3568 if is_disk_based_diagnostics_progress {
3569 language_server_status.has_pending_diagnostic_updates = true;
3570 self.disk_based_diagnostics_started(language_server_id, cx);
3571 self.buffer_ordered_messages_tx
3572 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3573 language_server_id,
3574 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3575 })
3576 .ok();
3577 } else {
3578 self.on_lsp_work_start(
3579 language_server_id,
3580 token.clone(),
3581 LanguageServerProgress {
3582 message: report.message.clone(),
3583 percentage: report.percentage.map(|p| p as usize),
3584 last_update_at: Instant::now(),
3585 },
3586 cx,
3587 );
3588 self.buffer_ordered_messages_tx
3589 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3590 language_server_id,
3591 message: proto::update_language_server::Variant::WorkStart(
3592 proto::LspWorkStart {
3593 token,
3594 message: report.message,
3595 percentage: report.percentage.map(|p| p as u32),
3596 },
3597 ),
3598 })
3599 .ok();
3600 }
3601 }
3602 lsp::WorkDoneProgress::Report(report) => {
3603 if !is_disk_based_diagnostics_progress {
3604 self.on_lsp_work_progress(
3605 language_server_id,
3606 token.clone(),
3607 LanguageServerProgress {
3608 message: report.message.clone(),
3609 percentage: report.percentage.map(|p| p as usize),
3610 last_update_at: Instant::now(),
3611 },
3612 cx,
3613 );
3614 self.buffer_ordered_messages_tx
3615 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3616 language_server_id,
3617 message: proto::update_language_server::Variant::WorkProgress(
3618 proto::LspWorkProgress {
3619 token,
3620 message: report.message,
3621 percentage: report.percentage.map(|p| p as u32),
3622 },
3623 ),
3624 })
3625 .ok();
3626 }
3627 }
3628 lsp::WorkDoneProgress::End(_) => {
3629 language_server_status.progress_tokens.remove(&token);
3630
3631 if is_disk_based_diagnostics_progress {
3632 language_server_status.has_pending_diagnostic_updates = false;
3633 self.disk_based_diagnostics_finished(language_server_id, cx);
3634 self.buffer_ordered_messages_tx
3635 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3636 language_server_id,
3637 message:
3638 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3639 Default::default(),
3640 ),
3641 })
3642 .ok();
3643 } else {
3644 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3645 self.buffer_ordered_messages_tx
3646 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3647 language_server_id,
3648 message: proto::update_language_server::Variant::WorkEnd(
3649 proto::LspWorkEnd { token },
3650 ),
3651 })
3652 .ok();
3653 }
3654 }
3655 }
3656 }
3657
3658 fn on_lsp_work_start(
3659 &mut self,
3660 language_server_id: LanguageServerId,
3661 token: String,
3662 progress: LanguageServerProgress,
3663 cx: &mut ModelContext<Self>,
3664 ) {
3665 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3666 status.pending_work.insert(token, progress);
3667 cx.notify();
3668 }
3669 }
3670
3671 fn on_lsp_work_progress(
3672 &mut self,
3673 language_server_id: LanguageServerId,
3674 token: String,
3675 progress: LanguageServerProgress,
3676 cx: &mut ModelContext<Self>,
3677 ) {
3678 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3679 let entry = status
3680 .pending_work
3681 .entry(token)
3682 .or_insert(LanguageServerProgress {
3683 message: Default::default(),
3684 percentage: Default::default(),
3685 last_update_at: progress.last_update_at,
3686 });
3687 if progress.message.is_some() {
3688 entry.message = progress.message;
3689 }
3690 if progress.percentage.is_some() {
3691 entry.percentage = progress.percentage;
3692 }
3693 entry.last_update_at = progress.last_update_at;
3694 cx.notify();
3695 }
3696 }
3697
3698 fn on_lsp_work_end(
3699 &mut self,
3700 language_server_id: LanguageServerId,
3701 token: String,
3702 cx: &mut ModelContext<Self>,
3703 ) {
3704 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3705 cx.emit(Event::RefreshInlayHints);
3706 status.pending_work.remove(&token);
3707 cx.notify();
3708 }
3709 }
3710
3711 fn on_lsp_did_change_watched_files(
3712 &mut self,
3713 language_server_id: LanguageServerId,
3714 params: DidChangeWatchedFilesRegistrationOptions,
3715 cx: &mut ModelContext<Self>,
3716 ) {
3717 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3718 self.language_servers.get_mut(&language_server_id)
3719 {
3720 let mut builders = HashMap::default();
3721 for watcher in params.watchers {
3722 for worktree in &self.worktrees {
3723 if let Some(worktree) = worktree.upgrade() {
3724 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3725 if let Some(abs_path) = tree.abs_path().to_str() {
3726 let relative_glob_pattern = match &watcher.glob_pattern {
3727 lsp::GlobPattern::String(s) => s
3728 .strip_prefix(abs_path)
3729 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3730 lsp::GlobPattern::Relative(rp) => {
3731 let base_uri = match &rp.base_uri {
3732 lsp::OneOf::Left(workspace_folder) => {
3733 &workspace_folder.uri
3734 }
3735 lsp::OneOf::Right(base_uri) => base_uri,
3736 };
3737 base_uri.to_file_path().ok().and_then(|file_path| {
3738 (file_path.to_str() == Some(abs_path))
3739 .then_some(rp.pattern.as_str())
3740 })
3741 }
3742 };
3743 if let Some(relative_glob_pattern) = relative_glob_pattern {
3744 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
3745 tree.as_local_mut()
3746 .unwrap()
3747 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3748 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3749 builders
3750 .entry(tree.id())
3751 .or_insert_with(|| GlobSetBuilder::new())
3752 .add(glob);
3753 }
3754 return true;
3755 }
3756 }
3757 false
3758 });
3759 if glob_is_inside_worktree {
3760 break;
3761 }
3762 }
3763 }
3764 }
3765
3766 watched_paths.clear();
3767 for (worktree_id, builder) in builders {
3768 if let Ok(globset) = builder.build() {
3769 watched_paths.insert(worktree_id, globset);
3770 }
3771 }
3772
3773 cx.notify();
3774 }
3775 }
3776
3777 async fn on_lsp_workspace_edit(
3778 this: WeakModel<Self>,
3779 params: lsp::ApplyWorkspaceEditParams,
3780 server_id: LanguageServerId,
3781 adapter: Arc<CachedLspAdapter>,
3782 mut cx: AsyncAppContext,
3783 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3784 let this = this
3785 .upgrade()
3786 .ok_or_else(|| anyhow!("project project closed"))?;
3787 let language_server = this
3788 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3789 .ok_or_else(|| anyhow!("language server not found"))?;
3790 let transaction = Self::deserialize_workspace_edit(
3791 this.clone(),
3792 params.edit,
3793 true,
3794 adapter.clone(),
3795 language_server.clone(),
3796 &mut cx,
3797 )
3798 .await
3799 .log_err();
3800 this.update(&mut cx, |this, _| {
3801 if let Some(transaction) = transaction {
3802 this.last_workspace_edits_by_language_server
3803 .insert(server_id, transaction);
3804 }
3805 })?;
3806 Ok(lsp::ApplyWorkspaceEditResponse {
3807 applied: true,
3808 failed_change: None,
3809 failure_reason: None,
3810 })
3811 }
3812
3813 pub fn language_server_statuses(
3814 &self,
3815 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3816 self.language_server_statuses.values()
3817 }
3818
3819 pub fn update_diagnostics(
3820 &mut self,
3821 language_server_id: LanguageServerId,
3822 mut params: lsp::PublishDiagnosticsParams,
3823 disk_based_sources: &[String],
3824 cx: &mut ModelContext<Self>,
3825 ) -> Result<()> {
3826 let abs_path = params
3827 .uri
3828 .to_file_path()
3829 .map_err(|_| anyhow!("URI is not a file"))?;
3830 let mut diagnostics = Vec::default();
3831 let mut primary_diagnostic_group_ids = HashMap::default();
3832 let mut sources_by_group_id = HashMap::default();
3833 let mut supporting_diagnostics = HashMap::default();
3834
3835 // Ensure that primary diagnostics are always the most severe
3836 params.diagnostics.sort_by_key(|item| item.severity);
3837
3838 for diagnostic in ¶ms.diagnostics {
3839 let source = diagnostic.source.as_ref();
3840 let code = diagnostic.code.as_ref().map(|code| match code {
3841 lsp::NumberOrString::Number(code) => code.to_string(),
3842 lsp::NumberOrString::String(code) => code.clone(),
3843 });
3844 let range = range_from_lsp(diagnostic.range);
3845 let is_supporting = diagnostic
3846 .related_information
3847 .as_ref()
3848 .map_or(false, |infos| {
3849 infos.iter().any(|info| {
3850 primary_diagnostic_group_ids.contains_key(&(
3851 source,
3852 code.clone(),
3853 range_from_lsp(info.location.range),
3854 ))
3855 })
3856 });
3857
3858 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3859 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3860 });
3861
3862 if is_supporting {
3863 supporting_diagnostics.insert(
3864 (source, code.clone(), range),
3865 (diagnostic.severity, is_unnecessary),
3866 );
3867 } else {
3868 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3869 let is_disk_based =
3870 source.map_or(false, |source| disk_based_sources.contains(source));
3871
3872 sources_by_group_id.insert(group_id, source);
3873 primary_diagnostic_group_ids
3874 .insert((source, code.clone(), range.clone()), group_id);
3875
3876 diagnostics.push(DiagnosticEntry {
3877 range,
3878 diagnostic: Diagnostic {
3879 source: diagnostic.source.clone(),
3880 code: code.clone(),
3881 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3882 message: diagnostic.message.trim().to_string(),
3883 group_id,
3884 is_primary: true,
3885 is_disk_based,
3886 is_unnecessary,
3887 },
3888 });
3889 if let Some(infos) = &diagnostic.related_information {
3890 for info in infos {
3891 if info.location.uri == params.uri && !info.message.is_empty() {
3892 let range = range_from_lsp(info.location.range);
3893 diagnostics.push(DiagnosticEntry {
3894 range,
3895 diagnostic: Diagnostic {
3896 source: diagnostic.source.clone(),
3897 code: code.clone(),
3898 severity: DiagnosticSeverity::INFORMATION,
3899 message: info.message.trim().to_string(),
3900 group_id,
3901 is_primary: false,
3902 is_disk_based,
3903 is_unnecessary: false,
3904 },
3905 });
3906 }
3907 }
3908 }
3909 }
3910 }
3911
3912 for entry in &mut diagnostics {
3913 let diagnostic = &mut entry.diagnostic;
3914 if !diagnostic.is_primary {
3915 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3916 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3917 source,
3918 diagnostic.code.clone(),
3919 entry.range.clone(),
3920 )) {
3921 if let Some(severity) = severity {
3922 diagnostic.severity = severity;
3923 }
3924 diagnostic.is_unnecessary = is_unnecessary;
3925 }
3926 }
3927 }
3928
3929 self.update_diagnostic_entries(
3930 language_server_id,
3931 abs_path,
3932 params.version,
3933 diagnostics,
3934 cx,
3935 )?;
3936 Ok(())
3937 }
3938
3939 pub fn update_diagnostic_entries(
3940 &mut self,
3941 server_id: LanguageServerId,
3942 abs_path: PathBuf,
3943 version: Option<i32>,
3944 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3945 cx: &mut ModelContext<Project>,
3946 ) -> Result<(), anyhow::Error> {
3947 let (worktree, relative_path) = self
3948 .find_local_worktree(&abs_path, cx)
3949 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3950
3951 let project_path = ProjectPath {
3952 worktree_id: worktree.read(cx).id(),
3953 path: relative_path.into(),
3954 };
3955
3956 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3957 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3958 }
3959
3960 let updated = worktree.update(cx, |worktree, cx| {
3961 worktree
3962 .as_local_mut()
3963 .ok_or_else(|| anyhow!("not a local worktree"))?
3964 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3965 })?;
3966 if updated {
3967 cx.emit(Event::DiagnosticsUpdated {
3968 language_server_id: server_id,
3969 path: project_path,
3970 });
3971 }
3972 Ok(())
3973 }
3974
3975 fn update_buffer_diagnostics(
3976 &mut self,
3977 buffer: &Model<Buffer>,
3978 server_id: LanguageServerId,
3979 version: Option<i32>,
3980 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3981 cx: &mut ModelContext<Self>,
3982 ) -> Result<()> {
3983 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3984 Ordering::Equal
3985 .then_with(|| b.is_primary.cmp(&a.is_primary))
3986 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3987 .then_with(|| a.severity.cmp(&b.severity))
3988 .then_with(|| a.message.cmp(&b.message))
3989 }
3990
3991 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3992
3993 diagnostics.sort_unstable_by(|a, b| {
3994 Ordering::Equal
3995 .then_with(|| a.range.start.cmp(&b.range.start))
3996 .then_with(|| b.range.end.cmp(&a.range.end))
3997 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3998 });
3999
4000 let mut sanitized_diagnostics = Vec::new();
4001 let edits_since_save = Patch::new(
4002 snapshot
4003 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4004 .collect(),
4005 );
4006 for entry in diagnostics {
4007 let start;
4008 let end;
4009 if entry.diagnostic.is_disk_based {
4010 // Some diagnostics are based on files on disk instead of buffers'
4011 // current contents. Adjust these diagnostics' ranges to reflect
4012 // any unsaved edits.
4013 start = edits_since_save.old_to_new(entry.range.start);
4014 end = edits_since_save.old_to_new(entry.range.end);
4015 } else {
4016 start = entry.range.start;
4017 end = entry.range.end;
4018 }
4019
4020 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4021 ..snapshot.clip_point_utf16(end, Bias::Right);
4022
4023 // Expand empty ranges by one codepoint
4024 if range.start == range.end {
4025 // This will be go to the next boundary when being clipped
4026 range.end.column += 1;
4027 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4028 if range.start == range.end && range.end.column > 0 {
4029 range.start.column -= 1;
4030 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
4031 }
4032 }
4033
4034 sanitized_diagnostics.push(DiagnosticEntry {
4035 range,
4036 diagnostic: entry.diagnostic,
4037 });
4038 }
4039 drop(edits_since_save);
4040
4041 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4042 buffer.update(cx, |buffer, cx| {
4043 buffer.update_diagnostics(server_id, set, cx)
4044 });
4045 Ok(())
4046 }
4047
4048 pub fn reload_buffers(
4049 &self,
4050 buffers: HashSet<Model<Buffer>>,
4051 push_to_history: bool,
4052 cx: &mut ModelContext<Self>,
4053 ) -> Task<Result<ProjectTransaction>> {
4054 let mut local_buffers = Vec::new();
4055 let mut remote_buffers = None;
4056 for buffer_handle in buffers {
4057 let buffer = buffer_handle.read(cx);
4058 if buffer.is_dirty() {
4059 if let Some(file) = File::from_dyn(buffer.file()) {
4060 if file.is_local() {
4061 local_buffers.push(buffer_handle);
4062 } else {
4063 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4064 }
4065 }
4066 }
4067 }
4068
4069 let remote_buffers = self.remote_id().zip(remote_buffers);
4070 let client = self.client.clone();
4071
4072 cx.spawn(move |this, mut cx| async move {
4073 let mut project_transaction = ProjectTransaction::default();
4074
4075 if let Some((project_id, remote_buffers)) = remote_buffers {
4076 let response = client
4077 .request(proto::ReloadBuffers {
4078 project_id,
4079 buffer_ids: remote_buffers
4080 .iter()
4081 .filter_map(|buffer| {
4082 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4083 })
4084 .collect(),
4085 })
4086 .await?
4087 .transaction
4088 .ok_or_else(|| anyhow!("missing transaction"))?;
4089 project_transaction = this
4090 .update(&mut cx, |this, cx| {
4091 this.deserialize_project_transaction(response, push_to_history, cx)
4092 })?
4093 .await?;
4094 }
4095
4096 for buffer in local_buffers {
4097 let transaction = buffer
4098 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4099 .await?;
4100 buffer.update(&mut cx, |buffer, cx| {
4101 if let Some(transaction) = transaction {
4102 if !push_to_history {
4103 buffer.forget_transaction(transaction.id);
4104 }
4105 project_transaction.0.insert(cx.handle(), transaction);
4106 }
4107 })?;
4108 }
4109
4110 Ok(project_transaction)
4111 })
4112 }
4113
4114 pub fn format(
4115 &mut self,
4116 buffers: HashSet<Model<Buffer>>,
4117 push_to_history: bool,
4118 trigger: FormatTrigger,
4119 cx: &mut ModelContext<Project>,
4120 ) -> Task<anyhow::Result<ProjectTransaction>> {
4121 if self.is_local() {
4122 let mut buffers_with_paths_and_servers = buffers
4123 .into_iter()
4124 .filter_map(|buffer_handle| {
4125 let buffer = buffer_handle.read(cx);
4126 let file = File::from_dyn(buffer.file())?;
4127 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4128 let server = self
4129 .primary_language_server_for_buffer(buffer, cx)
4130 .map(|s| s.1.clone());
4131 Some((buffer_handle, buffer_abs_path, server))
4132 })
4133 .collect::<Vec<_>>();
4134
4135 cx.spawn(move |project, mut cx| async move {
4136 // Do not allow multiple concurrent formatting requests for the
4137 // same buffer.
4138 project.update(&mut cx, |this, cx| {
4139 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4140 this.buffers_being_formatted
4141 .insert(buffer.read(cx).remote_id())
4142 });
4143 })?;
4144
4145 let _cleanup = defer({
4146 let this = project.clone();
4147 let mut cx = cx.clone();
4148 let buffers = &buffers_with_paths_and_servers;
4149 move || {
4150 this.update(&mut cx, |this, cx| {
4151 for (buffer, _, _) in buffers {
4152 this.buffers_being_formatted
4153 .remove(&buffer.read(cx).remote_id());
4154 }
4155 })
4156 .ok();
4157 }
4158 });
4159
4160 let mut project_transaction = ProjectTransaction::default();
4161 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4162 let settings = buffer.update(&mut cx, |buffer, cx| {
4163 language_settings(buffer.language(), buffer.file(), cx).clone()
4164 })?;
4165
4166 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4167 let ensure_final_newline = settings.ensure_final_newline_on_save;
4168 let tab_size = settings.tab_size;
4169
4170 // First, format buffer's whitespace according to the settings.
4171 let trailing_whitespace_diff = if remove_trailing_whitespace {
4172 Some(
4173 buffer
4174 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4175 .await,
4176 )
4177 } else {
4178 None
4179 };
4180 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4181 buffer.finalize_last_transaction();
4182 buffer.start_transaction();
4183 if let Some(diff) = trailing_whitespace_diff {
4184 buffer.apply_diff(diff, cx);
4185 }
4186 if ensure_final_newline {
4187 buffer.ensure_final_newline(cx);
4188 }
4189 buffer.end_transaction(cx)
4190 })?;
4191
4192 // Apply language-specific formatting using either a language server
4193 // or external command.
4194 let mut format_operation = None;
4195 match (&settings.formatter, &settings.format_on_save) {
4196 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4197
4198 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4199 | (_, FormatOnSave::LanguageServer) => {
4200 if let Some((language_server, buffer_abs_path)) =
4201 language_server.as_ref().zip(buffer_abs_path.as_ref())
4202 {
4203 format_operation = Some(FormatOperation::Lsp(
4204 Self::format_via_lsp(
4205 &project,
4206 buffer,
4207 buffer_abs_path,
4208 language_server,
4209 tab_size,
4210 &mut cx,
4211 )
4212 .await
4213 .context("failed to format via language server")?,
4214 ));
4215 }
4216 }
4217
4218 (
4219 Formatter::External { command, arguments },
4220 FormatOnSave::On | FormatOnSave::Off,
4221 )
4222 | (_, FormatOnSave::External { command, arguments }) => {
4223 if let Some(buffer_abs_path) = buffer_abs_path {
4224 format_operation = Self::format_via_external_command(
4225 buffer,
4226 buffer_abs_path,
4227 command,
4228 arguments,
4229 &mut cx,
4230 )
4231 .await
4232 .context(format!(
4233 "failed to format via external command {:?}",
4234 command
4235 ))?
4236 .map(FormatOperation::External);
4237 }
4238 }
4239 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4240 if let Some(new_operation) =
4241 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4242 .await
4243 {
4244 format_operation = Some(new_operation);
4245 } else if let Some((language_server, buffer_abs_path)) =
4246 language_server.as_ref().zip(buffer_abs_path.as_ref())
4247 {
4248 format_operation = Some(FormatOperation::Lsp(
4249 Self::format_via_lsp(
4250 &project,
4251 buffer,
4252 buffer_abs_path,
4253 language_server,
4254 tab_size,
4255 &mut cx,
4256 )
4257 .await
4258 .context("failed to format via language server")?,
4259 ));
4260 }
4261 }
4262 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4263 if let Some(new_operation) =
4264 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4265 .await
4266 {
4267 format_operation = Some(new_operation);
4268 }
4269 }
4270 };
4271
4272 buffer.update(&mut cx, |b, cx| {
4273 // If the buffer had its whitespace formatted and was edited while the language-specific
4274 // formatting was being computed, avoid applying the language-specific formatting, because
4275 // it can't be grouped with the whitespace formatting in the undo history.
4276 if let Some(transaction_id) = whitespace_transaction_id {
4277 if b.peek_undo_stack()
4278 .map_or(true, |e| e.transaction_id() != transaction_id)
4279 {
4280 format_operation.take();
4281 }
4282 }
4283
4284 // Apply any language-specific formatting, and group the two formatting operations
4285 // in the buffer's undo history.
4286 if let Some(operation) = format_operation {
4287 match operation {
4288 FormatOperation::Lsp(edits) => {
4289 b.edit(edits, None, cx);
4290 }
4291 FormatOperation::External(diff) => {
4292 b.apply_diff(diff, cx);
4293 }
4294 FormatOperation::Prettier(diff) => {
4295 b.apply_diff(diff, cx);
4296 }
4297 }
4298
4299 if let Some(transaction_id) = whitespace_transaction_id {
4300 b.group_until_transaction(transaction_id);
4301 }
4302 }
4303
4304 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4305 if !push_to_history {
4306 b.forget_transaction(transaction.id);
4307 }
4308 project_transaction.0.insert(buffer.clone(), transaction);
4309 }
4310 })?;
4311 }
4312
4313 Ok(project_transaction)
4314 })
4315 } else {
4316 let remote_id = self.remote_id();
4317 let client = self.client.clone();
4318 cx.spawn(move |this, mut cx| async move {
4319 let mut project_transaction = ProjectTransaction::default();
4320 if let Some(project_id) = remote_id {
4321 let response = client
4322 .request(proto::FormatBuffers {
4323 project_id,
4324 trigger: trigger as i32,
4325 buffer_ids: buffers
4326 .iter()
4327 .map(|buffer| {
4328 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4329 })
4330 .collect::<Result<_>>()?,
4331 })
4332 .await?
4333 .transaction
4334 .ok_or_else(|| anyhow!("missing transaction"))?;
4335 project_transaction = this
4336 .update(&mut cx, |this, cx| {
4337 this.deserialize_project_transaction(response, push_to_history, cx)
4338 })?
4339 .await?;
4340 }
4341 Ok(project_transaction)
4342 })
4343 }
4344 }
4345
4346 async fn format_via_lsp(
4347 this: &WeakModel<Self>,
4348 buffer: &Model<Buffer>,
4349 abs_path: &Path,
4350 language_server: &Arc<LanguageServer>,
4351 tab_size: NonZeroU32,
4352 cx: &mut AsyncAppContext,
4353 ) -> Result<Vec<(Range<Anchor>, String)>> {
4354 let uri = lsp::Url::from_file_path(abs_path)
4355 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4356 let text_document = lsp::TextDocumentIdentifier::new(uri);
4357 let capabilities = &language_server.capabilities();
4358
4359 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4360 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4361
4362 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4363 language_server
4364 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4365 text_document,
4366 options: lsp_command::lsp_formatting_options(tab_size.get()),
4367 work_done_progress_params: Default::default(),
4368 })
4369 .await?
4370 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4371 let buffer_start = lsp::Position::new(0, 0);
4372 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4373
4374 language_server
4375 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4376 text_document,
4377 range: lsp::Range::new(buffer_start, buffer_end),
4378 options: lsp_command::lsp_formatting_options(tab_size.get()),
4379 work_done_progress_params: Default::default(),
4380 })
4381 .await?
4382 } else {
4383 None
4384 };
4385
4386 if let Some(lsp_edits) = lsp_edits {
4387 this.update(cx, |this, cx| {
4388 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4389 })?
4390 .await
4391 } else {
4392 Ok(Vec::new())
4393 }
4394 }
4395
4396 async fn format_via_external_command(
4397 buffer: &Model<Buffer>,
4398 buffer_abs_path: &Path,
4399 command: &str,
4400 arguments: &[String],
4401 cx: &mut AsyncAppContext,
4402 ) -> Result<Option<Diff>> {
4403 let working_dir_path = buffer.update(cx, |buffer, cx| {
4404 let file = File::from_dyn(buffer.file())?;
4405 let worktree = file.worktree.read(cx).as_local()?;
4406 let mut worktree_path = worktree.abs_path().to_path_buf();
4407 if worktree.root_entry()?.is_file() {
4408 worktree_path.pop();
4409 }
4410 Some(worktree_path)
4411 })?;
4412
4413 if let Some(working_dir_path) = working_dir_path {
4414 let mut child =
4415 smol::process::Command::new(command)
4416 .args(arguments.iter().map(|arg| {
4417 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4418 }))
4419 .current_dir(&working_dir_path)
4420 .stdin(smol::process::Stdio::piped())
4421 .stdout(smol::process::Stdio::piped())
4422 .stderr(smol::process::Stdio::piped())
4423 .spawn()?;
4424 let stdin = child
4425 .stdin
4426 .as_mut()
4427 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4428 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4429 for chunk in text.chunks() {
4430 stdin.write_all(chunk.as_bytes()).await?;
4431 }
4432 stdin.flush().await?;
4433
4434 let output = child.output().await?;
4435 if !output.status.success() {
4436 return Err(anyhow!(
4437 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4438 output.status.code(),
4439 String::from_utf8_lossy(&output.stdout),
4440 String::from_utf8_lossy(&output.stderr),
4441 ));
4442 }
4443
4444 let stdout = String::from_utf8(output.stdout)?;
4445 Ok(Some(
4446 buffer
4447 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4448 .await,
4449 ))
4450 } else {
4451 Ok(None)
4452 }
4453 }
4454
4455 pub fn definition<T: ToPointUtf16>(
4456 &self,
4457 buffer: &Model<Buffer>,
4458 position: T,
4459 cx: &mut ModelContext<Self>,
4460 ) -> Task<Result<Vec<LocationLink>>> {
4461 let position = position.to_point_utf16(buffer.read(cx));
4462 self.request_lsp(
4463 buffer.clone(),
4464 LanguageServerToQuery::Primary,
4465 GetDefinition { position },
4466 cx,
4467 )
4468 }
4469
4470 pub fn type_definition<T: ToPointUtf16>(
4471 &self,
4472 buffer: &Model<Buffer>,
4473 position: T,
4474 cx: &mut ModelContext<Self>,
4475 ) -> Task<Result<Vec<LocationLink>>> {
4476 let position = position.to_point_utf16(buffer.read(cx));
4477 self.request_lsp(
4478 buffer.clone(),
4479 LanguageServerToQuery::Primary,
4480 GetTypeDefinition { position },
4481 cx,
4482 )
4483 }
4484
4485 pub fn references<T: ToPointUtf16>(
4486 &self,
4487 buffer: &Model<Buffer>,
4488 position: T,
4489 cx: &mut ModelContext<Self>,
4490 ) -> Task<Result<Vec<Location>>> {
4491 let position = position.to_point_utf16(buffer.read(cx));
4492 self.request_lsp(
4493 buffer.clone(),
4494 LanguageServerToQuery::Primary,
4495 GetReferences { position },
4496 cx,
4497 )
4498 }
4499
4500 pub fn document_highlights<T: ToPointUtf16>(
4501 &self,
4502 buffer: &Model<Buffer>,
4503 position: T,
4504 cx: &mut ModelContext<Self>,
4505 ) -> Task<Result<Vec<DocumentHighlight>>> {
4506 let position = position.to_point_utf16(buffer.read(cx));
4507 self.request_lsp(
4508 buffer.clone(),
4509 LanguageServerToQuery::Primary,
4510 GetDocumentHighlights { position },
4511 cx,
4512 )
4513 }
4514
4515 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4516 if self.is_local() {
4517 let mut requests = Vec::new();
4518 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4519 let worktree_id = *worktree_id;
4520 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4521 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4522 Some(worktree) => worktree,
4523 None => continue,
4524 };
4525 let worktree_abs_path = worktree.abs_path().clone();
4526
4527 let (adapter, language, server) = match self.language_servers.get(server_id) {
4528 Some(LanguageServerState::Running {
4529 adapter,
4530 language,
4531 server,
4532 ..
4533 }) => (adapter.clone(), language.clone(), server),
4534
4535 _ => continue,
4536 };
4537
4538 requests.push(
4539 server
4540 .request::<lsp::request::WorkspaceSymbolRequest>(
4541 lsp::WorkspaceSymbolParams {
4542 query: query.to_string(),
4543 ..Default::default()
4544 },
4545 )
4546 .log_err()
4547 .map(move |response| {
4548 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4549 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4550 flat_responses.into_iter().map(|lsp_symbol| {
4551 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4552 }).collect::<Vec<_>>()
4553 }
4554 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4555 nested_responses.into_iter().filter_map(|lsp_symbol| {
4556 let location = match lsp_symbol.location {
4557 OneOf::Left(location) => location,
4558 OneOf::Right(_) => {
4559 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4560 return None
4561 }
4562 };
4563 Some((lsp_symbol.name, lsp_symbol.kind, location))
4564 }).collect::<Vec<_>>()
4565 }
4566 }).unwrap_or_default();
4567
4568 (
4569 adapter,
4570 language,
4571 worktree_id,
4572 worktree_abs_path,
4573 lsp_symbols,
4574 )
4575 }),
4576 );
4577 }
4578
4579 cx.spawn(move |this, mut cx| async move {
4580 let responses = futures::future::join_all(requests).await;
4581 let this = match this.upgrade() {
4582 Some(this) => this,
4583 None => return Ok(Vec::new()),
4584 };
4585
4586 let symbols = this.update(&mut cx, |this, cx| {
4587 let mut symbols = Vec::new();
4588 for (
4589 adapter,
4590 adapter_language,
4591 source_worktree_id,
4592 worktree_abs_path,
4593 lsp_symbols,
4594 ) in responses
4595 {
4596 symbols.extend(lsp_symbols.into_iter().filter_map(
4597 |(symbol_name, symbol_kind, symbol_location)| {
4598 let abs_path = symbol_location.uri.to_file_path().ok()?;
4599 let mut worktree_id = source_worktree_id;
4600 let path;
4601 if let Some((worktree, rel_path)) =
4602 this.find_local_worktree(&abs_path, cx)
4603 {
4604 worktree_id = worktree.read(cx).id();
4605 path = rel_path;
4606 } else {
4607 path = relativize_path(&worktree_abs_path, &abs_path);
4608 }
4609
4610 let project_path = ProjectPath {
4611 worktree_id,
4612 path: path.into(),
4613 };
4614 let signature = this.symbol_signature(&project_path);
4615 let adapter_language = adapter_language.clone();
4616 let language = this
4617 .languages
4618 .language_for_file(&project_path.path, None)
4619 .unwrap_or_else(move |_| adapter_language);
4620 let language_server_name = adapter.name.clone();
4621 Some(async move {
4622 let language = language.await;
4623 let label =
4624 language.label_for_symbol(&symbol_name, symbol_kind).await;
4625
4626 Symbol {
4627 language_server_name,
4628 source_worktree_id,
4629 path: project_path,
4630 label: label.unwrap_or_else(|| {
4631 CodeLabel::plain(symbol_name.clone(), None)
4632 }),
4633 kind: symbol_kind,
4634 name: symbol_name,
4635 range: range_from_lsp(symbol_location.range),
4636 signature,
4637 }
4638 })
4639 },
4640 ));
4641 }
4642
4643 symbols
4644 })?;
4645
4646 Ok(futures::future::join_all(symbols).await)
4647 })
4648 } else if let Some(project_id) = self.remote_id() {
4649 let request = self.client.request(proto::GetProjectSymbols {
4650 project_id,
4651 query: query.to_string(),
4652 });
4653 cx.spawn(move |this, mut cx| async move {
4654 let response = request.await?;
4655 let mut symbols = Vec::new();
4656 if let Some(this) = this.upgrade() {
4657 let new_symbols = this.update(&mut cx, |this, _| {
4658 response
4659 .symbols
4660 .into_iter()
4661 .map(|symbol| this.deserialize_symbol(symbol))
4662 .collect::<Vec<_>>()
4663 })?;
4664 symbols = futures::future::join_all(new_symbols)
4665 .await
4666 .into_iter()
4667 .filter_map(|symbol| symbol.log_err())
4668 .collect::<Vec<_>>();
4669 }
4670 Ok(symbols)
4671 })
4672 } else {
4673 Task::ready(Ok(Default::default()))
4674 }
4675 }
4676
4677 pub fn open_buffer_for_symbol(
4678 &mut self,
4679 symbol: &Symbol,
4680 cx: &mut ModelContext<Self>,
4681 ) -> Task<Result<Model<Buffer>>> {
4682 if self.is_local() {
4683 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4684 symbol.source_worktree_id,
4685 symbol.language_server_name.clone(),
4686 )) {
4687 *id
4688 } else {
4689 return Task::ready(Err(anyhow!(
4690 "language server for worktree and language not found"
4691 )));
4692 };
4693
4694 let worktree_abs_path = if let Some(worktree_abs_path) = self
4695 .worktree_for_id(symbol.path.worktree_id, cx)
4696 .and_then(|worktree| worktree.read(cx).as_local())
4697 .map(|local_worktree| local_worktree.abs_path())
4698 {
4699 worktree_abs_path
4700 } else {
4701 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4702 };
4703
4704 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
4705 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4706 uri
4707 } else {
4708 return Task::ready(Err(anyhow!("invalid symbol path")));
4709 };
4710
4711 self.open_local_buffer_via_lsp(
4712 symbol_uri,
4713 language_server_id,
4714 symbol.language_server_name.clone(),
4715 cx,
4716 )
4717 } else if let Some(project_id) = self.remote_id() {
4718 let request = self.client.request(proto::OpenBufferForSymbol {
4719 project_id,
4720 symbol: Some(serialize_symbol(symbol)),
4721 });
4722 cx.spawn(move |this, mut cx| async move {
4723 let response = request.await?;
4724 this.update(&mut cx, |this, cx| {
4725 this.wait_for_remote_buffer(response.buffer_id, cx)
4726 })?
4727 .await
4728 })
4729 } else {
4730 Task::ready(Err(anyhow!("project does not have a remote id")))
4731 }
4732 }
4733
4734 pub fn hover<T: ToPointUtf16>(
4735 &self,
4736 buffer: &Model<Buffer>,
4737 position: T,
4738 cx: &mut ModelContext<Self>,
4739 ) -> Task<Result<Option<Hover>>> {
4740 let position = position.to_point_utf16(buffer.read(cx));
4741 self.request_lsp(
4742 buffer.clone(),
4743 LanguageServerToQuery::Primary,
4744 GetHover { position },
4745 cx,
4746 )
4747 }
4748
4749 pub fn completions<T: ToOffset + ToPointUtf16>(
4750 &self,
4751 buffer: &Model<Buffer>,
4752 position: T,
4753 cx: &mut ModelContext<Self>,
4754 ) -> Task<Result<Vec<Completion>>> {
4755 let position = position.to_point_utf16(buffer.read(cx));
4756 if self.is_local() {
4757 let snapshot = buffer.read(cx).snapshot();
4758 let offset = position.to_offset(&snapshot);
4759 let scope = snapshot.language_scope_at(offset);
4760
4761 let server_ids: Vec<_> = self
4762 .language_servers_for_buffer(buffer.read(cx), cx)
4763 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4764 .filter(|(adapter, _)| {
4765 scope
4766 .as_ref()
4767 .map(|scope| scope.language_allowed(&adapter.name))
4768 .unwrap_or(true)
4769 })
4770 .map(|(_, server)| server.server_id())
4771 .collect();
4772
4773 let buffer = buffer.clone();
4774 cx.spawn(move |this, mut cx| async move {
4775 let mut tasks = Vec::with_capacity(server_ids.len());
4776 this.update(&mut cx, |this, cx| {
4777 for server_id in server_ids {
4778 tasks.push(this.request_lsp(
4779 buffer.clone(),
4780 LanguageServerToQuery::Other(server_id),
4781 GetCompletions { position },
4782 cx,
4783 ));
4784 }
4785 })?;
4786
4787 let mut completions = Vec::new();
4788 for task in tasks {
4789 if let Ok(new_completions) = task.await {
4790 completions.extend_from_slice(&new_completions);
4791 }
4792 }
4793
4794 Ok(completions)
4795 })
4796 } else if let Some(project_id) = self.remote_id() {
4797 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4798 } else {
4799 Task::ready(Ok(Default::default()))
4800 }
4801 }
4802
4803 pub fn resolve_completions(
4804 &self,
4805 completion_indices: Vec<usize>,
4806 completions: Arc<RwLock<Box<[Completion]>>>,
4807 cx: &mut ModelContext<Self>,
4808 ) -> Task<Result<bool>> {
4809 let client = self.client();
4810 let language_registry = self.languages().clone();
4811
4812 let is_remote = self.is_remote();
4813 let project_id = self.remote_id();
4814
4815 cx.spawn(move |this, mut cx| async move {
4816 let mut did_resolve = false;
4817 if is_remote {
4818 let project_id =
4819 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
4820
4821 for completion_index in completion_indices {
4822 let completions_guard = completions.read();
4823 let completion = &completions_guard[completion_index];
4824 if completion.documentation.is_some() {
4825 continue;
4826 }
4827
4828 did_resolve = true;
4829 let server_id = completion.server_id;
4830 let completion = completion.lsp_completion.clone();
4831 drop(completions_guard);
4832
4833 Self::resolve_completion_documentation_remote(
4834 project_id,
4835 server_id,
4836 completions.clone(),
4837 completion_index,
4838 completion,
4839 client.clone(),
4840 language_registry.clone(),
4841 )
4842 .await;
4843 }
4844 } else {
4845 for completion_index in completion_indices {
4846 let completions_guard = completions.read();
4847 let completion = &completions_guard[completion_index];
4848 if completion.documentation.is_some() {
4849 continue;
4850 }
4851
4852 let server_id = completion.server_id;
4853 let completion = completion.lsp_completion.clone();
4854 drop(completions_guard);
4855
4856 let server = this
4857 .read_with(&mut cx, |project, _| {
4858 project.language_server_for_id(server_id)
4859 })
4860 .ok()
4861 .flatten();
4862 let Some(server) = server else {
4863 continue;
4864 };
4865
4866 did_resolve = true;
4867 Self::resolve_completion_documentation_local(
4868 server,
4869 completions.clone(),
4870 completion_index,
4871 completion,
4872 language_registry.clone(),
4873 )
4874 .await;
4875 }
4876 }
4877
4878 Ok(did_resolve)
4879 })
4880 }
4881
4882 async fn resolve_completion_documentation_local(
4883 server: Arc<lsp::LanguageServer>,
4884 completions: Arc<RwLock<Box<[Completion]>>>,
4885 completion_index: usize,
4886 completion: lsp::CompletionItem,
4887 language_registry: Arc<LanguageRegistry>,
4888 ) {
4889 let can_resolve = server
4890 .capabilities()
4891 .completion_provider
4892 .as_ref()
4893 .and_then(|options| options.resolve_provider)
4894 .unwrap_or(false);
4895 if !can_resolve {
4896 return;
4897 }
4898
4899 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
4900 let Some(completion_item) = request.await.log_err() else {
4901 return;
4902 };
4903
4904 if let Some(lsp_documentation) = completion_item.documentation {
4905 let documentation = language::prepare_completion_documentation(
4906 &lsp_documentation,
4907 &language_registry,
4908 None, // TODO: Try to reasonably work out which language the completion is for
4909 )
4910 .await;
4911
4912 let mut completions = completions.write();
4913 let completion = &mut completions[completion_index];
4914 completion.documentation = Some(documentation);
4915 } else {
4916 let mut completions = completions.write();
4917 let completion = &mut completions[completion_index];
4918 completion.documentation = Some(Documentation::Undocumented);
4919 }
4920 }
4921
4922 async fn resolve_completion_documentation_remote(
4923 project_id: u64,
4924 server_id: LanguageServerId,
4925 completions: Arc<RwLock<Box<[Completion]>>>,
4926 completion_index: usize,
4927 completion: lsp::CompletionItem,
4928 client: Arc<Client>,
4929 language_registry: Arc<LanguageRegistry>,
4930 ) {
4931 let request = proto::ResolveCompletionDocumentation {
4932 project_id,
4933 language_server_id: server_id.0 as u64,
4934 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
4935 };
4936
4937 let Some(response) = client
4938 .request(request)
4939 .await
4940 .context("completion documentation resolve proto request")
4941 .log_err()
4942 else {
4943 return;
4944 };
4945
4946 if response.text.is_empty() {
4947 let mut completions = completions.write();
4948 let completion = &mut completions[completion_index];
4949 completion.documentation = Some(Documentation::Undocumented);
4950 }
4951
4952 let documentation = if response.is_markdown {
4953 Documentation::MultiLineMarkdown(
4954 markdown::parse_markdown(&response.text, &language_registry, None).await,
4955 )
4956 } else if response.text.lines().count() <= 1 {
4957 Documentation::SingleLine(response.text)
4958 } else {
4959 Documentation::MultiLinePlainText(response.text)
4960 };
4961
4962 let mut completions = completions.write();
4963 let completion = &mut completions[completion_index];
4964 completion.documentation = Some(documentation);
4965 }
4966
4967 pub fn apply_additional_edits_for_completion(
4968 &self,
4969 buffer_handle: Model<Buffer>,
4970 completion: Completion,
4971 push_to_history: bool,
4972 cx: &mut ModelContext<Self>,
4973 ) -> Task<Result<Option<Transaction>>> {
4974 let buffer = buffer_handle.read(cx);
4975 let buffer_id = buffer.remote_id();
4976
4977 if self.is_local() {
4978 let server_id = completion.server_id;
4979 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4980 Some((_, server)) => server.clone(),
4981 _ => return Task::ready(Ok(Default::default())),
4982 };
4983
4984 cx.spawn(move |this, mut cx| async move {
4985 let can_resolve = lang_server
4986 .capabilities()
4987 .completion_provider
4988 .as_ref()
4989 .and_then(|options| options.resolve_provider)
4990 .unwrap_or(false);
4991 let additional_text_edits = if can_resolve {
4992 lang_server
4993 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4994 .await?
4995 .additional_text_edits
4996 } else {
4997 completion.lsp_completion.additional_text_edits
4998 };
4999 if let Some(edits) = additional_text_edits {
5000 let edits = this
5001 .update(&mut cx, |this, cx| {
5002 this.edits_from_lsp(
5003 &buffer_handle,
5004 edits,
5005 lang_server.server_id(),
5006 None,
5007 cx,
5008 )
5009 })?
5010 .await?;
5011
5012 buffer_handle.update(&mut cx, |buffer, cx| {
5013 buffer.finalize_last_transaction();
5014 buffer.start_transaction();
5015
5016 for (range, text) in edits {
5017 let primary = &completion.old_range;
5018 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5019 && primary.end.cmp(&range.start, buffer).is_ge();
5020 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5021 && range.end.cmp(&primary.end, buffer).is_ge();
5022
5023 //Skip additional edits which overlap with the primary completion edit
5024 //https://github.com/zed-industries/zed/pull/1871
5025 if !start_within && !end_within {
5026 buffer.edit([(range, text)], None, cx);
5027 }
5028 }
5029
5030 let transaction = if buffer.end_transaction(cx).is_some() {
5031 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5032 if !push_to_history {
5033 buffer.forget_transaction(transaction.id);
5034 }
5035 Some(transaction)
5036 } else {
5037 None
5038 };
5039 Ok(transaction)
5040 })?
5041 } else {
5042 Ok(None)
5043 }
5044 })
5045 } else if let Some(project_id) = self.remote_id() {
5046 let client = self.client.clone();
5047 cx.spawn(move |_, mut cx| async move {
5048 let response = client
5049 .request(proto::ApplyCompletionAdditionalEdits {
5050 project_id,
5051 buffer_id,
5052 completion: Some(language::proto::serialize_completion(&completion)),
5053 })
5054 .await?;
5055
5056 if let Some(transaction) = response.transaction {
5057 let transaction = language::proto::deserialize_transaction(transaction)?;
5058 buffer_handle
5059 .update(&mut cx, |buffer, _| {
5060 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5061 })?
5062 .await?;
5063 if push_to_history {
5064 buffer_handle.update(&mut cx, |buffer, _| {
5065 buffer.push_transaction(transaction.clone(), Instant::now());
5066 })?;
5067 }
5068 Ok(Some(transaction))
5069 } else {
5070 Ok(None)
5071 }
5072 })
5073 } else {
5074 Task::ready(Err(anyhow!("project does not have a remote id")))
5075 }
5076 }
5077
5078 pub fn code_actions<T: Clone + ToOffset>(
5079 &self,
5080 buffer_handle: &Model<Buffer>,
5081 range: Range<T>,
5082 cx: &mut ModelContext<Self>,
5083 ) -> Task<Result<Vec<CodeAction>>> {
5084 let buffer = buffer_handle.read(cx);
5085 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5086 self.request_lsp(
5087 buffer_handle.clone(),
5088 LanguageServerToQuery::Primary,
5089 GetCodeActions { range },
5090 cx,
5091 )
5092 }
5093
5094 pub fn apply_code_action(
5095 &self,
5096 buffer_handle: Model<Buffer>,
5097 mut action: CodeAction,
5098 push_to_history: bool,
5099 cx: &mut ModelContext<Self>,
5100 ) -> Task<Result<ProjectTransaction>> {
5101 if self.is_local() {
5102 let buffer = buffer_handle.read(cx);
5103 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5104 self.language_server_for_buffer(buffer, action.server_id, cx)
5105 {
5106 (adapter.clone(), server.clone())
5107 } else {
5108 return Task::ready(Ok(Default::default()));
5109 };
5110 let range = action.range.to_point_utf16(buffer);
5111
5112 cx.spawn(move |this, mut cx| async move {
5113 if let Some(lsp_range) = action
5114 .lsp_action
5115 .data
5116 .as_mut()
5117 .and_then(|d| d.get_mut("codeActionParams"))
5118 .and_then(|d| d.get_mut("range"))
5119 {
5120 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
5121 action.lsp_action = lang_server
5122 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
5123 .await?;
5124 } else {
5125 let actions = this
5126 .update(&mut cx, |this, cx| {
5127 this.code_actions(&buffer_handle, action.range, cx)
5128 })?
5129 .await?;
5130 action.lsp_action = actions
5131 .into_iter()
5132 .find(|a| a.lsp_action.title == action.lsp_action.title)
5133 .ok_or_else(|| anyhow!("code action is outdated"))?
5134 .lsp_action;
5135 }
5136
5137 if let Some(edit) = action.lsp_action.edit {
5138 if edit.changes.is_some() || edit.document_changes.is_some() {
5139 return Self::deserialize_workspace_edit(
5140 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5141 edit,
5142 push_to_history,
5143 lsp_adapter.clone(),
5144 lang_server.clone(),
5145 &mut cx,
5146 )
5147 .await;
5148 }
5149 }
5150
5151 if let Some(command) = action.lsp_action.command {
5152 this.update(&mut cx, |this, _| {
5153 this.last_workspace_edits_by_language_server
5154 .remove(&lang_server.server_id());
5155 })?;
5156
5157 let result = lang_server
5158 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5159 command: command.command,
5160 arguments: command.arguments.unwrap_or_default(),
5161 ..Default::default()
5162 })
5163 .await;
5164
5165 if let Err(err) = result {
5166 // TODO: LSP ERROR
5167 return Err(err);
5168 }
5169
5170 return Ok(this.update(&mut cx, |this, _| {
5171 this.last_workspace_edits_by_language_server
5172 .remove(&lang_server.server_id())
5173 .unwrap_or_default()
5174 })?);
5175 }
5176
5177 Ok(ProjectTransaction::default())
5178 })
5179 } else if let Some(project_id) = self.remote_id() {
5180 let client = self.client.clone();
5181 let request = proto::ApplyCodeAction {
5182 project_id,
5183 buffer_id: buffer_handle.read(cx).remote_id(),
5184 action: Some(language::proto::serialize_code_action(&action)),
5185 };
5186 cx.spawn(move |this, mut cx| async move {
5187 let response = client
5188 .request(request)
5189 .await?
5190 .transaction
5191 .ok_or_else(|| anyhow!("missing transaction"))?;
5192 this.update(&mut cx, |this, cx| {
5193 this.deserialize_project_transaction(response, push_to_history, cx)
5194 })?
5195 .await
5196 })
5197 } else {
5198 Task::ready(Err(anyhow!("project does not have a remote id")))
5199 }
5200 }
5201
5202 fn apply_on_type_formatting(
5203 &self,
5204 buffer: Model<Buffer>,
5205 position: Anchor,
5206 trigger: String,
5207 cx: &mut ModelContext<Self>,
5208 ) -> Task<Result<Option<Transaction>>> {
5209 if self.is_local() {
5210 cx.spawn(move |this, mut cx| async move {
5211 // Do not allow multiple concurrent formatting requests for the
5212 // same buffer.
5213 this.update(&mut cx, |this, cx| {
5214 this.buffers_being_formatted
5215 .insert(buffer.read(cx).remote_id())
5216 })?;
5217
5218 let _cleanup = defer({
5219 let this = this.clone();
5220 let mut cx = cx.clone();
5221 let closure_buffer = buffer.clone();
5222 move || {
5223 this.update(&mut cx, |this, cx| {
5224 this.buffers_being_formatted
5225 .remove(&closure_buffer.read(cx).remote_id());
5226 })
5227 .ok();
5228 }
5229 });
5230
5231 buffer
5232 .update(&mut cx, |buffer, _| {
5233 buffer.wait_for_edits(Some(position.timestamp))
5234 })?
5235 .await?;
5236 this.update(&mut cx, |this, cx| {
5237 let position = position.to_point_utf16(buffer.read(cx));
5238 this.on_type_format(buffer, position, trigger, false, cx)
5239 })?
5240 .await
5241 })
5242 } else if let Some(project_id) = self.remote_id() {
5243 let client = self.client.clone();
5244 let request = proto::OnTypeFormatting {
5245 project_id,
5246 buffer_id: buffer.read(cx).remote_id(),
5247 position: Some(serialize_anchor(&position)),
5248 trigger,
5249 version: serialize_version(&buffer.read(cx).version()),
5250 };
5251 cx.spawn(move |_, _| async move {
5252 client
5253 .request(request)
5254 .await?
5255 .transaction
5256 .map(language::proto::deserialize_transaction)
5257 .transpose()
5258 })
5259 } else {
5260 Task::ready(Err(anyhow!("project does not have a remote id")))
5261 }
5262 }
5263
5264 async fn deserialize_edits(
5265 this: Model<Self>,
5266 buffer_to_edit: Model<Buffer>,
5267 edits: Vec<lsp::TextEdit>,
5268 push_to_history: bool,
5269 _: Arc<CachedLspAdapter>,
5270 language_server: Arc<LanguageServer>,
5271 cx: &mut AsyncAppContext,
5272 ) -> Result<Option<Transaction>> {
5273 let edits = this
5274 .update(cx, |this, cx| {
5275 this.edits_from_lsp(
5276 &buffer_to_edit,
5277 edits,
5278 language_server.server_id(),
5279 None,
5280 cx,
5281 )
5282 })?
5283 .await?;
5284
5285 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5286 buffer.finalize_last_transaction();
5287 buffer.start_transaction();
5288 for (range, text) in edits {
5289 buffer.edit([(range, text)], None, cx);
5290 }
5291
5292 if buffer.end_transaction(cx).is_some() {
5293 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5294 if !push_to_history {
5295 buffer.forget_transaction(transaction.id);
5296 }
5297 Some(transaction)
5298 } else {
5299 None
5300 }
5301 })?;
5302
5303 Ok(transaction)
5304 }
5305
5306 async fn deserialize_workspace_edit(
5307 this: Model<Self>,
5308 edit: lsp::WorkspaceEdit,
5309 push_to_history: bool,
5310 lsp_adapter: Arc<CachedLspAdapter>,
5311 language_server: Arc<LanguageServer>,
5312 cx: &mut AsyncAppContext,
5313 ) -> Result<ProjectTransaction> {
5314 let fs = this.update(cx, |this, _| this.fs.clone())?;
5315 let mut operations = Vec::new();
5316 if let Some(document_changes) = edit.document_changes {
5317 match document_changes {
5318 lsp::DocumentChanges::Edits(edits) => {
5319 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5320 }
5321 lsp::DocumentChanges::Operations(ops) => operations = ops,
5322 }
5323 } else if let Some(changes) = edit.changes {
5324 operations.extend(changes.into_iter().map(|(uri, edits)| {
5325 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5326 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5327 uri,
5328 version: None,
5329 },
5330 edits: edits.into_iter().map(OneOf::Left).collect(),
5331 })
5332 }));
5333 }
5334
5335 let mut project_transaction = ProjectTransaction::default();
5336 for operation in operations {
5337 match operation {
5338 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5339 let abs_path = op
5340 .uri
5341 .to_file_path()
5342 .map_err(|_| anyhow!("can't convert URI to path"))?;
5343
5344 if let Some(parent_path) = abs_path.parent() {
5345 fs.create_dir(parent_path).await?;
5346 }
5347 if abs_path.ends_with("/") {
5348 fs.create_dir(&abs_path).await?;
5349 } else {
5350 fs.create_file(
5351 &abs_path,
5352 op.options
5353 .map(|options| fs::CreateOptions {
5354 overwrite: options.overwrite.unwrap_or(false),
5355 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5356 })
5357 .unwrap_or_default(),
5358 )
5359 .await?;
5360 }
5361 }
5362
5363 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5364 let source_abs_path = op
5365 .old_uri
5366 .to_file_path()
5367 .map_err(|_| anyhow!("can't convert URI to path"))?;
5368 let target_abs_path = op
5369 .new_uri
5370 .to_file_path()
5371 .map_err(|_| anyhow!("can't convert URI to path"))?;
5372 fs.rename(
5373 &source_abs_path,
5374 &target_abs_path,
5375 op.options
5376 .map(|options| fs::RenameOptions {
5377 overwrite: options.overwrite.unwrap_or(false),
5378 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5379 })
5380 .unwrap_or_default(),
5381 )
5382 .await?;
5383 }
5384
5385 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5386 let abs_path = op
5387 .uri
5388 .to_file_path()
5389 .map_err(|_| anyhow!("can't convert URI to path"))?;
5390 let options = op
5391 .options
5392 .map(|options| fs::RemoveOptions {
5393 recursive: options.recursive.unwrap_or(false),
5394 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5395 })
5396 .unwrap_or_default();
5397 if abs_path.ends_with("/") {
5398 fs.remove_dir(&abs_path, options).await?;
5399 } else {
5400 fs.remove_file(&abs_path, options).await?;
5401 }
5402 }
5403
5404 lsp::DocumentChangeOperation::Edit(op) => {
5405 let buffer_to_edit = this
5406 .update(cx, |this, cx| {
5407 this.open_local_buffer_via_lsp(
5408 op.text_document.uri,
5409 language_server.server_id(),
5410 lsp_adapter.name.clone(),
5411 cx,
5412 )
5413 })?
5414 .await?;
5415
5416 let edits = this
5417 .update(cx, |this, cx| {
5418 let edits = op.edits.into_iter().map(|edit| match edit {
5419 OneOf::Left(edit) => edit,
5420 OneOf::Right(edit) => edit.text_edit,
5421 });
5422 this.edits_from_lsp(
5423 &buffer_to_edit,
5424 edits,
5425 language_server.server_id(),
5426 op.text_document.version,
5427 cx,
5428 )
5429 })?
5430 .await?;
5431
5432 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5433 buffer.finalize_last_transaction();
5434 buffer.start_transaction();
5435 for (range, text) in edits {
5436 buffer.edit([(range, text)], None, cx);
5437 }
5438 let transaction = if buffer.end_transaction(cx).is_some() {
5439 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5440 if !push_to_history {
5441 buffer.forget_transaction(transaction.id);
5442 }
5443 Some(transaction)
5444 } else {
5445 None
5446 };
5447
5448 transaction
5449 })?;
5450 if let Some(transaction) = transaction {
5451 project_transaction.0.insert(buffer_to_edit, transaction);
5452 }
5453 }
5454 }
5455 }
5456
5457 Ok(project_transaction)
5458 }
5459
5460 pub fn prepare_rename<T: ToPointUtf16>(
5461 &self,
5462 buffer: Model<Buffer>,
5463 position: T,
5464 cx: &mut ModelContext<Self>,
5465 ) -> Task<Result<Option<Range<Anchor>>>> {
5466 let position = position.to_point_utf16(buffer.read(cx));
5467 self.request_lsp(
5468 buffer,
5469 LanguageServerToQuery::Primary,
5470 PrepareRename { position },
5471 cx,
5472 )
5473 }
5474
5475 pub fn perform_rename<T: ToPointUtf16>(
5476 &self,
5477 buffer: Model<Buffer>,
5478 position: T,
5479 new_name: String,
5480 push_to_history: bool,
5481 cx: &mut ModelContext<Self>,
5482 ) -> Task<Result<ProjectTransaction>> {
5483 let position = position.to_point_utf16(buffer.read(cx));
5484 self.request_lsp(
5485 buffer,
5486 LanguageServerToQuery::Primary,
5487 PerformRename {
5488 position,
5489 new_name,
5490 push_to_history,
5491 },
5492 cx,
5493 )
5494 }
5495
5496 pub fn on_type_format<T: ToPointUtf16>(
5497 &self,
5498 buffer: Model<Buffer>,
5499 position: T,
5500 trigger: String,
5501 push_to_history: bool,
5502 cx: &mut ModelContext<Self>,
5503 ) -> Task<Result<Option<Transaction>>> {
5504 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5505 let position = position.to_point_utf16(buffer);
5506 (
5507 position,
5508 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5509 .tab_size,
5510 )
5511 });
5512 self.request_lsp(
5513 buffer.clone(),
5514 LanguageServerToQuery::Primary,
5515 OnTypeFormatting {
5516 position,
5517 trigger,
5518 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5519 push_to_history,
5520 },
5521 cx,
5522 )
5523 }
5524
5525 pub fn inlay_hints<T: ToOffset>(
5526 &self,
5527 buffer_handle: Model<Buffer>,
5528 range: Range<T>,
5529 cx: &mut ModelContext<Self>,
5530 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5531 let buffer = buffer_handle.read(cx);
5532 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5533 let range_start = range.start;
5534 let range_end = range.end;
5535 let buffer_id = buffer.remote_id();
5536 let buffer_version = buffer.version().clone();
5537 let lsp_request = InlayHints { range };
5538
5539 if self.is_local() {
5540 let lsp_request_task = self.request_lsp(
5541 buffer_handle.clone(),
5542 LanguageServerToQuery::Primary,
5543 lsp_request,
5544 cx,
5545 );
5546 cx.spawn(move |_, mut cx| async move {
5547 buffer_handle
5548 .update(&mut cx, |buffer, _| {
5549 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5550 })?
5551 .await
5552 .context("waiting for inlay hint request range edits")?;
5553 lsp_request_task.await.context("inlay hints LSP request")
5554 })
5555 } else if let Some(project_id) = self.remote_id() {
5556 let client = self.client.clone();
5557 let request = proto::InlayHints {
5558 project_id,
5559 buffer_id,
5560 start: Some(serialize_anchor(&range_start)),
5561 end: Some(serialize_anchor(&range_end)),
5562 version: serialize_version(&buffer_version),
5563 };
5564 cx.spawn(move |project, cx| async move {
5565 let response = client
5566 .request(request)
5567 .await
5568 .context("inlay hints proto request")?;
5569 let hints_request_result = LspCommand::response_from_proto(
5570 lsp_request,
5571 response,
5572 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5573 buffer_handle.clone(),
5574 cx,
5575 )
5576 .await;
5577
5578 hints_request_result.context("inlay hints proto response conversion")
5579 })
5580 } else {
5581 Task::ready(Err(anyhow!("project does not have a remote id")))
5582 }
5583 }
5584
5585 pub fn resolve_inlay_hint(
5586 &self,
5587 hint: InlayHint,
5588 buffer_handle: Model<Buffer>,
5589 server_id: LanguageServerId,
5590 cx: &mut ModelContext<Self>,
5591 ) -> Task<anyhow::Result<InlayHint>> {
5592 if self.is_local() {
5593 let buffer = buffer_handle.read(cx);
5594 let (_, lang_server) = if let Some((adapter, server)) =
5595 self.language_server_for_buffer(buffer, server_id, cx)
5596 {
5597 (adapter.clone(), server.clone())
5598 } else {
5599 return Task::ready(Ok(hint));
5600 };
5601 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5602 return Task::ready(Ok(hint));
5603 }
5604
5605 let buffer_snapshot = buffer.snapshot();
5606 cx.spawn(move |_, mut cx| async move {
5607 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5608 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5609 );
5610 let resolved_hint = resolve_task
5611 .await
5612 .context("inlay hint resolve LSP request")?;
5613 let resolved_hint = InlayHints::lsp_to_project_hint(
5614 resolved_hint,
5615 &buffer_handle,
5616 server_id,
5617 ResolveState::Resolved,
5618 false,
5619 &mut cx,
5620 )
5621 .await?;
5622 Ok(resolved_hint)
5623 })
5624 } else if let Some(project_id) = self.remote_id() {
5625 let client = self.client.clone();
5626 let request = proto::ResolveInlayHint {
5627 project_id,
5628 buffer_id: buffer_handle.read(cx).remote_id(),
5629 language_server_id: server_id.0 as u64,
5630 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5631 };
5632 cx.spawn(move |_, _| async move {
5633 let response = client
5634 .request(request)
5635 .await
5636 .context("inlay hints proto request")?;
5637 match response.hint {
5638 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5639 .context("inlay hints proto resolve response conversion"),
5640 None => Ok(hint),
5641 }
5642 })
5643 } else {
5644 Task::ready(Err(anyhow!("project does not have a remote id")))
5645 }
5646 }
5647
5648 #[allow(clippy::type_complexity)]
5649 pub fn search(
5650 &self,
5651 query: SearchQuery,
5652 cx: &mut ModelContext<Self>,
5653 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5654 if self.is_local() {
5655 self.search_local(query, cx)
5656 } else if let Some(project_id) = self.remote_id() {
5657 let (tx, rx) = smol::channel::unbounded();
5658 let request = self.client.request(query.to_proto(project_id));
5659 cx.spawn(move |this, mut cx| async move {
5660 let response = request.await?;
5661 let mut result = HashMap::default();
5662 for location in response.locations {
5663 let target_buffer = this
5664 .update(&mut cx, |this, cx| {
5665 this.wait_for_remote_buffer(location.buffer_id, cx)
5666 })?
5667 .await?;
5668 let start = location
5669 .start
5670 .and_then(deserialize_anchor)
5671 .ok_or_else(|| anyhow!("missing target start"))?;
5672 let end = location
5673 .end
5674 .and_then(deserialize_anchor)
5675 .ok_or_else(|| anyhow!("missing target end"))?;
5676 result
5677 .entry(target_buffer)
5678 .or_insert(Vec::new())
5679 .push(start..end)
5680 }
5681 for (buffer, ranges) in result {
5682 let _ = tx.send((buffer, ranges)).await;
5683 }
5684 Result::<(), anyhow::Error>::Ok(())
5685 })
5686 .detach_and_log_err(cx);
5687 rx
5688 } else {
5689 unimplemented!();
5690 }
5691 }
5692
5693 pub fn search_local(
5694 &self,
5695 query: SearchQuery,
5696 cx: &mut ModelContext<Self>,
5697 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5698 // Local search is split into several phases.
5699 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5700 // and the second phase that finds positions of all the matches found in the candidate files.
5701 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5702 //
5703 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5704 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5705 //
5706 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5707 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5708 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5709 // 2. At this point, we have a list of all potentially matching buffers/files.
5710 // We sort that list by buffer path - this list is retained for later use.
5711 // We ensure that all buffers are now opened and available in project.
5712 // 3. We run a scan over all the candidate buffers on multiple background threads.
5713 // We cannot assume that there will even be a match - while at least one match
5714 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5715 // There is also an auxiliary background thread responsible for result gathering.
5716 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5717 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5718 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5719 // entry - which might already be available thanks to out-of-order processing.
5720 //
5721 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5722 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5723 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5724 // in face of constantly updating list of sorted matches.
5725 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5726 let snapshots = self
5727 .visible_worktrees(cx)
5728 .filter_map(|tree| {
5729 let tree = tree.read(cx).as_local()?;
5730 Some(tree.snapshot())
5731 })
5732 .collect::<Vec<_>>();
5733
5734 let background = cx.background_executor().clone();
5735 let path_count: usize = snapshots
5736 .iter()
5737 .map(|s| {
5738 if query.include_ignored() {
5739 s.file_count()
5740 } else {
5741 s.visible_file_count()
5742 }
5743 })
5744 .sum();
5745 if path_count == 0 {
5746 let (_, rx) = smol::channel::bounded(1024);
5747 return rx;
5748 }
5749 let workers = background.num_cpus().min(path_count);
5750 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5751 let mut unnamed_files = vec![];
5752 let opened_buffers = self
5753 .opened_buffers
5754 .iter()
5755 .filter_map(|(_, b)| {
5756 let buffer = b.upgrade()?;
5757 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5758 let is_ignored = buffer
5759 .project_path(cx)
5760 .and_then(|path| self.entry_for_path(&path, cx))
5761 .map_or(false, |entry| entry.is_ignored);
5762 (is_ignored, buffer.snapshot())
5763 });
5764 if is_ignored && !query.include_ignored() {
5765 return None;
5766 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5767 Some((path.clone(), (buffer, snapshot)))
5768 } else {
5769 unnamed_files.push(buffer);
5770 None
5771 }
5772 })
5773 .collect();
5774 cx.background_executor()
5775 .spawn(Self::background_search(
5776 unnamed_files,
5777 opened_buffers,
5778 cx.background_executor().clone(),
5779 self.fs.clone(),
5780 workers,
5781 query.clone(),
5782 path_count,
5783 snapshots,
5784 matching_paths_tx,
5785 ))
5786 .detach();
5787
5788 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5789 let background = cx.background_executor().clone();
5790 let (result_tx, result_rx) = smol::channel::bounded(1024);
5791 cx.background_executor()
5792 .spawn(async move {
5793 let Ok(buffers) = buffers.await else {
5794 return;
5795 };
5796
5797 let buffers_len = buffers.len();
5798 if buffers_len == 0 {
5799 return;
5800 }
5801 let query = &query;
5802 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5803 background
5804 .scoped(|scope| {
5805 #[derive(Clone)]
5806 struct FinishedStatus {
5807 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
5808 buffer_index: SearchMatchCandidateIndex,
5809 }
5810
5811 for _ in 0..workers {
5812 let finished_tx = finished_tx.clone();
5813 let mut buffers_rx = buffers_rx.clone();
5814 scope.spawn(async move {
5815 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5816 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5817 {
5818 if query.file_matches(
5819 snapshot.file().map(|file| file.path().as_ref()),
5820 ) {
5821 query
5822 .search(snapshot, None)
5823 .await
5824 .iter()
5825 .map(|range| {
5826 snapshot.anchor_before(range.start)
5827 ..snapshot.anchor_after(range.end)
5828 })
5829 .collect()
5830 } else {
5831 Vec::new()
5832 }
5833 } else {
5834 Vec::new()
5835 };
5836
5837 let status = if !buffer_matches.is_empty() {
5838 let entry = if let Some((buffer, _)) = entry.as_ref() {
5839 Some((buffer.clone(), buffer_matches))
5840 } else {
5841 None
5842 };
5843 FinishedStatus {
5844 entry,
5845 buffer_index,
5846 }
5847 } else {
5848 FinishedStatus {
5849 entry: None,
5850 buffer_index,
5851 }
5852 };
5853 if finished_tx.send(status).await.is_err() {
5854 break;
5855 }
5856 }
5857 });
5858 }
5859 // Report sorted matches
5860 scope.spawn(async move {
5861 let mut current_index = 0;
5862 let mut scratch = vec![None; buffers_len];
5863 while let Some(status) = finished_rx.next().await {
5864 debug_assert!(
5865 scratch[status.buffer_index].is_none(),
5866 "Got match status of position {} twice",
5867 status.buffer_index
5868 );
5869 let index = status.buffer_index;
5870 scratch[index] = Some(status);
5871 while current_index < buffers_len {
5872 let Some(current_entry) = scratch[current_index].take() else {
5873 // We intentionally **do not** increment `current_index` here. When next element arrives
5874 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5875 // this time.
5876 break;
5877 };
5878 if let Some(entry) = current_entry.entry {
5879 result_tx.send(entry).await.log_err();
5880 }
5881 current_index += 1;
5882 }
5883 if current_index == buffers_len {
5884 break;
5885 }
5886 }
5887 });
5888 })
5889 .await;
5890 })
5891 .detach();
5892 result_rx
5893 }
5894
5895 /// Pick paths that might potentially contain a match of a given search query.
5896 async fn background_search(
5897 unnamed_buffers: Vec<Model<Buffer>>,
5898 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
5899 executor: BackgroundExecutor,
5900 fs: Arc<dyn Fs>,
5901 workers: usize,
5902 query: SearchQuery,
5903 path_count: usize,
5904 snapshots: Vec<LocalSnapshot>,
5905 matching_paths_tx: Sender<SearchMatchCandidate>,
5906 ) {
5907 let fs = &fs;
5908 let query = &query;
5909 let matching_paths_tx = &matching_paths_tx;
5910 let snapshots = &snapshots;
5911 let paths_per_worker = (path_count + workers - 1) / workers;
5912 for buffer in unnamed_buffers {
5913 matching_paths_tx
5914 .send(SearchMatchCandidate::OpenBuffer {
5915 buffer: buffer.clone(),
5916 path: None,
5917 })
5918 .await
5919 .log_err();
5920 }
5921 for (path, (buffer, _)) in opened_buffers.iter() {
5922 matching_paths_tx
5923 .send(SearchMatchCandidate::OpenBuffer {
5924 buffer: buffer.clone(),
5925 path: Some(path.clone()),
5926 })
5927 .await
5928 .log_err();
5929 }
5930 executor
5931 .scoped(|scope| {
5932 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5933
5934 for worker_ix in 0..workers {
5935 let worker_start_ix = worker_ix * paths_per_worker;
5936 let worker_end_ix = worker_start_ix + paths_per_worker;
5937 let unnamed_buffers = opened_buffers.clone();
5938 let limiter = Arc::clone(&max_concurrent_workers);
5939 scope.spawn(async move {
5940 let _guard = limiter.acquire().await;
5941 let mut snapshot_start_ix = 0;
5942 let mut abs_path = PathBuf::new();
5943 for snapshot in snapshots {
5944 let snapshot_end_ix = snapshot_start_ix
5945 + if query.include_ignored() {
5946 snapshot.file_count()
5947 } else {
5948 snapshot.visible_file_count()
5949 };
5950 if worker_end_ix <= snapshot_start_ix {
5951 break;
5952 } else if worker_start_ix > snapshot_end_ix {
5953 snapshot_start_ix = snapshot_end_ix;
5954 continue;
5955 } else {
5956 let start_in_snapshot =
5957 worker_start_ix.saturating_sub(snapshot_start_ix);
5958 let end_in_snapshot =
5959 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5960
5961 for entry in snapshot
5962 .files(query.include_ignored(), start_in_snapshot)
5963 .take(end_in_snapshot - start_in_snapshot)
5964 {
5965 if matching_paths_tx.is_closed() {
5966 break;
5967 }
5968 if unnamed_buffers.contains_key(&entry.path) {
5969 continue;
5970 }
5971 let matches = if query.file_matches(Some(&entry.path)) {
5972 abs_path.clear();
5973 abs_path.push(&snapshot.abs_path());
5974 abs_path.push(&entry.path);
5975 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5976 {
5977 query.detect(file).unwrap_or(false)
5978 } else {
5979 false
5980 }
5981 } else {
5982 false
5983 };
5984
5985 if matches {
5986 let project_path = SearchMatchCandidate::Path {
5987 worktree_id: snapshot.id(),
5988 path: entry.path.clone(),
5989 is_ignored: entry.is_ignored,
5990 };
5991 if matching_paths_tx.send(project_path).await.is_err() {
5992 break;
5993 }
5994 }
5995 }
5996
5997 snapshot_start_ix = snapshot_end_ix;
5998 }
5999 }
6000 });
6001 }
6002
6003 if query.include_ignored() {
6004 for snapshot in snapshots {
6005 for ignored_entry in snapshot
6006 .entries(query.include_ignored())
6007 .filter(|e| e.is_ignored)
6008 {
6009 let limiter = Arc::clone(&max_concurrent_workers);
6010 scope.spawn(async move {
6011 let _guard = limiter.acquire().await;
6012 let mut ignored_paths_to_process =
6013 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
6014 while let Some(ignored_abs_path) =
6015 ignored_paths_to_process.pop_front()
6016 {
6017 if let Some(fs_metadata) = fs
6018 .metadata(&ignored_abs_path)
6019 .await
6020 .with_context(|| {
6021 format!("fetching fs metadata for {ignored_abs_path:?}")
6022 })
6023 .log_err()
6024 .flatten()
6025 {
6026 if fs_metadata.is_dir {
6027 if let Some(mut subfiles) = fs
6028 .read_dir(&ignored_abs_path)
6029 .await
6030 .with_context(|| {
6031 format!(
6032 "listing ignored path {ignored_abs_path:?}"
6033 )
6034 })
6035 .log_err()
6036 {
6037 while let Some(subfile) = subfiles.next().await {
6038 if let Some(subfile) = subfile.log_err() {
6039 ignored_paths_to_process.push_back(subfile);
6040 }
6041 }
6042 }
6043 } else if !fs_metadata.is_symlink {
6044 if !query.file_matches(Some(&ignored_abs_path))
6045 || snapshot.is_path_excluded(
6046 ignored_entry.path.to_path_buf(),
6047 )
6048 {
6049 continue;
6050 }
6051 let matches = if let Some(file) = fs
6052 .open_sync(&ignored_abs_path)
6053 .await
6054 .with_context(|| {
6055 format!(
6056 "Opening ignored path {ignored_abs_path:?}"
6057 )
6058 })
6059 .log_err()
6060 {
6061 query.detect(file).unwrap_or(false)
6062 } else {
6063 false
6064 };
6065 if matches {
6066 let project_path = SearchMatchCandidate::Path {
6067 worktree_id: snapshot.id(),
6068 path: Arc::from(
6069 ignored_abs_path
6070 .strip_prefix(snapshot.abs_path())
6071 .expect(
6072 "scanning worktree-related files",
6073 ),
6074 ),
6075 is_ignored: true,
6076 };
6077 if matching_paths_tx
6078 .send(project_path)
6079 .await
6080 .is_err()
6081 {
6082 return;
6083 }
6084 }
6085 }
6086 }
6087 }
6088 });
6089 }
6090 }
6091 }
6092 })
6093 .await;
6094 }
6095
6096 pub fn request_lsp<R: LspCommand>(
6097 &self,
6098 buffer_handle: Model<Buffer>,
6099 server: LanguageServerToQuery,
6100 request: R,
6101 cx: &mut ModelContext<Self>,
6102 ) -> Task<Result<R::Response>>
6103 where
6104 <R::LspRequest as lsp::request::Request>::Result: Send,
6105 <R::LspRequest as lsp::request::Request>::Params: Send,
6106 {
6107 let buffer = buffer_handle.read(cx);
6108 if self.is_local() {
6109 let language_server = match server {
6110 LanguageServerToQuery::Primary => {
6111 match self.primary_language_server_for_buffer(buffer, cx) {
6112 Some((_, server)) => Some(Arc::clone(server)),
6113 None => return Task::ready(Ok(Default::default())),
6114 }
6115 }
6116 LanguageServerToQuery::Other(id) => self
6117 .language_server_for_buffer(buffer, id, cx)
6118 .map(|(_, server)| Arc::clone(server)),
6119 };
6120 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6121 if let (Some(file), Some(language_server)) = (file, language_server) {
6122 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6123 return cx.spawn(move |this, cx| async move {
6124 if !request.check_capabilities(language_server.capabilities()) {
6125 return Ok(Default::default());
6126 }
6127
6128 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6129 let response = match result {
6130 Ok(response) => response,
6131
6132 Err(err) => {
6133 log::warn!(
6134 "Generic lsp request to {} failed: {}",
6135 language_server.name(),
6136 err
6137 );
6138 return Err(err);
6139 }
6140 };
6141
6142 request
6143 .response_from_lsp(
6144 response,
6145 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6146 buffer_handle,
6147 language_server.server_id(),
6148 cx,
6149 )
6150 .await
6151 });
6152 }
6153 } else if let Some(project_id) = self.remote_id() {
6154 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6155 }
6156
6157 Task::ready(Ok(Default::default()))
6158 }
6159
6160 fn send_lsp_proto_request<R: LspCommand>(
6161 &self,
6162 buffer: Model<Buffer>,
6163 project_id: u64,
6164 request: R,
6165 cx: &mut ModelContext<'_, Project>,
6166 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6167 let rpc = self.client.clone();
6168 let message = request.to_proto(project_id, buffer.read(cx));
6169 cx.spawn(move |this, mut cx| async move {
6170 // Ensure the project is still alive by the time the task
6171 // is scheduled.
6172 this.upgrade().context("project dropped")?;
6173 let response = rpc.request(message).await?;
6174 let this = this.upgrade().context("project dropped")?;
6175 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6176 Err(anyhow!("disconnected before completing request"))
6177 } else {
6178 request
6179 .response_from_proto(response, this, buffer, cx)
6180 .await
6181 }
6182 })
6183 }
6184
6185 fn sort_candidates_and_open_buffers(
6186 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6187 cx: &mut ModelContext<Self>,
6188 ) -> (
6189 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6190 Receiver<(
6191 Option<(Model<Buffer>, BufferSnapshot)>,
6192 SearchMatchCandidateIndex,
6193 )>,
6194 ) {
6195 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6196 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6197 cx.spawn(move |this, cx| async move {
6198 let mut buffers = Vec::new();
6199 let mut ignored_buffers = Vec::new();
6200 while let Some(entry) = matching_paths_rx.next().await {
6201 if matches!(
6202 entry,
6203 SearchMatchCandidate::Path {
6204 is_ignored: true,
6205 ..
6206 }
6207 ) {
6208 ignored_buffers.push(entry);
6209 } else {
6210 buffers.push(entry);
6211 }
6212 }
6213 buffers.sort_by_key(|candidate| candidate.path());
6214 ignored_buffers.sort_by_key(|candidate| candidate.path());
6215 buffers.extend(ignored_buffers);
6216 let matching_paths = buffers.clone();
6217 let _ = sorted_buffers_tx.send(buffers);
6218 for (index, candidate) in matching_paths.into_iter().enumerate() {
6219 if buffers_tx.is_closed() {
6220 break;
6221 }
6222 let this = this.clone();
6223 let buffers_tx = buffers_tx.clone();
6224 cx.spawn(move |mut cx| async move {
6225 let buffer = match candidate {
6226 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6227 SearchMatchCandidate::Path {
6228 worktree_id, path, ..
6229 } => this
6230 .update(&mut cx, |this, cx| {
6231 this.open_buffer((worktree_id, path), cx)
6232 })?
6233 .await
6234 .log_err(),
6235 };
6236 if let Some(buffer) = buffer {
6237 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6238 buffers_tx
6239 .send((Some((buffer, snapshot)), index))
6240 .await
6241 .log_err();
6242 } else {
6243 buffers_tx.send((None, index)).await.log_err();
6244 }
6245
6246 Ok::<_, anyhow::Error>(())
6247 })
6248 .detach();
6249 }
6250 })
6251 .detach();
6252 (sorted_buffers_rx, buffers_rx)
6253 }
6254
6255 pub fn find_or_create_local_worktree(
6256 &mut self,
6257 abs_path: impl AsRef<Path>,
6258 visible: bool,
6259 cx: &mut ModelContext<Self>,
6260 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6261 let abs_path = abs_path.as_ref();
6262 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6263 Task::ready(Ok((tree, relative_path)))
6264 } else {
6265 let worktree = self.create_local_worktree(abs_path, visible, cx);
6266 cx.background_executor()
6267 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6268 }
6269 }
6270
6271 pub fn find_local_worktree(
6272 &self,
6273 abs_path: &Path,
6274 cx: &AppContext,
6275 ) -> Option<(Model<Worktree>, PathBuf)> {
6276 for tree in &self.worktrees {
6277 if let Some(tree) = tree.upgrade() {
6278 if let Some(relative_path) = tree
6279 .read(cx)
6280 .as_local()
6281 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6282 {
6283 return Some((tree.clone(), relative_path.into()));
6284 }
6285 }
6286 }
6287 None
6288 }
6289
6290 pub fn is_shared(&self) -> bool {
6291 match &self.client_state {
6292 ProjectClientState::Shared { .. } => true,
6293 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6294 }
6295 }
6296
6297 fn create_local_worktree(
6298 &mut self,
6299 abs_path: impl AsRef<Path>,
6300 visible: bool,
6301 cx: &mut ModelContext<Self>,
6302 ) -> Task<Result<Model<Worktree>>> {
6303 let fs = self.fs.clone();
6304 let client = self.client.clone();
6305 let next_entry_id = self.next_entry_id.clone();
6306 let path: Arc<Path> = abs_path.as_ref().into();
6307 let task = self
6308 .loading_local_worktrees
6309 .entry(path.clone())
6310 .or_insert_with(|| {
6311 cx.spawn(move |project, mut cx| {
6312 async move {
6313 let worktree = Worktree::local(
6314 client.clone(),
6315 path.clone(),
6316 visible,
6317 fs,
6318 next_entry_id,
6319 &mut cx,
6320 )
6321 .await;
6322
6323 project.update(&mut cx, |project, _| {
6324 project.loading_local_worktrees.remove(&path);
6325 })?;
6326
6327 let worktree = worktree?;
6328 project
6329 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6330 Ok(worktree)
6331 }
6332 .map_err(Arc::new)
6333 })
6334 .shared()
6335 })
6336 .clone();
6337 cx.background_executor().spawn(async move {
6338 match task.await {
6339 Ok(worktree) => Ok(worktree),
6340 Err(err) => Err(anyhow!("{}", err)),
6341 }
6342 })
6343 }
6344
6345 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6346 let mut servers_to_remove = HashMap::default();
6347 let mut servers_to_preserve = HashSet::default();
6348 for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
6349 if worktree_id == &id_to_remove {
6350 servers_to_remove.insert(server_id, server_name.clone());
6351 } else {
6352 servers_to_preserve.insert(server_id);
6353 }
6354 }
6355 servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id));
6356 for (server_id_to_remove, server_name) in servers_to_remove {
6357 self.language_server_ids
6358 .remove(&(id_to_remove, server_name));
6359 self.language_server_statuses.remove(&server_id_to_remove);
6360 self.last_workspace_edits_by_language_server
6361 .remove(&server_id_to_remove);
6362 self.language_servers.remove(&server_id_to_remove);
6363 cx.emit(Event::LanguageServerRemoved(server_id_to_remove));
6364 }
6365
6366 let mut prettier_instances_to_clean = FuturesUnordered::new();
6367 if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) {
6368 for path in prettier_paths.iter().flatten() {
6369 if let Some(prettier_instance) = self.prettier_instances.remove(path) {
6370 prettier_instances_to_clean.push(async move {
6371 prettier_instance
6372 .server()
6373 .await
6374 .map(|server| server.server_id())
6375 });
6376 }
6377 }
6378 }
6379 cx.spawn(|project, mut cx| async move {
6380 while let Some(prettier_server_id) = prettier_instances_to_clean.next().await {
6381 if let Some(prettier_server_id) = prettier_server_id {
6382 project
6383 .update(&mut cx, |project, cx| {
6384 project
6385 .supplementary_language_servers
6386 .remove(&prettier_server_id);
6387 cx.emit(Event::LanguageServerRemoved(prettier_server_id));
6388 })
6389 .ok();
6390 }
6391 }
6392 })
6393 .detach();
6394
6395 self.worktrees.retain(|worktree| {
6396 if let Some(worktree) = worktree.upgrade() {
6397 let id = worktree.read(cx).id();
6398 if id == id_to_remove {
6399 cx.emit(Event::WorktreeRemoved(id));
6400 false
6401 } else {
6402 true
6403 }
6404 } else {
6405 false
6406 }
6407 });
6408 self.metadata_changed(cx);
6409 }
6410
6411 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6412 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6413 if worktree.read(cx).is_local() {
6414 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6415 worktree::Event::UpdatedEntries(changes) => {
6416 this.update_local_worktree_buffers(&worktree, changes, cx);
6417 this.update_local_worktree_language_servers(&worktree, changes, cx);
6418 this.update_local_worktree_settings(&worktree, changes, cx);
6419 this.update_prettier_settings(&worktree, changes, cx);
6420 cx.emit(Event::WorktreeUpdatedEntries(
6421 worktree.read(cx).id(),
6422 changes.clone(),
6423 ));
6424 }
6425 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6426 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6427 }
6428 })
6429 .detach();
6430 }
6431
6432 let push_strong_handle = {
6433 let worktree = worktree.read(cx);
6434 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6435 };
6436 if push_strong_handle {
6437 self.worktrees
6438 .push(WorktreeHandle::Strong(worktree.clone()));
6439 } else {
6440 self.worktrees
6441 .push(WorktreeHandle::Weak(worktree.downgrade()));
6442 }
6443
6444 let handle_id = worktree.entity_id();
6445 cx.observe_release(worktree, move |this, worktree, cx| {
6446 let _ = this.remove_worktree(worktree.id(), cx);
6447 cx.update_global::<SettingsStore, _>(|store, cx| {
6448 store
6449 .clear_local_settings(handle_id.as_u64() as usize, cx)
6450 .log_err()
6451 });
6452 })
6453 .detach();
6454
6455 cx.emit(Event::WorktreeAdded);
6456 self.metadata_changed(cx);
6457 }
6458
6459 fn update_local_worktree_buffers(
6460 &mut self,
6461 worktree_handle: &Model<Worktree>,
6462 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6463 cx: &mut ModelContext<Self>,
6464 ) {
6465 let snapshot = worktree_handle.read(cx).snapshot();
6466
6467 let mut renamed_buffers = Vec::new();
6468 for (path, entry_id, _) in changes {
6469 let worktree_id = worktree_handle.read(cx).id();
6470 let project_path = ProjectPath {
6471 worktree_id,
6472 path: path.clone(),
6473 };
6474
6475 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6476 Some(&buffer_id) => buffer_id,
6477 None => match self.local_buffer_ids_by_path.get(&project_path) {
6478 Some(&buffer_id) => buffer_id,
6479 None => {
6480 continue;
6481 }
6482 },
6483 };
6484
6485 let open_buffer = self.opened_buffers.get(&buffer_id);
6486 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6487 buffer
6488 } else {
6489 self.opened_buffers.remove(&buffer_id);
6490 self.local_buffer_ids_by_path.remove(&project_path);
6491 self.local_buffer_ids_by_entry_id.remove(entry_id);
6492 continue;
6493 };
6494
6495 buffer.update(cx, |buffer, cx| {
6496 if let Some(old_file) = File::from_dyn(buffer.file()) {
6497 if old_file.worktree != *worktree_handle {
6498 return;
6499 }
6500
6501 let new_file = if let Some(entry) = old_file
6502 .entry_id
6503 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6504 {
6505 File {
6506 is_local: true,
6507 entry_id: Some(entry.id),
6508 mtime: entry.mtime,
6509 path: entry.path.clone(),
6510 worktree: worktree_handle.clone(),
6511 is_deleted: false,
6512 }
6513 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6514 File {
6515 is_local: true,
6516 entry_id: Some(entry.id),
6517 mtime: entry.mtime,
6518 path: entry.path.clone(),
6519 worktree: worktree_handle.clone(),
6520 is_deleted: false,
6521 }
6522 } else {
6523 File {
6524 is_local: true,
6525 entry_id: old_file.entry_id,
6526 path: old_file.path().clone(),
6527 mtime: old_file.mtime(),
6528 worktree: worktree_handle.clone(),
6529 is_deleted: true,
6530 }
6531 };
6532
6533 let old_path = old_file.abs_path(cx);
6534 if new_file.abs_path(cx) != old_path {
6535 renamed_buffers.push((cx.handle(), old_file.clone()));
6536 self.local_buffer_ids_by_path.remove(&project_path);
6537 self.local_buffer_ids_by_path.insert(
6538 ProjectPath {
6539 worktree_id,
6540 path: path.clone(),
6541 },
6542 buffer_id,
6543 );
6544 }
6545
6546 if new_file.entry_id != Some(*entry_id) {
6547 self.local_buffer_ids_by_entry_id.remove(entry_id);
6548 if let Some(entry_id) = new_file.entry_id {
6549 self.local_buffer_ids_by_entry_id
6550 .insert(entry_id, buffer_id);
6551 }
6552 }
6553
6554 if new_file != *old_file {
6555 if let Some(project_id) = self.remote_id() {
6556 self.client
6557 .send(proto::UpdateBufferFile {
6558 project_id,
6559 buffer_id: buffer_id as u64,
6560 file: Some(new_file.to_proto()),
6561 })
6562 .log_err();
6563 }
6564
6565 buffer.file_updated(Arc::new(new_file), cx);
6566 }
6567 }
6568 });
6569 }
6570
6571 for (buffer, old_file) in renamed_buffers {
6572 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6573 self.detect_language_for_buffer(&buffer, cx);
6574 self.register_buffer_with_language_servers(&buffer, cx);
6575 }
6576 }
6577
6578 fn update_local_worktree_language_servers(
6579 &mut self,
6580 worktree_handle: &Model<Worktree>,
6581 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6582 cx: &mut ModelContext<Self>,
6583 ) {
6584 if changes.is_empty() {
6585 return;
6586 }
6587
6588 let worktree_id = worktree_handle.read(cx).id();
6589 let mut language_server_ids = self
6590 .language_server_ids
6591 .iter()
6592 .filter_map(|((server_worktree_id, _), server_id)| {
6593 (*server_worktree_id == worktree_id).then_some(*server_id)
6594 })
6595 .collect::<Vec<_>>();
6596 language_server_ids.sort();
6597 language_server_ids.dedup();
6598
6599 let abs_path = worktree_handle.read(cx).abs_path();
6600 for server_id in &language_server_ids {
6601 if let Some(LanguageServerState::Running {
6602 server,
6603 watched_paths,
6604 ..
6605 }) = self.language_servers.get(server_id)
6606 {
6607 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6608 let params = lsp::DidChangeWatchedFilesParams {
6609 changes: changes
6610 .iter()
6611 .filter_map(|(path, _, change)| {
6612 if !watched_paths.is_match(&path) {
6613 return None;
6614 }
6615 let typ = match change {
6616 PathChange::Loaded => return None,
6617 PathChange::Added => lsp::FileChangeType::CREATED,
6618 PathChange::Removed => lsp::FileChangeType::DELETED,
6619 PathChange::Updated => lsp::FileChangeType::CHANGED,
6620 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6621 };
6622 Some(lsp::FileEvent {
6623 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6624 typ,
6625 })
6626 })
6627 .collect(),
6628 };
6629
6630 if !params.changes.is_empty() {
6631 server
6632 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6633 .log_err();
6634 }
6635 }
6636 }
6637 }
6638 }
6639
6640 fn update_local_worktree_buffers_git_repos(
6641 &mut self,
6642 worktree_handle: Model<Worktree>,
6643 changed_repos: &UpdatedGitRepositoriesSet,
6644 cx: &mut ModelContext<Self>,
6645 ) {
6646 debug_assert!(worktree_handle.read(cx).is_local());
6647
6648 // Identify the loading buffers whose containing repository that has changed.
6649 let future_buffers = self
6650 .loading_buffers_by_path
6651 .iter()
6652 .filter_map(|(project_path, receiver)| {
6653 if project_path.worktree_id != worktree_handle.read(cx).id() {
6654 return None;
6655 }
6656 let path = &project_path.path;
6657 changed_repos
6658 .iter()
6659 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6660 let receiver = receiver.clone();
6661 let path = path.clone();
6662 Some(async move {
6663 wait_for_loading_buffer(receiver)
6664 .await
6665 .ok()
6666 .map(|buffer| (buffer, path))
6667 })
6668 })
6669 .collect::<FuturesUnordered<_>>();
6670
6671 // Identify the current buffers whose containing repository has changed.
6672 let current_buffers = self
6673 .opened_buffers
6674 .values()
6675 .filter_map(|buffer| {
6676 let buffer = buffer.upgrade()?;
6677 let file = File::from_dyn(buffer.read(cx).file())?;
6678 if file.worktree != worktree_handle {
6679 return None;
6680 }
6681 let path = file.path();
6682 changed_repos
6683 .iter()
6684 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6685 Some((buffer, path.clone()))
6686 })
6687 .collect::<Vec<_>>();
6688
6689 if future_buffers.len() + current_buffers.len() == 0 {
6690 return;
6691 }
6692
6693 let remote_id = self.remote_id();
6694 let client = self.client.clone();
6695 cx.spawn(move |_, mut cx| async move {
6696 // Wait for all of the buffers to load.
6697 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6698
6699 // Reload the diff base for every buffer whose containing git repository has changed.
6700 let snapshot =
6701 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6702 let diff_bases_by_buffer = cx
6703 .background_executor()
6704 .spawn(async move {
6705 future_buffers
6706 .into_iter()
6707 .filter_map(|e| e)
6708 .chain(current_buffers)
6709 .filter_map(|(buffer, path)| {
6710 let (work_directory, repo) =
6711 snapshot.repository_and_work_directory_for_path(&path)?;
6712 let repo = snapshot.get_local_repo(&repo)?;
6713 let relative_path = path.strip_prefix(&work_directory).ok()?;
6714 let base_text = repo.repo_ptr.lock().load_index_text(relative_path);
6715 Some((buffer, base_text))
6716 })
6717 .collect::<Vec<_>>()
6718 })
6719 .await;
6720
6721 // Assign the new diff bases on all of the buffers.
6722 for (buffer, diff_base) in diff_bases_by_buffer {
6723 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6724 buffer.set_diff_base(diff_base.clone(), cx);
6725 buffer.remote_id()
6726 })?;
6727 if let Some(project_id) = remote_id {
6728 client
6729 .send(proto::UpdateDiffBase {
6730 project_id,
6731 buffer_id,
6732 diff_base,
6733 })
6734 .log_err();
6735 }
6736 }
6737
6738 anyhow::Ok(())
6739 })
6740 .detach();
6741 }
6742
6743 fn update_local_worktree_settings(
6744 &mut self,
6745 worktree: &Model<Worktree>,
6746 changes: &UpdatedEntriesSet,
6747 cx: &mut ModelContext<Self>,
6748 ) {
6749 let project_id = self.remote_id();
6750 let worktree_id = worktree.entity_id();
6751 let worktree = worktree.read(cx).as_local().unwrap();
6752 let remote_worktree_id = worktree.id();
6753
6754 let mut settings_contents = Vec::new();
6755 for (path, _, change) in changes.iter() {
6756 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6757 let settings_dir = Arc::from(
6758 path.ancestors()
6759 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6760 .unwrap(),
6761 );
6762 let fs = self.fs.clone();
6763 let removed = *change == PathChange::Removed;
6764 let abs_path = worktree.absolutize(path);
6765 settings_contents.push(async move {
6766 (
6767 settings_dir,
6768 if removed {
6769 None
6770 } else {
6771 Some(async move { fs.load(&abs_path?).await }.await)
6772 },
6773 )
6774 });
6775 }
6776 }
6777
6778 if settings_contents.is_empty() {
6779 return;
6780 }
6781
6782 let client = self.client.clone();
6783 cx.spawn(move |_, cx| async move {
6784 let settings_contents: Vec<(Arc<Path>, _)> =
6785 futures::future::join_all(settings_contents).await;
6786 cx.update(|cx| {
6787 cx.update_global::<SettingsStore, _>(|store, cx| {
6788 for (directory, file_content) in settings_contents {
6789 let file_content = file_content.and_then(|content| content.log_err());
6790 store
6791 .set_local_settings(
6792 worktree_id.as_u64() as usize,
6793 directory.clone(),
6794 file_content.as_ref().map(String::as_str),
6795 cx,
6796 )
6797 .log_err();
6798 if let Some(remote_id) = project_id {
6799 client
6800 .send(proto::UpdateWorktreeSettings {
6801 project_id: remote_id,
6802 worktree_id: remote_worktree_id.to_proto(),
6803 path: directory.to_string_lossy().into_owned(),
6804 content: file_content,
6805 })
6806 .log_err();
6807 }
6808 }
6809 });
6810 })
6811 .ok();
6812 })
6813 .detach();
6814 }
6815
6816 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6817 let new_active_entry = entry.and_then(|project_path| {
6818 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6819 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6820 Some(entry.id)
6821 });
6822 if new_active_entry != self.active_entry {
6823 self.active_entry = new_active_entry;
6824 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6825 }
6826 }
6827
6828 pub fn language_servers_running_disk_based_diagnostics(
6829 &self,
6830 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6831 self.language_server_statuses
6832 .iter()
6833 .filter_map(|(id, status)| {
6834 if status.has_pending_diagnostic_updates {
6835 Some(*id)
6836 } else {
6837 None
6838 }
6839 })
6840 }
6841
6842 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
6843 let mut summary = DiagnosticSummary::default();
6844 for (_, _, path_summary) in
6845 self.diagnostic_summaries(include_ignored, cx)
6846 .filter(|(path, _, _)| {
6847 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
6848 include_ignored || worktree == Some(false)
6849 })
6850 {
6851 summary.error_count += path_summary.error_count;
6852 summary.warning_count += path_summary.warning_count;
6853 }
6854 summary
6855 }
6856
6857 pub fn diagnostic_summaries<'a>(
6858 &'a self,
6859 include_ignored: bool,
6860 cx: &'a AppContext,
6861 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6862 self.visible_worktrees(cx)
6863 .flat_map(move |worktree| {
6864 let worktree = worktree.read(cx);
6865 let worktree_id = worktree.id();
6866 worktree
6867 .diagnostic_summaries()
6868 .map(move |(path, server_id, summary)| {
6869 (ProjectPath { worktree_id, path }, server_id, summary)
6870 })
6871 })
6872 .filter(move |(path, _, _)| {
6873 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
6874 include_ignored || worktree == Some(false)
6875 })
6876 }
6877
6878 pub fn disk_based_diagnostics_started(
6879 &mut self,
6880 language_server_id: LanguageServerId,
6881 cx: &mut ModelContext<Self>,
6882 ) {
6883 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6884 }
6885
6886 pub fn disk_based_diagnostics_finished(
6887 &mut self,
6888 language_server_id: LanguageServerId,
6889 cx: &mut ModelContext<Self>,
6890 ) {
6891 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6892 }
6893
6894 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6895 self.active_entry
6896 }
6897
6898 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6899 self.worktree_for_id(path.worktree_id, cx)?
6900 .read(cx)
6901 .entry_for_path(&path.path)
6902 .cloned()
6903 }
6904
6905 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6906 let worktree = self.worktree_for_entry(entry_id, cx)?;
6907 let worktree = worktree.read(cx);
6908 let worktree_id = worktree.id();
6909 let path = worktree.entry_for_id(entry_id)?.path.clone();
6910 Some(ProjectPath { worktree_id, path })
6911 }
6912
6913 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6914 let workspace_root = self
6915 .worktree_for_id(project_path.worktree_id, cx)?
6916 .read(cx)
6917 .abs_path();
6918 let project_path = project_path.path.as_ref();
6919
6920 Some(if project_path == Path::new("") {
6921 workspace_root.to_path_buf()
6922 } else {
6923 workspace_root.join(project_path)
6924 })
6925 }
6926
6927 // RPC message handlers
6928
6929 async fn handle_unshare_project(
6930 this: Model<Self>,
6931 _: TypedEnvelope<proto::UnshareProject>,
6932 _: Arc<Client>,
6933 mut cx: AsyncAppContext,
6934 ) -> Result<()> {
6935 this.update(&mut cx, |this, cx| {
6936 if this.is_local() {
6937 this.unshare(cx)?;
6938 } else {
6939 this.disconnected_from_host(cx);
6940 }
6941 Ok(())
6942 })?
6943 }
6944
6945 async fn handle_add_collaborator(
6946 this: Model<Self>,
6947 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6948 _: Arc<Client>,
6949 mut cx: AsyncAppContext,
6950 ) -> Result<()> {
6951 let collaborator = envelope
6952 .payload
6953 .collaborator
6954 .take()
6955 .ok_or_else(|| anyhow!("empty collaborator"))?;
6956
6957 let collaborator = Collaborator::from_proto(collaborator)?;
6958 this.update(&mut cx, |this, cx| {
6959 this.shared_buffers.remove(&collaborator.peer_id);
6960 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6961 this.collaborators
6962 .insert(collaborator.peer_id, collaborator);
6963 cx.notify();
6964 })?;
6965
6966 Ok(())
6967 }
6968
6969 async fn handle_update_project_collaborator(
6970 this: Model<Self>,
6971 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6972 _: Arc<Client>,
6973 mut cx: AsyncAppContext,
6974 ) -> Result<()> {
6975 let old_peer_id = envelope
6976 .payload
6977 .old_peer_id
6978 .ok_or_else(|| anyhow!("missing old peer id"))?;
6979 let new_peer_id = envelope
6980 .payload
6981 .new_peer_id
6982 .ok_or_else(|| anyhow!("missing new peer id"))?;
6983 this.update(&mut cx, |this, cx| {
6984 let collaborator = this
6985 .collaborators
6986 .remove(&old_peer_id)
6987 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6988 let is_host = collaborator.replica_id == 0;
6989 this.collaborators.insert(new_peer_id, collaborator);
6990
6991 let buffers = this.shared_buffers.remove(&old_peer_id);
6992 log::info!(
6993 "peer {} became {}. moving buffers {:?}",
6994 old_peer_id,
6995 new_peer_id,
6996 &buffers
6997 );
6998 if let Some(buffers) = buffers {
6999 this.shared_buffers.insert(new_peer_id, buffers);
7000 }
7001
7002 if is_host {
7003 this.opened_buffers
7004 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
7005 this.buffer_ordered_messages_tx
7006 .unbounded_send(BufferOrderedMessage::Resync)
7007 .unwrap();
7008 }
7009
7010 cx.emit(Event::CollaboratorUpdated {
7011 old_peer_id,
7012 new_peer_id,
7013 });
7014 cx.notify();
7015 Ok(())
7016 })?
7017 }
7018
7019 async fn handle_remove_collaborator(
7020 this: Model<Self>,
7021 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7022 _: Arc<Client>,
7023 mut cx: AsyncAppContext,
7024 ) -> Result<()> {
7025 this.update(&mut cx, |this, cx| {
7026 let peer_id = envelope
7027 .payload
7028 .peer_id
7029 .ok_or_else(|| anyhow!("invalid peer id"))?;
7030 let replica_id = this
7031 .collaborators
7032 .remove(&peer_id)
7033 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7034 .replica_id;
7035 for buffer in this.opened_buffers.values() {
7036 if let Some(buffer) = buffer.upgrade() {
7037 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7038 }
7039 }
7040 this.shared_buffers.remove(&peer_id);
7041
7042 cx.emit(Event::CollaboratorLeft(peer_id));
7043 cx.notify();
7044 Ok(())
7045 })?
7046 }
7047
7048 async fn handle_update_project(
7049 this: Model<Self>,
7050 envelope: TypedEnvelope<proto::UpdateProject>,
7051 _: Arc<Client>,
7052 mut cx: AsyncAppContext,
7053 ) -> Result<()> {
7054 this.update(&mut cx, |this, cx| {
7055 // Don't handle messages that were sent before the response to us joining the project
7056 if envelope.message_id > this.join_project_response_message_id {
7057 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7058 }
7059 Ok(())
7060 })?
7061 }
7062
7063 async fn handle_update_worktree(
7064 this: Model<Self>,
7065 envelope: TypedEnvelope<proto::UpdateWorktree>,
7066 _: Arc<Client>,
7067 mut cx: AsyncAppContext,
7068 ) -> Result<()> {
7069 this.update(&mut cx, |this, cx| {
7070 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7071 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7072 worktree.update(cx, |worktree, _| {
7073 let worktree = worktree.as_remote_mut().unwrap();
7074 worktree.update_from_remote(envelope.payload);
7075 });
7076 }
7077 Ok(())
7078 })?
7079 }
7080
7081 async fn handle_update_worktree_settings(
7082 this: Model<Self>,
7083 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7084 _: Arc<Client>,
7085 mut cx: AsyncAppContext,
7086 ) -> Result<()> {
7087 this.update(&mut cx, |this, cx| {
7088 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7089 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7090 cx.update_global::<SettingsStore, _>(|store, cx| {
7091 store
7092 .set_local_settings(
7093 worktree.entity_id().as_u64() as usize,
7094 PathBuf::from(&envelope.payload.path).into(),
7095 envelope.payload.content.as_ref().map(String::as_str),
7096 cx,
7097 )
7098 .log_err();
7099 });
7100 }
7101 Ok(())
7102 })?
7103 }
7104
7105 async fn handle_create_project_entry(
7106 this: Model<Self>,
7107 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7108 _: Arc<Client>,
7109 mut cx: AsyncAppContext,
7110 ) -> Result<proto::ProjectEntryResponse> {
7111 let worktree = this.update(&mut cx, |this, cx| {
7112 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7113 this.worktree_for_id(worktree_id, cx)
7114 .ok_or_else(|| anyhow!("worktree not found"))
7115 })??;
7116 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7117 let entry = worktree
7118 .update(&mut cx, |worktree, cx| {
7119 let worktree = worktree.as_local_mut().unwrap();
7120 let path = PathBuf::from(envelope.payload.path);
7121 worktree.create_entry(path, envelope.payload.is_directory, cx)
7122 })?
7123 .await?;
7124 Ok(proto::ProjectEntryResponse {
7125 entry: entry.as_ref().map(|e| e.into()),
7126 worktree_scan_id: worktree_scan_id as u64,
7127 })
7128 }
7129
7130 async fn handle_rename_project_entry(
7131 this: Model<Self>,
7132 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7133 _: Arc<Client>,
7134 mut cx: AsyncAppContext,
7135 ) -> Result<proto::ProjectEntryResponse> {
7136 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7137 let worktree = this.update(&mut cx, |this, cx| {
7138 this.worktree_for_entry(entry_id, cx)
7139 .ok_or_else(|| anyhow!("worktree not found"))
7140 })??;
7141 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7142 let entry = worktree
7143 .update(&mut cx, |worktree, cx| {
7144 let new_path = PathBuf::from(envelope.payload.new_path);
7145 worktree
7146 .as_local_mut()
7147 .unwrap()
7148 .rename_entry(entry_id, new_path, cx)
7149 })?
7150 .await?;
7151 Ok(proto::ProjectEntryResponse {
7152 entry: entry.as_ref().map(|e| e.into()),
7153 worktree_scan_id: worktree_scan_id as u64,
7154 })
7155 }
7156
7157 async fn handle_copy_project_entry(
7158 this: Model<Self>,
7159 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7160 _: Arc<Client>,
7161 mut cx: AsyncAppContext,
7162 ) -> Result<proto::ProjectEntryResponse> {
7163 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7164 let worktree = this.update(&mut cx, |this, cx| {
7165 this.worktree_for_entry(entry_id, cx)
7166 .ok_or_else(|| anyhow!("worktree not found"))
7167 })??;
7168 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7169 let entry = worktree
7170 .update(&mut cx, |worktree, cx| {
7171 let new_path = PathBuf::from(envelope.payload.new_path);
7172 worktree
7173 .as_local_mut()
7174 .unwrap()
7175 .copy_entry(entry_id, new_path, cx)
7176 })?
7177 .await?;
7178 Ok(proto::ProjectEntryResponse {
7179 entry: entry.as_ref().map(|e| e.into()),
7180 worktree_scan_id: worktree_scan_id as u64,
7181 })
7182 }
7183
7184 async fn handle_delete_project_entry(
7185 this: Model<Self>,
7186 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7187 _: Arc<Client>,
7188 mut cx: AsyncAppContext,
7189 ) -> Result<proto::ProjectEntryResponse> {
7190 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7191
7192 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7193
7194 let worktree = this.update(&mut cx, |this, cx| {
7195 this.worktree_for_entry(entry_id, cx)
7196 .ok_or_else(|| anyhow!("worktree not found"))
7197 })??;
7198 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7199 worktree
7200 .update(&mut cx, |worktree, cx| {
7201 worktree
7202 .as_local_mut()
7203 .unwrap()
7204 .delete_entry(entry_id, cx)
7205 .ok_or_else(|| anyhow!("invalid entry"))
7206 })??
7207 .await?;
7208 Ok(proto::ProjectEntryResponse {
7209 entry: None,
7210 worktree_scan_id: worktree_scan_id as u64,
7211 })
7212 }
7213
7214 async fn handle_expand_project_entry(
7215 this: Model<Self>,
7216 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7217 _: Arc<Client>,
7218 mut cx: AsyncAppContext,
7219 ) -> Result<proto::ExpandProjectEntryResponse> {
7220 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7221 let worktree = this
7222 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7223 .ok_or_else(|| anyhow!("invalid request"))?;
7224 worktree
7225 .update(&mut cx, |worktree, cx| {
7226 worktree
7227 .as_local_mut()
7228 .unwrap()
7229 .expand_entry(entry_id, cx)
7230 .ok_or_else(|| anyhow!("invalid entry"))
7231 })??
7232 .await?;
7233 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7234 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7235 }
7236
7237 async fn handle_update_diagnostic_summary(
7238 this: Model<Self>,
7239 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7240 _: Arc<Client>,
7241 mut cx: AsyncAppContext,
7242 ) -> Result<()> {
7243 this.update(&mut cx, |this, cx| {
7244 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7245 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7246 if let Some(summary) = envelope.payload.summary {
7247 let project_path = ProjectPath {
7248 worktree_id,
7249 path: Path::new(&summary.path).into(),
7250 };
7251 worktree.update(cx, |worktree, _| {
7252 worktree
7253 .as_remote_mut()
7254 .unwrap()
7255 .update_diagnostic_summary(project_path.path.clone(), &summary);
7256 });
7257 cx.emit(Event::DiagnosticsUpdated {
7258 language_server_id: LanguageServerId(summary.language_server_id as usize),
7259 path: project_path,
7260 });
7261 }
7262 }
7263 Ok(())
7264 })?
7265 }
7266
7267 async fn handle_start_language_server(
7268 this: Model<Self>,
7269 envelope: TypedEnvelope<proto::StartLanguageServer>,
7270 _: Arc<Client>,
7271 mut cx: AsyncAppContext,
7272 ) -> Result<()> {
7273 let server = envelope
7274 .payload
7275 .server
7276 .ok_or_else(|| anyhow!("invalid server"))?;
7277 this.update(&mut cx, |this, cx| {
7278 this.language_server_statuses.insert(
7279 LanguageServerId(server.id as usize),
7280 LanguageServerStatus {
7281 name: server.name,
7282 pending_work: Default::default(),
7283 has_pending_diagnostic_updates: false,
7284 progress_tokens: Default::default(),
7285 },
7286 );
7287 cx.notify();
7288 })?;
7289 Ok(())
7290 }
7291
7292 async fn handle_update_language_server(
7293 this: Model<Self>,
7294 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7295 _: Arc<Client>,
7296 mut cx: AsyncAppContext,
7297 ) -> Result<()> {
7298 this.update(&mut cx, |this, cx| {
7299 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7300
7301 match envelope
7302 .payload
7303 .variant
7304 .ok_or_else(|| anyhow!("invalid variant"))?
7305 {
7306 proto::update_language_server::Variant::WorkStart(payload) => {
7307 this.on_lsp_work_start(
7308 language_server_id,
7309 payload.token,
7310 LanguageServerProgress {
7311 message: payload.message,
7312 percentage: payload.percentage.map(|p| p as usize),
7313 last_update_at: Instant::now(),
7314 },
7315 cx,
7316 );
7317 }
7318
7319 proto::update_language_server::Variant::WorkProgress(payload) => {
7320 this.on_lsp_work_progress(
7321 language_server_id,
7322 payload.token,
7323 LanguageServerProgress {
7324 message: payload.message,
7325 percentage: payload.percentage.map(|p| p as usize),
7326 last_update_at: Instant::now(),
7327 },
7328 cx,
7329 );
7330 }
7331
7332 proto::update_language_server::Variant::WorkEnd(payload) => {
7333 this.on_lsp_work_end(language_server_id, payload.token, cx);
7334 }
7335
7336 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7337 this.disk_based_diagnostics_started(language_server_id, cx);
7338 }
7339
7340 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7341 this.disk_based_diagnostics_finished(language_server_id, cx)
7342 }
7343 }
7344
7345 Ok(())
7346 })?
7347 }
7348
7349 async fn handle_update_buffer(
7350 this: Model<Self>,
7351 envelope: TypedEnvelope<proto::UpdateBuffer>,
7352 _: Arc<Client>,
7353 mut cx: AsyncAppContext,
7354 ) -> Result<proto::Ack> {
7355 this.update(&mut cx, |this, cx| {
7356 let payload = envelope.payload.clone();
7357 let buffer_id = payload.buffer_id;
7358 let ops = payload
7359 .operations
7360 .into_iter()
7361 .map(language::proto::deserialize_operation)
7362 .collect::<Result<Vec<_>, _>>()?;
7363 let is_remote = this.is_remote();
7364 match this.opened_buffers.entry(buffer_id) {
7365 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7366 OpenBuffer::Strong(buffer) => {
7367 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7368 }
7369 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7370 OpenBuffer::Weak(_) => {}
7371 },
7372 hash_map::Entry::Vacant(e) => {
7373 assert!(
7374 is_remote,
7375 "received buffer update from {:?}",
7376 envelope.original_sender_id
7377 );
7378 e.insert(OpenBuffer::Operations(ops));
7379 }
7380 }
7381 Ok(proto::Ack {})
7382 })?
7383 }
7384
7385 async fn handle_create_buffer_for_peer(
7386 this: Model<Self>,
7387 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7388 _: Arc<Client>,
7389 mut cx: AsyncAppContext,
7390 ) -> Result<()> {
7391 this.update(&mut cx, |this, cx| {
7392 match envelope
7393 .payload
7394 .variant
7395 .ok_or_else(|| anyhow!("missing variant"))?
7396 {
7397 proto::create_buffer_for_peer::Variant::State(mut state) => {
7398 let mut buffer_file = None;
7399 if let Some(file) = state.file.take() {
7400 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7401 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7402 anyhow!("no worktree found for id {}", file.worktree_id)
7403 })?;
7404 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7405 as Arc<dyn language::File>);
7406 }
7407
7408 let buffer_id = state.id;
7409 let buffer = cx.new_model(|_| {
7410 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
7411 .unwrap()
7412 });
7413 this.incomplete_remote_buffers
7414 .insert(buffer_id, Some(buffer));
7415 }
7416 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7417 let buffer = this
7418 .incomplete_remote_buffers
7419 .get(&chunk.buffer_id)
7420 .cloned()
7421 .flatten()
7422 .ok_or_else(|| {
7423 anyhow!(
7424 "received chunk for buffer {} without initial state",
7425 chunk.buffer_id
7426 )
7427 })?;
7428 let operations = chunk
7429 .operations
7430 .into_iter()
7431 .map(language::proto::deserialize_operation)
7432 .collect::<Result<Vec<_>>>()?;
7433 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7434
7435 if chunk.is_last {
7436 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7437 this.register_buffer(&buffer, cx)?;
7438 }
7439 }
7440 }
7441
7442 Ok(())
7443 })?
7444 }
7445
7446 async fn handle_update_diff_base(
7447 this: Model<Self>,
7448 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7449 _: Arc<Client>,
7450 mut cx: AsyncAppContext,
7451 ) -> Result<()> {
7452 this.update(&mut cx, |this, cx| {
7453 let buffer_id = envelope.payload.buffer_id;
7454 let diff_base = envelope.payload.diff_base;
7455 if let Some(buffer) = this
7456 .opened_buffers
7457 .get_mut(&buffer_id)
7458 .and_then(|b| b.upgrade())
7459 .or_else(|| {
7460 this.incomplete_remote_buffers
7461 .get(&buffer_id)
7462 .cloned()
7463 .flatten()
7464 })
7465 {
7466 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7467 }
7468 Ok(())
7469 })?
7470 }
7471
7472 async fn handle_update_buffer_file(
7473 this: Model<Self>,
7474 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7475 _: Arc<Client>,
7476 mut cx: AsyncAppContext,
7477 ) -> Result<()> {
7478 let buffer_id = envelope.payload.buffer_id;
7479
7480 this.update(&mut cx, |this, cx| {
7481 let payload = envelope.payload.clone();
7482 if let Some(buffer) = this
7483 .opened_buffers
7484 .get(&buffer_id)
7485 .and_then(|b| b.upgrade())
7486 .or_else(|| {
7487 this.incomplete_remote_buffers
7488 .get(&buffer_id)
7489 .cloned()
7490 .flatten()
7491 })
7492 {
7493 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7494 let worktree = this
7495 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7496 .ok_or_else(|| anyhow!("no such worktree"))?;
7497 let file = File::from_proto(file, worktree, cx)?;
7498 buffer.update(cx, |buffer, cx| {
7499 buffer.file_updated(Arc::new(file), cx);
7500 });
7501 this.detect_language_for_buffer(&buffer, cx);
7502 }
7503 Ok(())
7504 })?
7505 }
7506
7507 async fn handle_save_buffer(
7508 this: Model<Self>,
7509 envelope: TypedEnvelope<proto::SaveBuffer>,
7510 _: Arc<Client>,
7511 mut cx: AsyncAppContext,
7512 ) -> Result<proto::BufferSaved> {
7513 let buffer_id = envelope.payload.buffer_id;
7514 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7515 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7516 let buffer = this
7517 .opened_buffers
7518 .get(&buffer_id)
7519 .and_then(|buffer| buffer.upgrade())
7520 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7521 anyhow::Ok((project_id, buffer))
7522 })??;
7523 buffer
7524 .update(&mut cx, |buffer, _| {
7525 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7526 })?
7527 .await?;
7528 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7529
7530 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7531 .await?;
7532 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7533 project_id,
7534 buffer_id,
7535 version: serialize_version(buffer.saved_version()),
7536 mtime: Some(buffer.saved_mtime().into()),
7537 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7538 })?)
7539 }
7540
7541 async fn handle_reload_buffers(
7542 this: Model<Self>,
7543 envelope: TypedEnvelope<proto::ReloadBuffers>,
7544 _: Arc<Client>,
7545 mut cx: AsyncAppContext,
7546 ) -> Result<proto::ReloadBuffersResponse> {
7547 let sender_id = envelope.original_sender_id()?;
7548 let reload = this.update(&mut cx, |this, cx| {
7549 let mut buffers = HashSet::default();
7550 for buffer_id in &envelope.payload.buffer_ids {
7551 buffers.insert(
7552 this.opened_buffers
7553 .get(buffer_id)
7554 .and_then(|buffer| buffer.upgrade())
7555 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7556 );
7557 }
7558 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7559 })??;
7560
7561 let project_transaction = reload.await?;
7562 let project_transaction = this.update(&mut cx, |this, cx| {
7563 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7564 })?;
7565 Ok(proto::ReloadBuffersResponse {
7566 transaction: Some(project_transaction),
7567 })
7568 }
7569
7570 async fn handle_synchronize_buffers(
7571 this: Model<Self>,
7572 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7573 _: Arc<Client>,
7574 mut cx: AsyncAppContext,
7575 ) -> Result<proto::SynchronizeBuffersResponse> {
7576 let project_id = envelope.payload.project_id;
7577 let mut response = proto::SynchronizeBuffersResponse {
7578 buffers: Default::default(),
7579 };
7580
7581 this.update(&mut cx, |this, cx| {
7582 let Some(guest_id) = envelope.original_sender_id else {
7583 error!("missing original_sender_id on SynchronizeBuffers request");
7584 return;
7585 };
7586
7587 this.shared_buffers.entry(guest_id).or_default().clear();
7588 for buffer in envelope.payload.buffers {
7589 let buffer_id = buffer.id;
7590 let remote_version = language::proto::deserialize_version(&buffer.version);
7591 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7592 this.shared_buffers
7593 .entry(guest_id)
7594 .or_default()
7595 .insert(buffer_id);
7596
7597 let buffer = buffer.read(cx);
7598 response.buffers.push(proto::BufferVersion {
7599 id: buffer_id,
7600 version: language::proto::serialize_version(&buffer.version),
7601 });
7602
7603 let operations = buffer.serialize_ops(Some(remote_version), cx);
7604 let client = this.client.clone();
7605 if let Some(file) = buffer.file() {
7606 client
7607 .send(proto::UpdateBufferFile {
7608 project_id,
7609 buffer_id: buffer_id as u64,
7610 file: Some(file.to_proto()),
7611 })
7612 .log_err();
7613 }
7614
7615 client
7616 .send(proto::UpdateDiffBase {
7617 project_id,
7618 buffer_id: buffer_id as u64,
7619 diff_base: buffer.diff_base().map(Into::into),
7620 })
7621 .log_err();
7622
7623 client
7624 .send(proto::BufferReloaded {
7625 project_id,
7626 buffer_id,
7627 version: language::proto::serialize_version(buffer.saved_version()),
7628 mtime: Some(buffer.saved_mtime().into()),
7629 fingerprint: language::proto::serialize_fingerprint(
7630 buffer.saved_version_fingerprint(),
7631 ),
7632 line_ending: language::proto::serialize_line_ending(
7633 buffer.line_ending(),
7634 ) as i32,
7635 })
7636 .log_err();
7637
7638 cx.background_executor()
7639 .spawn(
7640 async move {
7641 let operations = operations.await;
7642 for chunk in split_operations(operations) {
7643 client
7644 .request(proto::UpdateBuffer {
7645 project_id,
7646 buffer_id,
7647 operations: chunk,
7648 })
7649 .await?;
7650 }
7651 anyhow::Ok(())
7652 }
7653 .log_err(),
7654 )
7655 .detach();
7656 }
7657 }
7658 })?;
7659
7660 Ok(response)
7661 }
7662
7663 async fn handle_format_buffers(
7664 this: Model<Self>,
7665 envelope: TypedEnvelope<proto::FormatBuffers>,
7666 _: Arc<Client>,
7667 mut cx: AsyncAppContext,
7668 ) -> Result<proto::FormatBuffersResponse> {
7669 let sender_id = envelope.original_sender_id()?;
7670 let format = this.update(&mut cx, |this, cx| {
7671 let mut buffers = HashSet::default();
7672 for buffer_id in &envelope.payload.buffer_ids {
7673 buffers.insert(
7674 this.opened_buffers
7675 .get(buffer_id)
7676 .and_then(|buffer| buffer.upgrade())
7677 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7678 );
7679 }
7680 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7681 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7682 })??;
7683
7684 let project_transaction = format.await?;
7685 let project_transaction = this.update(&mut cx, |this, cx| {
7686 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7687 })?;
7688 Ok(proto::FormatBuffersResponse {
7689 transaction: Some(project_transaction),
7690 })
7691 }
7692
7693 async fn handle_apply_additional_edits_for_completion(
7694 this: Model<Self>,
7695 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7696 _: Arc<Client>,
7697 mut cx: AsyncAppContext,
7698 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7699 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7700 let buffer = this
7701 .opened_buffers
7702 .get(&envelope.payload.buffer_id)
7703 .and_then(|buffer| buffer.upgrade())
7704 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7705 let language = buffer.read(cx).language();
7706 let completion = language::proto::deserialize_completion(
7707 envelope
7708 .payload
7709 .completion
7710 .ok_or_else(|| anyhow!("invalid completion"))?,
7711 language.cloned(),
7712 );
7713 Ok::<_, anyhow::Error>((buffer, completion))
7714 })??;
7715
7716 let completion = completion.await?;
7717
7718 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7719 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7720 })?;
7721
7722 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7723 transaction: apply_additional_edits
7724 .await?
7725 .as_ref()
7726 .map(language::proto::serialize_transaction),
7727 })
7728 }
7729
7730 async fn handle_resolve_completion_documentation(
7731 this: Model<Self>,
7732 envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
7733 _: Arc<Client>,
7734 mut cx: AsyncAppContext,
7735 ) -> Result<proto::ResolveCompletionDocumentationResponse> {
7736 let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
7737
7738 let completion = this
7739 .read_with(&mut cx, |this, _| {
7740 let id = LanguageServerId(envelope.payload.language_server_id as usize);
7741 let Some(server) = this.language_server_for_id(id) else {
7742 return Err(anyhow!("No language server {id}"));
7743 };
7744
7745 Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
7746 })??
7747 .await?;
7748
7749 let mut is_markdown = false;
7750 let text = match completion.documentation {
7751 Some(lsp::Documentation::String(text)) => text,
7752
7753 Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
7754 is_markdown = kind == lsp::MarkupKind::Markdown;
7755 value
7756 }
7757
7758 _ => String::new(),
7759 };
7760
7761 Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
7762 }
7763
7764 async fn handle_apply_code_action(
7765 this: Model<Self>,
7766 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7767 _: Arc<Client>,
7768 mut cx: AsyncAppContext,
7769 ) -> Result<proto::ApplyCodeActionResponse> {
7770 let sender_id = envelope.original_sender_id()?;
7771 let action = language::proto::deserialize_code_action(
7772 envelope
7773 .payload
7774 .action
7775 .ok_or_else(|| anyhow!("invalid action"))?,
7776 )?;
7777 let apply_code_action = this.update(&mut cx, |this, cx| {
7778 let buffer = this
7779 .opened_buffers
7780 .get(&envelope.payload.buffer_id)
7781 .and_then(|buffer| buffer.upgrade())
7782 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7783 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7784 })??;
7785
7786 let project_transaction = apply_code_action.await?;
7787 let project_transaction = this.update(&mut cx, |this, cx| {
7788 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7789 })?;
7790 Ok(proto::ApplyCodeActionResponse {
7791 transaction: Some(project_transaction),
7792 })
7793 }
7794
7795 async fn handle_on_type_formatting(
7796 this: Model<Self>,
7797 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7798 _: Arc<Client>,
7799 mut cx: AsyncAppContext,
7800 ) -> Result<proto::OnTypeFormattingResponse> {
7801 let on_type_formatting = this.update(&mut cx, |this, cx| {
7802 let buffer = this
7803 .opened_buffers
7804 .get(&envelope.payload.buffer_id)
7805 .and_then(|buffer| buffer.upgrade())
7806 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7807 let position = envelope
7808 .payload
7809 .position
7810 .and_then(deserialize_anchor)
7811 .ok_or_else(|| anyhow!("invalid position"))?;
7812 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7813 buffer,
7814 position,
7815 envelope.payload.trigger.clone(),
7816 cx,
7817 ))
7818 })??;
7819
7820 let transaction = on_type_formatting
7821 .await?
7822 .as_ref()
7823 .map(language::proto::serialize_transaction);
7824 Ok(proto::OnTypeFormattingResponse { transaction })
7825 }
7826
7827 async fn handle_inlay_hints(
7828 this: Model<Self>,
7829 envelope: TypedEnvelope<proto::InlayHints>,
7830 _: Arc<Client>,
7831 mut cx: AsyncAppContext,
7832 ) -> Result<proto::InlayHintsResponse> {
7833 let sender_id = envelope.original_sender_id()?;
7834 let buffer = this.update(&mut cx, |this, _| {
7835 this.opened_buffers
7836 .get(&envelope.payload.buffer_id)
7837 .and_then(|buffer| buffer.upgrade())
7838 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7839 })??;
7840 let buffer_version = deserialize_version(&envelope.payload.version);
7841
7842 buffer
7843 .update(&mut cx, |buffer, _| {
7844 buffer.wait_for_version(buffer_version.clone())
7845 })?
7846 .await
7847 .with_context(|| {
7848 format!(
7849 "waiting for version {:?} for buffer {}",
7850 buffer_version,
7851 buffer.entity_id()
7852 )
7853 })?;
7854
7855 let start = envelope
7856 .payload
7857 .start
7858 .and_then(deserialize_anchor)
7859 .context("missing range start")?;
7860 let end = envelope
7861 .payload
7862 .end
7863 .and_then(deserialize_anchor)
7864 .context("missing range end")?;
7865 let buffer_hints = this
7866 .update(&mut cx, |project, cx| {
7867 project.inlay_hints(buffer, start..end, cx)
7868 })?
7869 .await
7870 .context("inlay hints fetch")?;
7871
7872 Ok(this.update(&mut cx, |project, cx| {
7873 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7874 })?)
7875 }
7876
7877 async fn handle_resolve_inlay_hint(
7878 this: Model<Self>,
7879 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7880 _: Arc<Client>,
7881 mut cx: AsyncAppContext,
7882 ) -> Result<proto::ResolveInlayHintResponse> {
7883 let proto_hint = envelope
7884 .payload
7885 .hint
7886 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7887 let hint = InlayHints::proto_to_project_hint(proto_hint)
7888 .context("resolved proto inlay hint conversion")?;
7889 let buffer = this.update(&mut cx, |this, _cx| {
7890 this.opened_buffers
7891 .get(&envelope.payload.buffer_id)
7892 .and_then(|buffer| buffer.upgrade())
7893 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7894 })??;
7895 let response_hint = this
7896 .update(&mut cx, |project, cx| {
7897 project.resolve_inlay_hint(
7898 hint,
7899 buffer,
7900 LanguageServerId(envelope.payload.language_server_id as usize),
7901 cx,
7902 )
7903 })?
7904 .await
7905 .context("inlay hints fetch")?;
7906 Ok(proto::ResolveInlayHintResponse {
7907 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7908 })
7909 }
7910
7911 async fn handle_refresh_inlay_hints(
7912 this: Model<Self>,
7913 _: TypedEnvelope<proto::RefreshInlayHints>,
7914 _: Arc<Client>,
7915 mut cx: AsyncAppContext,
7916 ) -> Result<proto::Ack> {
7917 this.update(&mut cx, |_, cx| {
7918 cx.emit(Event::RefreshInlayHints);
7919 })?;
7920 Ok(proto::Ack {})
7921 }
7922
7923 async fn handle_lsp_command<T: LspCommand>(
7924 this: Model<Self>,
7925 envelope: TypedEnvelope<T::ProtoRequest>,
7926 _: Arc<Client>,
7927 mut cx: AsyncAppContext,
7928 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7929 where
7930 <T::LspRequest as lsp::request::Request>::Params: Send,
7931 <T::LspRequest as lsp::request::Request>::Result: Send,
7932 {
7933 let sender_id = envelope.original_sender_id()?;
7934 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7935 let buffer_handle = this.update(&mut cx, |this, _cx| {
7936 this.opened_buffers
7937 .get(&buffer_id)
7938 .and_then(|buffer| buffer.upgrade())
7939 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7940 })??;
7941 let request = T::from_proto(
7942 envelope.payload,
7943 this.clone(),
7944 buffer_handle.clone(),
7945 cx.clone(),
7946 )
7947 .await?;
7948 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7949 let response = this
7950 .update(&mut cx, |this, cx| {
7951 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7952 })?
7953 .await?;
7954 this.update(&mut cx, |this, cx| {
7955 Ok(T::response_to_proto(
7956 response,
7957 this,
7958 sender_id,
7959 &buffer_version,
7960 cx,
7961 ))
7962 })?
7963 }
7964
7965 async fn handle_get_project_symbols(
7966 this: Model<Self>,
7967 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7968 _: Arc<Client>,
7969 mut cx: AsyncAppContext,
7970 ) -> Result<proto::GetProjectSymbolsResponse> {
7971 let symbols = this
7972 .update(&mut cx, |this, cx| {
7973 this.symbols(&envelope.payload.query, cx)
7974 })?
7975 .await?;
7976
7977 Ok(proto::GetProjectSymbolsResponse {
7978 symbols: symbols.iter().map(serialize_symbol).collect(),
7979 })
7980 }
7981
7982 async fn handle_search_project(
7983 this: Model<Self>,
7984 envelope: TypedEnvelope<proto::SearchProject>,
7985 _: Arc<Client>,
7986 mut cx: AsyncAppContext,
7987 ) -> Result<proto::SearchProjectResponse> {
7988 let peer_id = envelope.original_sender_id()?;
7989 let query = SearchQuery::from_proto(envelope.payload)?;
7990 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7991
7992 cx.spawn(move |mut cx| async move {
7993 let mut locations = Vec::new();
7994 while let Some((buffer, ranges)) = result.next().await {
7995 for range in ranges {
7996 let start = serialize_anchor(&range.start);
7997 let end = serialize_anchor(&range.end);
7998 let buffer_id = this.update(&mut cx, |this, cx| {
7999 this.create_buffer_for_peer(&buffer, peer_id, cx)
8000 })?;
8001 locations.push(proto::Location {
8002 buffer_id,
8003 start: Some(start),
8004 end: Some(end),
8005 });
8006 }
8007 }
8008 Ok(proto::SearchProjectResponse { locations })
8009 })
8010 .await
8011 }
8012
8013 async fn handle_open_buffer_for_symbol(
8014 this: Model<Self>,
8015 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
8016 _: Arc<Client>,
8017 mut cx: AsyncAppContext,
8018 ) -> Result<proto::OpenBufferForSymbolResponse> {
8019 let peer_id = envelope.original_sender_id()?;
8020 let symbol = envelope
8021 .payload
8022 .symbol
8023 .ok_or_else(|| anyhow!("invalid symbol"))?;
8024 let symbol = this
8025 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
8026 .await?;
8027 let symbol = this.update(&mut cx, |this, _| {
8028 let signature = this.symbol_signature(&symbol.path);
8029 if signature == symbol.signature {
8030 Ok(symbol)
8031 } else {
8032 Err(anyhow!("invalid symbol signature"))
8033 }
8034 })??;
8035 let buffer = this
8036 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
8037 .await?;
8038
8039 Ok(proto::OpenBufferForSymbolResponse {
8040 buffer_id: this.update(&mut cx, |this, cx| {
8041 this.create_buffer_for_peer(&buffer, peer_id, cx)
8042 })?,
8043 })
8044 }
8045
8046 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
8047 let mut hasher = Sha256::new();
8048 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
8049 hasher.update(project_path.path.to_string_lossy().as_bytes());
8050 hasher.update(self.nonce.to_be_bytes());
8051 hasher.finalize().as_slice().try_into().unwrap()
8052 }
8053
8054 async fn handle_open_buffer_by_id(
8055 this: Model<Self>,
8056 envelope: TypedEnvelope<proto::OpenBufferById>,
8057 _: Arc<Client>,
8058 mut cx: AsyncAppContext,
8059 ) -> Result<proto::OpenBufferResponse> {
8060 let peer_id = envelope.original_sender_id()?;
8061 let buffer = this
8062 .update(&mut cx, |this, cx| {
8063 this.open_buffer_by_id(envelope.payload.id, cx)
8064 })?
8065 .await?;
8066 this.update(&mut cx, |this, cx| {
8067 Ok(proto::OpenBufferResponse {
8068 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
8069 })
8070 })?
8071 }
8072
8073 async fn handle_open_buffer_by_path(
8074 this: Model<Self>,
8075 envelope: TypedEnvelope<proto::OpenBufferByPath>,
8076 _: Arc<Client>,
8077 mut cx: AsyncAppContext,
8078 ) -> Result<proto::OpenBufferResponse> {
8079 let peer_id = envelope.original_sender_id()?;
8080 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8081 let open_buffer = this.update(&mut cx, |this, cx| {
8082 this.open_buffer(
8083 ProjectPath {
8084 worktree_id,
8085 path: PathBuf::from(envelope.payload.path).into(),
8086 },
8087 cx,
8088 )
8089 })?;
8090
8091 let buffer = open_buffer.await?;
8092 this.update(&mut cx, |this, cx| {
8093 Ok(proto::OpenBufferResponse {
8094 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
8095 })
8096 })?
8097 }
8098
8099 fn serialize_project_transaction_for_peer(
8100 &mut self,
8101 project_transaction: ProjectTransaction,
8102 peer_id: proto::PeerId,
8103 cx: &mut AppContext,
8104 ) -> proto::ProjectTransaction {
8105 let mut serialized_transaction = proto::ProjectTransaction {
8106 buffer_ids: Default::default(),
8107 transactions: Default::default(),
8108 };
8109 for (buffer, transaction) in project_transaction.0 {
8110 serialized_transaction
8111 .buffer_ids
8112 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
8113 serialized_transaction
8114 .transactions
8115 .push(language::proto::serialize_transaction(&transaction));
8116 }
8117 serialized_transaction
8118 }
8119
8120 fn deserialize_project_transaction(
8121 &mut self,
8122 message: proto::ProjectTransaction,
8123 push_to_history: bool,
8124 cx: &mut ModelContext<Self>,
8125 ) -> Task<Result<ProjectTransaction>> {
8126 cx.spawn(move |this, mut cx| async move {
8127 let mut project_transaction = ProjectTransaction::default();
8128 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
8129 {
8130 let buffer = this
8131 .update(&mut cx, |this, cx| {
8132 this.wait_for_remote_buffer(buffer_id, cx)
8133 })?
8134 .await?;
8135 let transaction = language::proto::deserialize_transaction(transaction)?;
8136 project_transaction.0.insert(buffer, transaction);
8137 }
8138
8139 for (buffer, transaction) in &project_transaction.0 {
8140 buffer
8141 .update(&mut cx, |buffer, _| {
8142 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
8143 })?
8144 .await?;
8145
8146 if push_to_history {
8147 buffer.update(&mut cx, |buffer, _| {
8148 buffer.push_transaction(transaction.clone(), Instant::now());
8149 })?;
8150 }
8151 }
8152
8153 Ok(project_transaction)
8154 })
8155 }
8156
8157 fn create_buffer_for_peer(
8158 &mut self,
8159 buffer: &Model<Buffer>,
8160 peer_id: proto::PeerId,
8161 cx: &mut AppContext,
8162 ) -> u64 {
8163 let buffer_id = buffer.read(cx).remote_id();
8164 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
8165 updates_tx
8166 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
8167 .ok();
8168 }
8169 buffer_id
8170 }
8171
8172 fn wait_for_remote_buffer(
8173 &mut self,
8174 id: u64,
8175 cx: &mut ModelContext<Self>,
8176 ) -> Task<Result<Model<Buffer>>> {
8177 let mut opened_buffer_rx = self.opened_buffer.1.clone();
8178
8179 cx.spawn(move |this, mut cx| async move {
8180 let buffer = loop {
8181 let Some(this) = this.upgrade() else {
8182 return Err(anyhow!("project dropped"));
8183 };
8184
8185 let buffer = this.update(&mut cx, |this, _cx| {
8186 this.opened_buffers
8187 .get(&id)
8188 .and_then(|buffer| buffer.upgrade())
8189 })?;
8190
8191 if let Some(buffer) = buffer {
8192 break buffer;
8193 } else if this.update(&mut cx, |this, _| this.is_disconnected())? {
8194 return Err(anyhow!("disconnected before buffer {} could be opened", id));
8195 }
8196
8197 this.update(&mut cx, |this, _| {
8198 this.incomplete_remote_buffers.entry(id).or_default();
8199 })?;
8200 drop(this);
8201
8202 opened_buffer_rx
8203 .next()
8204 .await
8205 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
8206 };
8207
8208 Ok(buffer)
8209 })
8210 }
8211
8212 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8213 let project_id = match self.client_state {
8214 ProjectClientState::Remote {
8215 sharing_has_stopped,
8216 remote_id,
8217 ..
8218 } => {
8219 if sharing_has_stopped {
8220 return Task::ready(Err(anyhow!(
8221 "can't synchronize remote buffers on a readonly project"
8222 )));
8223 } else {
8224 remote_id
8225 }
8226 }
8227 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8228 return Task::ready(Err(anyhow!(
8229 "can't synchronize remote buffers on a local project"
8230 )))
8231 }
8232 };
8233
8234 let client = self.client.clone();
8235 cx.spawn(move |this, mut cx| async move {
8236 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8237 let buffers = this
8238 .opened_buffers
8239 .iter()
8240 .filter_map(|(id, buffer)| {
8241 let buffer = buffer.upgrade()?;
8242 Some(proto::BufferVersion {
8243 id: *id,
8244 version: language::proto::serialize_version(&buffer.read(cx).version),
8245 })
8246 })
8247 .collect();
8248 let incomplete_buffer_ids = this
8249 .incomplete_remote_buffers
8250 .keys()
8251 .copied()
8252 .collect::<Vec<_>>();
8253
8254 (buffers, incomplete_buffer_ids)
8255 })?;
8256 let response = client
8257 .request(proto::SynchronizeBuffers {
8258 project_id,
8259 buffers,
8260 })
8261 .await?;
8262
8263 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8264 response
8265 .buffers
8266 .into_iter()
8267 .map(|buffer| {
8268 let client = client.clone();
8269 let buffer_id = buffer.id;
8270 let remote_version = language::proto::deserialize_version(&buffer.version);
8271 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8272 let operations =
8273 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8274 cx.background_executor().spawn(async move {
8275 let operations = operations.await;
8276 for chunk in split_operations(operations) {
8277 client
8278 .request(proto::UpdateBuffer {
8279 project_id,
8280 buffer_id,
8281 operations: chunk,
8282 })
8283 .await?;
8284 }
8285 anyhow::Ok(())
8286 })
8287 } else {
8288 Task::ready(Ok(()))
8289 }
8290 })
8291 .collect::<Vec<_>>()
8292 })?;
8293
8294 // Any incomplete buffers have open requests waiting. Request that the host sends
8295 // creates these buffers for us again to unblock any waiting futures.
8296 for id in incomplete_buffer_ids {
8297 cx.background_executor()
8298 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8299 .detach();
8300 }
8301
8302 futures::future::join_all(send_updates_for_buffers)
8303 .await
8304 .into_iter()
8305 .collect()
8306 })
8307 }
8308
8309 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8310 self.worktrees()
8311 .map(|worktree| {
8312 let worktree = worktree.read(cx);
8313 proto::WorktreeMetadata {
8314 id: worktree.id().to_proto(),
8315 root_name: worktree.root_name().into(),
8316 visible: worktree.is_visible(),
8317 abs_path: worktree.abs_path().to_string_lossy().into(),
8318 }
8319 })
8320 .collect()
8321 }
8322
8323 fn set_worktrees_from_proto(
8324 &mut self,
8325 worktrees: Vec<proto::WorktreeMetadata>,
8326 cx: &mut ModelContext<Project>,
8327 ) -> Result<()> {
8328 let replica_id = self.replica_id();
8329 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8330
8331 let mut old_worktrees_by_id = self
8332 .worktrees
8333 .drain(..)
8334 .filter_map(|worktree| {
8335 let worktree = worktree.upgrade()?;
8336 Some((worktree.read(cx).id(), worktree))
8337 })
8338 .collect::<HashMap<_, _>>();
8339
8340 for worktree in worktrees {
8341 if let Some(old_worktree) =
8342 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8343 {
8344 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8345 } else {
8346 let worktree =
8347 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8348 let _ = self.add_worktree(&worktree, cx);
8349 }
8350 }
8351
8352 self.metadata_changed(cx);
8353 for id in old_worktrees_by_id.keys() {
8354 cx.emit(Event::WorktreeRemoved(*id));
8355 }
8356
8357 Ok(())
8358 }
8359
8360 fn set_collaborators_from_proto(
8361 &mut self,
8362 messages: Vec<proto::Collaborator>,
8363 cx: &mut ModelContext<Self>,
8364 ) -> Result<()> {
8365 let mut collaborators = HashMap::default();
8366 for message in messages {
8367 let collaborator = Collaborator::from_proto(message)?;
8368 collaborators.insert(collaborator.peer_id, collaborator);
8369 }
8370 for old_peer_id in self.collaborators.keys() {
8371 if !collaborators.contains_key(old_peer_id) {
8372 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8373 }
8374 }
8375 self.collaborators = collaborators;
8376 Ok(())
8377 }
8378
8379 fn deserialize_symbol(
8380 &self,
8381 serialized_symbol: proto::Symbol,
8382 ) -> impl Future<Output = Result<Symbol>> {
8383 let languages = self.languages.clone();
8384 async move {
8385 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8386 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8387 let start = serialized_symbol
8388 .start
8389 .ok_or_else(|| anyhow!("invalid start"))?;
8390 let end = serialized_symbol
8391 .end
8392 .ok_or_else(|| anyhow!("invalid end"))?;
8393 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8394 let path = ProjectPath {
8395 worktree_id,
8396 path: PathBuf::from(serialized_symbol.path).into(),
8397 };
8398 let language = languages
8399 .language_for_file(&path.path, None)
8400 .await
8401 .log_err();
8402 Ok(Symbol {
8403 language_server_name: LanguageServerName(
8404 serialized_symbol.language_server_name.into(),
8405 ),
8406 source_worktree_id,
8407 path,
8408 label: {
8409 match language {
8410 Some(language) => {
8411 language
8412 .label_for_symbol(&serialized_symbol.name, kind)
8413 .await
8414 }
8415 None => None,
8416 }
8417 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8418 },
8419
8420 name: serialized_symbol.name,
8421 range: Unclipped(PointUtf16::new(start.row, start.column))
8422 ..Unclipped(PointUtf16::new(end.row, end.column)),
8423 kind,
8424 signature: serialized_symbol
8425 .signature
8426 .try_into()
8427 .map_err(|_| anyhow!("invalid signature"))?,
8428 })
8429 }
8430 }
8431
8432 async fn handle_buffer_saved(
8433 this: Model<Self>,
8434 envelope: TypedEnvelope<proto::BufferSaved>,
8435 _: Arc<Client>,
8436 mut cx: AsyncAppContext,
8437 ) -> Result<()> {
8438 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8439 let version = deserialize_version(&envelope.payload.version);
8440 let mtime = envelope
8441 .payload
8442 .mtime
8443 .ok_or_else(|| anyhow!("missing mtime"))?
8444 .into();
8445
8446 this.update(&mut cx, |this, cx| {
8447 let buffer = this
8448 .opened_buffers
8449 .get(&envelope.payload.buffer_id)
8450 .and_then(|buffer| buffer.upgrade())
8451 .or_else(|| {
8452 this.incomplete_remote_buffers
8453 .get(&envelope.payload.buffer_id)
8454 .and_then(|b| b.clone())
8455 });
8456 if let Some(buffer) = buffer {
8457 buffer.update(cx, |buffer, cx| {
8458 buffer.did_save(version, fingerprint, mtime, cx);
8459 });
8460 }
8461 Ok(())
8462 })?
8463 }
8464
8465 async fn handle_buffer_reloaded(
8466 this: Model<Self>,
8467 envelope: TypedEnvelope<proto::BufferReloaded>,
8468 _: Arc<Client>,
8469 mut cx: AsyncAppContext,
8470 ) -> Result<()> {
8471 let payload = envelope.payload;
8472 let version = deserialize_version(&payload.version);
8473 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8474 let line_ending = deserialize_line_ending(
8475 proto::LineEnding::from_i32(payload.line_ending)
8476 .ok_or_else(|| anyhow!("missing line ending"))?,
8477 );
8478 let mtime = payload
8479 .mtime
8480 .ok_or_else(|| anyhow!("missing mtime"))?
8481 .into();
8482 this.update(&mut cx, |this, cx| {
8483 let buffer = this
8484 .opened_buffers
8485 .get(&payload.buffer_id)
8486 .and_then(|buffer| buffer.upgrade())
8487 .or_else(|| {
8488 this.incomplete_remote_buffers
8489 .get(&payload.buffer_id)
8490 .cloned()
8491 .flatten()
8492 });
8493 if let Some(buffer) = buffer {
8494 buffer.update(cx, |buffer, cx| {
8495 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8496 });
8497 }
8498 Ok(())
8499 })?
8500 }
8501
8502 #[allow(clippy::type_complexity)]
8503 fn edits_from_lsp(
8504 &mut self,
8505 buffer: &Model<Buffer>,
8506 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8507 server_id: LanguageServerId,
8508 version: Option<i32>,
8509 cx: &mut ModelContext<Self>,
8510 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8511 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8512 cx.background_executor().spawn(async move {
8513 let snapshot = snapshot?;
8514 let mut lsp_edits = lsp_edits
8515 .into_iter()
8516 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8517 .collect::<Vec<_>>();
8518 lsp_edits.sort_by_key(|(range, _)| range.start);
8519
8520 let mut lsp_edits = lsp_edits.into_iter().peekable();
8521 let mut edits = Vec::new();
8522 while let Some((range, mut new_text)) = lsp_edits.next() {
8523 // Clip invalid ranges provided by the language server.
8524 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8525 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8526
8527 // Combine any LSP edits that are adjacent.
8528 //
8529 // Also, combine LSP edits that are separated from each other by only
8530 // a newline. This is important because for some code actions,
8531 // Rust-analyzer rewrites the entire buffer via a series of edits that
8532 // are separated by unchanged newline characters.
8533 //
8534 // In order for the diffing logic below to work properly, any edits that
8535 // cancel each other out must be combined into one.
8536 while let Some((next_range, next_text)) = lsp_edits.peek() {
8537 if next_range.start.0 > range.end {
8538 if next_range.start.0.row > range.end.row + 1
8539 || next_range.start.0.column > 0
8540 || snapshot.clip_point_utf16(
8541 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8542 Bias::Left,
8543 ) > range.end
8544 {
8545 break;
8546 }
8547 new_text.push('\n');
8548 }
8549 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8550 new_text.push_str(next_text);
8551 lsp_edits.next();
8552 }
8553
8554 // For multiline edits, perform a diff of the old and new text so that
8555 // we can identify the changes more precisely, preserving the locations
8556 // of any anchors positioned in the unchanged regions.
8557 if range.end.row > range.start.row {
8558 let mut offset = range.start.to_offset(&snapshot);
8559 let old_text = snapshot.text_for_range(range).collect::<String>();
8560
8561 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8562 let mut moved_since_edit = true;
8563 for change in diff.iter_all_changes() {
8564 let tag = change.tag();
8565 let value = change.value();
8566 match tag {
8567 ChangeTag::Equal => {
8568 offset += value.len();
8569 moved_since_edit = true;
8570 }
8571 ChangeTag::Delete => {
8572 let start = snapshot.anchor_after(offset);
8573 let end = snapshot.anchor_before(offset + value.len());
8574 if moved_since_edit {
8575 edits.push((start..end, String::new()));
8576 } else {
8577 edits.last_mut().unwrap().0.end = end;
8578 }
8579 offset += value.len();
8580 moved_since_edit = false;
8581 }
8582 ChangeTag::Insert => {
8583 if moved_since_edit {
8584 let anchor = snapshot.anchor_after(offset);
8585 edits.push((anchor..anchor, value.to_string()));
8586 } else {
8587 edits.last_mut().unwrap().1.push_str(value);
8588 }
8589 moved_since_edit = false;
8590 }
8591 }
8592 }
8593 } else if range.end == range.start {
8594 let anchor = snapshot.anchor_after(range.start);
8595 edits.push((anchor..anchor, new_text));
8596 } else {
8597 let edit_start = snapshot.anchor_after(range.start);
8598 let edit_end = snapshot.anchor_before(range.end);
8599 edits.push((edit_start..edit_end, new_text));
8600 }
8601 }
8602
8603 Ok(edits)
8604 })
8605 }
8606
8607 fn buffer_snapshot_for_lsp_version(
8608 &mut self,
8609 buffer: &Model<Buffer>,
8610 server_id: LanguageServerId,
8611 version: Option<i32>,
8612 cx: &AppContext,
8613 ) -> Result<TextBufferSnapshot> {
8614 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8615
8616 if let Some(version) = version {
8617 let buffer_id = buffer.read(cx).remote_id();
8618 let snapshots = self
8619 .buffer_snapshots
8620 .get_mut(&buffer_id)
8621 .and_then(|m| m.get_mut(&server_id))
8622 .ok_or_else(|| {
8623 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8624 })?;
8625
8626 let found_snapshot = snapshots
8627 .binary_search_by_key(&version, |e| e.version)
8628 .map(|ix| snapshots[ix].snapshot.clone())
8629 .map_err(|_| {
8630 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8631 })?;
8632
8633 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8634 Ok(found_snapshot)
8635 } else {
8636 Ok((buffer.read(cx)).text_snapshot())
8637 }
8638 }
8639
8640 pub fn language_servers(
8641 &self,
8642 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8643 self.language_server_ids
8644 .iter()
8645 .map(|((worktree_id, server_name), server_id)| {
8646 (*server_id, server_name.clone(), *worktree_id)
8647 })
8648 }
8649
8650 pub fn supplementary_language_servers(
8651 &self,
8652 ) -> impl '_
8653 + Iterator<
8654 Item = (
8655 &LanguageServerId,
8656 &(LanguageServerName, Arc<LanguageServer>),
8657 ),
8658 > {
8659 self.supplementary_language_servers.iter()
8660 }
8661
8662 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8663 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8664 Some(server.clone())
8665 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8666 Some(Arc::clone(server))
8667 } else {
8668 None
8669 }
8670 }
8671
8672 pub fn language_servers_for_buffer(
8673 &self,
8674 buffer: &Buffer,
8675 cx: &AppContext,
8676 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8677 self.language_server_ids_for_buffer(buffer, cx)
8678 .into_iter()
8679 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8680 LanguageServerState::Running {
8681 adapter, server, ..
8682 } => Some((adapter, server)),
8683 _ => None,
8684 })
8685 }
8686
8687 fn primary_language_server_for_buffer(
8688 &self,
8689 buffer: &Buffer,
8690 cx: &AppContext,
8691 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8692 self.language_servers_for_buffer(buffer, cx).next()
8693 }
8694
8695 pub fn language_server_for_buffer(
8696 &self,
8697 buffer: &Buffer,
8698 server_id: LanguageServerId,
8699 cx: &AppContext,
8700 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8701 self.language_servers_for_buffer(buffer, cx)
8702 .find(|(_, s)| s.server_id() == server_id)
8703 }
8704
8705 fn language_server_ids_for_buffer(
8706 &self,
8707 buffer: &Buffer,
8708 cx: &AppContext,
8709 ) -> Vec<LanguageServerId> {
8710 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8711 let worktree_id = file.worktree_id(cx);
8712 language
8713 .lsp_adapters()
8714 .iter()
8715 .flat_map(|adapter| {
8716 let key = (worktree_id, adapter.name.clone());
8717 self.language_server_ids.get(&key).copied()
8718 })
8719 .collect()
8720 } else {
8721 Vec::new()
8722 }
8723 }
8724}
8725
8726fn subscribe_for_copilot_events(
8727 copilot: &Model<Copilot>,
8728 cx: &mut ModelContext<'_, Project>,
8729) -> gpui::Subscription {
8730 cx.subscribe(
8731 copilot,
8732 |project, copilot, copilot_event, cx| match copilot_event {
8733 copilot::Event::CopilotLanguageServerStarted => {
8734 match copilot.read(cx).language_server() {
8735 Some((name, copilot_server)) => {
8736 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8737 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8738 let new_server_id = copilot_server.server_id();
8739 let weak_project = cx.weak_model();
8740 let copilot_log_subscription = copilot_server
8741 .on_notification::<copilot::request::LogMessage, _>(
8742 move |params, mut cx| {
8743 weak_project.update(&mut cx, |_, cx| {
8744 cx.emit(Event::LanguageServerLog(
8745 new_server_id,
8746 params.message,
8747 ));
8748 }).ok();
8749 },
8750 );
8751 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8752 project.copilot_log_subscription = Some(copilot_log_subscription);
8753 cx.emit(Event::LanguageServerAdded(new_server_id));
8754 }
8755 }
8756 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8757 }
8758 }
8759 },
8760 )
8761}
8762
8763fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8764 let mut literal_end = 0;
8765 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8766 if part.contains(&['*', '?', '{', '}']) {
8767 break;
8768 } else {
8769 if i > 0 {
8770 // Account for separator prior to this part
8771 literal_end += path::MAIN_SEPARATOR.len_utf8();
8772 }
8773 literal_end += part.len();
8774 }
8775 }
8776 &glob[..literal_end]
8777}
8778
8779impl WorktreeHandle {
8780 pub fn upgrade(&self) -> Option<Model<Worktree>> {
8781 match self {
8782 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8783 WorktreeHandle::Weak(handle) => handle.upgrade(),
8784 }
8785 }
8786
8787 pub fn handle_id(&self) -> usize {
8788 match self {
8789 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8790 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8791 }
8792 }
8793}
8794
8795impl OpenBuffer {
8796 pub fn upgrade(&self) -> Option<Model<Buffer>> {
8797 match self {
8798 OpenBuffer::Strong(handle) => Some(handle.clone()),
8799 OpenBuffer::Weak(handle) => handle.upgrade(),
8800 OpenBuffer::Operations(_) => None,
8801 }
8802 }
8803}
8804
8805pub struct PathMatchCandidateSet {
8806 pub snapshot: Snapshot,
8807 pub include_ignored: bool,
8808 pub include_root_name: bool,
8809}
8810
8811impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8812 type Candidates = PathMatchCandidateSetIter<'a>;
8813
8814 fn id(&self) -> usize {
8815 self.snapshot.id().to_usize()
8816 }
8817
8818 fn len(&self) -> usize {
8819 if self.include_ignored {
8820 self.snapshot.file_count()
8821 } else {
8822 self.snapshot.visible_file_count()
8823 }
8824 }
8825
8826 fn prefix(&self) -> Arc<str> {
8827 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8828 self.snapshot.root_name().into()
8829 } else if self.include_root_name {
8830 format!("{}/", self.snapshot.root_name()).into()
8831 } else {
8832 "".into()
8833 }
8834 }
8835
8836 fn candidates(&'a self, start: usize) -> Self::Candidates {
8837 PathMatchCandidateSetIter {
8838 traversal: self.snapshot.files(self.include_ignored, start),
8839 }
8840 }
8841}
8842
8843pub struct PathMatchCandidateSetIter<'a> {
8844 traversal: Traversal<'a>,
8845}
8846
8847impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8848 type Item = fuzzy::PathMatchCandidate<'a>;
8849
8850 fn next(&mut self) -> Option<Self::Item> {
8851 self.traversal.next().map(|entry| {
8852 if let EntryKind::File(char_bag) = entry.kind {
8853 fuzzy::PathMatchCandidate {
8854 path: &entry.path,
8855 char_bag,
8856 }
8857 } else {
8858 unreachable!()
8859 }
8860 })
8861 }
8862}
8863
8864impl EventEmitter<Event> for Project {}
8865
8866impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8867 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8868 Self {
8869 worktree_id,
8870 path: path.as_ref().into(),
8871 }
8872 }
8873}
8874
8875impl ProjectLspAdapterDelegate {
8876 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8877 Arc::new(Self {
8878 project: cx.handle(),
8879 http_client: project.client.http_client(),
8880 })
8881 }
8882}
8883
8884impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8885 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8886 self.project
8887 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8888 }
8889
8890 fn http_client(&self) -> Arc<dyn HttpClient> {
8891 self.http_client.clone()
8892 }
8893}
8894
8895fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8896 proto::Symbol {
8897 language_server_name: symbol.language_server_name.0.to_string(),
8898 source_worktree_id: symbol.source_worktree_id.to_proto(),
8899 worktree_id: symbol.path.worktree_id.to_proto(),
8900 path: symbol.path.path.to_string_lossy().to_string(),
8901 name: symbol.name.clone(),
8902 kind: unsafe { mem::transmute(symbol.kind) },
8903 start: Some(proto::PointUtf16 {
8904 row: symbol.range.start.0.row,
8905 column: symbol.range.start.0.column,
8906 }),
8907 end: Some(proto::PointUtf16 {
8908 row: symbol.range.end.0.row,
8909 column: symbol.range.end.0.column,
8910 }),
8911 signature: symbol.signature.to_vec(),
8912 }
8913}
8914
8915fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8916 let mut path_components = path.components();
8917 let mut base_components = base.components();
8918 let mut components: Vec<Component> = Vec::new();
8919 loop {
8920 match (path_components.next(), base_components.next()) {
8921 (None, None) => break,
8922 (Some(a), None) => {
8923 components.push(a);
8924 components.extend(path_components.by_ref());
8925 break;
8926 }
8927 (None, _) => components.push(Component::ParentDir),
8928 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8929 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8930 (Some(a), Some(_)) => {
8931 components.push(Component::ParentDir);
8932 for _ in base_components {
8933 components.push(Component::ParentDir);
8934 }
8935 components.push(a);
8936 components.extend(path_components.by_ref());
8937 break;
8938 }
8939 }
8940 }
8941 components.iter().map(|c| c.as_os_str()).collect()
8942}
8943
8944fn resolve_path(base: &Path, path: &Path) -> PathBuf {
8945 let mut result = base.to_path_buf();
8946 for component in path.components() {
8947 match component {
8948 Component::ParentDir => {
8949 result.pop();
8950 }
8951 Component::CurDir => (),
8952 _ => result.push(component),
8953 }
8954 }
8955 result
8956}
8957
8958impl Item for Buffer {
8959 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8960 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8961 }
8962
8963 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8964 File::from_dyn(self.file()).map(|file| ProjectPath {
8965 worktree_id: file.worktree_id(cx),
8966 path: file.path().clone(),
8967 })
8968 }
8969}
8970
8971async fn wait_for_loading_buffer(
8972 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
8973) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
8974 loop {
8975 if let Some(result) = receiver.borrow().as_ref() {
8976 match result {
8977 Ok(buffer) => return Ok(buffer.to_owned()),
8978 Err(e) => return Err(e.to_owned()),
8979 }
8980 }
8981 receiver.next().await;
8982 }
8983}
8984
8985fn include_text(server: &lsp::LanguageServer) -> bool {
8986 server
8987 .capabilities()
8988 .text_document_sync
8989 .as_ref()
8990 .and_then(|sync| match sync {
8991 lsp::TextDocumentSyncCapability::Kind(_) => None,
8992 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8993 })
8994 .and_then(|save_options| match save_options {
8995 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
8996 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8997 })
8998 .unwrap_or(false)
8999}