1mod ignore;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7pub mod terminals;
8pub mod worktree;
9
10#[cfg(test)]
11mod project_tests;
12#[cfg(test)]
13mod worktree_tests;
14
15use anyhow::{anyhow, Context as _, Result};
16use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
17use clock::ReplicaId;
18use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
19use copilot::Copilot;
20use futures::{
21 channel::{
22 mpsc::{self, UnboundedReceiver},
23 oneshot,
24 },
25 future::{try_join_all, Shared},
26 stream::FuturesUnordered,
27 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
28};
29use globset::{Glob, GlobSet, GlobSetBuilder};
30use gpui::{
31 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
32 Model, ModelContext, Task, WeakModel,
33};
34use itertools::Itertools;
35use language::{
36 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
37 markdown, point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability,
43 CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
44 Documentation, Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName,
45 LocalFile, LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer,
46 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use parking_lot::{Mutex, RwLock};
56use postage::watch;
57use prettier_support::{DefaultPrettier, PrettierInstance};
58use project_settings::{LspSettings, ProjectSettings};
59use rand::prelude::*;
60use search::SearchQuery;
61use serde::Serialize;
62use settings::{Settings, SettingsStore};
63use sha2::{Digest, Sha256};
64use similar::{ChangeTag, TextDiff};
65use smol::channel::{Receiver, Sender};
66use smol::lock::Semaphore;
67use std::{
68 cmp::{self, Ordering},
69 convert::TryInto,
70 hash::Hash,
71 mem,
72 num::NonZeroU32,
73 ops::Range,
74 path::{self, Component, Path, PathBuf},
75 process::Stdio,
76 str,
77 sync::{
78 atomic::{AtomicUsize, Ordering::SeqCst},
79 Arc,
80 },
81 time::{Duration, Instant},
82};
83use terminals::Terminals;
84use text::Anchor;
85use util::{
86 debug_panic, defer, http::HttpClient, merge_json_value_into,
87 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
88};
89
90pub use fs::*;
91#[cfg(any(test, feature = "test-support"))]
92pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
93pub use worktree::*;
94
95const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
96
97pub trait Item {
98 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
99 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
100}
101
102pub struct Project {
103 worktrees: Vec<WorktreeHandle>,
104 active_entry: Option<ProjectEntryId>,
105 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
106 languages: Arc<LanguageRegistry>,
107 supplementary_language_servers:
108 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
109 language_servers: HashMap<LanguageServerId, LanguageServerState>,
110 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
111 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
112 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
113 client: Arc<client::Client>,
114 next_entry_id: Arc<AtomicUsize>,
115 join_project_response_message_id: u32,
116 next_diagnostic_group_id: usize,
117 user_store: Model<UserStore>,
118 fs: Arc<dyn Fs>,
119 client_state: ProjectClientState,
120 collaborators: HashMap<proto::PeerId, Collaborator>,
121 client_subscriptions: Vec<client::Subscription>,
122 _subscriptions: Vec<gpui::Subscription>,
123 next_buffer_id: u64,
124 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
125 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
126 #[allow(clippy::type_complexity)]
127 loading_buffers_by_path: HashMap<
128 ProjectPath,
129 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
130 >,
131 #[allow(clippy::type_complexity)]
132 loading_local_worktrees:
133 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
134 opened_buffers: HashMap<u64, OpenBuffer>,
135 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
136 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
137 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
138 /// Used for re-issuing buffer requests when peers temporarily disconnect
139 incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>,
140 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
141 buffers_being_formatted: HashSet<u64>,
142 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
143 git_diff_debouncer: DelayedDebounced,
144 nonce: u128,
145 _maintain_buffer_languages: Task<()>,
146 _maintain_workspace_config: Task<Result<()>>,
147 terminals: Terminals,
148 copilot_lsp_subscription: Option<gpui::Subscription>,
149 copilot_log_subscription: Option<lsp::Subscription>,
150 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
151 node: Option<Arc<dyn NodeRuntime>>,
152 default_prettier: DefaultPrettier,
153 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
154 prettier_instances: HashMap<PathBuf, PrettierInstance>,
155}
156
157struct DelayedDebounced {
158 task: Option<Task<()>>,
159 cancel_channel: Option<oneshot::Sender<()>>,
160}
161
162pub enum LanguageServerToQuery {
163 Primary,
164 Other(LanguageServerId),
165}
166
167impl DelayedDebounced {
168 fn new() -> DelayedDebounced {
169 DelayedDebounced {
170 task: None,
171 cancel_channel: None,
172 }
173 }
174
175 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
176 where
177 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
178 {
179 if let Some(channel) = self.cancel_channel.take() {
180 _ = channel.send(());
181 }
182
183 let (sender, mut receiver) = oneshot::channel::<()>();
184 self.cancel_channel = Some(sender);
185
186 let previous_task = self.task.take();
187 self.task = Some(cx.spawn(move |project, mut cx| async move {
188 let mut timer = cx.background_executor().timer(delay).fuse();
189 if let Some(previous_task) = previous_task {
190 previous_task.await;
191 }
192
193 futures::select_biased! {
194 _ = receiver => return,
195 _ = timer => {}
196 }
197
198 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
199 task.await;
200 }
201 }));
202 }
203}
204
205struct LspBufferSnapshot {
206 version: i32,
207 snapshot: TextBufferSnapshot,
208}
209
210/// Message ordered with respect to buffer operations
211enum BufferOrderedMessage {
212 Operation {
213 buffer_id: u64,
214 operation: proto::Operation,
215 },
216 LanguageServerUpdate {
217 language_server_id: LanguageServerId,
218 message: proto::update_language_server::Variant,
219 },
220 Resync,
221}
222
223enum LocalProjectUpdate {
224 WorktreesChanged,
225 CreateBufferForPeer {
226 peer_id: proto::PeerId,
227 buffer_id: u64,
228 },
229}
230
231enum OpenBuffer {
232 Strong(Model<Buffer>),
233 Weak(WeakModel<Buffer>),
234 Operations(Vec<Operation>),
235}
236
237#[derive(Clone)]
238enum WorktreeHandle {
239 Strong(Model<Worktree>),
240 Weak(WeakModel<Worktree>),
241}
242
243#[derive(Debug)]
244enum ProjectClientState {
245 Local,
246 Shared {
247 remote_id: u64,
248 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
249 _send_updates: Task<Result<()>>,
250 },
251 Remote {
252 sharing_has_stopped: bool,
253 capability: Capability,
254 remote_id: u64,
255 replica_id: ReplicaId,
256 },
257}
258
259#[derive(Clone, Debug, PartialEq)]
260pub enum Event {
261 LanguageServerAdded(LanguageServerId),
262 LanguageServerRemoved(LanguageServerId),
263 LanguageServerLog(LanguageServerId, String),
264 Notification(String),
265 ActiveEntryChanged(Option<ProjectEntryId>),
266 ActivateProjectPanel,
267 WorktreeAdded,
268 WorktreeRemoved(WorktreeId),
269 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
270 DiskBasedDiagnosticsStarted {
271 language_server_id: LanguageServerId,
272 },
273 DiskBasedDiagnosticsFinished {
274 language_server_id: LanguageServerId,
275 },
276 DiagnosticsUpdated {
277 path: ProjectPath,
278 language_server_id: LanguageServerId,
279 },
280 RemoteIdChanged(Option<u64>),
281 DisconnectedFromHost,
282 Closed,
283 DeletedEntry(ProjectEntryId),
284 CollaboratorUpdated {
285 old_peer_id: proto::PeerId,
286 new_peer_id: proto::PeerId,
287 },
288 CollaboratorJoined(proto::PeerId),
289 CollaboratorLeft(proto::PeerId),
290 RefreshInlayHints,
291 RevealInProjectPanel(ProjectEntryId),
292}
293
294pub enum LanguageServerState {
295 Starting(Task<Option<Arc<LanguageServer>>>),
296
297 Running {
298 language: Arc<Language>,
299 adapter: Arc<CachedLspAdapter>,
300 server: Arc<LanguageServer>,
301 watched_paths: HashMap<WorktreeId, GlobSet>,
302 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
303 },
304}
305
306#[derive(Serialize)]
307pub struct LanguageServerStatus {
308 pub name: String,
309 pub pending_work: BTreeMap<String, LanguageServerProgress>,
310 pub has_pending_diagnostic_updates: bool,
311 progress_tokens: HashSet<String>,
312}
313
314#[derive(Clone, Debug, Serialize)]
315pub struct LanguageServerProgress {
316 pub message: Option<String>,
317 pub percentage: Option<usize>,
318 #[serde(skip_serializing)]
319 pub last_update_at: Instant,
320}
321
322#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
323pub struct ProjectPath {
324 pub worktree_id: WorktreeId,
325 pub path: Arc<Path>,
326}
327
328#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
329pub struct DiagnosticSummary {
330 pub error_count: usize,
331 pub warning_count: usize,
332}
333
334#[derive(Debug, Clone, PartialEq, Eq, Hash)]
335pub struct Location {
336 pub buffer: Model<Buffer>,
337 pub range: Range<language::Anchor>,
338}
339
340#[derive(Debug, Clone, PartialEq, Eq)]
341pub struct InlayHint {
342 pub position: language::Anchor,
343 pub label: InlayHintLabel,
344 pub kind: Option<InlayHintKind>,
345 pub padding_left: bool,
346 pub padding_right: bool,
347 pub tooltip: Option<InlayHintTooltip>,
348 pub resolve_state: ResolveState,
349}
350
351#[derive(Debug, Clone, PartialEq, Eq)]
352pub enum ResolveState {
353 Resolved,
354 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
355 Resolving,
356}
357
358impl InlayHint {
359 pub fn text(&self) -> String {
360 match &self.label {
361 InlayHintLabel::String(s) => s.to_owned(),
362 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
363 }
364 }
365}
366
367#[derive(Debug, Clone, PartialEq, Eq)]
368pub enum InlayHintLabel {
369 String(String),
370 LabelParts(Vec<InlayHintLabelPart>),
371}
372
373#[derive(Debug, Clone, PartialEq, Eq)]
374pub struct InlayHintLabelPart {
375 pub value: String,
376 pub tooltip: Option<InlayHintLabelPartTooltip>,
377 pub location: Option<(LanguageServerId, lsp::Location)>,
378}
379
380#[derive(Debug, Clone, PartialEq, Eq)]
381pub enum InlayHintTooltip {
382 String(String),
383 MarkupContent(MarkupContent),
384}
385
386#[derive(Debug, Clone, PartialEq, Eq)]
387pub enum InlayHintLabelPartTooltip {
388 String(String),
389 MarkupContent(MarkupContent),
390}
391
392#[derive(Debug, Clone, PartialEq, Eq)]
393pub struct MarkupContent {
394 pub kind: HoverBlockKind,
395 pub value: String,
396}
397
398#[derive(Debug, Clone)]
399pub struct LocationLink {
400 pub origin: Option<Location>,
401 pub target: Location,
402}
403
404#[derive(Debug)]
405pub struct DocumentHighlight {
406 pub range: Range<language::Anchor>,
407 pub kind: DocumentHighlightKind,
408}
409
410#[derive(Clone, Debug)]
411pub struct Symbol {
412 pub language_server_name: LanguageServerName,
413 pub source_worktree_id: WorktreeId,
414 pub path: ProjectPath,
415 pub label: CodeLabel,
416 pub name: String,
417 pub kind: lsp::SymbolKind,
418 pub range: Range<Unclipped<PointUtf16>>,
419 pub signature: [u8; 32],
420}
421
422#[derive(Clone, Debug, PartialEq)]
423pub struct HoverBlock {
424 pub text: String,
425 pub kind: HoverBlockKind,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum HoverBlockKind {
430 PlainText,
431 Markdown,
432 Code { language: String },
433}
434
435#[derive(Debug)]
436pub struct Hover {
437 pub contents: Vec<HoverBlock>,
438 pub range: Option<Range<language::Anchor>>,
439 pub language: Option<Arc<Language>>,
440}
441
442impl Hover {
443 pub fn is_empty(&self) -> bool {
444 self.contents.iter().all(|block| block.text.is_empty())
445 }
446}
447
448#[derive(Default)]
449pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
450
451impl DiagnosticSummary {
452 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
453 let mut this = Self {
454 error_count: 0,
455 warning_count: 0,
456 };
457
458 for entry in diagnostics {
459 if entry.diagnostic.is_primary {
460 match entry.diagnostic.severity {
461 DiagnosticSeverity::ERROR => this.error_count += 1,
462 DiagnosticSeverity::WARNING => this.warning_count += 1,
463 _ => {}
464 }
465 }
466 }
467
468 this
469 }
470
471 pub fn is_empty(&self) -> bool {
472 self.error_count == 0 && self.warning_count == 0
473 }
474
475 pub fn to_proto(
476 &self,
477 language_server_id: LanguageServerId,
478 path: &Path,
479 ) -> proto::DiagnosticSummary {
480 proto::DiagnosticSummary {
481 path: path.to_string_lossy().to_string(),
482 language_server_id: language_server_id.0 as u64,
483 error_count: self.error_count as u32,
484 warning_count: self.warning_count as u32,
485 }
486 }
487}
488
489#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
490pub struct ProjectEntryId(usize);
491
492impl ProjectEntryId {
493 pub const MAX: Self = Self(usize::MAX);
494
495 pub fn new(counter: &AtomicUsize) -> Self {
496 Self(counter.fetch_add(1, SeqCst))
497 }
498
499 pub fn from_proto(id: u64) -> Self {
500 Self(id as usize)
501 }
502
503 pub fn to_proto(&self) -> u64 {
504 self.0 as u64
505 }
506
507 pub fn to_usize(&self) -> usize {
508 self.0
509 }
510}
511
512#[derive(Debug, Clone, Copy, PartialEq, Eq)]
513pub enum FormatTrigger {
514 Save,
515 Manual,
516}
517
518struct ProjectLspAdapterDelegate {
519 project: Model<Project>,
520 http_client: Arc<dyn HttpClient>,
521}
522
523// Currently, formatting operations are represented differently depending on
524// whether they come from a language server or an external command.
525enum FormatOperation {
526 Lsp(Vec<(Range<Anchor>, String)>),
527 External(Diff),
528 Prettier(Diff),
529}
530
531impl FormatTrigger {
532 fn from_proto(value: i32) -> FormatTrigger {
533 match value {
534 0 => FormatTrigger::Save,
535 1 => FormatTrigger::Manual,
536 _ => FormatTrigger::Save,
537 }
538 }
539}
540#[derive(Clone, Debug, PartialEq)]
541enum SearchMatchCandidate {
542 OpenBuffer {
543 buffer: Model<Buffer>,
544 // This might be an unnamed file without representation on filesystem
545 path: Option<Arc<Path>>,
546 },
547 Path {
548 worktree_id: WorktreeId,
549 is_ignored: bool,
550 path: Arc<Path>,
551 },
552}
553
554type SearchMatchCandidateIndex = usize;
555impl SearchMatchCandidate {
556 fn path(&self) -> Option<Arc<Path>> {
557 match self {
558 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
559 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
560 }
561 }
562}
563
564impl Project {
565 pub fn init_settings(cx: &mut AppContext) {
566 ProjectSettings::register(cx);
567 }
568
569 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
570 Self::init_settings(cx);
571
572 client.add_model_message_handler(Self::handle_add_collaborator);
573 client.add_model_message_handler(Self::handle_update_project_collaborator);
574 client.add_model_message_handler(Self::handle_remove_collaborator);
575 client.add_model_message_handler(Self::handle_buffer_reloaded);
576 client.add_model_message_handler(Self::handle_buffer_saved);
577 client.add_model_message_handler(Self::handle_start_language_server);
578 client.add_model_message_handler(Self::handle_update_language_server);
579 client.add_model_message_handler(Self::handle_update_project);
580 client.add_model_message_handler(Self::handle_unshare_project);
581 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
582 client.add_model_message_handler(Self::handle_update_buffer_file);
583 client.add_model_request_handler(Self::handle_update_buffer);
584 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
585 client.add_model_message_handler(Self::handle_update_worktree);
586 client.add_model_message_handler(Self::handle_update_worktree_settings);
587 client.add_model_request_handler(Self::handle_create_project_entry);
588 client.add_model_request_handler(Self::handle_rename_project_entry);
589 client.add_model_request_handler(Self::handle_copy_project_entry);
590 client.add_model_request_handler(Self::handle_delete_project_entry);
591 client.add_model_request_handler(Self::handle_expand_project_entry);
592 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
593 client.add_model_request_handler(Self::handle_apply_code_action);
594 client.add_model_request_handler(Self::handle_on_type_formatting);
595 client.add_model_request_handler(Self::handle_inlay_hints);
596 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
597 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
598 client.add_model_request_handler(Self::handle_reload_buffers);
599 client.add_model_request_handler(Self::handle_synchronize_buffers);
600 client.add_model_request_handler(Self::handle_format_buffers);
601 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
602 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
603 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
604 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
605 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
606 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
607 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
608 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
609 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
610 client.add_model_request_handler(Self::handle_search_project);
611 client.add_model_request_handler(Self::handle_get_project_symbols);
612 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
613 client.add_model_request_handler(Self::handle_open_buffer_by_id);
614 client.add_model_request_handler(Self::handle_open_buffer_by_path);
615 client.add_model_request_handler(Self::handle_save_buffer);
616 client.add_model_message_handler(Self::handle_update_diff_base);
617 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
618 }
619
620 pub fn local(
621 client: Arc<Client>,
622 node: Arc<dyn NodeRuntime>,
623 user_store: Model<UserStore>,
624 languages: Arc<LanguageRegistry>,
625 fs: Arc<dyn Fs>,
626 cx: &mut AppContext,
627 ) -> Model<Self> {
628 cx.new_model(|cx: &mut ModelContext<Self>| {
629 let (tx, rx) = mpsc::unbounded();
630 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
631 .detach();
632 let copilot_lsp_subscription =
633 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
634 Self {
635 worktrees: Default::default(),
636 buffer_ordered_messages_tx: tx,
637 collaborators: Default::default(),
638 next_buffer_id: 0,
639 opened_buffers: Default::default(),
640 shared_buffers: Default::default(),
641 incomplete_remote_buffers: Default::default(),
642 loading_buffers_by_path: Default::default(),
643 loading_local_worktrees: Default::default(),
644 local_buffer_ids_by_path: Default::default(),
645 local_buffer_ids_by_entry_id: Default::default(),
646 buffer_snapshots: Default::default(),
647 join_project_response_message_id: 0,
648 client_state: ProjectClientState::Local,
649 opened_buffer: watch::channel(),
650 client_subscriptions: Vec::new(),
651 _subscriptions: vec![
652 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
653 cx.on_release(Self::release),
654 cx.on_app_quit(Self::shutdown_language_servers),
655 ],
656 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
657 _maintain_workspace_config: Self::maintain_workspace_config(cx),
658 active_entry: None,
659 languages,
660 client,
661 user_store,
662 fs,
663 next_entry_id: Default::default(),
664 next_diagnostic_group_id: Default::default(),
665 supplementary_language_servers: HashMap::default(),
666 language_servers: Default::default(),
667 language_server_ids: Default::default(),
668 language_server_statuses: Default::default(),
669 last_workspace_edits_by_language_server: Default::default(),
670 buffers_being_formatted: Default::default(),
671 buffers_needing_diff: Default::default(),
672 git_diff_debouncer: DelayedDebounced::new(),
673 nonce: StdRng::from_entropy().gen(),
674 terminals: Terminals {
675 local_handles: Vec::new(),
676 },
677 copilot_lsp_subscription,
678 copilot_log_subscription: None,
679 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
680 node: Some(node),
681 default_prettier: DefaultPrettier::default(),
682 prettiers_per_worktree: HashMap::default(),
683 prettier_instances: HashMap::default(),
684 }
685 })
686 }
687
688 pub async fn remote(
689 remote_id: u64,
690 client: Arc<Client>,
691 user_store: Model<UserStore>,
692 languages: Arc<LanguageRegistry>,
693 fs: Arc<dyn Fs>,
694 role: proto::ChannelRole,
695 mut cx: AsyncAppContext,
696 ) -> Result<Model<Self>> {
697 client.authenticate_and_connect(true, &cx).await?;
698
699 let subscription = client.subscribe_to_entity(remote_id)?;
700 let response = client
701 .request_envelope(proto::JoinProject {
702 project_id: remote_id,
703 })
704 .await?;
705 let this = cx.new_model(|cx| {
706 let replica_id = response.payload.replica_id as ReplicaId;
707
708 let mut worktrees = Vec::new();
709 for worktree in response.payload.worktrees {
710 let worktree =
711 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
712 worktrees.push(worktree);
713 }
714
715 let (tx, rx) = mpsc::unbounded();
716 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
717 .detach();
718 let copilot_lsp_subscription =
719 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
720 let mut this = Self {
721 worktrees: Vec::new(),
722 buffer_ordered_messages_tx: tx,
723 loading_buffers_by_path: Default::default(),
724 next_buffer_id: 0,
725 opened_buffer: watch::channel(),
726 shared_buffers: Default::default(),
727 incomplete_remote_buffers: Default::default(),
728 loading_local_worktrees: Default::default(),
729 local_buffer_ids_by_path: Default::default(),
730 local_buffer_ids_by_entry_id: Default::default(),
731 active_entry: None,
732 collaborators: Default::default(),
733 join_project_response_message_id: response.message_id,
734 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
735 _maintain_workspace_config: Self::maintain_workspace_config(cx),
736 languages,
737 user_store: user_store.clone(),
738 fs,
739 next_entry_id: Default::default(),
740 next_diagnostic_group_id: Default::default(),
741 client_subscriptions: Default::default(),
742 _subscriptions: vec![
743 cx.on_release(Self::release),
744 cx.on_app_quit(Self::shutdown_language_servers),
745 ],
746 client: client.clone(),
747 client_state: ProjectClientState::Remote {
748 sharing_has_stopped: false,
749 capability: Capability::ReadWrite,
750 remote_id,
751 replica_id,
752 },
753 supplementary_language_servers: HashMap::default(),
754 language_servers: Default::default(),
755 language_server_ids: Default::default(),
756 language_server_statuses: response
757 .payload
758 .language_servers
759 .into_iter()
760 .map(|server| {
761 (
762 LanguageServerId(server.id as usize),
763 LanguageServerStatus {
764 name: server.name,
765 pending_work: Default::default(),
766 has_pending_diagnostic_updates: false,
767 progress_tokens: Default::default(),
768 },
769 )
770 })
771 .collect(),
772 last_workspace_edits_by_language_server: Default::default(),
773 opened_buffers: Default::default(),
774 buffers_being_formatted: Default::default(),
775 buffers_needing_diff: Default::default(),
776 git_diff_debouncer: DelayedDebounced::new(),
777 buffer_snapshots: Default::default(),
778 nonce: StdRng::from_entropy().gen(),
779 terminals: Terminals {
780 local_handles: Vec::new(),
781 },
782 copilot_lsp_subscription,
783 copilot_log_subscription: None,
784 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
785 node: None,
786 default_prettier: DefaultPrettier::default(),
787 prettiers_per_worktree: HashMap::default(),
788 prettier_instances: HashMap::default(),
789 };
790 this.set_role(role, cx);
791 for worktree in worktrees {
792 let _ = this.add_worktree(&worktree, cx);
793 }
794 this
795 })?;
796 let subscription = subscription.set_model(&this, &mut cx);
797
798 let user_ids = response
799 .payload
800 .collaborators
801 .iter()
802 .map(|peer| peer.user_id)
803 .collect();
804 user_store
805 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
806 .await?;
807
808 this.update(&mut cx, |this, cx| {
809 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
810 this.client_subscriptions.push(subscription);
811 anyhow::Ok(())
812 })??;
813
814 Ok(this)
815 }
816
817 fn release(&mut self, cx: &mut AppContext) {
818 match &self.client_state {
819 ProjectClientState::Local => {}
820 ProjectClientState::Shared { .. } => {
821 let _ = self.unshare_internal(cx);
822 }
823 ProjectClientState::Remote { remote_id, .. } => {
824 let _ = self.client.send(proto::LeaveProject {
825 project_id: *remote_id,
826 });
827 self.disconnected_from_host_internal(cx);
828 }
829 }
830 }
831
832 fn shutdown_language_servers(
833 &mut self,
834 _cx: &mut ModelContext<Self>,
835 ) -> impl Future<Output = ()> {
836 let shutdown_futures = self
837 .language_servers
838 .drain()
839 .map(|(_, server_state)| async {
840 use LanguageServerState::*;
841 match server_state {
842 Running { server, .. } => server.shutdown()?.await,
843 Starting(task) => task.await?.shutdown()?.await,
844 }
845 })
846 .collect::<Vec<_>>();
847
848 async move {
849 futures::future::join_all(shutdown_futures).await;
850 }
851 }
852
853 #[cfg(any(test, feature = "test-support"))]
854 pub async fn test(
855 fs: Arc<dyn Fs>,
856 root_paths: impl IntoIterator<Item = &Path>,
857 cx: &mut gpui::TestAppContext,
858 ) -> Model<Project> {
859 let mut languages = LanguageRegistry::test();
860 languages.set_executor(cx.executor());
861 let http_client = util::http::FakeHttpClient::with_404_response();
862 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
863 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
864 let project = cx.update(|cx| {
865 Project::local(
866 client,
867 node_runtime::FakeNodeRuntime::new(),
868 user_store,
869 Arc::new(languages),
870 fs,
871 cx,
872 )
873 });
874 for path in root_paths {
875 let (tree, _) = project
876 .update(cx, |project, cx| {
877 project.find_or_create_local_worktree(path, true, cx)
878 })
879 .await
880 .unwrap();
881 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
882 .await;
883 }
884 project
885 }
886
887 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
888 let mut language_servers_to_start = Vec::new();
889 let mut language_formatters_to_check = Vec::new();
890 for buffer in self.opened_buffers.values() {
891 if let Some(buffer) = buffer.upgrade() {
892 let buffer = buffer.read(cx);
893 let buffer_file = File::from_dyn(buffer.file());
894 let buffer_language = buffer.language();
895 let settings = language_settings(buffer_language, buffer.file(), cx);
896 if let Some(language) = buffer_language {
897 if settings.enable_language_server {
898 if let Some(file) = buffer_file {
899 language_servers_to_start
900 .push((file.worktree.clone(), Arc::clone(language)));
901 }
902 }
903 language_formatters_to_check.push((
904 buffer_file.map(|f| f.worktree_id(cx)),
905 Arc::clone(language),
906 settings.clone(),
907 ));
908 }
909 }
910 }
911
912 let mut language_servers_to_stop = Vec::new();
913 let mut language_servers_to_restart = Vec::new();
914 let languages = self.languages.to_vec();
915
916 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
917 let current_lsp_settings = &self.current_lsp_settings;
918 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
919 let language = languages.iter().find_map(|l| {
920 let adapter = l
921 .lsp_adapters()
922 .iter()
923 .find(|adapter| &adapter.name == started_lsp_name)?;
924 Some((l, adapter))
925 });
926 if let Some((language, adapter)) = language {
927 let worktree = self.worktree_for_id(*worktree_id, cx);
928 let file = worktree.as_ref().and_then(|tree| {
929 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
930 });
931 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
932 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
933 } else if let Some(worktree) = worktree {
934 let server_name = &adapter.name.0;
935 match (
936 current_lsp_settings.get(server_name),
937 new_lsp_settings.get(server_name),
938 ) {
939 (None, None) => {}
940 (Some(_), None) | (None, Some(_)) => {
941 language_servers_to_restart.push((worktree, Arc::clone(language)));
942 }
943 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
944 if current_lsp_settings != new_lsp_settings {
945 language_servers_to_restart.push((worktree, Arc::clone(language)));
946 }
947 }
948 }
949 }
950 }
951 }
952 self.current_lsp_settings = new_lsp_settings;
953
954 // Stop all newly-disabled language servers.
955 for (worktree_id, adapter_name) in language_servers_to_stop {
956 self.stop_language_server(worktree_id, adapter_name, cx)
957 .detach();
958 }
959
960 let mut prettier_plugins_by_worktree = HashMap::default();
961 for (worktree, language, settings) in language_formatters_to_check {
962 if let Some(plugins) =
963 prettier_support::prettier_plugins_for_language(&language, &settings)
964 {
965 prettier_plugins_by_worktree
966 .entry(worktree)
967 .or_insert_with(|| HashSet::default())
968 .extend(plugins);
969 }
970 }
971 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
972 self.install_default_prettier(worktree, prettier_plugins, cx);
973 }
974
975 // Start all the newly-enabled language servers.
976 for (worktree, language) in language_servers_to_start {
977 let worktree_path = worktree.read(cx).abs_path();
978 self.start_language_servers(&worktree, worktree_path, language, cx);
979 }
980
981 // Restart all language servers with changed initialization options.
982 for (worktree, language) in language_servers_to_restart {
983 self.restart_language_servers(worktree, language, cx);
984 }
985
986 if self.copilot_lsp_subscription.is_none() {
987 if let Some(copilot) = Copilot::global(cx) {
988 for buffer in self.opened_buffers.values() {
989 if let Some(buffer) = buffer.upgrade() {
990 self.register_buffer_with_copilot(&buffer, cx);
991 }
992 }
993 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
994 }
995 }
996
997 cx.notify();
998 }
999
1000 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> {
1001 self.opened_buffers
1002 .get(&remote_id)
1003 .and_then(|buffer| buffer.upgrade())
1004 }
1005
1006 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1007 &self.languages
1008 }
1009
1010 pub fn client(&self) -> Arc<Client> {
1011 self.client.clone()
1012 }
1013
1014 pub fn user_store(&self) -> Model<UserStore> {
1015 self.user_store.clone()
1016 }
1017
1018 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1019 self.opened_buffers
1020 .values()
1021 .filter_map(|b| b.upgrade())
1022 .collect()
1023 }
1024
1025 #[cfg(any(test, feature = "test-support"))]
1026 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1027 let path = path.into();
1028 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1029 self.opened_buffers.iter().any(|(_, buffer)| {
1030 if let Some(buffer) = buffer.upgrade() {
1031 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1032 if file.worktree == worktree && file.path() == &path.path {
1033 return true;
1034 }
1035 }
1036 }
1037 false
1038 })
1039 } else {
1040 false
1041 }
1042 }
1043
1044 pub fn fs(&self) -> &Arc<dyn Fs> {
1045 &self.fs
1046 }
1047
1048 pub fn remote_id(&self) -> Option<u64> {
1049 match self.client_state {
1050 ProjectClientState::Local => None,
1051 ProjectClientState::Shared { remote_id, .. }
1052 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1053 }
1054 }
1055
1056 pub fn replica_id(&self) -> ReplicaId {
1057 match self.client_state {
1058 ProjectClientState::Remote { replica_id, .. } => replica_id,
1059 _ => 0,
1060 }
1061 }
1062
1063 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1064 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1065 updates_tx
1066 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1067 .ok();
1068 }
1069 cx.notify();
1070 }
1071
1072 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1073 &self.collaborators
1074 }
1075
1076 pub fn host(&self) -> Option<&Collaborator> {
1077 self.collaborators.values().find(|c| c.replica_id == 0)
1078 }
1079
1080 /// Collect all worktrees, including ones that don't appear in the project panel
1081 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1082 self.worktrees
1083 .iter()
1084 .filter_map(move |worktree| worktree.upgrade())
1085 }
1086
1087 /// Collect all user-visible worktrees, the ones that appear in the project panel
1088 pub fn visible_worktrees<'a>(
1089 &'a self,
1090 cx: &'a AppContext,
1091 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1092 self.worktrees.iter().filter_map(|worktree| {
1093 worktree.upgrade().and_then(|worktree| {
1094 if worktree.read(cx).is_visible() {
1095 Some(worktree)
1096 } else {
1097 None
1098 }
1099 })
1100 })
1101 }
1102
1103 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1104 self.visible_worktrees(cx)
1105 .map(|tree| tree.read(cx).root_name())
1106 }
1107
1108 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1109 self.worktrees()
1110 .find(|worktree| worktree.read(cx).id() == id)
1111 }
1112
1113 pub fn worktree_for_entry(
1114 &self,
1115 entry_id: ProjectEntryId,
1116 cx: &AppContext,
1117 ) -> Option<Model<Worktree>> {
1118 self.worktrees()
1119 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1120 }
1121
1122 pub fn worktree_id_for_entry(
1123 &self,
1124 entry_id: ProjectEntryId,
1125 cx: &AppContext,
1126 ) -> Option<WorktreeId> {
1127 self.worktree_for_entry(entry_id, cx)
1128 .map(|worktree| worktree.read(cx).id())
1129 }
1130
1131 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1132 paths.iter().all(|path| self.contains_path(path, cx))
1133 }
1134
1135 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1136 for worktree in self.worktrees() {
1137 let worktree = worktree.read(cx).as_local();
1138 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1139 return true;
1140 }
1141 }
1142 false
1143 }
1144
1145 pub fn create_entry(
1146 &mut self,
1147 project_path: impl Into<ProjectPath>,
1148 is_directory: bool,
1149 cx: &mut ModelContext<Self>,
1150 ) -> Task<Result<Option<Entry>>> {
1151 let project_path = project_path.into();
1152 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1153 return Task::ready(Ok(None));
1154 };
1155 if self.is_local() {
1156 worktree.update(cx, |worktree, cx| {
1157 worktree
1158 .as_local_mut()
1159 .unwrap()
1160 .create_entry(project_path.path, is_directory, cx)
1161 })
1162 } else {
1163 let client = self.client.clone();
1164 let project_id = self.remote_id().unwrap();
1165 cx.spawn(move |_, mut cx| async move {
1166 let response = client
1167 .request(proto::CreateProjectEntry {
1168 worktree_id: project_path.worktree_id.to_proto(),
1169 project_id,
1170 path: project_path.path.to_string_lossy().into(),
1171 is_directory,
1172 })
1173 .await?;
1174 match response.entry {
1175 Some(entry) => worktree
1176 .update(&mut cx, |worktree, cx| {
1177 worktree.as_remote_mut().unwrap().insert_entry(
1178 entry,
1179 response.worktree_scan_id as usize,
1180 cx,
1181 )
1182 })?
1183 .await
1184 .map(Some),
1185 None => Ok(None),
1186 }
1187 })
1188 }
1189 }
1190
1191 pub fn copy_entry(
1192 &mut self,
1193 entry_id: ProjectEntryId,
1194 new_path: impl Into<Arc<Path>>,
1195 cx: &mut ModelContext<Self>,
1196 ) -> Task<Result<Option<Entry>>> {
1197 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1198 return Task::ready(Ok(None));
1199 };
1200 let new_path = new_path.into();
1201 if self.is_local() {
1202 worktree.update(cx, |worktree, cx| {
1203 worktree
1204 .as_local_mut()
1205 .unwrap()
1206 .copy_entry(entry_id, new_path, cx)
1207 })
1208 } else {
1209 let client = self.client.clone();
1210 let project_id = self.remote_id().unwrap();
1211
1212 cx.spawn(move |_, mut cx| async move {
1213 let response = client
1214 .request(proto::CopyProjectEntry {
1215 project_id,
1216 entry_id: entry_id.to_proto(),
1217 new_path: new_path.to_string_lossy().into(),
1218 })
1219 .await?;
1220 match response.entry {
1221 Some(entry) => worktree
1222 .update(&mut cx, |worktree, cx| {
1223 worktree.as_remote_mut().unwrap().insert_entry(
1224 entry,
1225 response.worktree_scan_id as usize,
1226 cx,
1227 )
1228 })?
1229 .await
1230 .map(Some),
1231 None => Ok(None),
1232 }
1233 })
1234 }
1235 }
1236
1237 pub fn rename_entry(
1238 &mut self,
1239 entry_id: ProjectEntryId,
1240 new_path: impl Into<Arc<Path>>,
1241 cx: &mut ModelContext<Self>,
1242 ) -> Task<Result<Option<Entry>>> {
1243 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1244 return Task::ready(Ok(None));
1245 };
1246 let new_path = new_path.into();
1247 if self.is_local() {
1248 worktree.update(cx, |worktree, cx| {
1249 worktree
1250 .as_local_mut()
1251 .unwrap()
1252 .rename_entry(entry_id, new_path, cx)
1253 })
1254 } else {
1255 let client = self.client.clone();
1256 let project_id = self.remote_id().unwrap();
1257
1258 cx.spawn(move |_, mut cx| async move {
1259 let response = client
1260 .request(proto::RenameProjectEntry {
1261 project_id,
1262 entry_id: entry_id.to_proto(),
1263 new_path: new_path.to_string_lossy().into(),
1264 })
1265 .await?;
1266 match response.entry {
1267 Some(entry) => worktree
1268 .update(&mut cx, |worktree, cx| {
1269 worktree.as_remote_mut().unwrap().insert_entry(
1270 entry,
1271 response.worktree_scan_id as usize,
1272 cx,
1273 )
1274 })?
1275 .await
1276 .map(Some),
1277 None => Ok(None),
1278 }
1279 })
1280 }
1281 }
1282
1283 pub fn delete_entry(
1284 &mut self,
1285 entry_id: ProjectEntryId,
1286 cx: &mut ModelContext<Self>,
1287 ) -> Option<Task<Result<()>>> {
1288 let worktree = self.worktree_for_entry(entry_id, cx)?;
1289
1290 cx.emit(Event::DeletedEntry(entry_id));
1291
1292 if self.is_local() {
1293 worktree.update(cx, |worktree, cx| {
1294 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1295 })
1296 } else {
1297 let client = self.client.clone();
1298 let project_id = self.remote_id().unwrap();
1299 Some(cx.spawn(move |_, mut cx| async move {
1300 let response = client
1301 .request(proto::DeleteProjectEntry {
1302 project_id,
1303 entry_id: entry_id.to_proto(),
1304 })
1305 .await?;
1306 worktree
1307 .update(&mut cx, move |worktree, cx| {
1308 worktree.as_remote_mut().unwrap().delete_entry(
1309 entry_id,
1310 response.worktree_scan_id as usize,
1311 cx,
1312 )
1313 })?
1314 .await
1315 }))
1316 }
1317 }
1318
1319 pub fn expand_entry(
1320 &mut self,
1321 worktree_id: WorktreeId,
1322 entry_id: ProjectEntryId,
1323 cx: &mut ModelContext<Self>,
1324 ) -> Option<Task<Result<()>>> {
1325 let worktree = self.worktree_for_id(worktree_id, cx)?;
1326 if self.is_local() {
1327 worktree.update(cx, |worktree, cx| {
1328 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1329 })
1330 } else {
1331 let worktree = worktree.downgrade();
1332 let request = self.client.request(proto::ExpandProjectEntry {
1333 project_id: self.remote_id().unwrap(),
1334 entry_id: entry_id.to_proto(),
1335 });
1336 Some(cx.spawn(move |_, mut cx| async move {
1337 let response = request.await?;
1338 if let Some(worktree) = worktree.upgrade() {
1339 worktree
1340 .update(&mut cx, |worktree, _| {
1341 worktree
1342 .as_remote_mut()
1343 .unwrap()
1344 .wait_for_snapshot(response.worktree_scan_id as usize)
1345 })?
1346 .await?;
1347 }
1348 Ok(())
1349 }))
1350 }
1351 }
1352
1353 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1354 if !matches!(self.client_state, ProjectClientState::Local) {
1355 return Err(anyhow!("project was already shared"));
1356 }
1357 self.client_subscriptions.push(
1358 self.client
1359 .subscribe_to_entity(project_id)?
1360 .set_model(&cx.handle(), &mut cx.to_async()),
1361 );
1362
1363 for open_buffer in self.opened_buffers.values_mut() {
1364 match open_buffer {
1365 OpenBuffer::Strong(_) => {}
1366 OpenBuffer::Weak(buffer) => {
1367 if let Some(buffer) = buffer.upgrade() {
1368 *open_buffer = OpenBuffer::Strong(buffer);
1369 }
1370 }
1371 OpenBuffer::Operations(_) => unreachable!(),
1372 }
1373 }
1374
1375 for worktree_handle in self.worktrees.iter_mut() {
1376 match worktree_handle {
1377 WorktreeHandle::Strong(_) => {}
1378 WorktreeHandle::Weak(worktree) => {
1379 if let Some(worktree) = worktree.upgrade() {
1380 *worktree_handle = WorktreeHandle::Strong(worktree);
1381 }
1382 }
1383 }
1384 }
1385
1386 for (server_id, status) in &self.language_server_statuses {
1387 self.client
1388 .send(proto::StartLanguageServer {
1389 project_id,
1390 server: Some(proto::LanguageServer {
1391 id: server_id.0 as u64,
1392 name: status.name.clone(),
1393 }),
1394 })
1395 .log_err();
1396 }
1397
1398 let store = cx.global::<SettingsStore>();
1399 for worktree in self.worktrees() {
1400 let worktree_id = worktree.read(cx).id().to_proto();
1401 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1402 self.client
1403 .send(proto::UpdateWorktreeSettings {
1404 project_id,
1405 worktree_id,
1406 path: path.to_string_lossy().into(),
1407 content: Some(content),
1408 })
1409 .log_err();
1410 }
1411 }
1412
1413 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1414 let client = self.client.clone();
1415 self.client_state = ProjectClientState::Shared {
1416 remote_id: project_id,
1417 updates_tx,
1418 _send_updates: cx.spawn(move |this, mut cx| async move {
1419 while let Some(update) = updates_rx.next().await {
1420 match update {
1421 LocalProjectUpdate::WorktreesChanged => {
1422 let worktrees = this.update(&mut cx, |this, _cx| {
1423 this.worktrees().collect::<Vec<_>>()
1424 })?;
1425 let update_project = this
1426 .update(&mut cx, |this, cx| {
1427 this.client.request(proto::UpdateProject {
1428 project_id,
1429 worktrees: this.worktree_metadata_protos(cx),
1430 })
1431 })?
1432 .await;
1433 if update_project.is_ok() {
1434 for worktree in worktrees {
1435 worktree.update(&mut cx, |worktree, cx| {
1436 let worktree = worktree.as_local_mut().unwrap();
1437 worktree.share(project_id, cx).detach_and_log_err(cx)
1438 })?;
1439 }
1440 }
1441 }
1442 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1443 let buffer = this.update(&mut cx, |this, _| {
1444 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1445 let shared_buffers =
1446 this.shared_buffers.entry(peer_id).or_default();
1447 if shared_buffers.insert(buffer_id) {
1448 if let OpenBuffer::Strong(buffer) = buffer {
1449 Some(buffer.clone())
1450 } else {
1451 None
1452 }
1453 } else {
1454 None
1455 }
1456 })?;
1457
1458 let Some(buffer) = buffer else { continue };
1459 let operations =
1460 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1461 let operations = operations.await;
1462 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1463
1464 let initial_state = proto::CreateBufferForPeer {
1465 project_id,
1466 peer_id: Some(peer_id),
1467 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1468 };
1469 if client.send(initial_state).log_err().is_some() {
1470 let client = client.clone();
1471 cx.background_executor()
1472 .spawn(async move {
1473 let mut chunks = split_operations(operations).peekable();
1474 while let Some(chunk) = chunks.next() {
1475 let is_last = chunks.peek().is_none();
1476 client.send(proto::CreateBufferForPeer {
1477 project_id,
1478 peer_id: Some(peer_id),
1479 variant: Some(
1480 proto::create_buffer_for_peer::Variant::Chunk(
1481 proto::BufferChunk {
1482 buffer_id,
1483 operations: chunk,
1484 is_last,
1485 },
1486 ),
1487 ),
1488 })?;
1489 }
1490 anyhow::Ok(())
1491 })
1492 .await
1493 .log_err();
1494 }
1495 }
1496 }
1497 }
1498 Ok(())
1499 }),
1500 };
1501
1502 self.metadata_changed(cx);
1503 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1504 cx.notify();
1505 Ok(())
1506 }
1507
1508 pub fn reshared(
1509 &mut self,
1510 message: proto::ResharedProject,
1511 cx: &mut ModelContext<Self>,
1512 ) -> Result<()> {
1513 self.shared_buffers.clear();
1514 self.set_collaborators_from_proto(message.collaborators, cx)?;
1515 self.metadata_changed(cx);
1516 Ok(())
1517 }
1518
1519 pub fn rejoined(
1520 &mut self,
1521 message: proto::RejoinedProject,
1522 message_id: u32,
1523 cx: &mut ModelContext<Self>,
1524 ) -> Result<()> {
1525 cx.update_global::<SettingsStore, _>(|store, cx| {
1526 for worktree in &self.worktrees {
1527 store
1528 .clear_local_settings(worktree.handle_id(), cx)
1529 .log_err();
1530 }
1531 });
1532
1533 self.join_project_response_message_id = message_id;
1534 self.set_worktrees_from_proto(message.worktrees, cx)?;
1535 self.set_collaborators_from_proto(message.collaborators, cx)?;
1536 self.language_server_statuses = message
1537 .language_servers
1538 .into_iter()
1539 .map(|server| {
1540 (
1541 LanguageServerId(server.id as usize),
1542 LanguageServerStatus {
1543 name: server.name,
1544 pending_work: Default::default(),
1545 has_pending_diagnostic_updates: false,
1546 progress_tokens: Default::default(),
1547 },
1548 )
1549 })
1550 .collect();
1551 self.buffer_ordered_messages_tx
1552 .unbounded_send(BufferOrderedMessage::Resync)
1553 .unwrap();
1554 cx.notify();
1555 Ok(())
1556 }
1557
1558 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1559 self.unshare_internal(cx)?;
1560 self.metadata_changed(cx);
1561 cx.notify();
1562 Ok(())
1563 }
1564
1565 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1566 if self.is_remote() {
1567 return Err(anyhow!("attempted to unshare a remote project"));
1568 }
1569
1570 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1571 self.client_state = ProjectClientState::Local;
1572 self.collaborators.clear();
1573 self.shared_buffers.clear();
1574 self.client_subscriptions.clear();
1575
1576 for worktree_handle in self.worktrees.iter_mut() {
1577 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1578 let is_visible = worktree.update(cx, |worktree, _| {
1579 worktree.as_local_mut().unwrap().unshare();
1580 worktree.is_visible()
1581 });
1582 if !is_visible {
1583 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1584 }
1585 }
1586 }
1587
1588 for open_buffer in self.opened_buffers.values_mut() {
1589 // Wake up any tasks waiting for peers' edits to this buffer.
1590 if let Some(buffer) = open_buffer.upgrade() {
1591 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1592 }
1593
1594 if let OpenBuffer::Strong(buffer) = open_buffer {
1595 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1596 }
1597 }
1598
1599 self.client.send(proto::UnshareProject {
1600 project_id: remote_id,
1601 })?;
1602
1603 Ok(())
1604 } else {
1605 Err(anyhow!("attempted to unshare an unshared project"))
1606 }
1607 }
1608
1609 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1610 self.disconnected_from_host_internal(cx);
1611 cx.emit(Event::DisconnectedFromHost);
1612 cx.notify();
1613 }
1614
1615 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1616 let new_capability =
1617 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1618 Capability::ReadWrite
1619 } else {
1620 Capability::ReadOnly
1621 };
1622 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1623 if *capability == new_capability {
1624 return;
1625 }
1626
1627 *capability = new_capability;
1628 for buffer in self.opened_buffers() {
1629 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1630 }
1631 }
1632 }
1633
1634 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1635 if let ProjectClientState::Remote {
1636 sharing_has_stopped,
1637 ..
1638 } = &mut self.client_state
1639 {
1640 *sharing_has_stopped = true;
1641
1642 self.collaborators.clear();
1643
1644 for worktree in &self.worktrees {
1645 if let Some(worktree) = worktree.upgrade() {
1646 worktree.update(cx, |worktree, _| {
1647 if let Some(worktree) = worktree.as_remote_mut() {
1648 worktree.disconnected_from_host();
1649 }
1650 });
1651 }
1652 }
1653
1654 for open_buffer in self.opened_buffers.values_mut() {
1655 // Wake up any tasks waiting for peers' edits to this buffer.
1656 if let Some(buffer) = open_buffer.upgrade() {
1657 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1658 }
1659
1660 if let OpenBuffer::Strong(buffer) = open_buffer {
1661 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1662 }
1663 }
1664
1665 // Wake up all futures currently waiting on a buffer to get opened,
1666 // to give them a chance to fail now that we've disconnected.
1667 *self.opened_buffer.0.borrow_mut() = ();
1668 }
1669 }
1670
1671 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1672 cx.emit(Event::Closed);
1673 }
1674
1675 pub fn is_disconnected(&self) -> bool {
1676 match &self.client_state {
1677 ProjectClientState::Remote {
1678 sharing_has_stopped,
1679 ..
1680 } => *sharing_has_stopped,
1681 _ => false,
1682 }
1683 }
1684
1685 pub fn capability(&self) -> Capability {
1686 match &self.client_state {
1687 ProjectClientState::Remote { capability, .. } => *capability,
1688 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1689 }
1690 }
1691
1692 pub fn is_read_only(&self) -> bool {
1693 self.is_disconnected() || self.capability() == Capability::ReadOnly
1694 }
1695
1696 pub fn is_local(&self) -> bool {
1697 match &self.client_state {
1698 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1699 ProjectClientState::Remote { .. } => false,
1700 }
1701 }
1702
1703 pub fn is_remote(&self) -> bool {
1704 !self.is_local()
1705 }
1706
1707 pub fn create_buffer(
1708 &mut self,
1709 text: &str,
1710 language: Option<Arc<Language>>,
1711 cx: &mut ModelContext<Self>,
1712 ) -> Result<Model<Buffer>> {
1713 if self.is_remote() {
1714 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1715 }
1716 let id = post_inc(&mut self.next_buffer_id);
1717 let buffer = cx.new_model(|cx| {
1718 Buffer::new(self.replica_id(), id, text)
1719 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1720 });
1721 self.register_buffer(&buffer, cx)?;
1722 Ok(buffer)
1723 }
1724
1725 pub fn open_path(
1726 &mut self,
1727 path: ProjectPath,
1728 cx: &mut ModelContext<Self>,
1729 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1730 let task = self.open_buffer(path.clone(), cx);
1731 cx.spawn(move |_, cx| async move {
1732 let buffer = task.await?;
1733 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1734 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1735 })?;
1736
1737 let buffer: &AnyModel = &buffer;
1738 Ok((project_entry_id, buffer.clone()))
1739 })
1740 }
1741
1742 pub fn open_local_buffer(
1743 &mut self,
1744 abs_path: impl AsRef<Path>,
1745 cx: &mut ModelContext<Self>,
1746 ) -> Task<Result<Model<Buffer>>> {
1747 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1748 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1749 } else {
1750 Task::ready(Err(anyhow!("no such path")))
1751 }
1752 }
1753
1754 pub fn open_buffer(
1755 &mut self,
1756 path: impl Into<ProjectPath>,
1757 cx: &mut ModelContext<Self>,
1758 ) -> Task<Result<Model<Buffer>>> {
1759 let project_path = path.into();
1760 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1761 worktree
1762 } else {
1763 return Task::ready(Err(anyhow!("no such worktree")));
1764 };
1765
1766 // If there is already a buffer for the given path, then return it.
1767 let existing_buffer = self.get_open_buffer(&project_path, cx);
1768 if let Some(existing_buffer) = existing_buffer {
1769 return Task::ready(Ok(existing_buffer));
1770 }
1771
1772 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1773 // If the given path is already being loaded, then wait for that existing
1774 // task to complete and return the same buffer.
1775 hash_map::Entry::Occupied(e) => e.get().clone(),
1776
1777 // Otherwise, record the fact that this path is now being loaded.
1778 hash_map::Entry::Vacant(entry) => {
1779 let (mut tx, rx) = postage::watch::channel();
1780 entry.insert(rx.clone());
1781
1782 let load_buffer = if worktree.read(cx).is_local() {
1783 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1784 } else {
1785 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1786 };
1787
1788 let project_path = project_path.clone();
1789 cx.spawn(move |this, mut cx| async move {
1790 let load_result = load_buffer.await;
1791 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1792 // Record the fact that the buffer is no longer loading.
1793 this.loading_buffers_by_path.remove(&project_path);
1794 let buffer = load_result.map_err(Arc::new)?;
1795 Ok(buffer)
1796 })?);
1797 anyhow::Ok(())
1798 })
1799 .detach();
1800 rx
1801 }
1802 };
1803
1804 cx.background_executor().spawn(async move {
1805 wait_for_loading_buffer(loading_watch)
1806 .await
1807 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1808 })
1809 }
1810
1811 fn open_local_buffer_internal(
1812 &mut self,
1813 path: &Arc<Path>,
1814 worktree: &Model<Worktree>,
1815 cx: &mut ModelContext<Self>,
1816 ) -> Task<Result<Model<Buffer>>> {
1817 let buffer_id = post_inc(&mut self.next_buffer_id);
1818 let load_buffer = worktree.update(cx, |worktree, cx| {
1819 let worktree = worktree.as_local_mut().unwrap();
1820 worktree.load_buffer(buffer_id, path, cx)
1821 });
1822 cx.spawn(move |this, mut cx| async move {
1823 let buffer = load_buffer.await?;
1824 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1825 Ok(buffer)
1826 })
1827 }
1828
1829 fn open_remote_buffer_internal(
1830 &mut self,
1831 path: &Arc<Path>,
1832 worktree: &Model<Worktree>,
1833 cx: &mut ModelContext<Self>,
1834 ) -> Task<Result<Model<Buffer>>> {
1835 let rpc = self.client.clone();
1836 let project_id = self.remote_id().unwrap();
1837 let remote_worktree_id = worktree.read(cx).id();
1838 let path = path.clone();
1839 let path_string = path.to_string_lossy().to_string();
1840 cx.spawn(move |this, mut cx| async move {
1841 let response = rpc
1842 .request(proto::OpenBufferByPath {
1843 project_id,
1844 worktree_id: remote_worktree_id.to_proto(),
1845 path: path_string,
1846 })
1847 .await?;
1848 this.update(&mut cx, |this, cx| {
1849 this.wait_for_remote_buffer(response.buffer_id, cx)
1850 })?
1851 .await
1852 })
1853 }
1854
1855 /// LanguageServerName is owned, because it is inserted into a map
1856 pub fn open_local_buffer_via_lsp(
1857 &mut self,
1858 abs_path: lsp::Url,
1859 language_server_id: LanguageServerId,
1860 language_server_name: LanguageServerName,
1861 cx: &mut ModelContext<Self>,
1862 ) -> Task<Result<Model<Buffer>>> {
1863 cx.spawn(move |this, mut cx| async move {
1864 let abs_path = abs_path
1865 .to_file_path()
1866 .map_err(|_| anyhow!("can't convert URI to path"))?;
1867 let (worktree, relative_path) = if let Some(result) =
1868 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1869 {
1870 result
1871 } else {
1872 let worktree = this
1873 .update(&mut cx, |this, cx| {
1874 this.create_local_worktree(&abs_path, false, cx)
1875 })?
1876 .await?;
1877 this.update(&mut cx, |this, cx| {
1878 this.language_server_ids.insert(
1879 (worktree.read(cx).id(), language_server_name),
1880 language_server_id,
1881 );
1882 })
1883 .ok();
1884 (worktree, PathBuf::new())
1885 };
1886
1887 let project_path = ProjectPath {
1888 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1889 path: relative_path.into(),
1890 };
1891 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1892 .await
1893 })
1894 }
1895
1896 pub fn open_buffer_by_id(
1897 &mut self,
1898 id: u64,
1899 cx: &mut ModelContext<Self>,
1900 ) -> Task<Result<Model<Buffer>>> {
1901 if let Some(buffer) = self.buffer_for_id(id) {
1902 Task::ready(Ok(buffer))
1903 } else if self.is_local() {
1904 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1905 } else if let Some(project_id) = self.remote_id() {
1906 let request = self
1907 .client
1908 .request(proto::OpenBufferById { project_id, id });
1909 cx.spawn(move |this, mut cx| async move {
1910 let buffer_id = request.await?.buffer_id;
1911 this.update(&mut cx, |this, cx| {
1912 this.wait_for_remote_buffer(buffer_id, cx)
1913 })?
1914 .await
1915 })
1916 } else {
1917 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1918 }
1919 }
1920
1921 pub fn save_buffers(
1922 &self,
1923 buffers: HashSet<Model<Buffer>>,
1924 cx: &mut ModelContext<Self>,
1925 ) -> Task<Result<()>> {
1926 cx.spawn(move |this, mut cx| async move {
1927 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1928 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1929 .ok()
1930 });
1931 try_join_all(save_tasks).await?;
1932 Ok(())
1933 })
1934 }
1935
1936 pub fn save_buffer(
1937 &self,
1938 buffer: Model<Buffer>,
1939 cx: &mut ModelContext<Self>,
1940 ) -> Task<Result<()>> {
1941 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1942 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1943 };
1944 let worktree = file.worktree.clone();
1945 let path = file.path.clone();
1946 worktree.update(cx, |worktree, cx| match worktree {
1947 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1948 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1949 })
1950 }
1951
1952 pub fn save_buffer_as(
1953 &mut self,
1954 buffer: Model<Buffer>,
1955 abs_path: PathBuf,
1956 cx: &mut ModelContext<Self>,
1957 ) -> Task<Result<()>> {
1958 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1959 let old_file = File::from_dyn(buffer.read(cx).file())
1960 .filter(|f| f.is_local())
1961 .cloned();
1962 cx.spawn(move |this, mut cx| async move {
1963 if let Some(old_file) = &old_file {
1964 this.update(&mut cx, |this, cx| {
1965 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1966 })?;
1967 }
1968 let (worktree, path) = worktree_task.await?;
1969 worktree
1970 .update(&mut cx, |worktree, cx| match worktree {
1971 Worktree::Local(worktree) => {
1972 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1973 }
1974 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1975 })?
1976 .await?;
1977
1978 this.update(&mut cx, |this, cx| {
1979 this.detect_language_for_buffer(&buffer, cx);
1980 this.register_buffer_with_language_servers(&buffer, cx);
1981 })?;
1982 Ok(())
1983 })
1984 }
1985
1986 pub fn get_open_buffer(
1987 &mut self,
1988 path: &ProjectPath,
1989 cx: &mut ModelContext<Self>,
1990 ) -> Option<Model<Buffer>> {
1991 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1992 self.opened_buffers.values().find_map(|buffer| {
1993 let buffer = buffer.upgrade()?;
1994 let file = File::from_dyn(buffer.read(cx).file())?;
1995 if file.worktree == worktree && file.path() == &path.path {
1996 Some(buffer)
1997 } else {
1998 None
1999 }
2000 })
2001 }
2002
2003 fn register_buffer(
2004 &mut self,
2005 buffer: &Model<Buffer>,
2006 cx: &mut ModelContext<Self>,
2007 ) -> Result<()> {
2008 self.request_buffer_diff_recalculation(buffer, cx);
2009 buffer.update(cx, |buffer, _| {
2010 buffer.set_language_registry(self.languages.clone())
2011 });
2012
2013 let remote_id = buffer.read(cx).remote_id();
2014 let is_remote = self.is_remote();
2015 let open_buffer = if is_remote || self.is_shared() {
2016 OpenBuffer::Strong(buffer.clone())
2017 } else {
2018 OpenBuffer::Weak(buffer.downgrade())
2019 };
2020
2021 match self.opened_buffers.entry(remote_id) {
2022 hash_map::Entry::Vacant(entry) => {
2023 entry.insert(open_buffer);
2024 }
2025 hash_map::Entry::Occupied(mut entry) => {
2026 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2027 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2028 } else if entry.get().upgrade().is_some() {
2029 if is_remote {
2030 return Ok(());
2031 } else {
2032 debug_panic!("buffer {} was already registered", remote_id);
2033 Err(anyhow!("buffer {} was already registered", remote_id))?;
2034 }
2035 }
2036 entry.insert(open_buffer);
2037 }
2038 }
2039 cx.subscribe(buffer, |this, buffer, event, cx| {
2040 this.on_buffer_event(buffer, event, cx);
2041 })
2042 .detach();
2043
2044 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2045 if file.is_local {
2046 self.local_buffer_ids_by_path.insert(
2047 ProjectPath {
2048 worktree_id: file.worktree_id(cx),
2049 path: file.path.clone(),
2050 },
2051 remote_id,
2052 );
2053
2054 if let Some(entry_id) = file.entry_id {
2055 self.local_buffer_ids_by_entry_id
2056 .insert(entry_id, remote_id);
2057 }
2058 }
2059 }
2060
2061 self.detect_language_for_buffer(buffer, cx);
2062 self.register_buffer_with_language_servers(buffer, cx);
2063 self.register_buffer_with_copilot(buffer, cx);
2064 cx.observe_release(buffer, |this, buffer, cx| {
2065 if let Some(file) = File::from_dyn(buffer.file()) {
2066 if file.is_local() {
2067 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2068 for server in this.language_servers_for_buffer(buffer, cx) {
2069 server
2070 .1
2071 .notify::<lsp::notification::DidCloseTextDocument>(
2072 lsp::DidCloseTextDocumentParams {
2073 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2074 },
2075 )
2076 .log_err();
2077 }
2078 }
2079 }
2080 })
2081 .detach();
2082
2083 *self.opened_buffer.0.borrow_mut() = ();
2084 Ok(())
2085 }
2086
2087 fn register_buffer_with_language_servers(
2088 &mut self,
2089 buffer_handle: &Model<Buffer>,
2090 cx: &mut ModelContext<Self>,
2091 ) {
2092 let buffer = buffer_handle.read(cx);
2093 let buffer_id = buffer.remote_id();
2094
2095 if let Some(file) = File::from_dyn(buffer.file()) {
2096 if !file.is_local() {
2097 return;
2098 }
2099
2100 let abs_path = file.abs_path(cx);
2101 let uri = lsp::Url::from_file_path(&abs_path)
2102 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2103 let initial_snapshot = buffer.text_snapshot();
2104 let language = buffer.language().cloned();
2105 let worktree_id = file.worktree_id(cx);
2106
2107 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2108 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2109 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2110 .log_err();
2111 }
2112 }
2113
2114 if let Some(language) = language {
2115 for adapter in language.lsp_adapters() {
2116 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2117 let server = self
2118 .language_server_ids
2119 .get(&(worktree_id, adapter.name.clone()))
2120 .and_then(|id| self.language_servers.get(id))
2121 .and_then(|server_state| {
2122 if let LanguageServerState::Running { server, .. } = server_state {
2123 Some(server.clone())
2124 } else {
2125 None
2126 }
2127 });
2128 let server = match server {
2129 Some(server) => server,
2130 None => continue,
2131 };
2132
2133 server
2134 .notify::<lsp::notification::DidOpenTextDocument>(
2135 lsp::DidOpenTextDocumentParams {
2136 text_document: lsp::TextDocumentItem::new(
2137 uri.clone(),
2138 language_id.unwrap_or_default(),
2139 0,
2140 initial_snapshot.text(),
2141 ),
2142 },
2143 )
2144 .log_err();
2145
2146 buffer_handle.update(cx, |buffer, cx| {
2147 buffer.set_completion_triggers(
2148 server
2149 .capabilities()
2150 .completion_provider
2151 .as_ref()
2152 .and_then(|provider| provider.trigger_characters.clone())
2153 .unwrap_or_default(),
2154 cx,
2155 );
2156 });
2157
2158 let snapshot = LspBufferSnapshot {
2159 version: 0,
2160 snapshot: initial_snapshot.clone(),
2161 };
2162 self.buffer_snapshots
2163 .entry(buffer_id)
2164 .or_default()
2165 .insert(server.server_id(), vec![snapshot]);
2166 }
2167 }
2168 }
2169 }
2170
2171 fn unregister_buffer_from_language_servers(
2172 &mut self,
2173 buffer: &Model<Buffer>,
2174 old_file: &File,
2175 cx: &mut ModelContext<Self>,
2176 ) {
2177 let old_path = match old_file.as_local() {
2178 Some(local) => local.abs_path(cx),
2179 None => return,
2180 };
2181
2182 buffer.update(cx, |buffer, cx| {
2183 let worktree_id = old_file.worktree_id(cx);
2184 let ids = &self.language_server_ids;
2185
2186 let language = buffer.language().cloned();
2187 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2188 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2189 buffer.update_diagnostics(server_id, Default::default(), cx);
2190 }
2191
2192 self.buffer_snapshots.remove(&buffer.remote_id());
2193 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2194 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2195 language_server
2196 .notify::<lsp::notification::DidCloseTextDocument>(
2197 lsp::DidCloseTextDocumentParams {
2198 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2199 },
2200 )
2201 .log_err();
2202 }
2203 });
2204 }
2205
2206 fn register_buffer_with_copilot(
2207 &self,
2208 buffer_handle: &Model<Buffer>,
2209 cx: &mut ModelContext<Self>,
2210 ) {
2211 if let Some(copilot) = Copilot::global(cx) {
2212 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2213 }
2214 }
2215
2216 async fn send_buffer_ordered_messages(
2217 this: WeakModel<Self>,
2218 rx: UnboundedReceiver<BufferOrderedMessage>,
2219 mut cx: AsyncAppContext,
2220 ) -> Result<()> {
2221 const MAX_BATCH_SIZE: usize = 128;
2222
2223 let mut operations_by_buffer_id = HashMap::default();
2224 async fn flush_operations(
2225 this: &WeakModel<Project>,
2226 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2227 needs_resync_with_host: &mut bool,
2228 is_local: bool,
2229 cx: &mut AsyncAppContext,
2230 ) -> Result<()> {
2231 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2232 let request = this.update(cx, |this, _| {
2233 let project_id = this.remote_id()?;
2234 Some(this.client.request(proto::UpdateBuffer {
2235 buffer_id,
2236 project_id,
2237 operations,
2238 }))
2239 })?;
2240 if let Some(request) = request {
2241 if request.await.is_err() && !is_local {
2242 *needs_resync_with_host = true;
2243 break;
2244 }
2245 }
2246 }
2247 Ok(())
2248 }
2249
2250 let mut needs_resync_with_host = false;
2251 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2252
2253 while let Some(changes) = changes.next().await {
2254 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2255
2256 for change in changes {
2257 match change {
2258 BufferOrderedMessage::Operation {
2259 buffer_id,
2260 operation,
2261 } => {
2262 if needs_resync_with_host {
2263 continue;
2264 }
2265
2266 operations_by_buffer_id
2267 .entry(buffer_id)
2268 .or_insert(Vec::new())
2269 .push(operation);
2270 }
2271
2272 BufferOrderedMessage::Resync => {
2273 operations_by_buffer_id.clear();
2274 if this
2275 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2276 .await
2277 .is_ok()
2278 {
2279 needs_resync_with_host = false;
2280 }
2281 }
2282
2283 BufferOrderedMessage::LanguageServerUpdate {
2284 language_server_id,
2285 message,
2286 } => {
2287 flush_operations(
2288 &this,
2289 &mut operations_by_buffer_id,
2290 &mut needs_resync_with_host,
2291 is_local,
2292 &mut cx,
2293 )
2294 .await?;
2295
2296 this.update(&mut cx, |this, _| {
2297 if let Some(project_id) = this.remote_id() {
2298 this.client
2299 .send(proto::UpdateLanguageServer {
2300 project_id,
2301 language_server_id: language_server_id.0 as u64,
2302 variant: Some(message),
2303 })
2304 .log_err();
2305 }
2306 })?;
2307 }
2308 }
2309 }
2310
2311 flush_operations(
2312 &this,
2313 &mut operations_by_buffer_id,
2314 &mut needs_resync_with_host,
2315 is_local,
2316 &mut cx,
2317 )
2318 .await?;
2319 }
2320
2321 Ok(())
2322 }
2323
2324 fn on_buffer_event(
2325 &mut self,
2326 buffer: Model<Buffer>,
2327 event: &BufferEvent,
2328 cx: &mut ModelContext<Self>,
2329 ) -> Option<()> {
2330 if matches!(
2331 event,
2332 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2333 ) {
2334 self.request_buffer_diff_recalculation(&buffer, cx);
2335 }
2336
2337 match event {
2338 BufferEvent::Operation(operation) => {
2339 self.buffer_ordered_messages_tx
2340 .unbounded_send(BufferOrderedMessage::Operation {
2341 buffer_id: buffer.read(cx).remote_id(),
2342 operation: language::proto::serialize_operation(operation),
2343 })
2344 .ok();
2345 }
2346
2347 BufferEvent::Edited { .. } => {
2348 let buffer = buffer.read(cx);
2349 let file = File::from_dyn(buffer.file())?;
2350 let abs_path = file.as_local()?.abs_path(cx);
2351 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2352 let next_snapshot = buffer.text_snapshot();
2353
2354 let language_servers: Vec<_> = self
2355 .language_servers_for_buffer(buffer, cx)
2356 .map(|i| i.1.clone())
2357 .collect();
2358
2359 for language_server in language_servers {
2360 let language_server = language_server.clone();
2361
2362 let buffer_snapshots = self
2363 .buffer_snapshots
2364 .get_mut(&buffer.remote_id())
2365 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2366 let previous_snapshot = buffer_snapshots.last()?;
2367
2368 let build_incremental_change = || {
2369 buffer
2370 .edits_since::<(PointUtf16, usize)>(
2371 previous_snapshot.snapshot.version(),
2372 )
2373 .map(|edit| {
2374 let edit_start = edit.new.start.0;
2375 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2376 let new_text = next_snapshot
2377 .text_for_range(edit.new.start.1..edit.new.end.1)
2378 .collect();
2379 lsp::TextDocumentContentChangeEvent {
2380 range: Some(lsp::Range::new(
2381 point_to_lsp(edit_start),
2382 point_to_lsp(edit_end),
2383 )),
2384 range_length: None,
2385 text: new_text,
2386 }
2387 })
2388 .collect()
2389 };
2390
2391 let document_sync_kind = language_server
2392 .capabilities()
2393 .text_document_sync
2394 .as_ref()
2395 .and_then(|sync| match sync {
2396 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2397 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2398 });
2399
2400 let content_changes: Vec<_> = match document_sync_kind {
2401 Some(lsp::TextDocumentSyncKind::FULL) => {
2402 vec![lsp::TextDocumentContentChangeEvent {
2403 range: None,
2404 range_length: None,
2405 text: next_snapshot.text(),
2406 }]
2407 }
2408 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2409 _ => {
2410 #[cfg(any(test, feature = "test-support"))]
2411 {
2412 build_incremental_change()
2413 }
2414
2415 #[cfg(not(any(test, feature = "test-support")))]
2416 {
2417 continue;
2418 }
2419 }
2420 };
2421
2422 let next_version = previous_snapshot.version + 1;
2423
2424 buffer_snapshots.push(LspBufferSnapshot {
2425 version: next_version,
2426 snapshot: next_snapshot.clone(),
2427 });
2428
2429 language_server
2430 .notify::<lsp::notification::DidChangeTextDocument>(
2431 lsp::DidChangeTextDocumentParams {
2432 text_document: lsp::VersionedTextDocumentIdentifier::new(
2433 uri.clone(),
2434 next_version,
2435 ),
2436 content_changes,
2437 },
2438 )
2439 .log_err();
2440 }
2441 }
2442
2443 BufferEvent::Saved => {
2444 let file = File::from_dyn(buffer.read(cx).file())?;
2445 let worktree_id = file.worktree_id(cx);
2446 let abs_path = file.as_local()?.abs_path(cx);
2447 let text_document = lsp::TextDocumentIdentifier {
2448 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2449 };
2450
2451 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2452 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2453
2454 server
2455 .notify::<lsp::notification::DidSaveTextDocument>(
2456 lsp::DidSaveTextDocumentParams {
2457 text_document: text_document.clone(),
2458 text,
2459 },
2460 )
2461 .log_err();
2462 }
2463
2464 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2465 for language_server_id in language_server_ids {
2466 if let Some(LanguageServerState::Running {
2467 adapter,
2468 simulate_disk_based_diagnostics_completion,
2469 ..
2470 }) = self.language_servers.get_mut(&language_server_id)
2471 {
2472 // After saving a buffer using a language server that doesn't provide
2473 // a disk-based progress token, kick off a timer that will reset every
2474 // time the buffer is saved. If the timer eventually fires, simulate
2475 // disk-based diagnostics being finished so that other pieces of UI
2476 // (e.g., project diagnostics view, diagnostic status bar) can update.
2477 // We don't emit an event right away because the language server might take
2478 // some time to publish diagnostics.
2479 if adapter.disk_based_diagnostics_progress_token.is_none() {
2480 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2481 Duration::from_secs(1);
2482
2483 let task = cx.spawn(move |this, mut cx| async move {
2484 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2485 if let Some(this) = this.upgrade() {
2486 this.update(&mut cx, |this, cx| {
2487 this.disk_based_diagnostics_finished(
2488 language_server_id,
2489 cx,
2490 );
2491 this.buffer_ordered_messages_tx
2492 .unbounded_send(
2493 BufferOrderedMessage::LanguageServerUpdate {
2494 language_server_id,
2495 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2496 },
2497 )
2498 .ok();
2499 }).ok();
2500 }
2501 });
2502 *simulate_disk_based_diagnostics_completion = Some(task);
2503 }
2504 }
2505 }
2506 }
2507 BufferEvent::FileHandleChanged => {
2508 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2509 return None;
2510 };
2511
2512 let remote_id = buffer.read(cx).remote_id();
2513 if let Some(entry_id) = file.entry_id {
2514 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2515 Some(_) => {
2516 return None;
2517 }
2518 None => {
2519 self.local_buffer_ids_by_entry_id
2520 .insert(entry_id, remote_id);
2521 }
2522 }
2523 };
2524 self.local_buffer_ids_by_path.insert(
2525 ProjectPath {
2526 worktree_id: file.worktree_id(cx),
2527 path: file.path.clone(),
2528 },
2529 remote_id,
2530 );
2531 }
2532 _ => {}
2533 }
2534
2535 None
2536 }
2537
2538 fn request_buffer_diff_recalculation(
2539 &mut self,
2540 buffer: &Model<Buffer>,
2541 cx: &mut ModelContext<Self>,
2542 ) {
2543 self.buffers_needing_diff.insert(buffer.downgrade());
2544 let first_insertion = self.buffers_needing_diff.len() == 1;
2545
2546 let settings = ProjectSettings::get_global(cx);
2547 let delay = if let Some(delay) = settings.git.gutter_debounce {
2548 delay
2549 } else {
2550 if first_insertion {
2551 let this = cx.weak_model();
2552 cx.defer(move |cx| {
2553 if let Some(this) = this.upgrade() {
2554 this.update(cx, |this, cx| {
2555 this.recalculate_buffer_diffs(cx).detach();
2556 });
2557 }
2558 });
2559 }
2560 return;
2561 };
2562
2563 const MIN_DELAY: u64 = 50;
2564 let delay = delay.max(MIN_DELAY);
2565 let duration = Duration::from_millis(delay);
2566
2567 self.git_diff_debouncer
2568 .fire_new(duration, cx, move |this, cx| {
2569 this.recalculate_buffer_diffs(cx)
2570 });
2571 }
2572
2573 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2574 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2575 cx.spawn(move |this, mut cx| async move {
2576 let tasks: Vec<_> = buffers
2577 .iter()
2578 .filter_map(|buffer| {
2579 let buffer = buffer.upgrade()?;
2580 buffer
2581 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2582 .ok()
2583 .flatten()
2584 })
2585 .collect();
2586
2587 futures::future::join_all(tasks).await;
2588
2589 this.update(&mut cx, |this, cx| {
2590 if !this.buffers_needing_diff.is_empty() {
2591 this.recalculate_buffer_diffs(cx).detach();
2592 } else {
2593 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2594 for buffer in buffers {
2595 if let Some(buffer) = buffer.upgrade() {
2596 buffer.update(cx, |_, cx| cx.notify());
2597 }
2598 }
2599 }
2600 })
2601 .ok();
2602 })
2603 }
2604
2605 fn language_servers_for_worktree(
2606 &self,
2607 worktree_id: WorktreeId,
2608 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2609 self.language_server_ids
2610 .iter()
2611 .filter_map(move |((language_server_worktree_id, _), id)| {
2612 if *language_server_worktree_id == worktree_id {
2613 if let Some(LanguageServerState::Running {
2614 adapter,
2615 language,
2616 server,
2617 ..
2618 }) = self.language_servers.get(id)
2619 {
2620 return Some((adapter, language, server));
2621 }
2622 }
2623 None
2624 })
2625 }
2626
2627 fn maintain_buffer_languages(
2628 languages: Arc<LanguageRegistry>,
2629 cx: &mut ModelContext<Project>,
2630 ) -> Task<()> {
2631 let mut subscription = languages.subscribe();
2632 let mut prev_reload_count = languages.reload_count();
2633 cx.spawn(move |project, mut cx| async move {
2634 while let Some(()) = subscription.next().await {
2635 if let Some(project) = project.upgrade() {
2636 // If the language registry has been reloaded, then remove and
2637 // re-assign the languages on all open buffers.
2638 let reload_count = languages.reload_count();
2639 if reload_count > prev_reload_count {
2640 prev_reload_count = reload_count;
2641 project
2642 .update(&mut cx, |this, cx| {
2643 let buffers = this
2644 .opened_buffers
2645 .values()
2646 .filter_map(|b| b.upgrade())
2647 .collect::<Vec<_>>();
2648 for buffer in buffers {
2649 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2650 {
2651 this.unregister_buffer_from_language_servers(
2652 &buffer, &f, cx,
2653 );
2654 buffer
2655 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2656 }
2657 }
2658 })
2659 .ok();
2660 }
2661
2662 project
2663 .update(&mut cx, |project, cx| {
2664 let mut plain_text_buffers = Vec::new();
2665 let mut buffers_with_unknown_injections = Vec::new();
2666 for buffer in project.opened_buffers.values() {
2667 if let Some(handle) = buffer.upgrade() {
2668 let buffer = &handle.read(cx);
2669 if buffer.language().is_none()
2670 || buffer.language() == Some(&*language::PLAIN_TEXT)
2671 {
2672 plain_text_buffers.push(handle);
2673 } else if buffer.contains_unknown_injections() {
2674 buffers_with_unknown_injections.push(handle);
2675 }
2676 }
2677 }
2678
2679 for buffer in plain_text_buffers {
2680 project.detect_language_for_buffer(&buffer, cx);
2681 project.register_buffer_with_language_servers(&buffer, cx);
2682 }
2683
2684 for buffer in buffers_with_unknown_injections {
2685 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2686 }
2687 })
2688 .ok();
2689 }
2690 }
2691 })
2692 }
2693
2694 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2695 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2696 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2697
2698 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2699 *settings_changed_tx.borrow_mut() = ();
2700 });
2701
2702 cx.spawn(move |this, mut cx| async move {
2703 while let Some(_) = settings_changed_rx.next().await {
2704 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2705 this.language_servers
2706 .values()
2707 .filter_map(|state| match state {
2708 LanguageServerState::Starting(_) => None,
2709 LanguageServerState::Running {
2710 adapter, server, ..
2711 } => Some((adapter.clone(), server.clone())),
2712 })
2713 .collect()
2714 })?;
2715
2716 for (adapter, server) in servers {
2717 let workspace_config = cx
2718 .update(|cx| adapter.workspace_configuration(server.root_path(), cx))?
2719 .await;
2720 server
2721 .notify::<lsp::notification::DidChangeConfiguration>(
2722 lsp::DidChangeConfigurationParams {
2723 settings: workspace_config.clone(),
2724 },
2725 )
2726 .ok();
2727 }
2728 }
2729
2730 drop(settings_observation);
2731 anyhow::Ok(())
2732 })
2733 }
2734
2735 fn detect_language_for_buffer(
2736 &mut self,
2737 buffer_handle: &Model<Buffer>,
2738 cx: &mut ModelContext<Self>,
2739 ) -> Option<()> {
2740 // If the buffer has a language, set it and start the language server if we haven't already.
2741 let buffer = buffer_handle.read(cx);
2742 let full_path = buffer.file()?.full_path(cx);
2743 let content = buffer.as_rope();
2744 let new_language = self
2745 .languages
2746 .language_for_file(&full_path, Some(content))
2747 .now_or_never()?
2748 .ok()?;
2749 self.set_language_for_buffer(buffer_handle, new_language, cx);
2750 None
2751 }
2752
2753 pub fn set_language_for_buffer(
2754 &mut self,
2755 buffer: &Model<Buffer>,
2756 new_language: Arc<Language>,
2757 cx: &mut ModelContext<Self>,
2758 ) {
2759 buffer.update(cx, |buffer, cx| {
2760 if buffer.language().map_or(true, |old_language| {
2761 !Arc::ptr_eq(old_language, &new_language)
2762 }) {
2763 buffer.set_language(Some(new_language.clone()), cx);
2764 }
2765 });
2766
2767 let buffer_file = buffer.read(cx).file().cloned();
2768 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2769 let buffer_file = File::from_dyn(buffer_file.as_ref());
2770 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2771 if let Some(prettier_plugins) =
2772 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2773 {
2774 self.install_default_prettier(worktree, prettier_plugins, cx);
2775 };
2776 if let Some(file) = buffer_file {
2777 let worktree = file.worktree.clone();
2778 if let Some(tree) = worktree.read(cx).as_local() {
2779 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2780 }
2781 }
2782 }
2783
2784 fn start_language_servers(
2785 &mut self,
2786 worktree: &Model<Worktree>,
2787 worktree_path: Arc<Path>,
2788 language: Arc<Language>,
2789 cx: &mut ModelContext<Self>,
2790 ) {
2791 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2792 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2793 if !settings.enable_language_server {
2794 return;
2795 }
2796
2797 let worktree_id = worktree.read(cx).id();
2798 for adapter in language.lsp_adapters() {
2799 self.start_language_server(
2800 worktree_id,
2801 worktree_path.clone(),
2802 adapter.clone(),
2803 language.clone(),
2804 cx,
2805 );
2806 }
2807 }
2808
2809 fn start_language_server(
2810 &mut self,
2811 worktree_id: WorktreeId,
2812 worktree_path: Arc<Path>,
2813 adapter: Arc<CachedLspAdapter>,
2814 language: Arc<Language>,
2815 cx: &mut ModelContext<Self>,
2816 ) {
2817 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2818 return;
2819 }
2820
2821 let key = (worktree_id, adapter.name.clone());
2822 if self.language_server_ids.contains_key(&key) {
2823 return;
2824 }
2825
2826 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2827 let pending_server = match self.languages.create_pending_language_server(
2828 stderr_capture.clone(),
2829 language.clone(),
2830 adapter.clone(),
2831 Arc::clone(&worktree_path),
2832 ProjectLspAdapterDelegate::new(self, cx),
2833 cx,
2834 ) {
2835 Some(pending_server) => pending_server,
2836 None => return,
2837 };
2838
2839 let project_settings = ProjectSettings::get_global(cx);
2840 let lsp = project_settings.lsp.get(&adapter.name.0);
2841 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2842
2843 let server_id = pending_server.server_id;
2844 let container_dir = pending_server.container_dir.clone();
2845 let state = LanguageServerState::Starting({
2846 let adapter = adapter.clone();
2847 let server_name = adapter.name.0.clone();
2848 let language = language.clone();
2849 let key = key.clone();
2850
2851 cx.spawn(move |this, mut cx| async move {
2852 let result = Self::setup_and_insert_language_server(
2853 this.clone(),
2854 &worktree_path,
2855 override_options,
2856 pending_server,
2857 adapter.clone(),
2858 language.clone(),
2859 server_id,
2860 key,
2861 &mut cx,
2862 )
2863 .await;
2864
2865 match result {
2866 Ok(server) => {
2867 stderr_capture.lock().take();
2868 server
2869 }
2870
2871 Err(err) => {
2872 log::error!("failed to start language server {server_name:?}: {err}");
2873 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2874
2875 let this = this.upgrade()?;
2876 let container_dir = container_dir?;
2877
2878 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2879 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2880 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2881 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2882 return None;
2883 }
2884
2885 let installation_test_binary = adapter
2886 .installation_test_binary(container_dir.to_path_buf())
2887 .await;
2888
2889 this.update(&mut cx, |_, cx| {
2890 Self::check_errored_server(
2891 language,
2892 adapter,
2893 server_id,
2894 installation_test_binary,
2895 cx,
2896 )
2897 })
2898 .ok();
2899
2900 None
2901 }
2902 }
2903 })
2904 });
2905
2906 self.language_servers.insert(server_id, state);
2907 self.language_server_ids.insert(key, server_id);
2908 }
2909
2910 fn reinstall_language_server(
2911 &mut self,
2912 language: Arc<Language>,
2913 adapter: Arc<CachedLspAdapter>,
2914 server_id: LanguageServerId,
2915 cx: &mut ModelContext<Self>,
2916 ) -> Option<Task<()>> {
2917 log::info!("beginning to reinstall server");
2918
2919 let existing_server = match self.language_servers.remove(&server_id) {
2920 Some(LanguageServerState::Running { server, .. }) => Some(server),
2921 _ => None,
2922 };
2923
2924 for worktree in &self.worktrees {
2925 if let Some(worktree) = worktree.upgrade() {
2926 let key = (worktree.read(cx).id(), adapter.name.clone());
2927 self.language_server_ids.remove(&key);
2928 }
2929 }
2930
2931 Some(cx.spawn(move |this, mut cx| async move {
2932 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2933 log::info!("shutting down existing server");
2934 task.await;
2935 }
2936
2937 // TODO: This is race-safe with regards to preventing new instances from
2938 // starting while deleting, but existing instances in other projects are going
2939 // to be very confused and messed up
2940 let Some(task) = this
2941 .update(&mut cx, |this, cx| {
2942 this.languages.delete_server_container(adapter.clone(), cx)
2943 })
2944 .log_err()
2945 else {
2946 return;
2947 };
2948 task.await;
2949
2950 this.update(&mut cx, |this, cx| {
2951 let worktrees = this.worktrees.clone();
2952 for worktree in worktrees {
2953 let worktree = match worktree.upgrade() {
2954 Some(worktree) => worktree.read(cx),
2955 None => continue,
2956 };
2957 let worktree_id = worktree.id();
2958 let root_path = worktree.abs_path();
2959
2960 this.start_language_server(
2961 worktree_id,
2962 root_path,
2963 adapter.clone(),
2964 language.clone(),
2965 cx,
2966 );
2967 }
2968 })
2969 .ok();
2970 }))
2971 }
2972
2973 async fn setup_and_insert_language_server(
2974 this: WeakModel<Self>,
2975 worktree_path: &Path,
2976 override_initialization_options: Option<serde_json::Value>,
2977 pending_server: PendingLanguageServer,
2978 adapter: Arc<CachedLspAdapter>,
2979 language: Arc<Language>,
2980 server_id: LanguageServerId,
2981 key: (WorktreeId, LanguageServerName),
2982 cx: &mut AsyncAppContext,
2983 ) -> Result<Option<Arc<LanguageServer>>> {
2984 let language_server = Self::setup_pending_language_server(
2985 this.clone(),
2986 override_initialization_options,
2987 pending_server,
2988 worktree_path,
2989 adapter.clone(),
2990 server_id,
2991 cx,
2992 )
2993 .await?;
2994
2995 let this = match this.upgrade() {
2996 Some(this) => this,
2997 None => return Err(anyhow!("failed to upgrade project handle")),
2998 };
2999
3000 this.update(cx, |this, cx| {
3001 this.insert_newly_running_language_server(
3002 language,
3003 adapter,
3004 language_server.clone(),
3005 server_id,
3006 key,
3007 cx,
3008 )
3009 })??;
3010
3011 Ok(Some(language_server))
3012 }
3013
3014 async fn setup_pending_language_server(
3015 this: WeakModel<Self>,
3016 override_options: Option<serde_json::Value>,
3017 pending_server: PendingLanguageServer,
3018 worktree_path: &Path,
3019 adapter: Arc<CachedLspAdapter>,
3020 server_id: LanguageServerId,
3021 cx: &mut AsyncAppContext,
3022 ) -> Result<Arc<LanguageServer>> {
3023 let workspace_config = cx
3024 .update(|cx| adapter.workspace_configuration(worktree_path, cx))?
3025 .await;
3026 let language_server = pending_server.task.await?;
3027
3028 language_server
3029 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3030 let adapter = adapter.clone();
3031 let this = this.clone();
3032 move |mut params, mut cx| {
3033 let adapter = adapter.clone();
3034 if let Some(this) = this.upgrade() {
3035 adapter.process_diagnostics(&mut params);
3036 this.update(&mut cx, |this, cx| {
3037 this.update_diagnostics(
3038 server_id,
3039 params,
3040 &adapter.disk_based_diagnostic_sources,
3041 cx,
3042 )
3043 .log_err();
3044 })
3045 .ok();
3046 }
3047 }
3048 })
3049 .detach();
3050
3051 language_server
3052 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3053 let adapter = adapter.clone();
3054 let worktree_path = worktree_path.to_path_buf();
3055 move |params, cx| {
3056 let adapter = adapter.clone();
3057 let worktree_path = worktree_path.clone();
3058 async move {
3059 let workspace_config = cx
3060 .update(|cx| adapter.workspace_configuration(&worktree_path, cx))?
3061 .await;
3062 Ok(params
3063 .items
3064 .into_iter()
3065 .map(|item| {
3066 if let Some(section) = &item.section {
3067 workspace_config
3068 .get(section)
3069 .cloned()
3070 .unwrap_or(serde_json::Value::Null)
3071 } else {
3072 workspace_config.clone()
3073 }
3074 })
3075 .collect())
3076 }
3077 }
3078 })
3079 .detach();
3080
3081 // Even though we don't have handling for these requests, respond to them to
3082 // avoid stalling any language server like `gopls` which waits for a response
3083 // to these requests when initializing.
3084 language_server
3085 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3086 let this = this.clone();
3087 move |params, mut cx| {
3088 let this = this.clone();
3089 async move {
3090 this.update(&mut cx, |this, _| {
3091 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3092 {
3093 if let lsp::NumberOrString::String(token) = params.token {
3094 status.progress_tokens.insert(token);
3095 }
3096 }
3097 })?;
3098
3099 Ok(())
3100 }
3101 }
3102 })
3103 .detach();
3104
3105 language_server
3106 .on_request::<lsp::request::RegisterCapability, _, _>({
3107 let this = this.clone();
3108 move |params, mut cx| {
3109 let this = this.clone();
3110 async move {
3111 for reg in params.registrations {
3112 if reg.method == "workspace/didChangeWatchedFiles" {
3113 if let Some(options) = reg.register_options {
3114 let options = serde_json::from_value(options)?;
3115 this.update(&mut cx, |this, cx| {
3116 this.on_lsp_did_change_watched_files(
3117 server_id, options, cx,
3118 );
3119 })?;
3120 }
3121 }
3122 }
3123 Ok(())
3124 }
3125 }
3126 })
3127 .detach();
3128
3129 language_server
3130 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3131 let adapter = adapter.clone();
3132 let this = this.clone();
3133 move |params, cx| {
3134 Self::on_lsp_workspace_edit(
3135 this.clone(),
3136 params,
3137 server_id,
3138 adapter.clone(),
3139 cx,
3140 )
3141 }
3142 })
3143 .detach();
3144
3145 language_server
3146 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3147 let this = this.clone();
3148 move |(), mut cx| {
3149 let this = this.clone();
3150 async move {
3151 this.update(&mut cx, |project, cx| {
3152 cx.emit(Event::RefreshInlayHints);
3153 project.remote_id().map(|project_id| {
3154 project.client.send(proto::RefreshInlayHints { project_id })
3155 })
3156 })?
3157 .transpose()?;
3158 Ok(())
3159 }
3160 }
3161 })
3162 .detach();
3163
3164 let disk_based_diagnostics_progress_token =
3165 adapter.disk_based_diagnostics_progress_token.clone();
3166
3167 language_server
3168 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3169 if let Some(this) = this.upgrade() {
3170 this.update(&mut cx, |this, cx| {
3171 this.on_lsp_progress(
3172 params,
3173 server_id,
3174 disk_based_diagnostics_progress_token.clone(),
3175 cx,
3176 );
3177 })
3178 .ok();
3179 }
3180 })
3181 .detach();
3182 let mut initialization_options = adapter.adapter.initialization_options().await;
3183 match (&mut initialization_options, override_options) {
3184 (Some(initialization_options), Some(override_options)) => {
3185 merge_json_value_into(override_options, initialization_options);
3186 }
3187 (None, override_options) => initialization_options = override_options,
3188 _ => {}
3189 }
3190 let language_server = language_server.initialize(initialization_options).await?;
3191
3192 language_server
3193 .notify::<lsp::notification::DidChangeConfiguration>(
3194 lsp::DidChangeConfigurationParams {
3195 settings: workspace_config,
3196 },
3197 )
3198 .ok();
3199
3200 Ok(language_server)
3201 }
3202
3203 fn insert_newly_running_language_server(
3204 &mut self,
3205 language: Arc<Language>,
3206 adapter: Arc<CachedLspAdapter>,
3207 language_server: Arc<LanguageServer>,
3208 server_id: LanguageServerId,
3209 key: (WorktreeId, LanguageServerName),
3210 cx: &mut ModelContext<Self>,
3211 ) -> Result<()> {
3212 // If the language server for this key doesn't match the server id, don't store the
3213 // server. Which will cause it to be dropped, killing the process
3214 if self
3215 .language_server_ids
3216 .get(&key)
3217 .map(|id| id != &server_id)
3218 .unwrap_or(false)
3219 {
3220 return Ok(());
3221 }
3222
3223 // Update language_servers collection with Running variant of LanguageServerState
3224 // indicating that the server is up and running and ready
3225 self.language_servers.insert(
3226 server_id,
3227 LanguageServerState::Running {
3228 adapter: adapter.clone(),
3229 language: language.clone(),
3230 watched_paths: Default::default(),
3231 server: language_server.clone(),
3232 simulate_disk_based_diagnostics_completion: None,
3233 },
3234 );
3235
3236 self.language_server_statuses.insert(
3237 server_id,
3238 LanguageServerStatus {
3239 name: language_server.name().to_string(),
3240 pending_work: Default::default(),
3241 has_pending_diagnostic_updates: false,
3242 progress_tokens: Default::default(),
3243 },
3244 );
3245
3246 cx.emit(Event::LanguageServerAdded(server_id));
3247
3248 if let Some(project_id) = self.remote_id() {
3249 self.client.send(proto::StartLanguageServer {
3250 project_id,
3251 server: Some(proto::LanguageServer {
3252 id: server_id.0 as u64,
3253 name: language_server.name().to_string(),
3254 }),
3255 })?;
3256 }
3257
3258 // Tell the language server about every open buffer in the worktree that matches the language.
3259 for buffer in self.opened_buffers.values() {
3260 if let Some(buffer_handle) = buffer.upgrade() {
3261 let buffer = buffer_handle.read(cx);
3262 let file = match File::from_dyn(buffer.file()) {
3263 Some(file) => file,
3264 None => continue,
3265 };
3266 let language = match buffer.language() {
3267 Some(language) => language,
3268 None => continue,
3269 };
3270
3271 if file.worktree.read(cx).id() != key.0
3272 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3273 {
3274 continue;
3275 }
3276
3277 let file = match file.as_local() {
3278 Some(file) => file,
3279 None => continue,
3280 };
3281
3282 let versions = self
3283 .buffer_snapshots
3284 .entry(buffer.remote_id())
3285 .or_default()
3286 .entry(server_id)
3287 .or_insert_with(|| {
3288 vec![LspBufferSnapshot {
3289 version: 0,
3290 snapshot: buffer.text_snapshot(),
3291 }]
3292 });
3293
3294 let snapshot = versions.last().unwrap();
3295 let version = snapshot.version;
3296 let initial_snapshot = &snapshot.snapshot;
3297 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3298 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3299 lsp::DidOpenTextDocumentParams {
3300 text_document: lsp::TextDocumentItem::new(
3301 uri,
3302 adapter
3303 .language_ids
3304 .get(language.name().as_ref())
3305 .cloned()
3306 .unwrap_or_default(),
3307 version,
3308 initial_snapshot.text(),
3309 ),
3310 },
3311 )?;
3312
3313 buffer_handle.update(cx, |buffer, cx| {
3314 buffer.set_completion_triggers(
3315 language_server
3316 .capabilities()
3317 .completion_provider
3318 .as_ref()
3319 .and_then(|provider| provider.trigger_characters.clone())
3320 .unwrap_or_default(),
3321 cx,
3322 )
3323 });
3324 }
3325 }
3326
3327 cx.notify();
3328 Ok(())
3329 }
3330
3331 // Returns a list of all of the worktrees which no longer have a language server and the root path
3332 // for the stopped server
3333 fn stop_language_server(
3334 &mut self,
3335 worktree_id: WorktreeId,
3336 adapter_name: LanguageServerName,
3337 cx: &mut ModelContext<Self>,
3338 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3339 let key = (worktree_id, adapter_name);
3340 if let Some(server_id) = self.language_server_ids.remove(&key) {
3341 log::info!("stopping language server {}", key.1 .0);
3342
3343 // Remove other entries for this language server as well
3344 let mut orphaned_worktrees = vec![worktree_id];
3345 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3346 for other_key in other_keys {
3347 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3348 self.language_server_ids.remove(&other_key);
3349 orphaned_worktrees.push(other_key.0);
3350 }
3351 }
3352
3353 for buffer in self.opened_buffers.values() {
3354 if let Some(buffer) = buffer.upgrade() {
3355 buffer.update(cx, |buffer, cx| {
3356 buffer.update_diagnostics(server_id, Default::default(), cx);
3357 });
3358 }
3359 }
3360 for worktree in &self.worktrees {
3361 if let Some(worktree) = worktree.upgrade() {
3362 worktree.update(cx, |worktree, cx| {
3363 if let Some(worktree) = worktree.as_local_mut() {
3364 worktree.clear_diagnostics_for_language_server(server_id, cx);
3365 }
3366 });
3367 }
3368 }
3369
3370 self.language_server_statuses.remove(&server_id);
3371 cx.notify();
3372
3373 let server_state = self.language_servers.remove(&server_id);
3374 cx.emit(Event::LanguageServerRemoved(server_id));
3375 cx.spawn(move |this, mut cx| async move {
3376 let mut root_path = None;
3377
3378 let server = match server_state {
3379 Some(LanguageServerState::Starting(task)) => task.await,
3380 Some(LanguageServerState::Running { server, .. }) => Some(server),
3381 None => None,
3382 };
3383
3384 if let Some(server) = server {
3385 root_path = Some(server.root_path().clone());
3386 if let Some(shutdown) = server.shutdown() {
3387 shutdown.await;
3388 }
3389 }
3390
3391 if let Some(this) = this.upgrade() {
3392 this.update(&mut cx, |this, cx| {
3393 this.language_server_statuses.remove(&server_id);
3394 cx.notify();
3395 })
3396 .ok();
3397 }
3398
3399 (root_path, orphaned_worktrees)
3400 })
3401 } else {
3402 Task::ready((None, Vec::new()))
3403 }
3404 }
3405
3406 pub fn restart_language_servers_for_buffers(
3407 &mut self,
3408 buffers: impl IntoIterator<Item = Model<Buffer>>,
3409 cx: &mut ModelContext<Self>,
3410 ) -> Option<()> {
3411 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3412 .into_iter()
3413 .filter_map(|buffer| {
3414 let buffer = buffer.read(cx);
3415 let file = File::from_dyn(buffer.file())?;
3416 let full_path = file.full_path(cx);
3417 let language = self
3418 .languages
3419 .language_for_file(&full_path, Some(buffer.as_rope()))
3420 .now_or_never()?
3421 .ok()?;
3422 Some((file.worktree.clone(), language))
3423 })
3424 .collect();
3425 for (worktree, language) in language_server_lookup_info {
3426 self.restart_language_servers(worktree, language, cx);
3427 }
3428
3429 None
3430 }
3431
3432 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3433 fn restart_language_servers(
3434 &mut self,
3435 worktree: Model<Worktree>,
3436 language: Arc<Language>,
3437 cx: &mut ModelContext<Self>,
3438 ) {
3439 let worktree_id = worktree.read(cx).id();
3440 let fallback_path = worktree.read(cx).abs_path();
3441
3442 let mut stops = Vec::new();
3443 for adapter in language.lsp_adapters() {
3444 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3445 }
3446
3447 if stops.is_empty() {
3448 return;
3449 }
3450 let mut stops = stops.into_iter();
3451
3452 cx.spawn(move |this, mut cx| async move {
3453 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3454 for stop in stops {
3455 let (_, worktrees) = stop.await;
3456 orphaned_worktrees.extend_from_slice(&worktrees);
3457 }
3458
3459 let this = match this.upgrade() {
3460 Some(this) => this,
3461 None => return,
3462 };
3463
3464 this.update(&mut cx, |this, cx| {
3465 // Attempt to restart using original server path. Fallback to passed in
3466 // path if we could not retrieve the root path
3467 let root_path = original_root_path
3468 .map(|path_buf| Arc::from(path_buf.as_path()))
3469 .unwrap_or(fallback_path);
3470
3471 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3472
3473 // Lookup new server ids and set them for each of the orphaned worktrees
3474 for adapter in language.lsp_adapters() {
3475 if let Some(new_server_id) = this
3476 .language_server_ids
3477 .get(&(worktree_id, adapter.name.clone()))
3478 .cloned()
3479 {
3480 for &orphaned_worktree in &orphaned_worktrees {
3481 this.language_server_ids
3482 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3483 }
3484 }
3485 }
3486 })
3487 .ok();
3488 })
3489 .detach();
3490 }
3491
3492 fn check_errored_server(
3493 language: Arc<Language>,
3494 adapter: Arc<CachedLspAdapter>,
3495 server_id: LanguageServerId,
3496 installation_test_binary: Option<LanguageServerBinary>,
3497 cx: &mut ModelContext<Self>,
3498 ) {
3499 if !adapter.can_be_reinstalled() {
3500 log::info!(
3501 "Validation check requested for {:?} but it cannot be reinstalled",
3502 adapter.name.0
3503 );
3504 return;
3505 }
3506
3507 cx.spawn(move |this, mut cx| async move {
3508 log::info!("About to spawn test binary");
3509
3510 // A lack of test binary counts as a failure
3511 let process = installation_test_binary.and_then(|binary| {
3512 smol::process::Command::new(&binary.path)
3513 .current_dir(&binary.path)
3514 .args(binary.arguments)
3515 .stdin(Stdio::piped())
3516 .stdout(Stdio::piped())
3517 .stderr(Stdio::inherit())
3518 .kill_on_drop(true)
3519 .spawn()
3520 .ok()
3521 });
3522
3523 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3524 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3525
3526 let mut errored = false;
3527 if let Some(mut process) = process {
3528 futures::select! {
3529 status = process.status().fuse() => match status {
3530 Ok(status) => errored = !status.success(),
3531 Err(_) => errored = true,
3532 },
3533
3534 _ = timeout => {
3535 log::info!("test binary time-ed out, this counts as a success");
3536 _ = process.kill();
3537 }
3538 }
3539 } else {
3540 log::warn!("test binary failed to launch");
3541 errored = true;
3542 }
3543
3544 if errored {
3545 log::warn!("test binary check failed");
3546 let task = this
3547 .update(&mut cx, move |this, cx| {
3548 this.reinstall_language_server(language, adapter, server_id, cx)
3549 })
3550 .ok()
3551 .flatten();
3552
3553 if let Some(task) = task {
3554 task.await;
3555 }
3556 }
3557 })
3558 .detach();
3559 }
3560
3561 fn on_lsp_progress(
3562 &mut self,
3563 progress: lsp::ProgressParams,
3564 language_server_id: LanguageServerId,
3565 disk_based_diagnostics_progress_token: Option<String>,
3566 cx: &mut ModelContext<Self>,
3567 ) {
3568 let token = match progress.token {
3569 lsp::NumberOrString::String(token) => token,
3570 lsp::NumberOrString::Number(token) => {
3571 log::info!("skipping numeric progress token {}", token);
3572 return;
3573 }
3574 };
3575 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3576 let language_server_status =
3577 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3578 status
3579 } else {
3580 return;
3581 };
3582
3583 if !language_server_status.progress_tokens.contains(&token) {
3584 return;
3585 }
3586
3587 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3588 .as_ref()
3589 .map_or(false, |disk_based_token| {
3590 token.starts_with(disk_based_token)
3591 });
3592
3593 match progress {
3594 lsp::WorkDoneProgress::Begin(report) => {
3595 if is_disk_based_diagnostics_progress {
3596 language_server_status.has_pending_diagnostic_updates = true;
3597 self.disk_based_diagnostics_started(language_server_id, cx);
3598 self.buffer_ordered_messages_tx
3599 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3600 language_server_id,
3601 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3602 })
3603 .ok();
3604 } else {
3605 self.on_lsp_work_start(
3606 language_server_id,
3607 token.clone(),
3608 LanguageServerProgress {
3609 message: report.message.clone(),
3610 percentage: report.percentage.map(|p| p as usize),
3611 last_update_at: Instant::now(),
3612 },
3613 cx,
3614 );
3615 self.buffer_ordered_messages_tx
3616 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3617 language_server_id,
3618 message: proto::update_language_server::Variant::WorkStart(
3619 proto::LspWorkStart {
3620 token,
3621 message: report.message,
3622 percentage: report.percentage.map(|p| p as u32),
3623 },
3624 ),
3625 })
3626 .ok();
3627 }
3628 }
3629 lsp::WorkDoneProgress::Report(report) => {
3630 if !is_disk_based_diagnostics_progress {
3631 self.on_lsp_work_progress(
3632 language_server_id,
3633 token.clone(),
3634 LanguageServerProgress {
3635 message: report.message.clone(),
3636 percentage: report.percentage.map(|p| p as usize),
3637 last_update_at: Instant::now(),
3638 },
3639 cx,
3640 );
3641 self.buffer_ordered_messages_tx
3642 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3643 language_server_id,
3644 message: proto::update_language_server::Variant::WorkProgress(
3645 proto::LspWorkProgress {
3646 token,
3647 message: report.message,
3648 percentage: report.percentage.map(|p| p as u32),
3649 },
3650 ),
3651 })
3652 .ok();
3653 }
3654 }
3655 lsp::WorkDoneProgress::End(_) => {
3656 language_server_status.progress_tokens.remove(&token);
3657
3658 if is_disk_based_diagnostics_progress {
3659 language_server_status.has_pending_diagnostic_updates = false;
3660 self.disk_based_diagnostics_finished(language_server_id, cx);
3661 self.buffer_ordered_messages_tx
3662 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3663 language_server_id,
3664 message:
3665 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3666 Default::default(),
3667 ),
3668 })
3669 .ok();
3670 } else {
3671 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3672 self.buffer_ordered_messages_tx
3673 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3674 language_server_id,
3675 message: proto::update_language_server::Variant::WorkEnd(
3676 proto::LspWorkEnd { token },
3677 ),
3678 })
3679 .ok();
3680 }
3681 }
3682 }
3683 }
3684
3685 fn on_lsp_work_start(
3686 &mut self,
3687 language_server_id: LanguageServerId,
3688 token: String,
3689 progress: LanguageServerProgress,
3690 cx: &mut ModelContext<Self>,
3691 ) {
3692 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3693 status.pending_work.insert(token, progress);
3694 cx.notify();
3695 }
3696 }
3697
3698 fn on_lsp_work_progress(
3699 &mut self,
3700 language_server_id: LanguageServerId,
3701 token: String,
3702 progress: LanguageServerProgress,
3703 cx: &mut ModelContext<Self>,
3704 ) {
3705 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3706 let entry = status
3707 .pending_work
3708 .entry(token)
3709 .or_insert(LanguageServerProgress {
3710 message: Default::default(),
3711 percentage: Default::default(),
3712 last_update_at: progress.last_update_at,
3713 });
3714 if progress.message.is_some() {
3715 entry.message = progress.message;
3716 }
3717 if progress.percentage.is_some() {
3718 entry.percentage = progress.percentage;
3719 }
3720 entry.last_update_at = progress.last_update_at;
3721 cx.notify();
3722 }
3723 }
3724
3725 fn on_lsp_work_end(
3726 &mut self,
3727 language_server_id: LanguageServerId,
3728 token: String,
3729 cx: &mut ModelContext<Self>,
3730 ) {
3731 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3732 cx.emit(Event::RefreshInlayHints);
3733 status.pending_work.remove(&token);
3734 cx.notify();
3735 }
3736 }
3737
3738 fn on_lsp_did_change_watched_files(
3739 &mut self,
3740 language_server_id: LanguageServerId,
3741 params: DidChangeWatchedFilesRegistrationOptions,
3742 cx: &mut ModelContext<Self>,
3743 ) {
3744 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3745 self.language_servers.get_mut(&language_server_id)
3746 {
3747 let mut builders = HashMap::default();
3748 for watcher in params.watchers {
3749 for worktree in &self.worktrees {
3750 if let Some(worktree) = worktree.upgrade() {
3751 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3752 if let Some(abs_path) = tree.abs_path().to_str() {
3753 let relative_glob_pattern = match &watcher.glob_pattern {
3754 lsp::GlobPattern::String(s) => s
3755 .strip_prefix(abs_path)
3756 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3757 lsp::GlobPattern::Relative(rp) => {
3758 let base_uri = match &rp.base_uri {
3759 lsp::OneOf::Left(workspace_folder) => {
3760 &workspace_folder.uri
3761 }
3762 lsp::OneOf::Right(base_uri) => base_uri,
3763 };
3764 base_uri.to_file_path().ok().and_then(|file_path| {
3765 (file_path.to_str() == Some(abs_path))
3766 .then_some(rp.pattern.as_str())
3767 })
3768 }
3769 };
3770 if let Some(relative_glob_pattern) = relative_glob_pattern {
3771 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
3772 tree.as_local_mut()
3773 .unwrap()
3774 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3775 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3776 builders
3777 .entry(tree.id())
3778 .or_insert_with(|| GlobSetBuilder::new())
3779 .add(glob);
3780 }
3781 return true;
3782 }
3783 }
3784 false
3785 });
3786 if glob_is_inside_worktree {
3787 break;
3788 }
3789 }
3790 }
3791 }
3792
3793 watched_paths.clear();
3794 for (worktree_id, builder) in builders {
3795 if let Ok(globset) = builder.build() {
3796 watched_paths.insert(worktree_id, globset);
3797 }
3798 }
3799
3800 cx.notify();
3801 }
3802 }
3803
3804 async fn on_lsp_workspace_edit(
3805 this: WeakModel<Self>,
3806 params: lsp::ApplyWorkspaceEditParams,
3807 server_id: LanguageServerId,
3808 adapter: Arc<CachedLspAdapter>,
3809 mut cx: AsyncAppContext,
3810 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3811 let this = this
3812 .upgrade()
3813 .ok_or_else(|| anyhow!("project project closed"))?;
3814 let language_server = this
3815 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3816 .ok_or_else(|| anyhow!("language server not found"))?;
3817 let transaction = Self::deserialize_workspace_edit(
3818 this.clone(),
3819 params.edit,
3820 true,
3821 adapter.clone(),
3822 language_server.clone(),
3823 &mut cx,
3824 )
3825 .await
3826 .log_err();
3827 this.update(&mut cx, |this, _| {
3828 if let Some(transaction) = transaction {
3829 this.last_workspace_edits_by_language_server
3830 .insert(server_id, transaction);
3831 }
3832 })?;
3833 Ok(lsp::ApplyWorkspaceEditResponse {
3834 applied: true,
3835 failed_change: None,
3836 failure_reason: None,
3837 })
3838 }
3839
3840 pub fn language_server_statuses(
3841 &self,
3842 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3843 self.language_server_statuses.values()
3844 }
3845
3846 pub fn update_diagnostics(
3847 &mut self,
3848 language_server_id: LanguageServerId,
3849 mut params: lsp::PublishDiagnosticsParams,
3850 disk_based_sources: &[String],
3851 cx: &mut ModelContext<Self>,
3852 ) -> Result<()> {
3853 let abs_path = params
3854 .uri
3855 .to_file_path()
3856 .map_err(|_| anyhow!("URI is not a file"))?;
3857 let mut diagnostics = Vec::default();
3858 let mut primary_diagnostic_group_ids = HashMap::default();
3859 let mut sources_by_group_id = HashMap::default();
3860 let mut supporting_diagnostics = HashMap::default();
3861
3862 // Ensure that primary diagnostics are always the most severe
3863 params.diagnostics.sort_by_key(|item| item.severity);
3864
3865 for diagnostic in ¶ms.diagnostics {
3866 let source = diagnostic.source.as_ref();
3867 let code = diagnostic.code.as_ref().map(|code| match code {
3868 lsp::NumberOrString::Number(code) => code.to_string(),
3869 lsp::NumberOrString::String(code) => code.clone(),
3870 });
3871 let range = range_from_lsp(diagnostic.range);
3872 let is_supporting = diagnostic
3873 .related_information
3874 .as_ref()
3875 .map_or(false, |infos| {
3876 infos.iter().any(|info| {
3877 primary_diagnostic_group_ids.contains_key(&(
3878 source,
3879 code.clone(),
3880 range_from_lsp(info.location.range),
3881 ))
3882 })
3883 });
3884
3885 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3886 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3887 });
3888
3889 if is_supporting {
3890 supporting_diagnostics.insert(
3891 (source, code.clone(), range),
3892 (diagnostic.severity, is_unnecessary),
3893 );
3894 } else {
3895 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3896 let is_disk_based =
3897 source.map_or(false, |source| disk_based_sources.contains(source));
3898
3899 sources_by_group_id.insert(group_id, source);
3900 primary_diagnostic_group_ids
3901 .insert((source, code.clone(), range.clone()), group_id);
3902
3903 diagnostics.push(DiagnosticEntry {
3904 range,
3905 diagnostic: Diagnostic {
3906 source: diagnostic.source.clone(),
3907 code: code.clone(),
3908 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3909 message: diagnostic.message.clone(),
3910 group_id,
3911 is_primary: true,
3912 is_disk_based,
3913 is_unnecessary,
3914 },
3915 });
3916 if let Some(infos) = &diagnostic.related_information {
3917 for info in infos {
3918 if info.location.uri == params.uri && !info.message.is_empty() {
3919 let range = range_from_lsp(info.location.range);
3920 diagnostics.push(DiagnosticEntry {
3921 range,
3922 diagnostic: Diagnostic {
3923 source: diagnostic.source.clone(),
3924 code: code.clone(),
3925 severity: DiagnosticSeverity::INFORMATION,
3926 message: info.message.clone(),
3927 group_id,
3928 is_primary: false,
3929 is_disk_based,
3930 is_unnecessary: false,
3931 },
3932 });
3933 }
3934 }
3935 }
3936 }
3937 }
3938
3939 for entry in &mut diagnostics {
3940 let diagnostic = &mut entry.diagnostic;
3941 if !diagnostic.is_primary {
3942 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3943 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3944 source,
3945 diagnostic.code.clone(),
3946 entry.range.clone(),
3947 )) {
3948 if let Some(severity) = severity {
3949 diagnostic.severity = severity;
3950 }
3951 diagnostic.is_unnecessary = is_unnecessary;
3952 }
3953 }
3954 }
3955
3956 self.update_diagnostic_entries(
3957 language_server_id,
3958 abs_path,
3959 params.version,
3960 diagnostics,
3961 cx,
3962 )?;
3963 Ok(())
3964 }
3965
3966 pub fn update_diagnostic_entries(
3967 &mut self,
3968 server_id: LanguageServerId,
3969 abs_path: PathBuf,
3970 version: Option<i32>,
3971 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3972 cx: &mut ModelContext<Project>,
3973 ) -> Result<(), anyhow::Error> {
3974 let (worktree, relative_path) = self
3975 .find_local_worktree(&abs_path, cx)
3976 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3977
3978 let project_path = ProjectPath {
3979 worktree_id: worktree.read(cx).id(),
3980 path: relative_path.into(),
3981 };
3982
3983 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3984 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3985 }
3986
3987 let updated = worktree.update(cx, |worktree, cx| {
3988 worktree
3989 .as_local_mut()
3990 .ok_or_else(|| anyhow!("not a local worktree"))?
3991 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3992 })?;
3993 if updated {
3994 cx.emit(Event::DiagnosticsUpdated {
3995 language_server_id: server_id,
3996 path: project_path,
3997 });
3998 }
3999 Ok(())
4000 }
4001
4002 fn update_buffer_diagnostics(
4003 &mut self,
4004 buffer: &Model<Buffer>,
4005 server_id: LanguageServerId,
4006 version: Option<i32>,
4007 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4008 cx: &mut ModelContext<Self>,
4009 ) -> Result<()> {
4010 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4011 Ordering::Equal
4012 .then_with(|| b.is_primary.cmp(&a.is_primary))
4013 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4014 .then_with(|| a.severity.cmp(&b.severity))
4015 .then_with(|| a.message.cmp(&b.message))
4016 }
4017
4018 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4019
4020 diagnostics.sort_unstable_by(|a, b| {
4021 Ordering::Equal
4022 .then_with(|| a.range.start.cmp(&b.range.start))
4023 .then_with(|| b.range.end.cmp(&a.range.end))
4024 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4025 });
4026
4027 let mut sanitized_diagnostics = Vec::new();
4028 let edits_since_save = Patch::new(
4029 snapshot
4030 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4031 .collect(),
4032 );
4033 for entry in diagnostics {
4034 let start;
4035 let end;
4036 if entry.diagnostic.is_disk_based {
4037 // Some diagnostics are based on files on disk instead of buffers'
4038 // current contents. Adjust these diagnostics' ranges to reflect
4039 // any unsaved edits.
4040 start = edits_since_save.old_to_new(entry.range.start);
4041 end = edits_since_save.old_to_new(entry.range.end);
4042 } else {
4043 start = entry.range.start;
4044 end = entry.range.end;
4045 }
4046
4047 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4048 ..snapshot.clip_point_utf16(end, Bias::Right);
4049
4050 // Expand empty ranges by one codepoint
4051 if range.start == range.end {
4052 // This will be go to the next boundary when being clipped
4053 range.end.column += 1;
4054 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4055 if range.start == range.end && range.end.column > 0 {
4056 range.start.column -= 1;
4057 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
4058 }
4059 }
4060
4061 sanitized_diagnostics.push(DiagnosticEntry {
4062 range,
4063 diagnostic: entry.diagnostic,
4064 });
4065 }
4066 drop(edits_since_save);
4067
4068 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4069 buffer.update(cx, |buffer, cx| {
4070 buffer.update_diagnostics(server_id, set, cx)
4071 });
4072 Ok(())
4073 }
4074
4075 pub fn reload_buffers(
4076 &self,
4077 buffers: HashSet<Model<Buffer>>,
4078 push_to_history: bool,
4079 cx: &mut ModelContext<Self>,
4080 ) -> Task<Result<ProjectTransaction>> {
4081 let mut local_buffers = Vec::new();
4082 let mut remote_buffers = None;
4083 for buffer_handle in buffers {
4084 let buffer = buffer_handle.read(cx);
4085 if buffer.is_dirty() {
4086 if let Some(file) = File::from_dyn(buffer.file()) {
4087 if file.is_local() {
4088 local_buffers.push(buffer_handle);
4089 } else {
4090 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4091 }
4092 }
4093 }
4094 }
4095
4096 let remote_buffers = self.remote_id().zip(remote_buffers);
4097 let client = self.client.clone();
4098
4099 cx.spawn(move |this, mut cx| async move {
4100 let mut project_transaction = ProjectTransaction::default();
4101
4102 if let Some((project_id, remote_buffers)) = remote_buffers {
4103 let response = client
4104 .request(proto::ReloadBuffers {
4105 project_id,
4106 buffer_ids: remote_buffers
4107 .iter()
4108 .filter_map(|buffer| {
4109 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4110 })
4111 .collect(),
4112 })
4113 .await?
4114 .transaction
4115 .ok_or_else(|| anyhow!("missing transaction"))?;
4116 project_transaction = this
4117 .update(&mut cx, |this, cx| {
4118 this.deserialize_project_transaction(response, push_to_history, cx)
4119 })?
4120 .await?;
4121 }
4122
4123 for buffer in local_buffers {
4124 let transaction = buffer
4125 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4126 .await?;
4127 buffer.update(&mut cx, |buffer, cx| {
4128 if let Some(transaction) = transaction {
4129 if !push_to_history {
4130 buffer.forget_transaction(transaction.id);
4131 }
4132 project_transaction.0.insert(cx.handle(), transaction);
4133 }
4134 })?;
4135 }
4136
4137 Ok(project_transaction)
4138 })
4139 }
4140
4141 pub fn format(
4142 &mut self,
4143 buffers: HashSet<Model<Buffer>>,
4144 push_to_history: bool,
4145 trigger: FormatTrigger,
4146 cx: &mut ModelContext<Project>,
4147 ) -> Task<anyhow::Result<ProjectTransaction>> {
4148 if self.is_local() {
4149 let mut buffers_with_paths_and_servers = buffers
4150 .into_iter()
4151 .filter_map(|buffer_handle| {
4152 let buffer = buffer_handle.read(cx);
4153 let file = File::from_dyn(buffer.file())?;
4154 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4155 let server = self
4156 .primary_language_server_for_buffer(buffer, cx)
4157 .map(|s| s.1.clone());
4158 Some((buffer_handle, buffer_abs_path, server))
4159 })
4160 .collect::<Vec<_>>();
4161
4162 cx.spawn(move |project, mut cx| async move {
4163 // Do not allow multiple concurrent formatting requests for the
4164 // same buffer.
4165 project.update(&mut cx, |this, cx| {
4166 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4167 this.buffers_being_formatted
4168 .insert(buffer.read(cx).remote_id())
4169 });
4170 })?;
4171
4172 let _cleanup = defer({
4173 let this = project.clone();
4174 let mut cx = cx.clone();
4175 let buffers = &buffers_with_paths_and_servers;
4176 move || {
4177 this.update(&mut cx, |this, cx| {
4178 for (buffer, _, _) in buffers {
4179 this.buffers_being_formatted
4180 .remove(&buffer.read(cx).remote_id());
4181 }
4182 })
4183 .ok();
4184 }
4185 });
4186
4187 let mut project_transaction = ProjectTransaction::default();
4188 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4189 let settings = buffer.update(&mut cx, |buffer, cx| {
4190 language_settings(buffer.language(), buffer.file(), cx).clone()
4191 })?;
4192
4193 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4194 let ensure_final_newline = settings.ensure_final_newline_on_save;
4195 let tab_size = settings.tab_size;
4196
4197 // First, format buffer's whitespace according to the settings.
4198 let trailing_whitespace_diff = if remove_trailing_whitespace {
4199 Some(
4200 buffer
4201 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4202 .await,
4203 )
4204 } else {
4205 None
4206 };
4207 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4208 buffer.finalize_last_transaction();
4209 buffer.start_transaction();
4210 if let Some(diff) = trailing_whitespace_diff {
4211 buffer.apply_diff(diff, cx);
4212 }
4213 if ensure_final_newline {
4214 buffer.ensure_final_newline(cx);
4215 }
4216 buffer.end_transaction(cx)
4217 })?;
4218
4219 // Apply language-specific formatting using either a language server
4220 // or external command.
4221 let mut format_operation = None;
4222 match (&settings.formatter, &settings.format_on_save) {
4223 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4224
4225 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4226 | (_, FormatOnSave::LanguageServer) => {
4227 if let Some((language_server, buffer_abs_path)) =
4228 language_server.as_ref().zip(buffer_abs_path.as_ref())
4229 {
4230 format_operation = Some(FormatOperation::Lsp(
4231 Self::format_via_lsp(
4232 &project,
4233 buffer,
4234 buffer_abs_path,
4235 language_server,
4236 tab_size,
4237 &mut cx,
4238 )
4239 .await
4240 .context("failed to format via language server")?,
4241 ));
4242 }
4243 }
4244
4245 (
4246 Formatter::External { command, arguments },
4247 FormatOnSave::On | FormatOnSave::Off,
4248 )
4249 | (_, FormatOnSave::External { command, arguments }) => {
4250 if let Some(buffer_abs_path) = buffer_abs_path {
4251 format_operation = Self::format_via_external_command(
4252 buffer,
4253 buffer_abs_path,
4254 command,
4255 arguments,
4256 &mut cx,
4257 )
4258 .await
4259 .context(format!(
4260 "failed to format via external command {:?}",
4261 command
4262 ))?
4263 .map(FormatOperation::External);
4264 }
4265 }
4266 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4267 if let Some(new_operation) =
4268 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4269 .await
4270 {
4271 format_operation = Some(new_operation);
4272 } else if let Some((language_server, buffer_abs_path)) =
4273 language_server.as_ref().zip(buffer_abs_path.as_ref())
4274 {
4275 format_operation = Some(FormatOperation::Lsp(
4276 Self::format_via_lsp(
4277 &project,
4278 buffer,
4279 buffer_abs_path,
4280 language_server,
4281 tab_size,
4282 &mut cx,
4283 )
4284 .await
4285 .context("failed to format via language server")?,
4286 ));
4287 }
4288 }
4289 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4290 if let Some(new_operation) =
4291 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4292 .await
4293 {
4294 format_operation = Some(new_operation);
4295 }
4296 }
4297 };
4298
4299 buffer.update(&mut cx, |b, cx| {
4300 // If the buffer had its whitespace formatted and was edited while the language-specific
4301 // formatting was being computed, avoid applying the language-specific formatting, because
4302 // it can't be grouped with the whitespace formatting in the undo history.
4303 if let Some(transaction_id) = whitespace_transaction_id {
4304 if b.peek_undo_stack()
4305 .map_or(true, |e| e.transaction_id() != transaction_id)
4306 {
4307 format_operation.take();
4308 }
4309 }
4310
4311 // Apply any language-specific formatting, and group the two formatting operations
4312 // in the buffer's undo history.
4313 if let Some(operation) = format_operation {
4314 match operation {
4315 FormatOperation::Lsp(edits) => {
4316 b.edit(edits, None, cx);
4317 }
4318 FormatOperation::External(diff) => {
4319 b.apply_diff(diff, cx);
4320 }
4321 FormatOperation::Prettier(diff) => {
4322 b.apply_diff(diff, cx);
4323 }
4324 }
4325
4326 if let Some(transaction_id) = whitespace_transaction_id {
4327 b.group_until_transaction(transaction_id);
4328 }
4329 }
4330
4331 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4332 if !push_to_history {
4333 b.forget_transaction(transaction.id);
4334 }
4335 project_transaction.0.insert(buffer.clone(), transaction);
4336 }
4337 })?;
4338 }
4339
4340 Ok(project_transaction)
4341 })
4342 } else {
4343 let remote_id = self.remote_id();
4344 let client = self.client.clone();
4345 cx.spawn(move |this, mut cx| async move {
4346 let mut project_transaction = ProjectTransaction::default();
4347 if let Some(project_id) = remote_id {
4348 let response = client
4349 .request(proto::FormatBuffers {
4350 project_id,
4351 trigger: trigger as i32,
4352 buffer_ids: buffers
4353 .iter()
4354 .map(|buffer| {
4355 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4356 })
4357 .collect::<Result<_>>()?,
4358 })
4359 .await?
4360 .transaction
4361 .ok_or_else(|| anyhow!("missing transaction"))?;
4362 project_transaction = this
4363 .update(&mut cx, |this, cx| {
4364 this.deserialize_project_transaction(response, push_to_history, cx)
4365 })?
4366 .await?;
4367 }
4368 Ok(project_transaction)
4369 })
4370 }
4371 }
4372
4373 async fn format_via_lsp(
4374 this: &WeakModel<Self>,
4375 buffer: &Model<Buffer>,
4376 abs_path: &Path,
4377 language_server: &Arc<LanguageServer>,
4378 tab_size: NonZeroU32,
4379 cx: &mut AsyncAppContext,
4380 ) -> Result<Vec<(Range<Anchor>, String)>> {
4381 let uri = lsp::Url::from_file_path(abs_path)
4382 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4383 let text_document = lsp::TextDocumentIdentifier::new(uri);
4384 let capabilities = &language_server.capabilities();
4385
4386 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4387 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4388
4389 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4390 language_server
4391 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4392 text_document,
4393 options: lsp_command::lsp_formatting_options(tab_size.get()),
4394 work_done_progress_params: Default::default(),
4395 })
4396 .await?
4397 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4398 let buffer_start = lsp::Position::new(0, 0);
4399 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4400
4401 language_server
4402 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4403 text_document,
4404 range: lsp::Range::new(buffer_start, buffer_end),
4405 options: lsp_command::lsp_formatting_options(tab_size.get()),
4406 work_done_progress_params: Default::default(),
4407 })
4408 .await?
4409 } else {
4410 None
4411 };
4412
4413 if let Some(lsp_edits) = lsp_edits {
4414 this.update(cx, |this, cx| {
4415 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4416 })?
4417 .await
4418 } else {
4419 Ok(Vec::new())
4420 }
4421 }
4422
4423 async fn format_via_external_command(
4424 buffer: &Model<Buffer>,
4425 buffer_abs_path: &Path,
4426 command: &str,
4427 arguments: &[String],
4428 cx: &mut AsyncAppContext,
4429 ) -> Result<Option<Diff>> {
4430 let working_dir_path = buffer.update(cx, |buffer, cx| {
4431 let file = File::from_dyn(buffer.file())?;
4432 let worktree = file.worktree.read(cx).as_local()?;
4433 let mut worktree_path = worktree.abs_path().to_path_buf();
4434 if worktree.root_entry()?.is_file() {
4435 worktree_path.pop();
4436 }
4437 Some(worktree_path)
4438 })?;
4439
4440 if let Some(working_dir_path) = working_dir_path {
4441 let mut child =
4442 smol::process::Command::new(command)
4443 .args(arguments.iter().map(|arg| {
4444 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4445 }))
4446 .current_dir(&working_dir_path)
4447 .stdin(smol::process::Stdio::piped())
4448 .stdout(smol::process::Stdio::piped())
4449 .stderr(smol::process::Stdio::piped())
4450 .spawn()?;
4451 let stdin = child
4452 .stdin
4453 .as_mut()
4454 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4455 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4456 for chunk in text.chunks() {
4457 stdin.write_all(chunk.as_bytes()).await?;
4458 }
4459 stdin.flush().await?;
4460
4461 let output = child.output().await?;
4462 if !output.status.success() {
4463 return Err(anyhow!(
4464 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4465 output.status.code(),
4466 String::from_utf8_lossy(&output.stdout),
4467 String::from_utf8_lossy(&output.stderr),
4468 ));
4469 }
4470
4471 let stdout = String::from_utf8(output.stdout)?;
4472 Ok(Some(
4473 buffer
4474 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4475 .await,
4476 ))
4477 } else {
4478 Ok(None)
4479 }
4480 }
4481
4482 pub fn definition<T: ToPointUtf16>(
4483 &self,
4484 buffer: &Model<Buffer>,
4485 position: T,
4486 cx: &mut ModelContext<Self>,
4487 ) -> Task<Result<Vec<LocationLink>>> {
4488 let position = position.to_point_utf16(buffer.read(cx));
4489 self.request_lsp(
4490 buffer.clone(),
4491 LanguageServerToQuery::Primary,
4492 GetDefinition { position },
4493 cx,
4494 )
4495 }
4496
4497 pub fn type_definition<T: ToPointUtf16>(
4498 &self,
4499 buffer: &Model<Buffer>,
4500 position: T,
4501 cx: &mut ModelContext<Self>,
4502 ) -> Task<Result<Vec<LocationLink>>> {
4503 let position = position.to_point_utf16(buffer.read(cx));
4504 self.request_lsp(
4505 buffer.clone(),
4506 LanguageServerToQuery::Primary,
4507 GetTypeDefinition { position },
4508 cx,
4509 )
4510 }
4511
4512 pub fn references<T: ToPointUtf16>(
4513 &self,
4514 buffer: &Model<Buffer>,
4515 position: T,
4516 cx: &mut ModelContext<Self>,
4517 ) -> Task<Result<Vec<Location>>> {
4518 let position = position.to_point_utf16(buffer.read(cx));
4519 self.request_lsp(
4520 buffer.clone(),
4521 LanguageServerToQuery::Primary,
4522 GetReferences { position },
4523 cx,
4524 )
4525 }
4526
4527 pub fn document_highlights<T: ToPointUtf16>(
4528 &self,
4529 buffer: &Model<Buffer>,
4530 position: T,
4531 cx: &mut ModelContext<Self>,
4532 ) -> Task<Result<Vec<DocumentHighlight>>> {
4533 let position = position.to_point_utf16(buffer.read(cx));
4534 self.request_lsp(
4535 buffer.clone(),
4536 LanguageServerToQuery::Primary,
4537 GetDocumentHighlights { position },
4538 cx,
4539 )
4540 }
4541
4542 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4543 if self.is_local() {
4544 let mut requests = Vec::new();
4545 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4546 let worktree_id = *worktree_id;
4547 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4548 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4549 Some(worktree) => worktree,
4550 None => continue,
4551 };
4552 let worktree_abs_path = worktree.abs_path().clone();
4553
4554 let (adapter, language, server) = match self.language_servers.get(server_id) {
4555 Some(LanguageServerState::Running {
4556 adapter,
4557 language,
4558 server,
4559 ..
4560 }) => (adapter.clone(), language.clone(), server),
4561
4562 _ => continue,
4563 };
4564
4565 requests.push(
4566 server
4567 .request::<lsp::request::WorkspaceSymbolRequest>(
4568 lsp::WorkspaceSymbolParams {
4569 query: query.to_string(),
4570 ..Default::default()
4571 },
4572 )
4573 .log_err()
4574 .map(move |response| {
4575 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4576 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4577 flat_responses.into_iter().map(|lsp_symbol| {
4578 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4579 }).collect::<Vec<_>>()
4580 }
4581 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4582 nested_responses.into_iter().filter_map(|lsp_symbol| {
4583 let location = match lsp_symbol.location {
4584 OneOf::Left(location) => location,
4585 OneOf::Right(_) => {
4586 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4587 return None
4588 }
4589 };
4590 Some((lsp_symbol.name, lsp_symbol.kind, location))
4591 }).collect::<Vec<_>>()
4592 }
4593 }).unwrap_or_default();
4594
4595 (
4596 adapter,
4597 language,
4598 worktree_id,
4599 worktree_abs_path,
4600 lsp_symbols,
4601 )
4602 }),
4603 );
4604 }
4605
4606 cx.spawn(move |this, mut cx| async move {
4607 let responses = futures::future::join_all(requests).await;
4608 let this = match this.upgrade() {
4609 Some(this) => this,
4610 None => return Ok(Vec::new()),
4611 };
4612
4613 let symbols = this.update(&mut cx, |this, cx| {
4614 let mut symbols = Vec::new();
4615 for (
4616 adapter,
4617 adapter_language,
4618 source_worktree_id,
4619 worktree_abs_path,
4620 lsp_symbols,
4621 ) in responses
4622 {
4623 symbols.extend(lsp_symbols.into_iter().filter_map(
4624 |(symbol_name, symbol_kind, symbol_location)| {
4625 let abs_path = symbol_location.uri.to_file_path().ok()?;
4626 let mut worktree_id = source_worktree_id;
4627 let path;
4628 if let Some((worktree, rel_path)) =
4629 this.find_local_worktree(&abs_path, cx)
4630 {
4631 worktree_id = worktree.read(cx).id();
4632 path = rel_path;
4633 } else {
4634 path = relativize_path(&worktree_abs_path, &abs_path);
4635 }
4636
4637 let project_path = ProjectPath {
4638 worktree_id,
4639 path: path.into(),
4640 };
4641 let signature = this.symbol_signature(&project_path);
4642 let adapter_language = adapter_language.clone();
4643 let language = this
4644 .languages
4645 .language_for_file(&project_path.path, None)
4646 .unwrap_or_else(move |_| adapter_language);
4647 let language_server_name = adapter.name.clone();
4648 Some(async move {
4649 let language = language.await;
4650 let label =
4651 language.label_for_symbol(&symbol_name, symbol_kind).await;
4652
4653 Symbol {
4654 language_server_name,
4655 source_worktree_id,
4656 path: project_path,
4657 label: label.unwrap_or_else(|| {
4658 CodeLabel::plain(symbol_name.clone(), None)
4659 }),
4660 kind: symbol_kind,
4661 name: symbol_name,
4662 range: range_from_lsp(symbol_location.range),
4663 signature,
4664 }
4665 })
4666 },
4667 ));
4668 }
4669
4670 symbols
4671 })?;
4672
4673 Ok(futures::future::join_all(symbols).await)
4674 })
4675 } else if let Some(project_id) = self.remote_id() {
4676 let request = self.client.request(proto::GetProjectSymbols {
4677 project_id,
4678 query: query.to_string(),
4679 });
4680 cx.spawn(move |this, mut cx| async move {
4681 let response = request.await?;
4682 let mut symbols = Vec::new();
4683 if let Some(this) = this.upgrade() {
4684 let new_symbols = this.update(&mut cx, |this, _| {
4685 response
4686 .symbols
4687 .into_iter()
4688 .map(|symbol| this.deserialize_symbol(symbol))
4689 .collect::<Vec<_>>()
4690 })?;
4691 symbols = futures::future::join_all(new_symbols)
4692 .await
4693 .into_iter()
4694 .filter_map(|symbol| symbol.log_err())
4695 .collect::<Vec<_>>();
4696 }
4697 Ok(symbols)
4698 })
4699 } else {
4700 Task::ready(Ok(Default::default()))
4701 }
4702 }
4703
4704 pub fn open_buffer_for_symbol(
4705 &mut self,
4706 symbol: &Symbol,
4707 cx: &mut ModelContext<Self>,
4708 ) -> Task<Result<Model<Buffer>>> {
4709 if self.is_local() {
4710 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4711 symbol.source_worktree_id,
4712 symbol.language_server_name.clone(),
4713 )) {
4714 *id
4715 } else {
4716 return Task::ready(Err(anyhow!(
4717 "language server for worktree and language not found"
4718 )));
4719 };
4720
4721 let worktree_abs_path = if let Some(worktree_abs_path) = self
4722 .worktree_for_id(symbol.path.worktree_id, cx)
4723 .and_then(|worktree| worktree.read(cx).as_local())
4724 .map(|local_worktree| local_worktree.abs_path())
4725 {
4726 worktree_abs_path
4727 } else {
4728 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4729 };
4730
4731 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
4732 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4733 uri
4734 } else {
4735 return Task::ready(Err(anyhow!("invalid symbol path")));
4736 };
4737
4738 self.open_local_buffer_via_lsp(
4739 symbol_uri,
4740 language_server_id,
4741 symbol.language_server_name.clone(),
4742 cx,
4743 )
4744 } else if let Some(project_id) = self.remote_id() {
4745 let request = self.client.request(proto::OpenBufferForSymbol {
4746 project_id,
4747 symbol: Some(serialize_symbol(symbol)),
4748 });
4749 cx.spawn(move |this, mut cx| async move {
4750 let response = request.await?;
4751 this.update(&mut cx, |this, cx| {
4752 this.wait_for_remote_buffer(response.buffer_id, cx)
4753 })?
4754 .await
4755 })
4756 } else {
4757 Task::ready(Err(anyhow!("project does not have a remote id")))
4758 }
4759 }
4760
4761 pub fn hover<T: ToPointUtf16>(
4762 &self,
4763 buffer: &Model<Buffer>,
4764 position: T,
4765 cx: &mut ModelContext<Self>,
4766 ) -> Task<Result<Option<Hover>>> {
4767 let position = position.to_point_utf16(buffer.read(cx));
4768 self.request_lsp(
4769 buffer.clone(),
4770 LanguageServerToQuery::Primary,
4771 GetHover { position },
4772 cx,
4773 )
4774 }
4775
4776 pub fn completions<T: ToOffset + ToPointUtf16>(
4777 &self,
4778 buffer: &Model<Buffer>,
4779 position: T,
4780 cx: &mut ModelContext<Self>,
4781 ) -> Task<Result<Vec<Completion>>> {
4782 let position = position.to_point_utf16(buffer.read(cx));
4783 if self.is_local() {
4784 let snapshot = buffer.read(cx).snapshot();
4785 let offset = position.to_offset(&snapshot);
4786 let scope = snapshot.language_scope_at(offset);
4787
4788 let server_ids: Vec<_> = self
4789 .language_servers_for_buffer(buffer.read(cx), cx)
4790 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4791 .filter(|(adapter, _)| {
4792 scope
4793 .as_ref()
4794 .map(|scope| scope.language_allowed(&adapter.name))
4795 .unwrap_or(true)
4796 })
4797 .map(|(_, server)| server.server_id())
4798 .collect();
4799
4800 let buffer = buffer.clone();
4801 cx.spawn(move |this, mut cx| async move {
4802 let mut tasks = Vec::with_capacity(server_ids.len());
4803 this.update(&mut cx, |this, cx| {
4804 for server_id in server_ids {
4805 tasks.push(this.request_lsp(
4806 buffer.clone(),
4807 LanguageServerToQuery::Other(server_id),
4808 GetCompletions { position },
4809 cx,
4810 ));
4811 }
4812 })?;
4813
4814 let mut completions = Vec::new();
4815 for task in tasks {
4816 if let Ok(new_completions) = task.await {
4817 completions.extend_from_slice(&new_completions);
4818 }
4819 }
4820
4821 Ok(completions)
4822 })
4823 } else if let Some(project_id) = self.remote_id() {
4824 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4825 } else {
4826 Task::ready(Ok(Default::default()))
4827 }
4828 }
4829
4830 pub fn resolve_completions(
4831 &self,
4832 completion_indices: Vec<usize>,
4833 completions: Arc<RwLock<Box<[Completion]>>>,
4834 cx: &mut ModelContext<Self>,
4835 ) -> Task<Result<bool>> {
4836 let client = self.client();
4837 let language_registry = self.languages().clone();
4838
4839 let is_remote = self.is_remote();
4840 let project_id = self.remote_id();
4841
4842 cx.spawn(move |this, mut cx| async move {
4843 let mut did_resolve = false;
4844 if is_remote {
4845 let project_id =
4846 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
4847
4848 for completion_index in completion_indices {
4849 let completions_guard = completions.read();
4850 let completion = &completions_guard[completion_index];
4851 if completion.documentation.is_some() {
4852 continue;
4853 }
4854
4855 did_resolve = true;
4856 let server_id = completion.server_id;
4857 let completion = completion.lsp_completion.clone();
4858 drop(completions_guard);
4859
4860 Self::resolve_completion_documentation_remote(
4861 project_id,
4862 server_id,
4863 completions.clone(),
4864 completion_index,
4865 completion,
4866 client.clone(),
4867 language_registry.clone(),
4868 )
4869 .await;
4870 }
4871 } else {
4872 for completion_index in completion_indices {
4873 let completions_guard = completions.read();
4874 let completion = &completions_guard[completion_index];
4875 if completion.documentation.is_some() {
4876 continue;
4877 }
4878
4879 let server_id = completion.server_id;
4880 let completion = completion.lsp_completion.clone();
4881 drop(completions_guard);
4882
4883 let server = this
4884 .read_with(&mut cx, |project, _| {
4885 project.language_server_for_id(server_id)
4886 })
4887 .ok()
4888 .flatten();
4889 let Some(server) = server else {
4890 continue;
4891 };
4892
4893 did_resolve = true;
4894 Self::resolve_completion_documentation_local(
4895 server,
4896 completions.clone(),
4897 completion_index,
4898 completion,
4899 language_registry.clone(),
4900 )
4901 .await;
4902 }
4903 }
4904
4905 Ok(did_resolve)
4906 })
4907 }
4908
4909 async fn resolve_completion_documentation_local(
4910 server: Arc<lsp::LanguageServer>,
4911 completions: Arc<RwLock<Box<[Completion]>>>,
4912 completion_index: usize,
4913 completion: lsp::CompletionItem,
4914 language_registry: Arc<LanguageRegistry>,
4915 ) {
4916 let can_resolve = server
4917 .capabilities()
4918 .completion_provider
4919 .as_ref()
4920 .and_then(|options| options.resolve_provider)
4921 .unwrap_or(false);
4922 if !can_resolve {
4923 return;
4924 }
4925
4926 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
4927 let Some(completion_item) = request.await.log_err() else {
4928 return;
4929 };
4930
4931 if let Some(lsp_documentation) = completion_item.documentation {
4932 let documentation = language::prepare_completion_documentation(
4933 &lsp_documentation,
4934 &language_registry,
4935 None, // TODO: Try to reasonably work out which language the completion is for
4936 )
4937 .await;
4938
4939 let mut completions = completions.write();
4940 let completion = &mut completions[completion_index];
4941 completion.documentation = Some(documentation);
4942 } else {
4943 let mut completions = completions.write();
4944 let completion = &mut completions[completion_index];
4945 completion.documentation = Some(Documentation::Undocumented);
4946 }
4947 }
4948
4949 async fn resolve_completion_documentation_remote(
4950 project_id: u64,
4951 server_id: LanguageServerId,
4952 completions: Arc<RwLock<Box<[Completion]>>>,
4953 completion_index: usize,
4954 completion: lsp::CompletionItem,
4955 client: Arc<Client>,
4956 language_registry: Arc<LanguageRegistry>,
4957 ) {
4958 let request = proto::ResolveCompletionDocumentation {
4959 project_id,
4960 language_server_id: server_id.0 as u64,
4961 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
4962 };
4963
4964 let Some(response) = client
4965 .request(request)
4966 .await
4967 .context("completion documentation resolve proto request")
4968 .log_err()
4969 else {
4970 return;
4971 };
4972
4973 if response.text.is_empty() {
4974 let mut completions = completions.write();
4975 let completion = &mut completions[completion_index];
4976 completion.documentation = Some(Documentation::Undocumented);
4977 }
4978
4979 let documentation = if response.is_markdown {
4980 Documentation::MultiLineMarkdown(
4981 markdown::parse_markdown(&response.text, &language_registry, None).await,
4982 )
4983 } else if response.text.lines().count() <= 1 {
4984 Documentation::SingleLine(response.text)
4985 } else {
4986 Documentation::MultiLinePlainText(response.text)
4987 };
4988
4989 let mut completions = completions.write();
4990 let completion = &mut completions[completion_index];
4991 completion.documentation = Some(documentation);
4992 }
4993
4994 pub fn apply_additional_edits_for_completion(
4995 &self,
4996 buffer_handle: Model<Buffer>,
4997 completion: Completion,
4998 push_to_history: bool,
4999 cx: &mut ModelContext<Self>,
5000 ) -> Task<Result<Option<Transaction>>> {
5001 let buffer = buffer_handle.read(cx);
5002 let buffer_id = buffer.remote_id();
5003
5004 if self.is_local() {
5005 let server_id = completion.server_id;
5006 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
5007 Some((_, server)) => server.clone(),
5008 _ => return Task::ready(Ok(Default::default())),
5009 };
5010
5011 cx.spawn(move |this, mut cx| async move {
5012 let can_resolve = lang_server
5013 .capabilities()
5014 .completion_provider
5015 .as_ref()
5016 .and_then(|options| options.resolve_provider)
5017 .unwrap_or(false);
5018 let additional_text_edits = if can_resolve {
5019 lang_server
5020 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
5021 .await?
5022 .additional_text_edits
5023 } else {
5024 completion.lsp_completion.additional_text_edits
5025 };
5026 if let Some(edits) = additional_text_edits {
5027 let edits = this
5028 .update(&mut cx, |this, cx| {
5029 this.edits_from_lsp(
5030 &buffer_handle,
5031 edits,
5032 lang_server.server_id(),
5033 None,
5034 cx,
5035 )
5036 })?
5037 .await?;
5038
5039 buffer_handle.update(&mut cx, |buffer, cx| {
5040 buffer.finalize_last_transaction();
5041 buffer.start_transaction();
5042
5043 for (range, text) in edits {
5044 let primary = &completion.old_range;
5045 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5046 && primary.end.cmp(&range.start, buffer).is_ge();
5047 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5048 && range.end.cmp(&primary.end, buffer).is_ge();
5049
5050 //Skip additional edits which overlap with the primary completion edit
5051 //https://github.com/zed-industries/zed/pull/1871
5052 if !start_within && !end_within {
5053 buffer.edit([(range, text)], None, cx);
5054 }
5055 }
5056
5057 let transaction = if buffer.end_transaction(cx).is_some() {
5058 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5059 if !push_to_history {
5060 buffer.forget_transaction(transaction.id);
5061 }
5062 Some(transaction)
5063 } else {
5064 None
5065 };
5066 Ok(transaction)
5067 })?
5068 } else {
5069 Ok(None)
5070 }
5071 })
5072 } else if let Some(project_id) = self.remote_id() {
5073 let client = self.client.clone();
5074 cx.spawn(move |_, mut cx| async move {
5075 let response = client
5076 .request(proto::ApplyCompletionAdditionalEdits {
5077 project_id,
5078 buffer_id,
5079 completion: Some(language::proto::serialize_completion(&completion)),
5080 })
5081 .await?;
5082
5083 if let Some(transaction) = response.transaction {
5084 let transaction = language::proto::deserialize_transaction(transaction)?;
5085 buffer_handle
5086 .update(&mut cx, |buffer, _| {
5087 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5088 })?
5089 .await?;
5090 if push_to_history {
5091 buffer_handle.update(&mut cx, |buffer, _| {
5092 buffer.push_transaction(transaction.clone(), Instant::now());
5093 })?;
5094 }
5095 Ok(Some(transaction))
5096 } else {
5097 Ok(None)
5098 }
5099 })
5100 } else {
5101 Task::ready(Err(anyhow!("project does not have a remote id")))
5102 }
5103 }
5104
5105 pub fn code_actions<T: Clone + ToOffset>(
5106 &self,
5107 buffer_handle: &Model<Buffer>,
5108 range: Range<T>,
5109 cx: &mut ModelContext<Self>,
5110 ) -> Task<Result<Vec<CodeAction>>> {
5111 let buffer = buffer_handle.read(cx);
5112 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5113 self.request_lsp(
5114 buffer_handle.clone(),
5115 LanguageServerToQuery::Primary,
5116 GetCodeActions { range },
5117 cx,
5118 )
5119 }
5120
5121 pub fn apply_code_action(
5122 &self,
5123 buffer_handle: Model<Buffer>,
5124 mut action: CodeAction,
5125 push_to_history: bool,
5126 cx: &mut ModelContext<Self>,
5127 ) -> Task<Result<ProjectTransaction>> {
5128 if self.is_local() {
5129 let buffer = buffer_handle.read(cx);
5130 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5131 self.language_server_for_buffer(buffer, action.server_id, cx)
5132 {
5133 (adapter.clone(), server.clone())
5134 } else {
5135 return Task::ready(Ok(Default::default()));
5136 };
5137 let range = action.range.to_point_utf16(buffer);
5138
5139 cx.spawn(move |this, mut cx| async move {
5140 if let Some(lsp_range) = action
5141 .lsp_action
5142 .data
5143 .as_mut()
5144 .and_then(|d| d.get_mut("codeActionParams"))
5145 .and_then(|d| d.get_mut("range"))
5146 {
5147 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
5148 action.lsp_action = lang_server
5149 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
5150 .await?;
5151 } else {
5152 let actions = this
5153 .update(&mut cx, |this, cx| {
5154 this.code_actions(&buffer_handle, action.range, cx)
5155 })?
5156 .await?;
5157 action.lsp_action = actions
5158 .into_iter()
5159 .find(|a| a.lsp_action.title == action.lsp_action.title)
5160 .ok_or_else(|| anyhow!("code action is outdated"))?
5161 .lsp_action;
5162 }
5163
5164 if let Some(edit) = action.lsp_action.edit {
5165 if edit.changes.is_some() || edit.document_changes.is_some() {
5166 return Self::deserialize_workspace_edit(
5167 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5168 edit,
5169 push_to_history,
5170 lsp_adapter.clone(),
5171 lang_server.clone(),
5172 &mut cx,
5173 )
5174 .await;
5175 }
5176 }
5177
5178 if let Some(command) = action.lsp_action.command {
5179 this.update(&mut cx, |this, _| {
5180 this.last_workspace_edits_by_language_server
5181 .remove(&lang_server.server_id());
5182 })?;
5183
5184 let result = lang_server
5185 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5186 command: command.command,
5187 arguments: command.arguments.unwrap_or_default(),
5188 ..Default::default()
5189 })
5190 .await;
5191
5192 if let Err(err) = result {
5193 // TODO: LSP ERROR
5194 return Err(err);
5195 }
5196
5197 return Ok(this.update(&mut cx, |this, _| {
5198 this.last_workspace_edits_by_language_server
5199 .remove(&lang_server.server_id())
5200 .unwrap_or_default()
5201 })?);
5202 }
5203
5204 Ok(ProjectTransaction::default())
5205 })
5206 } else if let Some(project_id) = self.remote_id() {
5207 let client = self.client.clone();
5208 let request = proto::ApplyCodeAction {
5209 project_id,
5210 buffer_id: buffer_handle.read(cx).remote_id(),
5211 action: Some(language::proto::serialize_code_action(&action)),
5212 };
5213 cx.spawn(move |this, mut cx| async move {
5214 let response = client
5215 .request(request)
5216 .await?
5217 .transaction
5218 .ok_or_else(|| anyhow!("missing transaction"))?;
5219 this.update(&mut cx, |this, cx| {
5220 this.deserialize_project_transaction(response, push_to_history, cx)
5221 })?
5222 .await
5223 })
5224 } else {
5225 Task::ready(Err(anyhow!("project does not have a remote id")))
5226 }
5227 }
5228
5229 fn apply_on_type_formatting(
5230 &self,
5231 buffer: Model<Buffer>,
5232 position: Anchor,
5233 trigger: String,
5234 cx: &mut ModelContext<Self>,
5235 ) -> Task<Result<Option<Transaction>>> {
5236 if self.is_local() {
5237 cx.spawn(move |this, mut cx| async move {
5238 // Do not allow multiple concurrent formatting requests for the
5239 // same buffer.
5240 this.update(&mut cx, |this, cx| {
5241 this.buffers_being_formatted
5242 .insert(buffer.read(cx).remote_id())
5243 })?;
5244
5245 let _cleanup = defer({
5246 let this = this.clone();
5247 let mut cx = cx.clone();
5248 let closure_buffer = buffer.clone();
5249 move || {
5250 this.update(&mut cx, |this, cx| {
5251 this.buffers_being_formatted
5252 .remove(&closure_buffer.read(cx).remote_id());
5253 })
5254 .ok();
5255 }
5256 });
5257
5258 buffer
5259 .update(&mut cx, |buffer, _| {
5260 buffer.wait_for_edits(Some(position.timestamp))
5261 })?
5262 .await?;
5263 this.update(&mut cx, |this, cx| {
5264 let position = position.to_point_utf16(buffer.read(cx));
5265 this.on_type_format(buffer, position, trigger, false, cx)
5266 })?
5267 .await
5268 })
5269 } else if let Some(project_id) = self.remote_id() {
5270 let client = self.client.clone();
5271 let request = proto::OnTypeFormatting {
5272 project_id,
5273 buffer_id: buffer.read(cx).remote_id(),
5274 position: Some(serialize_anchor(&position)),
5275 trigger,
5276 version: serialize_version(&buffer.read(cx).version()),
5277 };
5278 cx.spawn(move |_, _| async move {
5279 client
5280 .request(request)
5281 .await?
5282 .transaction
5283 .map(language::proto::deserialize_transaction)
5284 .transpose()
5285 })
5286 } else {
5287 Task::ready(Err(anyhow!("project does not have a remote id")))
5288 }
5289 }
5290
5291 async fn deserialize_edits(
5292 this: Model<Self>,
5293 buffer_to_edit: Model<Buffer>,
5294 edits: Vec<lsp::TextEdit>,
5295 push_to_history: bool,
5296 _: Arc<CachedLspAdapter>,
5297 language_server: Arc<LanguageServer>,
5298 cx: &mut AsyncAppContext,
5299 ) -> Result<Option<Transaction>> {
5300 let edits = this
5301 .update(cx, |this, cx| {
5302 this.edits_from_lsp(
5303 &buffer_to_edit,
5304 edits,
5305 language_server.server_id(),
5306 None,
5307 cx,
5308 )
5309 })?
5310 .await?;
5311
5312 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5313 buffer.finalize_last_transaction();
5314 buffer.start_transaction();
5315 for (range, text) in edits {
5316 buffer.edit([(range, text)], None, cx);
5317 }
5318
5319 if buffer.end_transaction(cx).is_some() {
5320 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5321 if !push_to_history {
5322 buffer.forget_transaction(transaction.id);
5323 }
5324 Some(transaction)
5325 } else {
5326 None
5327 }
5328 })?;
5329
5330 Ok(transaction)
5331 }
5332
5333 async fn deserialize_workspace_edit(
5334 this: Model<Self>,
5335 edit: lsp::WorkspaceEdit,
5336 push_to_history: bool,
5337 lsp_adapter: Arc<CachedLspAdapter>,
5338 language_server: Arc<LanguageServer>,
5339 cx: &mut AsyncAppContext,
5340 ) -> Result<ProjectTransaction> {
5341 let fs = this.update(cx, |this, _| this.fs.clone())?;
5342 let mut operations = Vec::new();
5343 if let Some(document_changes) = edit.document_changes {
5344 match document_changes {
5345 lsp::DocumentChanges::Edits(edits) => {
5346 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5347 }
5348 lsp::DocumentChanges::Operations(ops) => operations = ops,
5349 }
5350 } else if let Some(changes) = edit.changes {
5351 operations.extend(changes.into_iter().map(|(uri, edits)| {
5352 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5353 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5354 uri,
5355 version: None,
5356 },
5357 edits: edits.into_iter().map(OneOf::Left).collect(),
5358 })
5359 }));
5360 }
5361
5362 let mut project_transaction = ProjectTransaction::default();
5363 for operation in operations {
5364 match operation {
5365 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5366 let abs_path = op
5367 .uri
5368 .to_file_path()
5369 .map_err(|_| anyhow!("can't convert URI to path"))?;
5370
5371 if let Some(parent_path) = abs_path.parent() {
5372 fs.create_dir(parent_path).await?;
5373 }
5374 if abs_path.ends_with("/") {
5375 fs.create_dir(&abs_path).await?;
5376 } else {
5377 fs.create_file(
5378 &abs_path,
5379 op.options
5380 .map(|options| fs::CreateOptions {
5381 overwrite: options.overwrite.unwrap_or(false),
5382 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5383 })
5384 .unwrap_or_default(),
5385 )
5386 .await?;
5387 }
5388 }
5389
5390 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5391 let source_abs_path = op
5392 .old_uri
5393 .to_file_path()
5394 .map_err(|_| anyhow!("can't convert URI to path"))?;
5395 let target_abs_path = op
5396 .new_uri
5397 .to_file_path()
5398 .map_err(|_| anyhow!("can't convert URI to path"))?;
5399 fs.rename(
5400 &source_abs_path,
5401 &target_abs_path,
5402 op.options
5403 .map(|options| fs::RenameOptions {
5404 overwrite: options.overwrite.unwrap_or(false),
5405 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5406 })
5407 .unwrap_or_default(),
5408 )
5409 .await?;
5410 }
5411
5412 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5413 let abs_path = op
5414 .uri
5415 .to_file_path()
5416 .map_err(|_| anyhow!("can't convert URI to path"))?;
5417 let options = op
5418 .options
5419 .map(|options| fs::RemoveOptions {
5420 recursive: options.recursive.unwrap_or(false),
5421 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5422 })
5423 .unwrap_or_default();
5424 if abs_path.ends_with("/") {
5425 fs.remove_dir(&abs_path, options).await?;
5426 } else {
5427 fs.remove_file(&abs_path, options).await?;
5428 }
5429 }
5430
5431 lsp::DocumentChangeOperation::Edit(op) => {
5432 let buffer_to_edit = this
5433 .update(cx, |this, cx| {
5434 this.open_local_buffer_via_lsp(
5435 op.text_document.uri,
5436 language_server.server_id(),
5437 lsp_adapter.name.clone(),
5438 cx,
5439 )
5440 })?
5441 .await?;
5442
5443 let edits = this
5444 .update(cx, |this, cx| {
5445 let edits = op.edits.into_iter().map(|edit| match edit {
5446 OneOf::Left(edit) => edit,
5447 OneOf::Right(edit) => edit.text_edit,
5448 });
5449 this.edits_from_lsp(
5450 &buffer_to_edit,
5451 edits,
5452 language_server.server_id(),
5453 op.text_document.version,
5454 cx,
5455 )
5456 })?
5457 .await?;
5458
5459 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5460 buffer.finalize_last_transaction();
5461 buffer.start_transaction();
5462 for (range, text) in edits {
5463 buffer.edit([(range, text)], None, cx);
5464 }
5465 let transaction = if buffer.end_transaction(cx).is_some() {
5466 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5467 if !push_to_history {
5468 buffer.forget_transaction(transaction.id);
5469 }
5470 Some(transaction)
5471 } else {
5472 None
5473 };
5474
5475 transaction
5476 })?;
5477 if let Some(transaction) = transaction {
5478 project_transaction.0.insert(buffer_to_edit, transaction);
5479 }
5480 }
5481 }
5482 }
5483
5484 Ok(project_transaction)
5485 }
5486
5487 pub fn prepare_rename<T: ToPointUtf16>(
5488 &self,
5489 buffer: Model<Buffer>,
5490 position: T,
5491 cx: &mut ModelContext<Self>,
5492 ) -> Task<Result<Option<Range<Anchor>>>> {
5493 let position = position.to_point_utf16(buffer.read(cx));
5494 self.request_lsp(
5495 buffer,
5496 LanguageServerToQuery::Primary,
5497 PrepareRename { position },
5498 cx,
5499 )
5500 }
5501
5502 pub fn perform_rename<T: ToPointUtf16>(
5503 &self,
5504 buffer: Model<Buffer>,
5505 position: T,
5506 new_name: String,
5507 push_to_history: bool,
5508 cx: &mut ModelContext<Self>,
5509 ) -> Task<Result<ProjectTransaction>> {
5510 let position = position.to_point_utf16(buffer.read(cx));
5511 self.request_lsp(
5512 buffer,
5513 LanguageServerToQuery::Primary,
5514 PerformRename {
5515 position,
5516 new_name,
5517 push_to_history,
5518 },
5519 cx,
5520 )
5521 }
5522
5523 pub fn on_type_format<T: ToPointUtf16>(
5524 &self,
5525 buffer: Model<Buffer>,
5526 position: T,
5527 trigger: String,
5528 push_to_history: bool,
5529 cx: &mut ModelContext<Self>,
5530 ) -> Task<Result<Option<Transaction>>> {
5531 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5532 let position = position.to_point_utf16(buffer);
5533 (
5534 position,
5535 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5536 .tab_size,
5537 )
5538 });
5539 self.request_lsp(
5540 buffer.clone(),
5541 LanguageServerToQuery::Primary,
5542 OnTypeFormatting {
5543 position,
5544 trigger,
5545 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5546 push_to_history,
5547 },
5548 cx,
5549 )
5550 }
5551
5552 pub fn inlay_hints<T: ToOffset>(
5553 &self,
5554 buffer_handle: Model<Buffer>,
5555 range: Range<T>,
5556 cx: &mut ModelContext<Self>,
5557 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5558 let buffer = buffer_handle.read(cx);
5559 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5560 let range_start = range.start;
5561 let range_end = range.end;
5562 let buffer_id = buffer.remote_id();
5563 let buffer_version = buffer.version().clone();
5564 let lsp_request = InlayHints { range };
5565
5566 if self.is_local() {
5567 let lsp_request_task = self.request_lsp(
5568 buffer_handle.clone(),
5569 LanguageServerToQuery::Primary,
5570 lsp_request,
5571 cx,
5572 );
5573 cx.spawn(move |_, mut cx| async move {
5574 buffer_handle
5575 .update(&mut cx, |buffer, _| {
5576 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5577 })?
5578 .await
5579 .context("waiting for inlay hint request range edits")?;
5580 lsp_request_task.await.context("inlay hints LSP request")
5581 })
5582 } else if let Some(project_id) = self.remote_id() {
5583 let client = self.client.clone();
5584 let request = proto::InlayHints {
5585 project_id,
5586 buffer_id,
5587 start: Some(serialize_anchor(&range_start)),
5588 end: Some(serialize_anchor(&range_end)),
5589 version: serialize_version(&buffer_version),
5590 };
5591 cx.spawn(move |project, cx| async move {
5592 let response = client
5593 .request(request)
5594 .await
5595 .context("inlay hints proto request")?;
5596 let hints_request_result = LspCommand::response_from_proto(
5597 lsp_request,
5598 response,
5599 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5600 buffer_handle.clone(),
5601 cx,
5602 )
5603 .await;
5604
5605 hints_request_result.context("inlay hints proto response conversion")
5606 })
5607 } else {
5608 Task::ready(Err(anyhow!("project does not have a remote id")))
5609 }
5610 }
5611
5612 pub fn resolve_inlay_hint(
5613 &self,
5614 hint: InlayHint,
5615 buffer_handle: Model<Buffer>,
5616 server_id: LanguageServerId,
5617 cx: &mut ModelContext<Self>,
5618 ) -> Task<anyhow::Result<InlayHint>> {
5619 if self.is_local() {
5620 let buffer = buffer_handle.read(cx);
5621 let (_, lang_server) = if let Some((adapter, server)) =
5622 self.language_server_for_buffer(buffer, server_id, cx)
5623 {
5624 (adapter.clone(), server.clone())
5625 } else {
5626 return Task::ready(Ok(hint));
5627 };
5628 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5629 return Task::ready(Ok(hint));
5630 }
5631
5632 let buffer_snapshot = buffer.snapshot();
5633 cx.spawn(move |_, mut cx| async move {
5634 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5635 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5636 );
5637 let resolved_hint = resolve_task
5638 .await
5639 .context("inlay hint resolve LSP request")?;
5640 let resolved_hint = InlayHints::lsp_to_project_hint(
5641 resolved_hint,
5642 &buffer_handle,
5643 server_id,
5644 ResolveState::Resolved,
5645 false,
5646 &mut cx,
5647 )
5648 .await?;
5649 Ok(resolved_hint)
5650 })
5651 } else if let Some(project_id) = self.remote_id() {
5652 let client = self.client.clone();
5653 let request = proto::ResolveInlayHint {
5654 project_id,
5655 buffer_id: buffer_handle.read(cx).remote_id(),
5656 language_server_id: server_id.0 as u64,
5657 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5658 };
5659 cx.spawn(move |_, _| async move {
5660 let response = client
5661 .request(request)
5662 .await
5663 .context("inlay hints proto request")?;
5664 match response.hint {
5665 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5666 .context("inlay hints proto resolve response conversion"),
5667 None => Ok(hint),
5668 }
5669 })
5670 } else {
5671 Task::ready(Err(anyhow!("project does not have a remote id")))
5672 }
5673 }
5674
5675 #[allow(clippy::type_complexity)]
5676 pub fn search(
5677 &self,
5678 query: SearchQuery,
5679 cx: &mut ModelContext<Self>,
5680 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5681 if self.is_local() {
5682 self.search_local(query, cx)
5683 } else if let Some(project_id) = self.remote_id() {
5684 let (tx, rx) = smol::channel::unbounded();
5685 let request = self.client.request(query.to_proto(project_id));
5686 cx.spawn(move |this, mut cx| async move {
5687 let response = request.await?;
5688 let mut result = HashMap::default();
5689 for location in response.locations {
5690 let target_buffer = this
5691 .update(&mut cx, |this, cx| {
5692 this.wait_for_remote_buffer(location.buffer_id, cx)
5693 })?
5694 .await?;
5695 let start = location
5696 .start
5697 .and_then(deserialize_anchor)
5698 .ok_or_else(|| anyhow!("missing target start"))?;
5699 let end = location
5700 .end
5701 .and_then(deserialize_anchor)
5702 .ok_or_else(|| anyhow!("missing target end"))?;
5703 result
5704 .entry(target_buffer)
5705 .or_insert(Vec::new())
5706 .push(start..end)
5707 }
5708 for (buffer, ranges) in result {
5709 let _ = tx.send((buffer, ranges)).await;
5710 }
5711 Result::<(), anyhow::Error>::Ok(())
5712 })
5713 .detach_and_log_err(cx);
5714 rx
5715 } else {
5716 unimplemented!();
5717 }
5718 }
5719
5720 pub fn search_local(
5721 &self,
5722 query: SearchQuery,
5723 cx: &mut ModelContext<Self>,
5724 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5725 // Local search is split into several phases.
5726 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5727 // and the second phase that finds positions of all the matches found in the candidate files.
5728 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5729 //
5730 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5731 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5732 //
5733 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5734 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5735 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5736 // 2. At this point, we have a list of all potentially matching buffers/files.
5737 // We sort that list by buffer path - this list is retained for later use.
5738 // We ensure that all buffers are now opened and available in project.
5739 // 3. We run a scan over all the candidate buffers on multiple background threads.
5740 // We cannot assume that there will even be a match - while at least one match
5741 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5742 // There is also an auxiliary background thread responsible for result gathering.
5743 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5744 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5745 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5746 // entry - which might already be available thanks to out-of-order processing.
5747 //
5748 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5749 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5750 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5751 // in face of constantly updating list of sorted matches.
5752 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5753 let snapshots = self
5754 .visible_worktrees(cx)
5755 .filter_map(|tree| {
5756 let tree = tree.read(cx).as_local()?;
5757 Some(tree.snapshot())
5758 })
5759 .collect::<Vec<_>>();
5760
5761 let background = cx.background_executor().clone();
5762 let path_count: usize = snapshots
5763 .iter()
5764 .map(|s| {
5765 if query.include_ignored() {
5766 s.file_count()
5767 } else {
5768 s.visible_file_count()
5769 }
5770 })
5771 .sum();
5772 if path_count == 0 {
5773 let (_, rx) = smol::channel::bounded(1024);
5774 return rx;
5775 }
5776 let workers = background.num_cpus().min(path_count);
5777 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5778 let mut unnamed_files = vec![];
5779 let opened_buffers = self
5780 .opened_buffers
5781 .iter()
5782 .filter_map(|(_, b)| {
5783 let buffer = b.upgrade()?;
5784 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5785 let is_ignored = buffer
5786 .project_path(cx)
5787 .and_then(|path| self.entry_for_path(&path, cx))
5788 .map_or(false, |entry| entry.is_ignored);
5789 (is_ignored, buffer.snapshot())
5790 });
5791 if is_ignored && !query.include_ignored() {
5792 return None;
5793 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5794 Some((path.clone(), (buffer, snapshot)))
5795 } else {
5796 unnamed_files.push(buffer);
5797 None
5798 }
5799 })
5800 .collect();
5801 cx.background_executor()
5802 .spawn(Self::background_search(
5803 unnamed_files,
5804 opened_buffers,
5805 cx.background_executor().clone(),
5806 self.fs.clone(),
5807 workers,
5808 query.clone(),
5809 path_count,
5810 snapshots,
5811 matching_paths_tx,
5812 ))
5813 .detach();
5814
5815 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5816 let background = cx.background_executor().clone();
5817 let (result_tx, result_rx) = smol::channel::bounded(1024);
5818 cx.background_executor()
5819 .spawn(async move {
5820 let Ok(buffers) = buffers.await else {
5821 return;
5822 };
5823
5824 let buffers_len = buffers.len();
5825 if buffers_len == 0 {
5826 return;
5827 }
5828 let query = &query;
5829 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5830 background
5831 .scoped(|scope| {
5832 #[derive(Clone)]
5833 struct FinishedStatus {
5834 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
5835 buffer_index: SearchMatchCandidateIndex,
5836 }
5837
5838 for _ in 0..workers {
5839 let finished_tx = finished_tx.clone();
5840 let mut buffers_rx = buffers_rx.clone();
5841 scope.spawn(async move {
5842 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5843 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5844 {
5845 if query.file_matches(
5846 snapshot.file().map(|file| file.path().as_ref()),
5847 ) {
5848 query
5849 .search(snapshot, None)
5850 .await
5851 .iter()
5852 .map(|range| {
5853 snapshot.anchor_before(range.start)
5854 ..snapshot.anchor_after(range.end)
5855 })
5856 .collect()
5857 } else {
5858 Vec::new()
5859 }
5860 } else {
5861 Vec::new()
5862 };
5863
5864 let status = if !buffer_matches.is_empty() {
5865 let entry = if let Some((buffer, _)) = entry.as_ref() {
5866 Some((buffer.clone(), buffer_matches))
5867 } else {
5868 None
5869 };
5870 FinishedStatus {
5871 entry,
5872 buffer_index,
5873 }
5874 } else {
5875 FinishedStatus {
5876 entry: None,
5877 buffer_index,
5878 }
5879 };
5880 if finished_tx.send(status).await.is_err() {
5881 break;
5882 }
5883 }
5884 });
5885 }
5886 // Report sorted matches
5887 scope.spawn(async move {
5888 let mut current_index = 0;
5889 let mut scratch = vec![None; buffers_len];
5890 while let Some(status) = finished_rx.next().await {
5891 debug_assert!(
5892 scratch[status.buffer_index].is_none(),
5893 "Got match status of position {} twice",
5894 status.buffer_index
5895 );
5896 let index = status.buffer_index;
5897 scratch[index] = Some(status);
5898 while current_index < buffers_len {
5899 let Some(current_entry) = scratch[current_index].take() else {
5900 // We intentionally **do not** increment `current_index` here. When next element arrives
5901 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5902 // this time.
5903 break;
5904 };
5905 if let Some(entry) = current_entry.entry {
5906 result_tx.send(entry).await.log_err();
5907 }
5908 current_index += 1;
5909 }
5910 if current_index == buffers_len {
5911 break;
5912 }
5913 }
5914 });
5915 })
5916 .await;
5917 })
5918 .detach();
5919 result_rx
5920 }
5921
5922 /// Pick paths that might potentially contain a match of a given search query.
5923 async fn background_search(
5924 unnamed_buffers: Vec<Model<Buffer>>,
5925 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
5926 executor: BackgroundExecutor,
5927 fs: Arc<dyn Fs>,
5928 workers: usize,
5929 query: SearchQuery,
5930 path_count: usize,
5931 snapshots: Vec<LocalSnapshot>,
5932 matching_paths_tx: Sender<SearchMatchCandidate>,
5933 ) {
5934 let fs = &fs;
5935 let query = &query;
5936 let matching_paths_tx = &matching_paths_tx;
5937 let snapshots = &snapshots;
5938 let paths_per_worker = (path_count + workers - 1) / workers;
5939 for buffer in unnamed_buffers {
5940 matching_paths_tx
5941 .send(SearchMatchCandidate::OpenBuffer {
5942 buffer: buffer.clone(),
5943 path: None,
5944 })
5945 .await
5946 .log_err();
5947 }
5948 for (path, (buffer, _)) in opened_buffers.iter() {
5949 matching_paths_tx
5950 .send(SearchMatchCandidate::OpenBuffer {
5951 buffer: buffer.clone(),
5952 path: Some(path.clone()),
5953 })
5954 .await
5955 .log_err();
5956 }
5957 executor
5958 .scoped(|scope| {
5959 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5960
5961 for worker_ix in 0..workers {
5962 let worker_start_ix = worker_ix * paths_per_worker;
5963 let worker_end_ix = worker_start_ix + paths_per_worker;
5964 let unnamed_buffers = opened_buffers.clone();
5965 let limiter = Arc::clone(&max_concurrent_workers);
5966 scope.spawn(async move {
5967 let _guard = limiter.acquire().await;
5968 let mut snapshot_start_ix = 0;
5969 let mut abs_path = PathBuf::new();
5970 for snapshot in snapshots {
5971 let snapshot_end_ix = snapshot_start_ix
5972 + if query.include_ignored() {
5973 snapshot.file_count()
5974 } else {
5975 snapshot.visible_file_count()
5976 };
5977 if worker_end_ix <= snapshot_start_ix {
5978 break;
5979 } else if worker_start_ix > snapshot_end_ix {
5980 snapshot_start_ix = snapshot_end_ix;
5981 continue;
5982 } else {
5983 let start_in_snapshot =
5984 worker_start_ix.saturating_sub(snapshot_start_ix);
5985 let end_in_snapshot =
5986 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5987
5988 for entry in snapshot
5989 .files(query.include_ignored(), start_in_snapshot)
5990 .take(end_in_snapshot - start_in_snapshot)
5991 {
5992 if matching_paths_tx.is_closed() {
5993 break;
5994 }
5995 if unnamed_buffers.contains_key(&entry.path) {
5996 continue;
5997 }
5998 let matches = if query.file_matches(Some(&entry.path)) {
5999 abs_path.clear();
6000 abs_path.push(&snapshot.abs_path());
6001 abs_path.push(&entry.path);
6002 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
6003 {
6004 query.detect(file).unwrap_or(false)
6005 } else {
6006 false
6007 }
6008 } else {
6009 false
6010 };
6011
6012 if matches {
6013 let project_path = SearchMatchCandidate::Path {
6014 worktree_id: snapshot.id(),
6015 path: entry.path.clone(),
6016 is_ignored: entry.is_ignored,
6017 };
6018 if matching_paths_tx.send(project_path).await.is_err() {
6019 break;
6020 }
6021 }
6022 }
6023
6024 snapshot_start_ix = snapshot_end_ix;
6025 }
6026 }
6027 });
6028 }
6029
6030 if query.include_ignored() {
6031 for snapshot in snapshots {
6032 for ignored_entry in snapshot
6033 .entries(query.include_ignored())
6034 .filter(|e| e.is_ignored)
6035 {
6036 let limiter = Arc::clone(&max_concurrent_workers);
6037 scope.spawn(async move {
6038 let _guard = limiter.acquire().await;
6039 let mut ignored_paths_to_process =
6040 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
6041 while let Some(ignored_abs_path) =
6042 ignored_paths_to_process.pop_front()
6043 {
6044 if let Some(fs_metadata) = fs
6045 .metadata(&ignored_abs_path)
6046 .await
6047 .with_context(|| {
6048 format!("fetching fs metadata for {ignored_abs_path:?}")
6049 })
6050 .log_err()
6051 .flatten()
6052 {
6053 if fs_metadata.is_dir {
6054 if let Some(mut subfiles) = fs
6055 .read_dir(&ignored_abs_path)
6056 .await
6057 .with_context(|| {
6058 format!(
6059 "listing ignored path {ignored_abs_path:?}"
6060 )
6061 })
6062 .log_err()
6063 {
6064 while let Some(subfile) = subfiles.next().await {
6065 if let Some(subfile) = subfile.log_err() {
6066 ignored_paths_to_process.push_back(subfile);
6067 }
6068 }
6069 }
6070 } else if !fs_metadata.is_symlink {
6071 if !query.file_matches(Some(&ignored_abs_path))
6072 || snapshot.is_path_excluded(
6073 ignored_entry.path.to_path_buf(),
6074 )
6075 {
6076 continue;
6077 }
6078 let matches = if let Some(file) = fs
6079 .open_sync(&ignored_abs_path)
6080 .await
6081 .with_context(|| {
6082 format!(
6083 "Opening ignored path {ignored_abs_path:?}"
6084 )
6085 })
6086 .log_err()
6087 {
6088 query.detect(file).unwrap_or(false)
6089 } else {
6090 false
6091 };
6092 if matches {
6093 let project_path = SearchMatchCandidate::Path {
6094 worktree_id: snapshot.id(),
6095 path: Arc::from(
6096 ignored_abs_path
6097 .strip_prefix(snapshot.abs_path())
6098 .expect(
6099 "scanning worktree-related files",
6100 ),
6101 ),
6102 is_ignored: true,
6103 };
6104 if matching_paths_tx
6105 .send(project_path)
6106 .await
6107 .is_err()
6108 {
6109 return;
6110 }
6111 }
6112 }
6113 }
6114 }
6115 });
6116 }
6117 }
6118 }
6119 })
6120 .await;
6121 }
6122
6123 pub fn request_lsp<R: LspCommand>(
6124 &self,
6125 buffer_handle: Model<Buffer>,
6126 server: LanguageServerToQuery,
6127 request: R,
6128 cx: &mut ModelContext<Self>,
6129 ) -> Task<Result<R::Response>>
6130 where
6131 <R::LspRequest as lsp::request::Request>::Result: Send,
6132 <R::LspRequest as lsp::request::Request>::Params: Send,
6133 {
6134 let buffer = buffer_handle.read(cx);
6135 if self.is_local() {
6136 let language_server = match server {
6137 LanguageServerToQuery::Primary => {
6138 match self.primary_language_server_for_buffer(buffer, cx) {
6139 Some((_, server)) => Some(Arc::clone(server)),
6140 None => return Task::ready(Ok(Default::default())),
6141 }
6142 }
6143 LanguageServerToQuery::Other(id) => self
6144 .language_server_for_buffer(buffer, id, cx)
6145 .map(|(_, server)| Arc::clone(server)),
6146 };
6147 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6148 if let (Some(file), Some(language_server)) = (file, language_server) {
6149 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6150 return cx.spawn(move |this, cx| async move {
6151 if !request.check_capabilities(language_server.capabilities()) {
6152 return Ok(Default::default());
6153 }
6154
6155 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6156 let response = match result {
6157 Ok(response) => response,
6158
6159 Err(err) => {
6160 log::warn!(
6161 "Generic lsp request to {} failed: {}",
6162 language_server.name(),
6163 err
6164 );
6165 return Err(err);
6166 }
6167 };
6168
6169 request
6170 .response_from_lsp(
6171 response,
6172 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6173 buffer_handle,
6174 language_server.server_id(),
6175 cx,
6176 )
6177 .await
6178 });
6179 }
6180 } else if let Some(project_id) = self.remote_id() {
6181 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6182 }
6183
6184 Task::ready(Ok(Default::default()))
6185 }
6186
6187 fn send_lsp_proto_request<R: LspCommand>(
6188 &self,
6189 buffer: Model<Buffer>,
6190 project_id: u64,
6191 request: R,
6192 cx: &mut ModelContext<'_, Project>,
6193 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6194 let rpc = self.client.clone();
6195 let message = request.to_proto(project_id, buffer.read(cx));
6196 cx.spawn(move |this, mut cx| async move {
6197 // Ensure the project is still alive by the time the task
6198 // is scheduled.
6199 this.upgrade().context("project dropped")?;
6200 let response = rpc.request(message).await?;
6201 let this = this.upgrade().context("project dropped")?;
6202 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6203 Err(anyhow!("disconnected before completing request"))
6204 } else {
6205 request
6206 .response_from_proto(response, this, buffer, cx)
6207 .await
6208 }
6209 })
6210 }
6211
6212 fn sort_candidates_and_open_buffers(
6213 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6214 cx: &mut ModelContext<Self>,
6215 ) -> (
6216 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6217 Receiver<(
6218 Option<(Model<Buffer>, BufferSnapshot)>,
6219 SearchMatchCandidateIndex,
6220 )>,
6221 ) {
6222 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6223 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6224 cx.spawn(move |this, cx| async move {
6225 let mut buffers = Vec::new();
6226 let mut ignored_buffers = Vec::new();
6227 while let Some(entry) = matching_paths_rx.next().await {
6228 if matches!(
6229 entry,
6230 SearchMatchCandidate::Path {
6231 is_ignored: true,
6232 ..
6233 }
6234 ) {
6235 ignored_buffers.push(entry);
6236 } else {
6237 buffers.push(entry);
6238 }
6239 }
6240 buffers.sort_by_key(|candidate| candidate.path());
6241 ignored_buffers.sort_by_key(|candidate| candidate.path());
6242 buffers.extend(ignored_buffers);
6243 let matching_paths = buffers.clone();
6244 let _ = sorted_buffers_tx.send(buffers);
6245 for (index, candidate) in matching_paths.into_iter().enumerate() {
6246 if buffers_tx.is_closed() {
6247 break;
6248 }
6249 let this = this.clone();
6250 let buffers_tx = buffers_tx.clone();
6251 cx.spawn(move |mut cx| async move {
6252 let buffer = match candidate {
6253 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6254 SearchMatchCandidate::Path {
6255 worktree_id, path, ..
6256 } => this
6257 .update(&mut cx, |this, cx| {
6258 this.open_buffer((worktree_id, path), cx)
6259 })?
6260 .await
6261 .log_err(),
6262 };
6263 if let Some(buffer) = buffer {
6264 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6265 buffers_tx
6266 .send((Some((buffer, snapshot)), index))
6267 .await
6268 .log_err();
6269 } else {
6270 buffers_tx.send((None, index)).await.log_err();
6271 }
6272
6273 Ok::<_, anyhow::Error>(())
6274 })
6275 .detach();
6276 }
6277 })
6278 .detach();
6279 (sorted_buffers_rx, buffers_rx)
6280 }
6281
6282 pub fn find_or_create_local_worktree(
6283 &mut self,
6284 abs_path: impl AsRef<Path>,
6285 visible: bool,
6286 cx: &mut ModelContext<Self>,
6287 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6288 let abs_path = abs_path.as_ref();
6289 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6290 Task::ready(Ok((tree, relative_path)))
6291 } else {
6292 let worktree = self.create_local_worktree(abs_path, visible, cx);
6293 cx.background_executor()
6294 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6295 }
6296 }
6297
6298 pub fn find_local_worktree(
6299 &self,
6300 abs_path: &Path,
6301 cx: &AppContext,
6302 ) -> Option<(Model<Worktree>, PathBuf)> {
6303 for tree in &self.worktrees {
6304 if let Some(tree) = tree.upgrade() {
6305 if let Some(relative_path) = tree
6306 .read(cx)
6307 .as_local()
6308 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6309 {
6310 return Some((tree.clone(), relative_path.into()));
6311 }
6312 }
6313 }
6314 None
6315 }
6316
6317 pub fn is_shared(&self) -> bool {
6318 match &self.client_state {
6319 ProjectClientState::Shared { .. } => true,
6320 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6321 }
6322 }
6323
6324 fn create_local_worktree(
6325 &mut self,
6326 abs_path: impl AsRef<Path>,
6327 visible: bool,
6328 cx: &mut ModelContext<Self>,
6329 ) -> Task<Result<Model<Worktree>>> {
6330 let fs = self.fs.clone();
6331 let client = self.client.clone();
6332 let next_entry_id = self.next_entry_id.clone();
6333 let path: Arc<Path> = abs_path.as_ref().into();
6334 let task = self
6335 .loading_local_worktrees
6336 .entry(path.clone())
6337 .or_insert_with(|| {
6338 cx.spawn(move |project, mut cx| {
6339 async move {
6340 let worktree = Worktree::local(
6341 client.clone(),
6342 path.clone(),
6343 visible,
6344 fs,
6345 next_entry_id,
6346 &mut cx,
6347 )
6348 .await;
6349
6350 project.update(&mut cx, |project, _| {
6351 project.loading_local_worktrees.remove(&path);
6352 })?;
6353
6354 let worktree = worktree?;
6355 project
6356 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6357 Ok(worktree)
6358 }
6359 .map_err(Arc::new)
6360 })
6361 .shared()
6362 })
6363 .clone();
6364 cx.background_executor().spawn(async move {
6365 match task.await {
6366 Ok(worktree) => Ok(worktree),
6367 Err(err) => Err(anyhow!("{}", err)),
6368 }
6369 })
6370 }
6371
6372 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6373 self.worktrees.retain(|worktree| {
6374 if let Some(worktree) = worktree.upgrade() {
6375 let id = worktree.read(cx).id();
6376 if id == id_to_remove {
6377 cx.emit(Event::WorktreeRemoved(id));
6378 false
6379 } else {
6380 true
6381 }
6382 } else {
6383 false
6384 }
6385 });
6386 self.metadata_changed(cx);
6387 }
6388
6389 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6390 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6391 if worktree.read(cx).is_local() {
6392 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6393 worktree::Event::UpdatedEntries(changes) => {
6394 this.update_local_worktree_buffers(&worktree, changes, cx);
6395 this.update_local_worktree_language_servers(&worktree, changes, cx);
6396 this.update_local_worktree_settings(&worktree, changes, cx);
6397 this.update_prettier_settings(&worktree, changes, cx);
6398 cx.emit(Event::WorktreeUpdatedEntries(
6399 worktree.read(cx).id(),
6400 changes.clone(),
6401 ));
6402 }
6403 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6404 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6405 }
6406 })
6407 .detach();
6408 }
6409
6410 let push_strong_handle = {
6411 let worktree = worktree.read(cx);
6412 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6413 };
6414 if push_strong_handle {
6415 self.worktrees
6416 .push(WorktreeHandle::Strong(worktree.clone()));
6417 } else {
6418 self.worktrees
6419 .push(WorktreeHandle::Weak(worktree.downgrade()));
6420 }
6421
6422 let handle_id = worktree.entity_id();
6423 cx.observe_release(worktree, move |this, worktree, cx| {
6424 let _ = this.remove_worktree(worktree.id(), cx);
6425 cx.update_global::<SettingsStore, _>(|store, cx| {
6426 store
6427 .clear_local_settings(handle_id.as_u64() as usize, cx)
6428 .log_err()
6429 });
6430 })
6431 .detach();
6432
6433 cx.emit(Event::WorktreeAdded);
6434 self.metadata_changed(cx);
6435 }
6436
6437 fn update_local_worktree_buffers(
6438 &mut self,
6439 worktree_handle: &Model<Worktree>,
6440 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6441 cx: &mut ModelContext<Self>,
6442 ) {
6443 let snapshot = worktree_handle.read(cx).snapshot();
6444
6445 let mut renamed_buffers = Vec::new();
6446 for (path, entry_id, _) in changes {
6447 let worktree_id = worktree_handle.read(cx).id();
6448 let project_path = ProjectPath {
6449 worktree_id,
6450 path: path.clone(),
6451 };
6452
6453 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6454 Some(&buffer_id) => buffer_id,
6455 None => match self.local_buffer_ids_by_path.get(&project_path) {
6456 Some(&buffer_id) => buffer_id,
6457 None => {
6458 continue;
6459 }
6460 },
6461 };
6462
6463 let open_buffer = self.opened_buffers.get(&buffer_id);
6464 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6465 buffer
6466 } else {
6467 self.opened_buffers.remove(&buffer_id);
6468 self.local_buffer_ids_by_path.remove(&project_path);
6469 self.local_buffer_ids_by_entry_id.remove(entry_id);
6470 continue;
6471 };
6472
6473 buffer.update(cx, |buffer, cx| {
6474 if let Some(old_file) = File::from_dyn(buffer.file()) {
6475 if old_file.worktree != *worktree_handle {
6476 return;
6477 }
6478
6479 let new_file = if let Some(entry) = old_file
6480 .entry_id
6481 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6482 {
6483 File {
6484 is_local: true,
6485 entry_id: Some(entry.id),
6486 mtime: entry.mtime,
6487 path: entry.path.clone(),
6488 worktree: worktree_handle.clone(),
6489 is_deleted: false,
6490 }
6491 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6492 File {
6493 is_local: true,
6494 entry_id: Some(entry.id),
6495 mtime: entry.mtime,
6496 path: entry.path.clone(),
6497 worktree: worktree_handle.clone(),
6498 is_deleted: false,
6499 }
6500 } else {
6501 File {
6502 is_local: true,
6503 entry_id: old_file.entry_id,
6504 path: old_file.path().clone(),
6505 mtime: old_file.mtime(),
6506 worktree: worktree_handle.clone(),
6507 is_deleted: true,
6508 }
6509 };
6510
6511 let old_path = old_file.abs_path(cx);
6512 if new_file.abs_path(cx) != old_path {
6513 renamed_buffers.push((cx.handle(), old_file.clone()));
6514 self.local_buffer_ids_by_path.remove(&project_path);
6515 self.local_buffer_ids_by_path.insert(
6516 ProjectPath {
6517 worktree_id,
6518 path: path.clone(),
6519 },
6520 buffer_id,
6521 );
6522 }
6523
6524 if new_file.entry_id != Some(*entry_id) {
6525 self.local_buffer_ids_by_entry_id.remove(entry_id);
6526 if let Some(entry_id) = new_file.entry_id {
6527 self.local_buffer_ids_by_entry_id
6528 .insert(entry_id, buffer_id);
6529 }
6530 }
6531
6532 if new_file != *old_file {
6533 if let Some(project_id) = self.remote_id() {
6534 self.client
6535 .send(proto::UpdateBufferFile {
6536 project_id,
6537 buffer_id: buffer_id as u64,
6538 file: Some(new_file.to_proto()),
6539 })
6540 .log_err();
6541 }
6542
6543 buffer.file_updated(Arc::new(new_file), cx);
6544 }
6545 }
6546 });
6547 }
6548
6549 for (buffer, old_file) in renamed_buffers {
6550 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6551 self.detect_language_for_buffer(&buffer, cx);
6552 self.register_buffer_with_language_servers(&buffer, cx);
6553 }
6554 }
6555
6556 fn update_local_worktree_language_servers(
6557 &mut self,
6558 worktree_handle: &Model<Worktree>,
6559 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6560 cx: &mut ModelContext<Self>,
6561 ) {
6562 if changes.is_empty() {
6563 return;
6564 }
6565
6566 let worktree_id = worktree_handle.read(cx).id();
6567 let mut language_server_ids = self
6568 .language_server_ids
6569 .iter()
6570 .filter_map(|((server_worktree_id, _), server_id)| {
6571 (*server_worktree_id == worktree_id).then_some(*server_id)
6572 })
6573 .collect::<Vec<_>>();
6574 language_server_ids.sort();
6575 language_server_ids.dedup();
6576
6577 let abs_path = worktree_handle.read(cx).abs_path();
6578 for server_id in &language_server_ids {
6579 if let Some(LanguageServerState::Running {
6580 server,
6581 watched_paths,
6582 ..
6583 }) = self.language_servers.get(server_id)
6584 {
6585 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6586 let params = lsp::DidChangeWatchedFilesParams {
6587 changes: changes
6588 .iter()
6589 .filter_map(|(path, _, change)| {
6590 if !watched_paths.is_match(&path) {
6591 return None;
6592 }
6593 let typ = match change {
6594 PathChange::Loaded => return None,
6595 PathChange::Added => lsp::FileChangeType::CREATED,
6596 PathChange::Removed => lsp::FileChangeType::DELETED,
6597 PathChange::Updated => lsp::FileChangeType::CHANGED,
6598 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6599 };
6600 Some(lsp::FileEvent {
6601 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6602 typ,
6603 })
6604 })
6605 .collect(),
6606 };
6607
6608 if !params.changes.is_empty() {
6609 server
6610 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6611 .log_err();
6612 }
6613 }
6614 }
6615 }
6616 }
6617
6618 fn update_local_worktree_buffers_git_repos(
6619 &mut self,
6620 worktree_handle: Model<Worktree>,
6621 changed_repos: &UpdatedGitRepositoriesSet,
6622 cx: &mut ModelContext<Self>,
6623 ) {
6624 debug_assert!(worktree_handle.read(cx).is_local());
6625
6626 // Identify the loading buffers whose containing repository that has changed.
6627 let future_buffers = self
6628 .loading_buffers_by_path
6629 .iter()
6630 .filter_map(|(project_path, receiver)| {
6631 if project_path.worktree_id != worktree_handle.read(cx).id() {
6632 return None;
6633 }
6634 let path = &project_path.path;
6635 changed_repos
6636 .iter()
6637 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6638 let receiver = receiver.clone();
6639 let path = path.clone();
6640 Some(async move {
6641 wait_for_loading_buffer(receiver)
6642 .await
6643 .ok()
6644 .map(|buffer| (buffer, path))
6645 })
6646 })
6647 .collect::<FuturesUnordered<_>>();
6648
6649 // Identify the current buffers whose containing repository has changed.
6650 let current_buffers = self
6651 .opened_buffers
6652 .values()
6653 .filter_map(|buffer| {
6654 let buffer = buffer.upgrade()?;
6655 let file = File::from_dyn(buffer.read(cx).file())?;
6656 if file.worktree != worktree_handle {
6657 return None;
6658 }
6659 let path = file.path();
6660 changed_repos
6661 .iter()
6662 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6663 Some((buffer, path.clone()))
6664 })
6665 .collect::<Vec<_>>();
6666
6667 if future_buffers.len() + current_buffers.len() == 0 {
6668 return;
6669 }
6670
6671 let remote_id = self.remote_id();
6672 let client = self.client.clone();
6673 cx.spawn(move |_, mut cx| async move {
6674 // Wait for all of the buffers to load.
6675 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6676
6677 // Reload the diff base for every buffer whose containing git repository has changed.
6678 let snapshot =
6679 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6680 let diff_bases_by_buffer = cx
6681 .background_executor()
6682 .spawn(async move {
6683 future_buffers
6684 .into_iter()
6685 .filter_map(|e| e)
6686 .chain(current_buffers)
6687 .filter_map(|(buffer, path)| {
6688 let (work_directory, repo) =
6689 snapshot.repository_and_work_directory_for_path(&path)?;
6690 let repo = snapshot.get_local_repo(&repo)?;
6691 let relative_path = path.strip_prefix(&work_directory).ok()?;
6692 let base_text = repo.repo_ptr.lock().load_index_text(relative_path);
6693 Some((buffer, base_text))
6694 })
6695 .collect::<Vec<_>>()
6696 })
6697 .await;
6698
6699 // Assign the new diff bases on all of the buffers.
6700 for (buffer, diff_base) in diff_bases_by_buffer {
6701 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6702 buffer.set_diff_base(diff_base.clone(), cx);
6703 buffer.remote_id()
6704 })?;
6705 if let Some(project_id) = remote_id {
6706 client
6707 .send(proto::UpdateDiffBase {
6708 project_id,
6709 buffer_id,
6710 diff_base,
6711 })
6712 .log_err();
6713 }
6714 }
6715
6716 anyhow::Ok(())
6717 })
6718 .detach();
6719 }
6720
6721 fn update_local_worktree_settings(
6722 &mut self,
6723 worktree: &Model<Worktree>,
6724 changes: &UpdatedEntriesSet,
6725 cx: &mut ModelContext<Self>,
6726 ) {
6727 let project_id = self.remote_id();
6728 let worktree_id = worktree.entity_id();
6729 let worktree = worktree.read(cx).as_local().unwrap();
6730 let remote_worktree_id = worktree.id();
6731
6732 let mut settings_contents = Vec::new();
6733 for (path, _, change) in changes.iter() {
6734 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6735 let settings_dir = Arc::from(
6736 path.ancestors()
6737 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6738 .unwrap(),
6739 );
6740 let fs = self.fs.clone();
6741 let removed = *change == PathChange::Removed;
6742 let abs_path = worktree.absolutize(path);
6743 settings_contents.push(async move {
6744 (
6745 settings_dir,
6746 if removed {
6747 None
6748 } else {
6749 Some(async move { fs.load(&abs_path?).await }.await)
6750 },
6751 )
6752 });
6753 }
6754 }
6755
6756 if settings_contents.is_empty() {
6757 return;
6758 }
6759
6760 let client = self.client.clone();
6761 cx.spawn(move |_, cx| async move {
6762 let settings_contents: Vec<(Arc<Path>, _)> =
6763 futures::future::join_all(settings_contents).await;
6764 cx.update(|cx| {
6765 cx.update_global::<SettingsStore, _>(|store, cx| {
6766 for (directory, file_content) in settings_contents {
6767 let file_content = file_content.and_then(|content| content.log_err());
6768 store
6769 .set_local_settings(
6770 worktree_id.as_u64() as usize,
6771 directory.clone(),
6772 file_content.as_ref().map(String::as_str),
6773 cx,
6774 )
6775 .log_err();
6776 if let Some(remote_id) = project_id {
6777 client
6778 .send(proto::UpdateWorktreeSettings {
6779 project_id: remote_id,
6780 worktree_id: remote_worktree_id.to_proto(),
6781 path: directory.to_string_lossy().into_owned(),
6782 content: file_content,
6783 })
6784 .log_err();
6785 }
6786 }
6787 });
6788 })
6789 .ok();
6790 })
6791 .detach();
6792 }
6793
6794 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6795 let new_active_entry = entry.and_then(|project_path| {
6796 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6797 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6798 Some(entry.id)
6799 });
6800 if new_active_entry != self.active_entry {
6801 self.active_entry = new_active_entry;
6802 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6803 }
6804 }
6805
6806 pub fn language_servers_running_disk_based_diagnostics(
6807 &self,
6808 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6809 self.language_server_statuses
6810 .iter()
6811 .filter_map(|(id, status)| {
6812 if status.has_pending_diagnostic_updates {
6813 Some(*id)
6814 } else {
6815 None
6816 }
6817 })
6818 }
6819
6820 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
6821 let mut summary = DiagnosticSummary::default();
6822 for (_, _, path_summary) in
6823 self.diagnostic_summaries(include_ignored, cx)
6824 .filter(|(path, _, _)| {
6825 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
6826 include_ignored || worktree == Some(false)
6827 })
6828 {
6829 summary.error_count += path_summary.error_count;
6830 summary.warning_count += path_summary.warning_count;
6831 }
6832 summary
6833 }
6834
6835 pub fn diagnostic_summaries<'a>(
6836 &'a self,
6837 include_ignored: bool,
6838 cx: &'a AppContext,
6839 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6840 self.visible_worktrees(cx)
6841 .flat_map(move |worktree| {
6842 let worktree = worktree.read(cx);
6843 let worktree_id = worktree.id();
6844 worktree
6845 .diagnostic_summaries()
6846 .map(move |(path, server_id, summary)| {
6847 (ProjectPath { worktree_id, path }, server_id, summary)
6848 })
6849 })
6850 .filter(move |(path, _, _)| {
6851 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
6852 include_ignored || worktree == Some(false)
6853 })
6854 }
6855
6856 pub fn disk_based_diagnostics_started(
6857 &mut self,
6858 language_server_id: LanguageServerId,
6859 cx: &mut ModelContext<Self>,
6860 ) {
6861 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6862 }
6863
6864 pub fn disk_based_diagnostics_finished(
6865 &mut self,
6866 language_server_id: LanguageServerId,
6867 cx: &mut ModelContext<Self>,
6868 ) {
6869 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6870 }
6871
6872 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6873 self.active_entry
6874 }
6875
6876 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6877 self.worktree_for_id(path.worktree_id, cx)?
6878 .read(cx)
6879 .entry_for_path(&path.path)
6880 .cloned()
6881 }
6882
6883 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6884 let worktree = self.worktree_for_entry(entry_id, cx)?;
6885 let worktree = worktree.read(cx);
6886 let worktree_id = worktree.id();
6887 let path = worktree.entry_for_id(entry_id)?.path.clone();
6888 Some(ProjectPath { worktree_id, path })
6889 }
6890
6891 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6892 let workspace_root = self
6893 .worktree_for_id(project_path.worktree_id, cx)?
6894 .read(cx)
6895 .abs_path();
6896 let project_path = project_path.path.as_ref();
6897
6898 Some(if project_path == Path::new("") {
6899 workspace_root.to_path_buf()
6900 } else {
6901 workspace_root.join(project_path)
6902 })
6903 }
6904
6905 // RPC message handlers
6906
6907 async fn handle_unshare_project(
6908 this: Model<Self>,
6909 _: TypedEnvelope<proto::UnshareProject>,
6910 _: Arc<Client>,
6911 mut cx: AsyncAppContext,
6912 ) -> Result<()> {
6913 this.update(&mut cx, |this, cx| {
6914 if this.is_local() {
6915 this.unshare(cx)?;
6916 } else {
6917 this.disconnected_from_host(cx);
6918 }
6919 Ok(())
6920 })?
6921 }
6922
6923 async fn handle_add_collaborator(
6924 this: Model<Self>,
6925 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6926 _: Arc<Client>,
6927 mut cx: AsyncAppContext,
6928 ) -> Result<()> {
6929 let collaborator = envelope
6930 .payload
6931 .collaborator
6932 .take()
6933 .ok_or_else(|| anyhow!("empty collaborator"))?;
6934
6935 let collaborator = Collaborator::from_proto(collaborator)?;
6936 this.update(&mut cx, |this, cx| {
6937 this.shared_buffers.remove(&collaborator.peer_id);
6938 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6939 this.collaborators
6940 .insert(collaborator.peer_id, collaborator);
6941 cx.notify();
6942 })?;
6943
6944 Ok(())
6945 }
6946
6947 async fn handle_update_project_collaborator(
6948 this: Model<Self>,
6949 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6950 _: Arc<Client>,
6951 mut cx: AsyncAppContext,
6952 ) -> Result<()> {
6953 let old_peer_id = envelope
6954 .payload
6955 .old_peer_id
6956 .ok_or_else(|| anyhow!("missing old peer id"))?;
6957 let new_peer_id = envelope
6958 .payload
6959 .new_peer_id
6960 .ok_or_else(|| anyhow!("missing new peer id"))?;
6961 this.update(&mut cx, |this, cx| {
6962 let collaborator = this
6963 .collaborators
6964 .remove(&old_peer_id)
6965 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6966 let is_host = collaborator.replica_id == 0;
6967 this.collaborators.insert(new_peer_id, collaborator);
6968
6969 let buffers = this.shared_buffers.remove(&old_peer_id);
6970 log::info!(
6971 "peer {} became {}. moving buffers {:?}",
6972 old_peer_id,
6973 new_peer_id,
6974 &buffers
6975 );
6976 if let Some(buffers) = buffers {
6977 this.shared_buffers.insert(new_peer_id, buffers);
6978 }
6979
6980 if is_host {
6981 this.opened_buffers
6982 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6983 this.buffer_ordered_messages_tx
6984 .unbounded_send(BufferOrderedMessage::Resync)
6985 .unwrap();
6986 }
6987
6988 cx.emit(Event::CollaboratorUpdated {
6989 old_peer_id,
6990 new_peer_id,
6991 });
6992 cx.notify();
6993 Ok(())
6994 })?
6995 }
6996
6997 async fn handle_remove_collaborator(
6998 this: Model<Self>,
6999 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7000 _: Arc<Client>,
7001 mut cx: AsyncAppContext,
7002 ) -> Result<()> {
7003 this.update(&mut cx, |this, cx| {
7004 let peer_id = envelope
7005 .payload
7006 .peer_id
7007 .ok_or_else(|| anyhow!("invalid peer id"))?;
7008 let replica_id = this
7009 .collaborators
7010 .remove(&peer_id)
7011 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7012 .replica_id;
7013 for buffer in this.opened_buffers.values() {
7014 if let Some(buffer) = buffer.upgrade() {
7015 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7016 }
7017 }
7018 this.shared_buffers.remove(&peer_id);
7019
7020 cx.emit(Event::CollaboratorLeft(peer_id));
7021 cx.notify();
7022 Ok(())
7023 })?
7024 }
7025
7026 async fn handle_update_project(
7027 this: Model<Self>,
7028 envelope: TypedEnvelope<proto::UpdateProject>,
7029 _: Arc<Client>,
7030 mut cx: AsyncAppContext,
7031 ) -> Result<()> {
7032 this.update(&mut cx, |this, cx| {
7033 // Don't handle messages that were sent before the response to us joining the project
7034 if envelope.message_id > this.join_project_response_message_id {
7035 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7036 }
7037 Ok(())
7038 })?
7039 }
7040
7041 async fn handle_update_worktree(
7042 this: Model<Self>,
7043 envelope: TypedEnvelope<proto::UpdateWorktree>,
7044 _: Arc<Client>,
7045 mut cx: AsyncAppContext,
7046 ) -> Result<()> {
7047 this.update(&mut cx, |this, cx| {
7048 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7049 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7050 worktree.update(cx, |worktree, _| {
7051 let worktree = worktree.as_remote_mut().unwrap();
7052 worktree.update_from_remote(envelope.payload);
7053 });
7054 }
7055 Ok(())
7056 })?
7057 }
7058
7059 async fn handle_update_worktree_settings(
7060 this: Model<Self>,
7061 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7062 _: Arc<Client>,
7063 mut cx: AsyncAppContext,
7064 ) -> Result<()> {
7065 this.update(&mut cx, |this, cx| {
7066 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7067 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7068 cx.update_global::<SettingsStore, _>(|store, cx| {
7069 store
7070 .set_local_settings(
7071 worktree.entity_id().as_u64() as usize,
7072 PathBuf::from(&envelope.payload.path).into(),
7073 envelope.payload.content.as_ref().map(String::as_str),
7074 cx,
7075 )
7076 .log_err();
7077 });
7078 }
7079 Ok(())
7080 })?
7081 }
7082
7083 async fn handle_create_project_entry(
7084 this: Model<Self>,
7085 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7086 _: Arc<Client>,
7087 mut cx: AsyncAppContext,
7088 ) -> Result<proto::ProjectEntryResponse> {
7089 let worktree = this.update(&mut cx, |this, cx| {
7090 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7091 this.worktree_for_id(worktree_id, cx)
7092 .ok_or_else(|| anyhow!("worktree not found"))
7093 })??;
7094 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7095 let entry = worktree
7096 .update(&mut cx, |worktree, cx| {
7097 let worktree = worktree.as_local_mut().unwrap();
7098 let path = PathBuf::from(envelope.payload.path);
7099 worktree.create_entry(path, envelope.payload.is_directory, cx)
7100 })?
7101 .await?;
7102 Ok(proto::ProjectEntryResponse {
7103 entry: entry.as_ref().map(|e| e.into()),
7104 worktree_scan_id: worktree_scan_id as u64,
7105 })
7106 }
7107
7108 async fn handle_rename_project_entry(
7109 this: Model<Self>,
7110 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7111 _: Arc<Client>,
7112 mut cx: AsyncAppContext,
7113 ) -> Result<proto::ProjectEntryResponse> {
7114 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7115 let worktree = this.update(&mut cx, |this, cx| {
7116 this.worktree_for_entry(entry_id, cx)
7117 .ok_or_else(|| anyhow!("worktree not found"))
7118 })??;
7119 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7120 let entry = worktree
7121 .update(&mut cx, |worktree, cx| {
7122 let new_path = PathBuf::from(envelope.payload.new_path);
7123 worktree
7124 .as_local_mut()
7125 .unwrap()
7126 .rename_entry(entry_id, new_path, cx)
7127 })?
7128 .await?;
7129 Ok(proto::ProjectEntryResponse {
7130 entry: entry.as_ref().map(|e| e.into()),
7131 worktree_scan_id: worktree_scan_id as u64,
7132 })
7133 }
7134
7135 async fn handle_copy_project_entry(
7136 this: Model<Self>,
7137 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7138 _: Arc<Client>,
7139 mut cx: AsyncAppContext,
7140 ) -> Result<proto::ProjectEntryResponse> {
7141 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7142 let worktree = this.update(&mut cx, |this, cx| {
7143 this.worktree_for_entry(entry_id, cx)
7144 .ok_or_else(|| anyhow!("worktree not found"))
7145 })??;
7146 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7147 let entry = worktree
7148 .update(&mut cx, |worktree, cx| {
7149 let new_path = PathBuf::from(envelope.payload.new_path);
7150 worktree
7151 .as_local_mut()
7152 .unwrap()
7153 .copy_entry(entry_id, new_path, cx)
7154 })?
7155 .await?;
7156 Ok(proto::ProjectEntryResponse {
7157 entry: entry.as_ref().map(|e| e.into()),
7158 worktree_scan_id: worktree_scan_id as u64,
7159 })
7160 }
7161
7162 async fn handle_delete_project_entry(
7163 this: Model<Self>,
7164 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7165 _: Arc<Client>,
7166 mut cx: AsyncAppContext,
7167 ) -> Result<proto::ProjectEntryResponse> {
7168 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7169
7170 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7171
7172 let worktree = this.update(&mut cx, |this, cx| {
7173 this.worktree_for_entry(entry_id, cx)
7174 .ok_or_else(|| anyhow!("worktree not found"))
7175 })??;
7176 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7177 worktree
7178 .update(&mut cx, |worktree, cx| {
7179 worktree
7180 .as_local_mut()
7181 .unwrap()
7182 .delete_entry(entry_id, cx)
7183 .ok_or_else(|| anyhow!("invalid entry"))
7184 })??
7185 .await?;
7186 Ok(proto::ProjectEntryResponse {
7187 entry: None,
7188 worktree_scan_id: worktree_scan_id as u64,
7189 })
7190 }
7191
7192 async fn handle_expand_project_entry(
7193 this: Model<Self>,
7194 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7195 _: Arc<Client>,
7196 mut cx: AsyncAppContext,
7197 ) -> Result<proto::ExpandProjectEntryResponse> {
7198 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7199 let worktree = this
7200 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7201 .ok_or_else(|| anyhow!("invalid request"))?;
7202 worktree
7203 .update(&mut cx, |worktree, cx| {
7204 worktree
7205 .as_local_mut()
7206 .unwrap()
7207 .expand_entry(entry_id, cx)
7208 .ok_or_else(|| anyhow!("invalid entry"))
7209 })??
7210 .await?;
7211 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7212 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7213 }
7214
7215 async fn handle_update_diagnostic_summary(
7216 this: Model<Self>,
7217 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7218 _: Arc<Client>,
7219 mut cx: AsyncAppContext,
7220 ) -> Result<()> {
7221 this.update(&mut cx, |this, cx| {
7222 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7223 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7224 if let Some(summary) = envelope.payload.summary {
7225 let project_path = ProjectPath {
7226 worktree_id,
7227 path: Path::new(&summary.path).into(),
7228 };
7229 worktree.update(cx, |worktree, _| {
7230 worktree
7231 .as_remote_mut()
7232 .unwrap()
7233 .update_diagnostic_summary(project_path.path.clone(), &summary);
7234 });
7235 cx.emit(Event::DiagnosticsUpdated {
7236 language_server_id: LanguageServerId(summary.language_server_id as usize),
7237 path: project_path,
7238 });
7239 }
7240 }
7241 Ok(())
7242 })?
7243 }
7244
7245 async fn handle_start_language_server(
7246 this: Model<Self>,
7247 envelope: TypedEnvelope<proto::StartLanguageServer>,
7248 _: Arc<Client>,
7249 mut cx: AsyncAppContext,
7250 ) -> Result<()> {
7251 let server = envelope
7252 .payload
7253 .server
7254 .ok_or_else(|| anyhow!("invalid server"))?;
7255 this.update(&mut cx, |this, cx| {
7256 this.language_server_statuses.insert(
7257 LanguageServerId(server.id as usize),
7258 LanguageServerStatus {
7259 name: server.name,
7260 pending_work: Default::default(),
7261 has_pending_diagnostic_updates: false,
7262 progress_tokens: Default::default(),
7263 },
7264 );
7265 cx.notify();
7266 })?;
7267 Ok(())
7268 }
7269
7270 async fn handle_update_language_server(
7271 this: Model<Self>,
7272 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7273 _: Arc<Client>,
7274 mut cx: AsyncAppContext,
7275 ) -> Result<()> {
7276 this.update(&mut cx, |this, cx| {
7277 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7278
7279 match envelope
7280 .payload
7281 .variant
7282 .ok_or_else(|| anyhow!("invalid variant"))?
7283 {
7284 proto::update_language_server::Variant::WorkStart(payload) => {
7285 this.on_lsp_work_start(
7286 language_server_id,
7287 payload.token,
7288 LanguageServerProgress {
7289 message: payload.message,
7290 percentage: payload.percentage.map(|p| p as usize),
7291 last_update_at: Instant::now(),
7292 },
7293 cx,
7294 );
7295 }
7296
7297 proto::update_language_server::Variant::WorkProgress(payload) => {
7298 this.on_lsp_work_progress(
7299 language_server_id,
7300 payload.token,
7301 LanguageServerProgress {
7302 message: payload.message,
7303 percentage: payload.percentage.map(|p| p as usize),
7304 last_update_at: Instant::now(),
7305 },
7306 cx,
7307 );
7308 }
7309
7310 proto::update_language_server::Variant::WorkEnd(payload) => {
7311 this.on_lsp_work_end(language_server_id, payload.token, cx);
7312 }
7313
7314 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7315 this.disk_based_diagnostics_started(language_server_id, cx);
7316 }
7317
7318 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7319 this.disk_based_diagnostics_finished(language_server_id, cx)
7320 }
7321 }
7322
7323 Ok(())
7324 })?
7325 }
7326
7327 async fn handle_update_buffer(
7328 this: Model<Self>,
7329 envelope: TypedEnvelope<proto::UpdateBuffer>,
7330 _: Arc<Client>,
7331 mut cx: AsyncAppContext,
7332 ) -> Result<proto::Ack> {
7333 this.update(&mut cx, |this, cx| {
7334 let payload = envelope.payload.clone();
7335 let buffer_id = payload.buffer_id;
7336 let ops = payload
7337 .operations
7338 .into_iter()
7339 .map(language::proto::deserialize_operation)
7340 .collect::<Result<Vec<_>, _>>()?;
7341 let is_remote = this.is_remote();
7342 match this.opened_buffers.entry(buffer_id) {
7343 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7344 OpenBuffer::Strong(buffer) => {
7345 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7346 }
7347 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7348 OpenBuffer::Weak(_) => {}
7349 },
7350 hash_map::Entry::Vacant(e) => {
7351 assert!(
7352 is_remote,
7353 "received buffer update from {:?}",
7354 envelope.original_sender_id
7355 );
7356 e.insert(OpenBuffer::Operations(ops));
7357 }
7358 }
7359 Ok(proto::Ack {})
7360 })?
7361 }
7362
7363 async fn handle_create_buffer_for_peer(
7364 this: Model<Self>,
7365 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7366 _: Arc<Client>,
7367 mut cx: AsyncAppContext,
7368 ) -> Result<()> {
7369 this.update(&mut cx, |this, cx| {
7370 match envelope
7371 .payload
7372 .variant
7373 .ok_or_else(|| anyhow!("missing variant"))?
7374 {
7375 proto::create_buffer_for_peer::Variant::State(mut state) => {
7376 let mut buffer_file = None;
7377 if let Some(file) = state.file.take() {
7378 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7379 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7380 anyhow!("no worktree found for id {}", file.worktree_id)
7381 })?;
7382 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7383 as Arc<dyn language::File>);
7384 }
7385
7386 let buffer_id = state.id;
7387 let buffer = cx.new_model(|_| {
7388 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
7389 .unwrap()
7390 });
7391 this.incomplete_remote_buffers
7392 .insert(buffer_id, Some(buffer));
7393 }
7394 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7395 let buffer = this
7396 .incomplete_remote_buffers
7397 .get(&chunk.buffer_id)
7398 .cloned()
7399 .flatten()
7400 .ok_or_else(|| {
7401 anyhow!(
7402 "received chunk for buffer {} without initial state",
7403 chunk.buffer_id
7404 )
7405 })?;
7406 let operations = chunk
7407 .operations
7408 .into_iter()
7409 .map(language::proto::deserialize_operation)
7410 .collect::<Result<Vec<_>>>()?;
7411 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7412
7413 if chunk.is_last {
7414 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7415 this.register_buffer(&buffer, cx)?;
7416 }
7417 }
7418 }
7419
7420 Ok(())
7421 })?
7422 }
7423
7424 async fn handle_update_diff_base(
7425 this: Model<Self>,
7426 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7427 _: Arc<Client>,
7428 mut cx: AsyncAppContext,
7429 ) -> Result<()> {
7430 this.update(&mut cx, |this, cx| {
7431 let buffer_id = envelope.payload.buffer_id;
7432 let diff_base = envelope.payload.diff_base;
7433 if let Some(buffer) = this
7434 .opened_buffers
7435 .get_mut(&buffer_id)
7436 .and_then(|b| b.upgrade())
7437 .or_else(|| {
7438 this.incomplete_remote_buffers
7439 .get(&buffer_id)
7440 .cloned()
7441 .flatten()
7442 })
7443 {
7444 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7445 }
7446 Ok(())
7447 })?
7448 }
7449
7450 async fn handle_update_buffer_file(
7451 this: Model<Self>,
7452 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7453 _: Arc<Client>,
7454 mut cx: AsyncAppContext,
7455 ) -> Result<()> {
7456 let buffer_id = envelope.payload.buffer_id;
7457
7458 this.update(&mut cx, |this, cx| {
7459 let payload = envelope.payload.clone();
7460 if let Some(buffer) = this
7461 .opened_buffers
7462 .get(&buffer_id)
7463 .and_then(|b| b.upgrade())
7464 .or_else(|| {
7465 this.incomplete_remote_buffers
7466 .get(&buffer_id)
7467 .cloned()
7468 .flatten()
7469 })
7470 {
7471 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7472 let worktree = this
7473 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7474 .ok_or_else(|| anyhow!("no such worktree"))?;
7475 let file = File::from_proto(file, worktree, cx)?;
7476 buffer.update(cx, |buffer, cx| {
7477 buffer.file_updated(Arc::new(file), cx);
7478 });
7479 this.detect_language_for_buffer(&buffer, cx);
7480 }
7481 Ok(())
7482 })?
7483 }
7484
7485 async fn handle_save_buffer(
7486 this: Model<Self>,
7487 envelope: TypedEnvelope<proto::SaveBuffer>,
7488 _: Arc<Client>,
7489 mut cx: AsyncAppContext,
7490 ) -> Result<proto::BufferSaved> {
7491 let buffer_id = envelope.payload.buffer_id;
7492 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7493 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7494 let buffer = this
7495 .opened_buffers
7496 .get(&buffer_id)
7497 .and_then(|buffer| buffer.upgrade())
7498 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7499 anyhow::Ok((project_id, buffer))
7500 })??;
7501 buffer
7502 .update(&mut cx, |buffer, _| {
7503 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7504 })?
7505 .await?;
7506 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7507
7508 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7509 .await?;
7510 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7511 project_id,
7512 buffer_id,
7513 version: serialize_version(buffer.saved_version()),
7514 mtime: Some(buffer.saved_mtime().into()),
7515 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7516 })?)
7517 }
7518
7519 async fn handle_reload_buffers(
7520 this: Model<Self>,
7521 envelope: TypedEnvelope<proto::ReloadBuffers>,
7522 _: Arc<Client>,
7523 mut cx: AsyncAppContext,
7524 ) -> Result<proto::ReloadBuffersResponse> {
7525 let sender_id = envelope.original_sender_id()?;
7526 let reload = this.update(&mut cx, |this, cx| {
7527 let mut buffers = HashSet::default();
7528 for buffer_id in &envelope.payload.buffer_ids {
7529 buffers.insert(
7530 this.opened_buffers
7531 .get(buffer_id)
7532 .and_then(|buffer| buffer.upgrade())
7533 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7534 );
7535 }
7536 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7537 })??;
7538
7539 let project_transaction = reload.await?;
7540 let project_transaction = this.update(&mut cx, |this, cx| {
7541 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7542 })?;
7543 Ok(proto::ReloadBuffersResponse {
7544 transaction: Some(project_transaction),
7545 })
7546 }
7547
7548 async fn handle_synchronize_buffers(
7549 this: Model<Self>,
7550 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7551 _: Arc<Client>,
7552 mut cx: AsyncAppContext,
7553 ) -> Result<proto::SynchronizeBuffersResponse> {
7554 let project_id = envelope.payload.project_id;
7555 let mut response = proto::SynchronizeBuffersResponse {
7556 buffers: Default::default(),
7557 };
7558
7559 this.update(&mut cx, |this, cx| {
7560 let Some(guest_id) = envelope.original_sender_id else {
7561 error!("missing original_sender_id on SynchronizeBuffers request");
7562 return;
7563 };
7564
7565 this.shared_buffers.entry(guest_id).or_default().clear();
7566 for buffer in envelope.payload.buffers {
7567 let buffer_id = buffer.id;
7568 let remote_version = language::proto::deserialize_version(&buffer.version);
7569 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7570 this.shared_buffers
7571 .entry(guest_id)
7572 .or_default()
7573 .insert(buffer_id);
7574
7575 let buffer = buffer.read(cx);
7576 response.buffers.push(proto::BufferVersion {
7577 id: buffer_id,
7578 version: language::proto::serialize_version(&buffer.version),
7579 });
7580
7581 let operations = buffer.serialize_ops(Some(remote_version), cx);
7582 let client = this.client.clone();
7583 if let Some(file) = buffer.file() {
7584 client
7585 .send(proto::UpdateBufferFile {
7586 project_id,
7587 buffer_id: buffer_id as u64,
7588 file: Some(file.to_proto()),
7589 })
7590 .log_err();
7591 }
7592
7593 client
7594 .send(proto::UpdateDiffBase {
7595 project_id,
7596 buffer_id: buffer_id as u64,
7597 diff_base: buffer.diff_base().map(Into::into),
7598 })
7599 .log_err();
7600
7601 client
7602 .send(proto::BufferReloaded {
7603 project_id,
7604 buffer_id,
7605 version: language::proto::serialize_version(buffer.saved_version()),
7606 mtime: Some(buffer.saved_mtime().into()),
7607 fingerprint: language::proto::serialize_fingerprint(
7608 buffer.saved_version_fingerprint(),
7609 ),
7610 line_ending: language::proto::serialize_line_ending(
7611 buffer.line_ending(),
7612 ) as i32,
7613 })
7614 .log_err();
7615
7616 cx.background_executor()
7617 .spawn(
7618 async move {
7619 let operations = operations.await;
7620 for chunk in split_operations(operations) {
7621 client
7622 .request(proto::UpdateBuffer {
7623 project_id,
7624 buffer_id,
7625 operations: chunk,
7626 })
7627 .await?;
7628 }
7629 anyhow::Ok(())
7630 }
7631 .log_err(),
7632 )
7633 .detach();
7634 }
7635 }
7636 })?;
7637
7638 Ok(response)
7639 }
7640
7641 async fn handle_format_buffers(
7642 this: Model<Self>,
7643 envelope: TypedEnvelope<proto::FormatBuffers>,
7644 _: Arc<Client>,
7645 mut cx: AsyncAppContext,
7646 ) -> Result<proto::FormatBuffersResponse> {
7647 let sender_id = envelope.original_sender_id()?;
7648 let format = this.update(&mut cx, |this, cx| {
7649 let mut buffers = HashSet::default();
7650 for buffer_id in &envelope.payload.buffer_ids {
7651 buffers.insert(
7652 this.opened_buffers
7653 .get(buffer_id)
7654 .and_then(|buffer| buffer.upgrade())
7655 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7656 );
7657 }
7658 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7659 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7660 })??;
7661
7662 let project_transaction = format.await?;
7663 let project_transaction = this.update(&mut cx, |this, cx| {
7664 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7665 })?;
7666 Ok(proto::FormatBuffersResponse {
7667 transaction: Some(project_transaction),
7668 })
7669 }
7670
7671 async fn handle_apply_additional_edits_for_completion(
7672 this: Model<Self>,
7673 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7674 _: Arc<Client>,
7675 mut cx: AsyncAppContext,
7676 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7677 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7678 let buffer = this
7679 .opened_buffers
7680 .get(&envelope.payload.buffer_id)
7681 .and_then(|buffer| buffer.upgrade())
7682 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7683 let language = buffer.read(cx).language();
7684 let completion = language::proto::deserialize_completion(
7685 envelope
7686 .payload
7687 .completion
7688 .ok_or_else(|| anyhow!("invalid completion"))?,
7689 language.cloned(),
7690 );
7691 Ok::<_, anyhow::Error>((buffer, completion))
7692 })??;
7693
7694 let completion = completion.await?;
7695
7696 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7697 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7698 })?;
7699
7700 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7701 transaction: apply_additional_edits
7702 .await?
7703 .as_ref()
7704 .map(language::proto::serialize_transaction),
7705 })
7706 }
7707
7708 async fn handle_apply_code_action(
7709 this: Model<Self>,
7710 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7711 _: Arc<Client>,
7712 mut cx: AsyncAppContext,
7713 ) -> Result<proto::ApplyCodeActionResponse> {
7714 let sender_id = envelope.original_sender_id()?;
7715 let action = language::proto::deserialize_code_action(
7716 envelope
7717 .payload
7718 .action
7719 .ok_or_else(|| anyhow!("invalid action"))?,
7720 )?;
7721 let apply_code_action = this.update(&mut cx, |this, cx| {
7722 let buffer = this
7723 .opened_buffers
7724 .get(&envelope.payload.buffer_id)
7725 .and_then(|buffer| buffer.upgrade())
7726 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7727 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7728 })??;
7729
7730 let project_transaction = apply_code_action.await?;
7731 let project_transaction = this.update(&mut cx, |this, cx| {
7732 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7733 })?;
7734 Ok(proto::ApplyCodeActionResponse {
7735 transaction: Some(project_transaction),
7736 })
7737 }
7738
7739 async fn handle_on_type_formatting(
7740 this: Model<Self>,
7741 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7742 _: Arc<Client>,
7743 mut cx: AsyncAppContext,
7744 ) -> Result<proto::OnTypeFormattingResponse> {
7745 let on_type_formatting = this.update(&mut cx, |this, cx| {
7746 let buffer = this
7747 .opened_buffers
7748 .get(&envelope.payload.buffer_id)
7749 .and_then(|buffer| buffer.upgrade())
7750 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7751 let position = envelope
7752 .payload
7753 .position
7754 .and_then(deserialize_anchor)
7755 .ok_or_else(|| anyhow!("invalid position"))?;
7756 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7757 buffer,
7758 position,
7759 envelope.payload.trigger.clone(),
7760 cx,
7761 ))
7762 })??;
7763
7764 let transaction = on_type_formatting
7765 .await?
7766 .as_ref()
7767 .map(language::proto::serialize_transaction);
7768 Ok(proto::OnTypeFormattingResponse { transaction })
7769 }
7770
7771 async fn handle_inlay_hints(
7772 this: Model<Self>,
7773 envelope: TypedEnvelope<proto::InlayHints>,
7774 _: Arc<Client>,
7775 mut cx: AsyncAppContext,
7776 ) -> Result<proto::InlayHintsResponse> {
7777 let sender_id = envelope.original_sender_id()?;
7778 let buffer = this.update(&mut cx, |this, _| {
7779 this.opened_buffers
7780 .get(&envelope.payload.buffer_id)
7781 .and_then(|buffer| buffer.upgrade())
7782 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7783 })??;
7784 let buffer_version = deserialize_version(&envelope.payload.version);
7785
7786 buffer
7787 .update(&mut cx, |buffer, _| {
7788 buffer.wait_for_version(buffer_version.clone())
7789 })?
7790 .await
7791 .with_context(|| {
7792 format!(
7793 "waiting for version {:?} for buffer {}",
7794 buffer_version,
7795 buffer.entity_id()
7796 )
7797 })?;
7798
7799 let start = envelope
7800 .payload
7801 .start
7802 .and_then(deserialize_anchor)
7803 .context("missing range start")?;
7804 let end = envelope
7805 .payload
7806 .end
7807 .and_then(deserialize_anchor)
7808 .context("missing range end")?;
7809 let buffer_hints = this
7810 .update(&mut cx, |project, cx| {
7811 project.inlay_hints(buffer, start..end, cx)
7812 })?
7813 .await
7814 .context("inlay hints fetch")?;
7815
7816 Ok(this.update(&mut cx, |project, cx| {
7817 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7818 })?)
7819 }
7820
7821 async fn handle_resolve_inlay_hint(
7822 this: Model<Self>,
7823 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7824 _: Arc<Client>,
7825 mut cx: AsyncAppContext,
7826 ) -> Result<proto::ResolveInlayHintResponse> {
7827 let proto_hint = envelope
7828 .payload
7829 .hint
7830 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7831 let hint = InlayHints::proto_to_project_hint(proto_hint)
7832 .context("resolved proto inlay hint conversion")?;
7833 let buffer = this.update(&mut cx, |this, _cx| {
7834 this.opened_buffers
7835 .get(&envelope.payload.buffer_id)
7836 .and_then(|buffer| buffer.upgrade())
7837 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7838 })??;
7839 let response_hint = this
7840 .update(&mut cx, |project, cx| {
7841 project.resolve_inlay_hint(
7842 hint,
7843 buffer,
7844 LanguageServerId(envelope.payload.language_server_id as usize),
7845 cx,
7846 )
7847 })?
7848 .await
7849 .context("inlay hints fetch")?;
7850 Ok(proto::ResolveInlayHintResponse {
7851 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7852 })
7853 }
7854
7855 async fn handle_refresh_inlay_hints(
7856 this: Model<Self>,
7857 _: TypedEnvelope<proto::RefreshInlayHints>,
7858 _: Arc<Client>,
7859 mut cx: AsyncAppContext,
7860 ) -> Result<proto::Ack> {
7861 this.update(&mut cx, |_, cx| {
7862 cx.emit(Event::RefreshInlayHints);
7863 })?;
7864 Ok(proto::Ack {})
7865 }
7866
7867 async fn handle_lsp_command<T: LspCommand>(
7868 this: Model<Self>,
7869 envelope: TypedEnvelope<T::ProtoRequest>,
7870 _: Arc<Client>,
7871 mut cx: AsyncAppContext,
7872 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7873 where
7874 <T::LspRequest as lsp::request::Request>::Params: Send,
7875 <T::LspRequest as lsp::request::Request>::Result: Send,
7876 {
7877 let sender_id = envelope.original_sender_id()?;
7878 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7879 let buffer_handle = this.update(&mut cx, |this, _cx| {
7880 this.opened_buffers
7881 .get(&buffer_id)
7882 .and_then(|buffer| buffer.upgrade())
7883 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7884 })??;
7885 let request = T::from_proto(
7886 envelope.payload,
7887 this.clone(),
7888 buffer_handle.clone(),
7889 cx.clone(),
7890 )
7891 .await?;
7892 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7893 let response = this
7894 .update(&mut cx, |this, cx| {
7895 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7896 })?
7897 .await?;
7898 this.update(&mut cx, |this, cx| {
7899 Ok(T::response_to_proto(
7900 response,
7901 this,
7902 sender_id,
7903 &buffer_version,
7904 cx,
7905 ))
7906 })?
7907 }
7908
7909 async fn handle_get_project_symbols(
7910 this: Model<Self>,
7911 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7912 _: Arc<Client>,
7913 mut cx: AsyncAppContext,
7914 ) -> Result<proto::GetProjectSymbolsResponse> {
7915 let symbols = this
7916 .update(&mut cx, |this, cx| {
7917 this.symbols(&envelope.payload.query, cx)
7918 })?
7919 .await?;
7920
7921 Ok(proto::GetProjectSymbolsResponse {
7922 symbols: symbols.iter().map(serialize_symbol).collect(),
7923 })
7924 }
7925
7926 async fn handle_search_project(
7927 this: Model<Self>,
7928 envelope: TypedEnvelope<proto::SearchProject>,
7929 _: Arc<Client>,
7930 mut cx: AsyncAppContext,
7931 ) -> Result<proto::SearchProjectResponse> {
7932 let peer_id = envelope.original_sender_id()?;
7933 let query = SearchQuery::from_proto(envelope.payload)?;
7934 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7935
7936 cx.spawn(move |mut cx| async move {
7937 let mut locations = Vec::new();
7938 while let Some((buffer, ranges)) = result.next().await {
7939 for range in ranges {
7940 let start = serialize_anchor(&range.start);
7941 let end = serialize_anchor(&range.end);
7942 let buffer_id = this.update(&mut cx, |this, cx| {
7943 this.create_buffer_for_peer(&buffer, peer_id, cx)
7944 })?;
7945 locations.push(proto::Location {
7946 buffer_id,
7947 start: Some(start),
7948 end: Some(end),
7949 });
7950 }
7951 }
7952 Ok(proto::SearchProjectResponse { locations })
7953 })
7954 .await
7955 }
7956
7957 async fn handle_open_buffer_for_symbol(
7958 this: Model<Self>,
7959 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7960 _: Arc<Client>,
7961 mut cx: AsyncAppContext,
7962 ) -> Result<proto::OpenBufferForSymbolResponse> {
7963 let peer_id = envelope.original_sender_id()?;
7964 let symbol = envelope
7965 .payload
7966 .symbol
7967 .ok_or_else(|| anyhow!("invalid symbol"))?;
7968 let symbol = this
7969 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
7970 .await?;
7971 let symbol = this.update(&mut cx, |this, _| {
7972 let signature = this.symbol_signature(&symbol.path);
7973 if signature == symbol.signature {
7974 Ok(symbol)
7975 } else {
7976 Err(anyhow!("invalid symbol signature"))
7977 }
7978 })??;
7979 let buffer = this
7980 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
7981 .await?;
7982
7983 Ok(proto::OpenBufferForSymbolResponse {
7984 buffer_id: this.update(&mut cx, |this, cx| {
7985 this.create_buffer_for_peer(&buffer, peer_id, cx)
7986 })?,
7987 })
7988 }
7989
7990 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7991 let mut hasher = Sha256::new();
7992 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7993 hasher.update(project_path.path.to_string_lossy().as_bytes());
7994 hasher.update(self.nonce.to_be_bytes());
7995 hasher.finalize().as_slice().try_into().unwrap()
7996 }
7997
7998 async fn handle_open_buffer_by_id(
7999 this: Model<Self>,
8000 envelope: TypedEnvelope<proto::OpenBufferById>,
8001 _: Arc<Client>,
8002 mut cx: AsyncAppContext,
8003 ) -> Result<proto::OpenBufferResponse> {
8004 let peer_id = envelope.original_sender_id()?;
8005 let buffer = this
8006 .update(&mut cx, |this, cx| {
8007 this.open_buffer_by_id(envelope.payload.id, cx)
8008 })?
8009 .await?;
8010 this.update(&mut cx, |this, cx| {
8011 Ok(proto::OpenBufferResponse {
8012 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
8013 })
8014 })?
8015 }
8016
8017 async fn handle_open_buffer_by_path(
8018 this: Model<Self>,
8019 envelope: TypedEnvelope<proto::OpenBufferByPath>,
8020 _: Arc<Client>,
8021 mut cx: AsyncAppContext,
8022 ) -> Result<proto::OpenBufferResponse> {
8023 let peer_id = envelope.original_sender_id()?;
8024 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8025 let open_buffer = this.update(&mut cx, |this, cx| {
8026 this.open_buffer(
8027 ProjectPath {
8028 worktree_id,
8029 path: PathBuf::from(envelope.payload.path).into(),
8030 },
8031 cx,
8032 )
8033 })?;
8034
8035 let buffer = open_buffer.await?;
8036 this.update(&mut cx, |this, cx| {
8037 Ok(proto::OpenBufferResponse {
8038 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
8039 })
8040 })?
8041 }
8042
8043 fn serialize_project_transaction_for_peer(
8044 &mut self,
8045 project_transaction: ProjectTransaction,
8046 peer_id: proto::PeerId,
8047 cx: &mut AppContext,
8048 ) -> proto::ProjectTransaction {
8049 let mut serialized_transaction = proto::ProjectTransaction {
8050 buffer_ids: Default::default(),
8051 transactions: Default::default(),
8052 };
8053 for (buffer, transaction) in project_transaction.0 {
8054 serialized_transaction
8055 .buffer_ids
8056 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
8057 serialized_transaction
8058 .transactions
8059 .push(language::proto::serialize_transaction(&transaction));
8060 }
8061 serialized_transaction
8062 }
8063
8064 fn deserialize_project_transaction(
8065 &mut self,
8066 message: proto::ProjectTransaction,
8067 push_to_history: bool,
8068 cx: &mut ModelContext<Self>,
8069 ) -> Task<Result<ProjectTransaction>> {
8070 cx.spawn(move |this, mut cx| async move {
8071 let mut project_transaction = ProjectTransaction::default();
8072 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
8073 {
8074 let buffer = this
8075 .update(&mut cx, |this, cx| {
8076 this.wait_for_remote_buffer(buffer_id, cx)
8077 })?
8078 .await?;
8079 let transaction = language::proto::deserialize_transaction(transaction)?;
8080 project_transaction.0.insert(buffer, transaction);
8081 }
8082
8083 for (buffer, transaction) in &project_transaction.0 {
8084 buffer
8085 .update(&mut cx, |buffer, _| {
8086 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
8087 })?
8088 .await?;
8089
8090 if push_to_history {
8091 buffer.update(&mut cx, |buffer, _| {
8092 buffer.push_transaction(transaction.clone(), Instant::now());
8093 })?;
8094 }
8095 }
8096
8097 Ok(project_transaction)
8098 })
8099 }
8100
8101 fn create_buffer_for_peer(
8102 &mut self,
8103 buffer: &Model<Buffer>,
8104 peer_id: proto::PeerId,
8105 cx: &mut AppContext,
8106 ) -> u64 {
8107 let buffer_id = buffer.read(cx).remote_id();
8108 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
8109 updates_tx
8110 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
8111 .ok();
8112 }
8113 buffer_id
8114 }
8115
8116 fn wait_for_remote_buffer(
8117 &mut self,
8118 id: u64,
8119 cx: &mut ModelContext<Self>,
8120 ) -> Task<Result<Model<Buffer>>> {
8121 let mut opened_buffer_rx = self.opened_buffer.1.clone();
8122
8123 cx.spawn(move |this, mut cx| async move {
8124 let buffer = loop {
8125 let Some(this) = this.upgrade() else {
8126 return Err(anyhow!("project dropped"));
8127 };
8128
8129 let buffer = this.update(&mut cx, |this, _cx| {
8130 this.opened_buffers
8131 .get(&id)
8132 .and_then(|buffer| buffer.upgrade())
8133 })?;
8134
8135 if let Some(buffer) = buffer {
8136 break buffer;
8137 } else if this.update(&mut cx, |this, _| this.is_disconnected())? {
8138 return Err(anyhow!("disconnected before buffer {} could be opened", id));
8139 }
8140
8141 this.update(&mut cx, |this, _| {
8142 this.incomplete_remote_buffers.entry(id).or_default();
8143 })?;
8144 drop(this);
8145
8146 opened_buffer_rx
8147 .next()
8148 .await
8149 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
8150 };
8151
8152 Ok(buffer)
8153 })
8154 }
8155
8156 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8157 let project_id = match self.client_state {
8158 ProjectClientState::Remote {
8159 sharing_has_stopped,
8160 remote_id,
8161 ..
8162 } => {
8163 if sharing_has_stopped {
8164 return Task::ready(Err(anyhow!(
8165 "can't synchronize remote buffers on a readonly project"
8166 )));
8167 } else {
8168 remote_id
8169 }
8170 }
8171 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8172 return Task::ready(Err(anyhow!(
8173 "can't synchronize remote buffers on a local project"
8174 )))
8175 }
8176 };
8177
8178 let client = self.client.clone();
8179 cx.spawn(move |this, mut cx| async move {
8180 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8181 let buffers = this
8182 .opened_buffers
8183 .iter()
8184 .filter_map(|(id, buffer)| {
8185 let buffer = buffer.upgrade()?;
8186 Some(proto::BufferVersion {
8187 id: *id,
8188 version: language::proto::serialize_version(&buffer.read(cx).version),
8189 })
8190 })
8191 .collect();
8192 let incomplete_buffer_ids = this
8193 .incomplete_remote_buffers
8194 .keys()
8195 .copied()
8196 .collect::<Vec<_>>();
8197
8198 (buffers, incomplete_buffer_ids)
8199 })?;
8200 let response = client
8201 .request(proto::SynchronizeBuffers {
8202 project_id,
8203 buffers,
8204 })
8205 .await?;
8206
8207 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8208 response
8209 .buffers
8210 .into_iter()
8211 .map(|buffer| {
8212 let client = client.clone();
8213 let buffer_id = buffer.id;
8214 let remote_version = language::proto::deserialize_version(&buffer.version);
8215 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8216 let operations =
8217 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8218 cx.background_executor().spawn(async move {
8219 let operations = operations.await;
8220 for chunk in split_operations(operations) {
8221 client
8222 .request(proto::UpdateBuffer {
8223 project_id,
8224 buffer_id,
8225 operations: chunk,
8226 })
8227 .await?;
8228 }
8229 anyhow::Ok(())
8230 })
8231 } else {
8232 Task::ready(Ok(()))
8233 }
8234 })
8235 .collect::<Vec<_>>()
8236 })?;
8237
8238 // Any incomplete buffers have open requests waiting. Request that the host sends
8239 // creates these buffers for us again to unblock any waiting futures.
8240 for id in incomplete_buffer_ids {
8241 cx.background_executor()
8242 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8243 .detach();
8244 }
8245
8246 futures::future::join_all(send_updates_for_buffers)
8247 .await
8248 .into_iter()
8249 .collect()
8250 })
8251 }
8252
8253 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8254 self.worktrees()
8255 .map(|worktree| {
8256 let worktree = worktree.read(cx);
8257 proto::WorktreeMetadata {
8258 id: worktree.id().to_proto(),
8259 root_name: worktree.root_name().into(),
8260 visible: worktree.is_visible(),
8261 abs_path: worktree.abs_path().to_string_lossy().into(),
8262 }
8263 })
8264 .collect()
8265 }
8266
8267 fn set_worktrees_from_proto(
8268 &mut self,
8269 worktrees: Vec<proto::WorktreeMetadata>,
8270 cx: &mut ModelContext<Project>,
8271 ) -> Result<()> {
8272 let replica_id = self.replica_id();
8273 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8274
8275 let mut old_worktrees_by_id = self
8276 .worktrees
8277 .drain(..)
8278 .filter_map(|worktree| {
8279 let worktree = worktree.upgrade()?;
8280 Some((worktree.read(cx).id(), worktree))
8281 })
8282 .collect::<HashMap<_, _>>();
8283
8284 for worktree in worktrees {
8285 if let Some(old_worktree) =
8286 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8287 {
8288 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8289 } else {
8290 let worktree =
8291 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8292 let _ = self.add_worktree(&worktree, cx);
8293 }
8294 }
8295
8296 self.metadata_changed(cx);
8297 for id in old_worktrees_by_id.keys() {
8298 cx.emit(Event::WorktreeRemoved(*id));
8299 }
8300
8301 Ok(())
8302 }
8303
8304 fn set_collaborators_from_proto(
8305 &mut self,
8306 messages: Vec<proto::Collaborator>,
8307 cx: &mut ModelContext<Self>,
8308 ) -> Result<()> {
8309 let mut collaborators = HashMap::default();
8310 for message in messages {
8311 let collaborator = Collaborator::from_proto(message)?;
8312 collaborators.insert(collaborator.peer_id, collaborator);
8313 }
8314 for old_peer_id in self.collaborators.keys() {
8315 if !collaborators.contains_key(old_peer_id) {
8316 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8317 }
8318 }
8319 self.collaborators = collaborators;
8320 Ok(())
8321 }
8322
8323 fn deserialize_symbol(
8324 &self,
8325 serialized_symbol: proto::Symbol,
8326 ) -> impl Future<Output = Result<Symbol>> {
8327 let languages = self.languages.clone();
8328 async move {
8329 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8330 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8331 let start = serialized_symbol
8332 .start
8333 .ok_or_else(|| anyhow!("invalid start"))?;
8334 let end = serialized_symbol
8335 .end
8336 .ok_or_else(|| anyhow!("invalid end"))?;
8337 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8338 let path = ProjectPath {
8339 worktree_id,
8340 path: PathBuf::from(serialized_symbol.path).into(),
8341 };
8342 let language = languages
8343 .language_for_file(&path.path, None)
8344 .await
8345 .log_err();
8346 Ok(Symbol {
8347 language_server_name: LanguageServerName(
8348 serialized_symbol.language_server_name.into(),
8349 ),
8350 source_worktree_id,
8351 path,
8352 label: {
8353 match language {
8354 Some(language) => {
8355 language
8356 .label_for_symbol(&serialized_symbol.name, kind)
8357 .await
8358 }
8359 None => None,
8360 }
8361 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8362 },
8363
8364 name: serialized_symbol.name,
8365 range: Unclipped(PointUtf16::new(start.row, start.column))
8366 ..Unclipped(PointUtf16::new(end.row, end.column)),
8367 kind,
8368 signature: serialized_symbol
8369 .signature
8370 .try_into()
8371 .map_err(|_| anyhow!("invalid signature"))?,
8372 })
8373 }
8374 }
8375
8376 async fn handle_buffer_saved(
8377 this: Model<Self>,
8378 envelope: TypedEnvelope<proto::BufferSaved>,
8379 _: Arc<Client>,
8380 mut cx: AsyncAppContext,
8381 ) -> Result<()> {
8382 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8383 let version = deserialize_version(&envelope.payload.version);
8384 let mtime = envelope
8385 .payload
8386 .mtime
8387 .ok_or_else(|| anyhow!("missing mtime"))?
8388 .into();
8389
8390 this.update(&mut cx, |this, cx| {
8391 let buffer = this
8392 .opened_buffers
8393 .get(&envelope.payload.buffer_id)
8394 .and_then(|buffer| buffer.upgrade())
8395 .or_else(|| {
8396 this.incomplete_remote_buffers
8397 .get(&envelope.payload.buffer_id)
8398 .and_then(|b| b.clone())
8399 });
8400 if let Some(buffer) = buffer {
8401 buffer.update(cx, |buffer, cx| {
8402 buffer.did_save(version, fingerprint, mtime, cx);
8403 });
8404 }
8405 Ok(())
8406 })?
8407 }
8408
8409 async fn handle_buffer_reloaded(
8410 this: Model<Self>,
8411 envelope: TypedEnvelope<proto::BufferReloaded>,
8412 _: Arc<Client>,
8413 mut cx: AsyncAppContext,
8414 ) -> Result<()> {
8415 let payload = envelope.payload;
8416 let version = deserialize_version(&payload.version);
8417 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8418 let line_ending = deserialize_line_ending(
8419 proto::LineEnding::from_i32(payload.line_ending)
8420 .ok_or_else(|| anyhow!("missing line ending"))?,
8421 );
8422 let mtime = payload
8423 .mtime
8424 .ok_or_else(|| anyhow!("missing mtime"))?
8425 .into();
8426 this.update(&mut cx, |this, cx| {
8427 let buffer = this
8428 .opened_buffers
8429 .get(&payload.buffer_id)
8430 .and_then(|buffer| buffer.upgrade())
8431 .or_else(|| {
8432 this.incomplete_remote_buffers
8433 .get(&payload.buffer_id)
8434 .cloned()
8435 .flatten()
8436 });
8437 if let Some(buffer) = buffer {
8438 buffer.update(cx, |buffer, cx| {
8439 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8440 });
8441 }
8442 Ok(())
8443 })?
8444 }
8445
8446 #[allow(clippy::type_complexity)]
8447 fn edits_from_lsp(
8448 &mut self,
8449 buffer: &Model<Buffer>,
8450 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8451 server_id: LanguageServerId,
8452 version: Option<i32>,
8453 cx: &mut ModelContext<Self>,
8454 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8455 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8456 cx.background_executor().spawn(async move {
8457 let snapshot = snapshot?;
8458 let mut lsp_edits = lsp_edits
8459 .into_iter()
8460 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8461 .collect::<Vec<_>>();
8462 lsp_edits.sort_by_key(|(range, _)| range.start);
8463
8464 let mut lsp_edits = lsp_edits.into_iter().peekable();
8465 let mut edits = Vec::new();
8466 while let Some((range, mut new_text)) = lsp_edits.next() {
8467 // Clip invalid ranges provided by the language server.
8468 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8469 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8470
8471 // Combine any LSP edits that are adjacent.
8472 //
8473 // Also, combine LSP edits that are separated from each other by only
8474 // a newline. This is important because for some code actions,
8475 // Rust-analyzer rewrites the entire buffer via a series of edits that
8476 // are separated by unchanged newline characters.
8477 //
8478 // In order for the diffing logic below to work properly, any edits that
8479 // cancel each other out must be combined into one.
8480 while let Some((next_range, next_text)) = lsp_edits.peek() {
8481 if next_range.start.0 > range.end {
8482 if next_range.start.0.row > range.end.row + 1
8483 || next_range.start.0.column > 0
8484 || snapshot.clip_point_utf16(
8485 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8486 Bias::Left,
8487 ) > range.end
8488 {
8489 break;
8490 }
8491 new_text.push('\n');
8492 }
8493 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8494 new_text.push_str(next_text);
8495 lsp_edits.next();
8496 }
8497
8498 // For multiline edits, perform a diff of the old and new text so that
8499 // we can identify the changes more precisely, preserving the locations
8500 // of any anchors positioned in the unchanged regions.
8501 if range.end.row > range.start.row {
8502 let mut offset = range.start.to_offset(&snapshot);
8503 let old_text = snapshot.text_for_range(range).collect::<String>();
8504
8505 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8506 let mut moved_since_edit = true;
8507 for change in diff.iter_all_changes() {
8508 let tag = change.tag();
8509 let value = change.value();
8510 match tag {
8511 ChangeTag::Equal => {
8512 offset += value.len();
8513 moved_since_edit = true;
8514 }
8515 ChangeTag::Delete => {
8516 let start = snapshot.anchor_after(offset);
8517 let end = snapshot.anchor_before(offset + value.len());
8518 if moved_since_edit {
8519 edits.push((start..end, String::new()));
8520 } else {
8521 edits.last_mut().unwrap().0.end = end;
8522 }
8523 offset += value.len();
8524 moved_since_edit = false;
8525 }
8526 ChangeTag::Insert => {
8527 if moved_since_edit {
8528 let anchor = snapshot.anchor_after(offset);
8529 edits.push((anchor..anchor, value.to_string()));
8530 } else {
8531 edits.last_mut().unwrap().1.push_str(value);
8532 }
8533 moved_since_edit = false;
8534 }
8535 }
8536 }
8537 } else if range.end == range.start {
8538 let anchor = snapshot.anchor_after(range.start);
8539 edits.push((anchor..anchor, new_text));
8540 } else {
8541 let edit_start = snapshot.anchor_after(range.start);
8542 let edit_end = snapshot.anchor_before(range.end);
8543 edits.push((edit_start..edit_end, new_text));
8544 }
8545 }
8546
8547 Ok(edits)
8548 })
8549 }
8550
8551 fn buffer_snapshot_for_lsp_version(
8552 &mut self,
8553 buffer: &Model<Buffer>,
8554 server_id: LanguageServerId,
8555 version: Option<i32>,
8556 cx: &AppContext,
8557 ) -> Result<TextBufferSnapshot> {
8558 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8559
8560 if let Some(version) = version {
8561 let buffer_id = buffer.read(cx).remote_id();
8562 let snapshots = self
8563 .buffer_snapshots
8564 .get_mut(&buffer_id)
8565 .and_then(|m| m.get_mut(&server_id))
8566 .ok_or_else(|| {
8567 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8568 })?;
8569
8570 let found_snapshot = snapshots
8571 .binary_search_by_key(&version, |e| e.version)
8572 .map(|ix| snapshots[ix].snapshot.clone())
8573 .map_err(|_| {
8574 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8575 })?;
8576
8577 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8578 Ok(found_snapshot)
8579 } else {
8580 Ok((buffer.read(cx)).text_snapshot())
8581 }
8582 }
8583
8584 pub fn language_servers(
8585 &self,
8586 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8587 self.language_server_ids
8588 .iter()
8589 .map(|((worktree_id, server_name), server_id)| {
8590 (*server_id, server_name.clone(), *worktree_id)
8591 })
8592 }
8593
8594 pub fn supplementary_language_servers(
8595 &self,
8596 ) -> impl '_
8597 + Iterator<
8598 Item = (
8599 &LanguageServerId,
8600 &(LanguageServerName, Arc<LanguageServer>),
8601 ),
8602 > {
8603 self.supplementary_language_servers.iter()
8604 }
8605
8606 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8607 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8608 Some(server.clone())
8609 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8610 Some(Arc::clone(server))
8611 } else {
8612 None
8613 }
8614 }
8615
8616 pub fn language_servers_for_buffer(
8617 &self,
8618 buffer: &Buffer,
8619 cx: &AppContext,
8620 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8621 self.language_server_ids_for_buffer(buffer, cx)
8622 .into_iter()
8623 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8624 LanguageServerState::Running {
8625 adapter, server, ..
8626 } => Some((adapter, server)),
8627 _ => None,
8628 })
8629 }
8630
8631 fn primary_language_server_for_buffer(
8632 &self,
8633 buffer: &Buffer,
8634 cx: &AppContext,
8635 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8636 self.language_servers_for_buffer(buffer, cx).next()
8637 }
8638
8639 pub fn language_server_for_buffer(
8640 &self,
8641 buffer: &Buffer,
8642 server_id: LanguageServerId,
8643 cx: &AppContext,
8644 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8645 self.language_servers_for_buffer(buffer, cx)
8646 .find(|(_, s)| s.server_id() == server_id)
8647 }
8648
8649 fn language_server_ids_for_buffer(
8650 &self,
8651 buffer: &Buffer,
8652 cx: &AppContext,
8653 ) -> Vec<LanguageServerId> {
8654 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8655 let worktree_id = file.worktree_id(cx);
8656 language
8657 .lsp_adapters()
8658 .iter()
8659 .flat_map(|adapter| {
8660 let key = (worktree_id, adapter.name.clone());
8661 self.language_server_ids.get(&key).copied()
8662 })
8663 .collect()
8664 } else {
8665 Vec::new()
8666 }
8667 }
8668}
8669
8670fn subscribe_for_copilot_events(
8671 copilot: &Model<Copilot>,
8672 cx: &mut ModelContext<'_, Project>,
8673) -> gpui::Subscription {
8674 cx.subscribe(
8675 copilot,
8676 |project, copilot, copilot_event, cx| match copilot_event {
8677 copilot::Event::CopilotLanguageServerStarted => {
8678 match copilot.read(cx).language_server() {
8679 Some((name, copilot_server)) => {
8680 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8681 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8682 let new_server_id = copilot_server.server_id();
8683 let weak_project = cx.weak_model();
8684 let copilot_log_subscription = copilot_server
8685 .on_notification::<copilot::request::LogMessage, _>(
8686 move |params, mut cx| {
8687 weak_project.update(&mut cx, |_, cx| {
8688 cx.emit(Event::LanguageServerLog(
8689 new_server_id,
8690 params.message,
8691 ));
8692 }).ok();
8693 },
8694 );
8695 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8696 project.copilot_log_subscription = Some(copilot_log_subscription);
8697 cx.emit(Event::LanguageServerAdded(new_server_id));
8698 }
8699 }
8700 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8701 }
8702 }
8703 },
8704 )
8705}
8706
8707fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8708 let mut literal_end = 0;
8709 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8710 if part.contains(&['*', '?', '{', '}']) {
8711 break;
8712 } else {
8713 if i > 0 {
8714 // Account for separator prior to this part
8715 literal_end += path::MAIN_SEPARATOR.len_utf8();
8716 }
8717 literal_end += part.len();
8718 }
8719 }
8720 &glob[..literal_end]
8721}
8722
8723impl WorktreeHandle {
8724 pub fn upgrade(&self) -> Option<Model<Worktree>> {
8725 match self {
8726 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8727 WorktreeHandle::Weak(handle) => handle.upgrade(),
8728 }
8729 }
8730
8731 pub fn handle_id(&self) -> usize {
8732 match self {
8733 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8734 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8735 }
8736 }
8737}
8738
8739impl OpenBuffer {
8740 pub fn upgrade(&self) -> Option<Model<Buffer>> {
8741 match self {
8742 OpenBuffer::Strong(handle) => Some(handle.clone()),
8743 OpenBuffer::Weak(handle) => handle.upgrade(),
8744 OpenBuffer::Operations(_) => None,
8745 }
8746 }
8747}
8748
8749pub struct PathMatchCandidateSet {
8750 pub snapshot: Snapshot,
8751 pub include_ignored: bool,
8752 pub include_root_name: bool,
8753}
8754
8755impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8756 type Candidates = PathMatchCandidateSetIter<'a>;
8757
8758 fn id(&self) -> usize {
8759 self.snapshot.id().to_usize()
8760 }
8761
8762 fn len(&self) -> usize {
8763 if self.include_ignored {
8764 self.snapshot.file_count()
8765 } else {
8766 self.snapshot.visible_file_count()
8767 }
8768 }
8769
8770 fn prefix(&self) -> Arc<str> {
8771 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8772 self.snapshot.root_name().into()
8773 } else if self.include_root_name {
8774 format!("{}/", self.snapshot.root_name()).into()
8775 } else {
8776 "".into()
8777 }
8778 }
8779
8780 fn candidates(&'a self, start: usize) -> Self::Candidates {
8781 PathMatchCandidateSetIter {
8782 traversal: self.snapshot.files(self.include_ignored, start),
8783 }
8784 }
8785}
8786
8787pub struct PathMatchCandidateSetIter<'a> {
8788 traversal: Traversal<'a>,
8789}
8790
8791impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8792 type Item = fuzzy::PathMatchCandidate<'a>;
8793
8794 fn next(&mut self) -> Option<Self::Item> {
8795 self.traversal.next().map(|entry| {
8796 if let EntryKind::File(char_bag) = entry.kind {
8797 fuzzy::PathMatchCandidate {
8798 path: &entry.path,
8799 char_bag,
8800 }
8801 } else {
8802 unreachable!()
8803 }
8804 })
8805 }
8806}
8807
8808impl EventEmitter<Event> for Project {}
8809
8810impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8811 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8812 Self {
8813 worktree_id,
8814 path: path.as_ref().into(),
8815 }
8816 }
8817}
8818
8819impl ProjectLspAdapterDelegate {
8820 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8821 Arc::new(Self {
8822 project: cx.handle(),
8823 http_client: project.client.http_client(),
8824 })
8825 }
8826}
8827
8828impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8829 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8830 self.project
8831 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8832 }
8833
8834 fn http_client(&self) -> Arc<dyn HttpClient> {
8835 self.http_client.clone()
8836 }
8837}
8838
8839fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8840 proto::Symbol {
8841 language_server_name: symbol.language_server_name.0.to_string(),
8842 source_worktree_id: symbol.source_worktree_id.to_proto(),
8843 worktree_id: symbol.path.worktree_id.to_proto(),
8844 path: symbol.path.path.to_string_lossy().to_string(),
8845 name: symbol.name.clone(),
8846 kind: unsafe { mem::transmute(symbol.kind) },
8847 start: Some(proto::PointUtf16 {
8848 row: symbol.range.start.0.row,
8849 column: symbol.range.start.0.column,
8850 }),
8851 end: Some(proto::PointUtf16 {
8852 row: symbol.range.end.0.row,
8853 column: symbol.range.end.0.column,
8854 }),
8855 signature: symbol.signature.to_vec(),
8856 }
8857}
8858
8859fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8860 let mut path_components = path.components();
8861 let mut base_components = base.components();
8862 let mut components: Vec<Component> = Vec::new();
8863 loop {
8864 match (path_components.next(), base_components.next()) {
8865 (None, None) => break,
8866 (Some(a), None) => {
8867 components.push(a);
8868 components.extend(path_components.by_ref());
8869 break;
8870 }
8871 (None, _) => components.push(Component::ParentDir),
8872 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8873 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8874 (Some(a), Some(_)) => {
8875 components.push(Component::ParentDir);
8876 for _ in base_components {
8877 components.push(Component::ParentDir);
8878 }
8879 components.push(a);
8880 components.extend(path_components.by_ref());
8881 break;
8882 }
8883 }
8884 }
8885 components.iter().map(|c| c.as_os_str()).collect()
8886}
8887
8888fn resolve_path(base: &Path, path: &Path) -> PathBuf {
8889 let mut result = base.to_path_buf();
8890 for component in path.components() {
8891 match component {
8892 Component::ParentDir => {
8893 result.pop();
8894 }
8895 Component::CurDir => (),
8896 _ => result.push(component),
8897 }
8898 }
8899 result
8900}
8901
8902impl Item for Buffer {
8903 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8904 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8905 }
8906
8907 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8908 File::from_dyn(self.file()).map(|file| ProjectPath {
8909 worktree_id: file.worktree_id(cx),
8910 path: file.path().clone(),
8911 })
8912 }
8913}
8914
8915async fn wait_for_loading_buffer(
8916 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
8917) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
8918 loop {
8919 if let Some(result) = receiver.borrow().as_ref() {
8920 match result {
8921 Ok(buffer) => return Ok(buffer.to_owned()),
8922 Err(e) => return Err(e.to_owned()),
8923 }
8924 }
8925 receiver.next().await;
8926 }
8927}
8928
8929fn include_text(server: &lsp::LanguageServer) -> bool {
8930 server
8931 .capabilities()
8932 .text_document_sync
8933 .as_ref()
8934 .and_then(|sync| match sync {
8935 lsp::TextDocumentSyncCapability::Kind(_) => None,
8936 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8937 })
8938 .and_then(|save_options| match save_options {
8939 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
8940 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8941 })
8942 .unwrap_or(false)
8943}