1mod ignore;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7pub mod terminals;
8pub mod worktree;
9
10#[cfg(test)]
11mod project_tests;
12#[cfg(test)]
13mod worktree_tests;
14
15use anyhow::{anyhow, Context as _, Result};
16use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
17use clock::ReplicaId;
18use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
19use copilot::Copilot;
20use futures::{
21 channel::{
22 mpsc::{self, UnboundedReceiver},
23 oneshot,
24 },
25 future::{try_join_all, Shared},
26 stream::FuturesUnordered,
27 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
28};
29use globset::{Glob, GlobSet, GlobSetBuilder};
30use gpui::{
31 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
32 Model, ModelContext, Task, WeakModel,
33};
34use itertools::Itertools;
35use language::{
36 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
37 markdown, point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability,
43 CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
44 Documentation, Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName,
45 LocalFile, LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer,
46 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use parking_lot::{Mutex, RwLock};
56use postage::watch;
57use prettier_support::{DefaultPrettier, PrettierInstance};
58use project_settings::{LspSettings, ProjectSettings};
59use rand::prelude::*;
60use search::SearchQuery;
61use serde::Serialize;
62use settings::{Settings, SettingsStore};
63use sha2::{Digest, Sha256};
64use similar::{ChangeTag, TextDiff};
65use smol::channel::{Receiver, Sender};
66use smol::lock::Semaphore;
67use std::{
68 cmp::{self, Ordering},
69 convert::TryInto,
70 hash::Hash,
71 mem,
72 num::NonZeroU32,
73 ops::Range,
74 path::{self, Component, Path, PathBuf},
75 process::Stdio,
76 str,
77 sync::{
78 atomic::{AtomicUsize, Ordering::SeqCst},
79 Arc,
80 },
81 time::{Duration, Instant},
82};
83use terminals::Terminals;
84use text::Anchor;
85use util::{
86 debug_panic, defer, http::HttpClient, merge_json_value_into,
87 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
88};
89
90pub use fs::*;
91#[cfg(any(test, feature = "test-support"))]
92pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
93pub use worktree::*;
94
95const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
96
97pub trait Item {
98 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
99 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
100}
101
102pub struct Project {
103 worktrees: Vec<WorktreeHandle>,
104 active_entry: Option<ProjectEntryId>,
105 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
106 languages: Arc<LanguageRegistry>,
107 supplementary_language_servers:
108 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
109 language_servers: HashMap<LanguageServerId, LanguageServerState>,
110 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
111 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
112 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
113 client: Arc<client::Client>,
114 next_entry_id: Arc<AtomicUsize>,
115 join_project_response_message_id: u32,
116 next_diagnostic_group_id: usize,
117 user_store: Model<UserStore>,
118 fs: Arc<dyn Fs>,
119 client_state: ProjectClientState,
120 collaborators: HashMap<proto::PeerId, Collaborator>,
121 client_subscriptions: Vec<client::Subscription>,
122 _subscriptions: Vec<gpui::Subscription>,
123 next_buffer_id: u64,
124 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
125 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
126 #[allow(clippy::type_complexity)]
127 loading_buffers_by_path: HashMap<
128 ProjectPath,
129 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
130 >,
131 #[allow(clippy::type_complexity)]
132 loading_local_worktrees:
133 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
134 opened_buffers: HashMap<u64, OpenBuffer>,
135 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
136 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
137 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
138 /// Used for re-issuing buffer requests when peers temporarily disconnect
139 incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>,
140 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
141 buffers_being_formatted: HashSet<u64>,
142 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
143 git_diff_debouncer: DelayedDebounced,
144 nonce: u128,
145 _maintain_buffer_languages: Task<()>,
146 _maintain_workspace_config: Task<Result<()>>,
147 terminals: Terminals,
148 copilot_lsp_subscription: Option<gpui::Subscription>,
149 copilot_log_subscription: Option<lsp::Subscription>,
150 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
151 node: Option<Arc<dyn NodeRuntime>>,
152 default_prettier: DefaultPrettier,
153 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
154 prettier_instances: HashMap<PathBuf, PrettierInstance>,
155}
156
157struct DelayedDebounced {
158 task: Option<Task<()>>,
159 cancel_channel: Option<oneshot::Sender<()>>,
160}
161
162pub enum LanguageServerToQuery {
163 Primary,
164 Other(LanguageServerId),
165}
166
167impl DelayedDebounced {
168 fn new() -> DelayedDebounced {
169 DelayedDebounced {
170 task: None,
171 cancel_channel: None,
172 }
173 }
174
175 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
176 where
177 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
178 {
179 if let Some(channel) = self.cancel_channel.take() {
180 _ = channel.send(());
181 }
182
183 let (sender, mut receiver) = oneshot::channel::<()>();
184 self.cancel_channel = Some(sender);
185
186 let previous_task = self.task.take();
187 self.task = Some(cx.spawn(move |project, mut cx| async move {
188 let mut timer = cx.background_executor().timer(delay).fuse();
189 if let Some(previous_task) = previous_task {
190 previous_task.await;
191 }
192
193 futures::select_biased! {
194 _ = receiver => return,
195 _ = timer => {}
196 }
197
198 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
199 task.await;
200 }
201 }));
202 }
203}
204
205struct LspBufferSnapshot {
206 version: i32,
207 snapshot: TextBufferSnapshot,
208}
209
210/// Message ordered with respect to buffer operations
211enum BufferOrderedMessage {
212 Operation {
213 buffer_id: u64,
214 operation: proto::Operation,
215 },
216 LanguageServerUpdate {
217 language_server_id: LanguageServerId,
218 message: proto::update_language_server::Variant,
219 },
220 Resync,
221}
222
223enum LocalProjectUpdate {
224 WorktreesChanged,
225 CreateBufferForPeer {
226 peer_id: proto::PeerId,
227 buffer_id: u64,
228 },
229}
230
231enum OpenBuffer {
232 Strong(Model<Buffer>),
233 Weak(WeakModel<Buffer>),
234 Operations(Vec<Operation>),
235}
236
237#[derive(Clone)]
238enum WorktreeHandle {
239 Strong(Model<Worktree>),
240 Weak(WeakModel<Worktree>),
241}
242
243#[derive(Debug)]
244enum ProjectClientState {
245 Local,
246 Shared {
247 remote_id: u64,
248 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
249 _send_updates: Task<Result<()>>,
250 },
251 Remote {
252 sharing_has_stopped: bool,
253 capability: Capability,
254 remote_id: u64,
255 replica_id: ReplicaId,
256 },
257}
258
259#[derive(Clone, Debug, PartialEq)]
260pub enum Event {
261 LanguageServerAdded(LanguageServerId),
262 LanguageServerRemoved(LanguageServerId),
263 LanguageServerLog(LanguageServerId, String),
264 Notification(String),
265 ActiveEntryChanged(Option<ProjectEntryId>),
266 ActivateProjectPanel,
267 WorktreeAdded,
268 WorktreeRemoved(WorktreeId),
269 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
270 DiskBasedDiagnosticsStarted {
271 language_server_id: LanguageServerId,
272 },
273 DiskBasedDiagnosticsFinished {
274 language_server_id: LanguageServerId,
275 },
276 DiagnosticsUpdated {
277 path: ProjectPath,
278 language_server_id: LanguageServerId,
279 },
280 RemoteIdChanged(Option<u64>),
281 DisconnectedFromHost,
282 Closed,
283 DeletedEntry(ProjectEntryId),
284 CollaboratorUpdated {
285 old_peer_id: proto::PeerId,
286 new_peer_id: proto::PeerId,
287 },
288 CollaboratorJoined(proto::PeerId),
289 CollaboratorLeft(proto::PeerId),
290 RefreshInlayHints,
291 RevealInProjectPanel(ProjectEntryId),
292}
293
294pub enum LanguageServerState {
295 Starting(Task<Option<Arc<LanguageServer>>>),
296
297 Running {
298 language: Arc<Language>,
299 adapter: Arc<CachedLspAdapter>,
300 server: Arc<LanguageServer>,
301 watched_paths: HashMap<WorktreeId, GlobSet>,
302 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
303 },
304}
305
306#[derive(Serialize)]
307pub struct LanguageServerStatus {
308 pub name: String,
309 pub pending_work: BTreeMap<String, LanguageServerProgress>,
310 pub has_pending_diagnostic_updates: bool,
311 progress_tokens: HashSet<String>,
312}
313
314#[derive(Clone, Debug, Serialize)]
315pub struct LanguageServerProgress {
316 pub message: Option<String>,
317 pub percentage: Option<usize>,
318 #[serde(skip_serializing)]
319 pub last_update_at: Instant,
320}
321
322#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
323pub struct ProjectPath {
324 pub worktree_id: WorktreeId,
325 pub path: Arc<Path>,
326}
327
328#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
329pub struct DiagnosticSummary {
330 pub error_count: usize,
331 pub warning_count: usize,
332}
333
334#[derive(Debug, Clone, PartialEq, Eq, Hash)]
335pub struct Location {
336 pub buffer: Model<Buffer>,
337 pub range: Range<language::Anchor>,
338}
339
340#[derive(Debug, Clone, PartialEq, Eq)]
341pub struct InlayHint {
342 pub position: language::Anchor,
343 pub label: InlayHintLabel,
344 pub kind: Option<InlayHintKind>,
345 pub padding_left: bool,
346 pub padding_right: bool,
347 pub tooltip: Option<InlayHintTooltip>,
348 pub resolve_state: ResolveState,
349}
350
351#[derive(Debug, Clone, PartialEq, Eq)]
352pub enum ResolveState {
353 Resolved,
354 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
355 Resolving,
356}
357
358impl InlayHint {
359 pub fn text(&self) -> String {
360 match &self.label {
361 InlayHintLabel::String(s) => s.to_owned(),
362 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
363 }
364 }
365}
366
367#[derive(Debug, Clone, PartialEq, Eq)]
368pub enum InlayHintLabel {
369 String(String),
370 LabelParts(Vec<InlayHintLabelPart>),
371}
372
373#[derive(Debug, Clone, PartialEq, Eq)]
374pub struct InlayHintLabelPart {
375 pub value: String,
376 pub tooltip: Option<InlayHintLabelPartTooltip>,
377 pub location: Option<(LanguageServerId, lsp::Location)>,
378}
379
380#[derive(Debug, Clone, PartialEq, Eq)]
381pub enum InlayHintTooltip {
382 String(String),
383 MarkupContent(MarkupContent),
384}
385
386#[derive(Debug, Clone, PartialEq, Eq)]
387pub enum InlayHintLabelPartTooltip {
388 String(String),
389 MarkupContent(MarkupContent),
390}
391
392#[derive(Debug, Clone, PartialEq, Eq)]
393pub struct MarkupContent {
394 pub kind: HoverBlockKind,
395 pub value: String,
396}
397
398#[derive(Debug, Clone)]
399pub struct LocationLink {
400 pub origin: Option<Location>,
401 pub target: Location,
402}
403
404#[derive(Debug)]
405pub struct DocumentHighlight {
406 pub range: Range<language::Anchor>,
407 pub kind: DocumentHighlightKind,
408}
409
410#[derive(Clone, Debug)]
411pub struct Symbol {
412 pub language_server_name: LanguageServerName,
413 pub source_worktree_id: WorktreeId,
414 pub path: ProjectPath,
415 pub label: CodeLabel,
416 pub name: String,
417 pub kind: lsp::SymbolKind,
418 pub range: Range<Unclipped<PointUtf16>>,
419 pub signature: [u8; 32],
420}
421
422#[derive(Clone, Debug, PartialEq)]
423pub struct HoverBlock {
424 pub text: String,
425 pub kind: HoverBlockKind,
426}
427
428#[derive(Clone, Debug, PartialEq, Eq)]
429pub enum HoverBlockKind {
430 PlainText,
431 Markdown,
432 Code { language: String },
433}
434
435#[derive(Debug)]
436pub struct Hover {
437 pub contents: Vec<HoverBlock>,
438 pub range: Option<Range<language::Anchor>>,
439 pub language: Option<Arc<Language>>,
440}
441
442impl Hover {
443 pub fn is_empty(&self) -> bool {
444 self.contents.iter().all(|block| block.text.is_empty())
445 }
446}
447
448#[derive(Default)]
449pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
450
451impl DiagnosticSummary {
452 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
453 let mut this = Self {
454 error_count: 0,
455 warning_count: 0,
456 };
457
458 for entry in diagnostics {
459 if entry.diagnostic.is_primary {
460 match entry.diagnostic.severity {
461 DiagnosticSeverity::ERROR => this.error_count += 1,
462 DiagnosticSeverity::WARNING => this.warning_count += 1,
463 _ => {}
464 }
465 }
466 }
467
468 this
469 }
470
471 pub fn is_empty(&self) -> bool {
472 self.error_count == 0 && self.warning_count == 0
473 }
474
475 pub fn to_proto(
476 &self,
477 language_server_id: LanguageServerId,
478 path: &Path,
479 ) -> proto::DiagnosticSummary {
480 proto::DiagnosticSummary {
481 path: path.to_string_lossy().to_string(),
482 language_server_id: language_server_id.0 as u64,
483 error_count: self.error_count as u32,
484 warning_count: self.warning_count as u32,
485 }
486 }
487}
488
489#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
490pub struct ProjectEntryId(usize);
491
492impl ProjectEntryId {
493 pub const MAX: Self = Self(usize::MAX);
494
495 pub fn new(counter: &AtomicUsize) -> Self {
496 Self(counter.fetch_add(1, SeqCst))
497 }
498
499 pub fn from_proto(id: u64) -> Self {
500 Self(id as usize)
501 }
502
503 pub fn to_proto(&self) -> u64 {
504 self.0 as u64
505 }
506
507 pub fn to_usize(&self) -> usize {
508 self.0
509 }
510}
511
512#[derive(Debug, Clone, Copy, PartialEq, Eq)]
513pub enum FormatTrigger {
514 Save,
515 Manual,
516}
517
518struct ProjectLspAdapterDelegate {
519 project: Model<Project>,
520 http_client: Arc<dyn HttpClient>,
521}
522
523// Currently, formatting operations are represented differently depending on
524// whether they come from a language server or an external command.
525enum FormatOperation {
526 Lsp(Vec<(Range<Anchor>, String)>),
527 External(Diff),
528 Prettier(Diff),
529}
530
531impl FormatTrigger {
532 fn from_proto(value: i32) -> FormatTrigger {
533 match value {
534 0 => FormatTrigger::Save,
535 1 => FormatTrigger::Manual,
536 _ => FormatTrigger::Save,
537 }
538 }
539}
540#[derive(Clone, Debug, PartialEq)]
541enum SearchMatchCandidate {
542 OpenBuffer {
543 buffer: Model<Buffer>,
544 // This might be an unnamed file without representation on filesystem
545 path: Option<Arc<Path>>,
546 },
547 Path {
548 worktree_id: WorktreeId,
549 is_ignored: bool,
550 path: Arc<Path>,
551 },
552}
553
554type SearchMatchCandidateIndex = usize;
555impl SearchMatchCandidate {
556 fn path(&self) -> Option<Arc<Path>> {
557 match self {
558 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
559 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
560 }
561 }
562}
563
564impl Project {
565 pub fn init_settings(cx: &mut AppContext) {
566 ProjectSettings::register(cx);
567 }
568
569 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
570 Self::init_settings(cx);
571
572 client.add_model_message_handler(Self::handle_add_collaborator);
573 client.add_model_message_handler(Self::handle_update_project_collaborator);
574 client.add_model_message_handler(Self::handle_remove_collaborator);
575 client.add_model_message_handler(Self::handle_buffer_reloaded);
576 client.add_model_message_handler(Self::handle_buffer_saved);
577 client.add_model_message_handler(Self::handle_start_language_server);
578 client.add_model_message_handler(Self::handle_update_language_server);
579 client.add_model_message_handler(Self::handle_update_project);
580 client.add_model_message_handler(Self::handle_unshare_project);
581 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
582 client.add_model_message_handler(Self::handle_update_buffer_file);
583 client.add_model_request_handler(Self::handle_update_buffer);
584 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
585 client.add_model_message_handler(Self::handle_update_worktree);
586 client.add_model_message_handler(Self::handle_update_worktree_settings);
587 client.add_model_request_handler(Self::handle_create_project_entry);
588 client.add_model_request_handler(Self::handle_rename_project_entry);
589 client.add_model_request_handler(Self::handle_copy_project_entry);
590 client.add_model_request_handler(Self::handle_delete_project_entry);
591 client.add_model_request_handler(Self::handle_expand_project_entry);
592 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
593 client.add_model_request_handler(Self::handle_apply_code_action);
594 client.add_model_request_handler(Self::handle_on_type_formatting);
595 client.add_model_request_handler(Self::handle_inlay_hints);
596 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
597 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
598 client.add_model_request_handler(Self::handle_reload_buffers);
599 client.add_model_request_handler(Self::handle_synchronize_buffers);
600 client.add_model_request_handler(Self::handle_format_buffers);
601 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
602 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
603 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
604 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
605 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
606 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
607 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
608 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
609 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
610 client.add_model_request_handler(Self::handle_search_project);
611 client.add_model_request_handler(Self::handle_get_project_symbols);
612 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
613 client.add_model_request_handler(Self::handle_open_buffer_by_id);
614 client.add_model_request_handler(Self::handle_open_buffer_by_path);
615 client.add_model_request_handler(Self::handle_save_buffer);
616 client.add_model_message_handler(Self::handle_update_diff_base);
617 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
618 }
619
620 pub fn local(
621 client: Arc<Client>,
622 node: Arc<dyn NodeRuntime>,
623 user_store: Model<UserStore>,
624 languages: Arc<LanguageRegistry>,
625 fs: Arc<dyn Fs>,
626 cx: &mut AppContext,
627 ) -> Model<Self> {
628 cx.new_model(|cx: &mut ModelContext<Self>| {
629 let (tx, rx) = mpsc::unbounded();
630 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
631 .detach();
632 let copilot_lsp_subscription =
633 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
634 Self {
635 worktrees: Vec::new(),
636 buffer_ordered_messages_tx: tx,
637 collaborators: Default::default(),
638 next_buffer_id: 0,
639 opened_buffers: Default::default(),
640 shared_buffers: Default::default(),
641 incomplete_remote_buffers: Default::default(),
642 loading_buffers_by_path: Default::default(),
643 loading_local_worktrees: Default::default(),
644 local_buffer_ids_by_path: Default::default(),
645 local_buffer_ids_by_entry_id: Default::default(),
646 buffer_snapshots: Default::default(),
647 join_project_response_message_id: 0,
648 client_state: ProjectClientState::Local,
649 opened_buffer: watch::channel(),
650 client_subscriptions: Vec::new(),
651 _subscriptions: vec![
652 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
653 cx.on_release(Self::release),
654 cx.on_app_quit(Self::shutdown_language_servers),
655 ],
656 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
657 _maintain_workspace_config: Self::maintain_workspace_config(cx),
658 active_entry: None,
659 languages,
660 client,
661 user_store,
662 fs,
663 next_entry_id: Default::default(),
664 next_diagnostic_group_id: Default::default(),
665 supplementary_language_servers: HashMap::default(),
666 language_servers: Default::default(),
667 language_server_ids: HashMap::default(),
668 language_server_statuses: Default::default(),
669 last_workspace_edits_by_language_server: Default::default(),
670 buffers_being_formatted: Default::default(),
671 buffers_needing_diff: Default::default(),
672 git_diff_debouncer: DelayedDebounced::new(),
673 nonce: StdRng::from_entropy().gen(),
674 terminals: Terminals {
675 local_handles: Vec::new(),
676 },
677 copilot_lsp_subscription,
678 copilot_log_subscription: None,
679 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
680 node: Some(node),
681 default_prettier: DefaultPrettier::default(),
682 prettiers_per_worktree: HashMap::default(),
683 prettier_instances: HashMap::default(),
684 }
685 })
686 }
687
688 pub async fn remote(
689 remote_id: u64,
690 client: Arc<Client>,
691 user_store: Model<UserStore>,
692 languages: Arc<LanguageRegistry>,
693 fs: Arc<dyn Fs>,
694 role: proto::ChannelRole,
695 mut cx: AsyncAppContext,
696 ) -> Result<Model<Self>> {
697 client.authenticate_and_connect(true, &cx).await?;
698
699 let subscription = client.subscribe_to_entity(remote_id)?;
700 let response = client
701 .request_envelope(proto::JoinProject {
702 project_id: remote_id,
703 })
704 .await?;
705 let this = cx.new_model(|cx| {
706 let replica_id = response.payload.replica_id as ReplicaId;
707
708 let mut worktrees = Vec::new();
709 for worktree in response.payload.worktrees {
710 let worktree =
711 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
712 worktrees.push(worktree);
713 }
714
715 let (tx, rx) = mpsc::unbounded();
716 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
717 .detach();
718 let copilot_lsp_subscription =
719 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
720 let mut this = Self {
721 worktrees: Vec::new(),
722 buffer_ordered_messages_tx: tx,
723 loading_buffers_by_path: Default::default(),
724 next_buffer_id: 0,
725 opened_buffer: watch::channel(),
726 shared_buffers: Default::default(),
727 incomplete_remote_buffers: Default::default(),
728 loading_local_worktrees: Default::default(),
729 local_buffer_ids_by_path: Default::default(),
730 local_buffer_ids_by_entry_id: Default::default(),
731 active_entry: None,
732 collaborators: Default::default(),
733 join_project_response_message_id: response.message_id,
734 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
735 _maintain_workspace_config: Self::maintain_workspace_config(cx),
736 languages,
737 user_store: user_store.clone(),
738 fs,
739 next_entry_id: Default::default(),
740 next_diagnostic_group_id: Default::default(),
741 client_subscriptions: Default::default(),
742 _subscriptions: vec![
743 cx.on_release(Self::release),
744 cx.on_app_quit(Self::shutdown_language_servers),
745 ],
746 client: client.clone(),
747 client_state: ProjectClientState::Remote {
748 sharing_has_stopped: false,
749 capability: Capability::ReadWrite,
750 remote_id,
751 replica_id,
752 },
753 supplementary_language_servers: HashMap::default(),
754 language_servers: Default::default(),
755 language_server_ids: HashMap::default(),
756 language_server_statuses: response
757 .payload
758 .language_servers
759 .into_iter()
760 .map(|server| {
761 (
762 LanguageServerId(server.id as usize),
763 LanguageServerStatus {
764 name: server.name,
765 pending_work: Default::default(),
766 has_pending_diagnostic_updates: false,
767 progress_tokens: Default::default(),
768 },
769 )
770 })
771 .collect(),
772 last_workspace_edits_by_language_server: Default::default(),
773 opened_buffers: Default::default(),
774 buffers_being_formatted: Default::default(),
775 buffers_needing_diff: Default::default(),
776 git_diff_debouncer: DelayedDebounced::new(),
777 buffer_snapshots: Default::default(),
778 nonce: StdRng::from_entropy().gen(),
779 terminals: Terminals {
780 local_handles: Vec::new(),
781 },
782 copilot_lsp_subscription,
783 copilot_log_subscription: None,
784 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
785 node: None,
786 default_prettier: DefaultPrettier::default(),
787 prettiers_per_worktree: HashMap::default(),
788 prettier_instances: HashMap::default(),
789 };
790 this.set_role(role, cx);
791 for worktree in worktrees {
792 let _ = this.add_worktree(&worktree, cx);
793 }
794 this
795 })?;
796 let subscription = subscription.set_model(&this, &mut cx);
797
798 let user_ids = response
799 .payload
800 .collaborators
801 .iter()
802 .map(|peer| peer.user_id)
803 .collect();
804 user_store
805 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
806 .await?;
807
808 this.update(&mut cx, |this, cx| {
809 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
810 this.client_subscriptions.push(subscription);
811 anyhow::Ok(())
812 })??;
813
814 Ok(this)
815 }
816
817 fn release(&mut self, cx: &mut AppContext) {
818 match &self.client_state {
819 ProjectClientState::Local => {}
820 ProjectClientState::Shared { .. } => {
821 let _ = self.unshare_internal(cx);
822 }
823 ProjectClientState::Remote { remote_id, .. } => {
824 let _ = self.client.send(proto::LeaveProject {
825 project_id: *remote_id,
826 });
827 self.disconnected_from_host_internal(cx);
828 }
829 }
830 }
831
832 fn shutdown_language_servers(
833 &mut self,
834 _cx: &mut ModelContext<Self>,
835 ) -> impl Future<Output = ()> {
836 let shutdown_futures = self
837 .language_servers
838 .drain()
839 .map(|(_, server_state)| async {
840 use LanguageServerState::*;
841 match server_state {
842 Running { server, .. } => server.shutdown()?.await,
843 Starting(task) => task.await?.shutdown()?.await,
844 }
845 })
846 .collect::<Vec<_>>();
847
848 async move {
849 futures::future::join_all(shutdown_futures).await;
850 }
851 }
852
853 #[cfg(any(test, feature = "test-support"))]
854 pub async fn test(
855 fs: Arc<dyn Fs>,
856 root_paths: impl IntoIterator<Item = &Path>,
857 cx: &mut gpui::TestAppContext,
858 ) -> Model<Project> {
859 let mut languages = LanguageRegistry::test();
860 languages.set_executor(cx.executor());
861 let http_client = util::http::FakeHttpClient::with_404_response();
862 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
863 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
864 let project = cx.update(|cx| {
865 Project::local(
866 client,
867 node_runtime::FakeNodeRuntime::new(),
868 user_store,
869 Arc::new(languages),
870 fs,
871 cx,
872 )
873 });
874 for path in root_paths {
875 let (tree, _) = project
876 .update(cx, |project, cx| {
877 project.find_or_create_local_worktree(path, true, cx)
878 })
879 .await
880 .unwrap();
881 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
882 .await;
883 }
884 project
885 }
886
887 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
888 let mut language_servers_to_start = Vec::new();
889 let mut language_formatters_to_check = Vec::new();
890 for buffer in self.opened_buffers.values() {
891 if let Some(buffer) = buffer.upgrade() {
892 let buffer = buffer.read(cx);
893 let buffer_file = File::from_dyn(buffer.file());
894 let buffer_language = buffer.language();
895 let settings = language_settings(buffer_language, buffer.file(), cx);
896 if let Some(language) = buffer_language {
897 if settings.enable_language_server {
898 if let Some(file) = buffer_file {
899 language_servers_to_start
900 .push((file.worktree.clone(), Arc::clone(language)));
901 }
902 }
903 language_formatters_to_check.push((
904 buffer_file.map(|f| f.worktree_id(cx)),
905 Arc::clone(language),
906 settings.clone(),
907 ));
908 }
909 }
910 }
911
912 let mut language_servers_to_stop = Vec::new();
913 let mut language_servers_to_restart = Vec::new();
914 let languages = self.languages.to_vec();
915
916 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
917 let current_lsp_settings = &self.current_lsp_settings;
918 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
919 let language = languages.iter().find_map(|l| {
920 let adapter = l
921 .lsp_adapters()
922 .iter()
923 .find(|adapter| &adapter.name == started_lsp_name)?;
924 Some((l, adapter))
925 });
926 if let Some((language, adapter)) = language {
927 let worktree = self.worktree_for_id(*worktree_id, cx);
928 let file = worktree.as_ref().and_then(|tree| {
929 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
930 });
931 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
932 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
933 } else if let Some(worktree) = worktree {
934 let server_name = &adapter.name.0;
935 match (
936 current_lsp_settings.get(server_name),
937 new_lsp_settings.get(server_name),
938 ) {
939 (None, None) => {}
940 (Some(_), None) | (None, Some(_)) => {
941 language_servers_to_restart.push((worktree, Arc::clone(language)));
942 }
943 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
944 if current_lsp_settings != new_lsp_settings {
945 language_servers_to_restart.push((worktree, Arc::clone(language)));
946 }
947 }
948 }
949 }
950 }
951 }
952 self.current_lsp_settings = new_lsp_settings;
953
954 // Stop all newly-disabled language servers.
955 for (worktree_id, adapter_name) in language_servers_to_stop {
956 self.stop_language_server(worktree_id, adapter_name, cx)
957 .detach();
958 }
959
960 let mut prettier_plugins_by_worktree = HashMap::default();
961 for (worktree, language, settings) in language_formatters_to_check {
962 if let Some(plugins) =
963 prettier_support::prettier_plugins_for_language(&language, &settings)
964 {
965 prettier_plugins_by_worktree
966 .entry(worktree)
967 .or_insert_with(|| HashSet::default())
968 .extend(plugins);
969 }
970 }
971 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
972 self.install_default_prettier(worktree, prettier_plugins, cx);
973 }
974
975 // Start all the newly-enabled language servers.
976 for (worktree, language) in language_servers_to_start {
977 self.start_language_servers(&worktree, language, cx);
978 }
979
980 // Restart all language servers with changed initialization options.
981 for (worktree, language) in language_servers_to_restart {
982 self.restart_language_servers(worktree, language, cx);
983 }
984
985 if self.copilot_lsp_subscription.is_none() {
986 if let Some(copilot) = Copilot::global(cx) {
987 for buffer in self.opened_buffers.values() {
988 if let Some(buffer) = buffer.upgrade() {
989 self.register_buffer_with_copilot(&buffer, cx);
990 }
991 }
992 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
993 }
994 }
995
996 cx.notify();
997 }
998
999 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> {
1000 self.opened_buffers
1001 .get(&remote_id)
1002 .and_then(|buffer| buffer.upgrade())
1003 }
1004
1005 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1006 &self.languages
1007 }
1008
1009 pub fn client(&self) -> Arc<Client> {
1010 self.client.clone()
1011 }
1012
1013 pub fn user_store(&self) -> Model<UserStore> {
1014 self.user_store.clone()
1015 }
1016
1017 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1018 self.opened_buffers
1019 .values()
1020 .filter_map(|b| b.upgrade())
1021 .collect()
1022 }
1023
1024 #[cfg(any(test, feature = "test-support"))]
1025 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1026 let path = path.into();
1027 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1028 self.opened_buffers.iter().any(|(_, buffer)| {
1029 if let Some(buffer) = buffer.upgrade() {
1030 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1031 if file.worktree == worktree && file.path() == &path.path {
1032 return true;
1033 }
1034 }
1035 }
1036 false
1037 })
1038 } else {
1039 false
1040 }
1041 }
1042
1043 pub fn fs(&self) -> &Arc<dyn Fs> {
1044 &self.fs
1045 }
1046
1047 pub fn remote_id(&self) -> Option<u64> {
1048 match self.client_state {
1049 ProjectClientState::Local => None,
1050 ProjectClientState::Shared { remote_id, .. }
1051 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1052 }
1053 }
1054
1055 pub fn replica_id(&self) -> ReplicaId {
1056 match self.client_state {
1057 ProjectClientState::Remote { replica_id, .. } => replica_id,
1058 _ => 0,
1059 }
1060 }
1061
1062 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1063 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1064 updates_tx
1065 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1066 .ok();
1067 }
1068 cx.notify();
1069 }
1070
1071 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1072 &self.collaborators
1073 }
1074
1075 pub fn host(&self) -> Option<&Collaborator> {
1076 self.collaborators.values().find(|c| c.replica_id == 0)
1077 }
1078
1079 /// Collect all worktrees, including ones that don't appear in the project panel
1080 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1081 self.worktrees
1082 .iter()
1083 .filter_map(move |worktree| worktree.upgrade())
1084 }
1085
1086 /// Collect all user-visible worktrees, the ones that appear in the project panel
1087 pub fn visible_worktrees<'a>(
1088 &'a self,
1089 cx: &'a AppContext,
1090 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1091 self.worktrees.iter().filter_map(|worktree| {
1092 worktree.upgrade().and_then(|worktree| {
1093 if worktree.read(cx).is_visible() {
1094 Some(worktree)
1095 } else {
1096 None
1097 }
1098 })
1099 })
1100 }
1101
1102 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1103 self.visible_worktrees(cx)
1104 .map(|tree| tree.read(cx).root_name())
1105 }
1106
1107 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1108 self.worktrees()
1109 .find(|worktree| worktree.read(cx).id() == id)
1110 }
1111
1112 pub fn worktree_for_entry(
1113 &self,
1114 entry_id: ProjectEntryId,
1115 cx: &AppContext,
1116 ) -> Option<Model<Worktree>> {
1117 self.worktrees()
1118 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1119 }
1120
1121 pub fn worktree_id_for_entry(
1122 &self,
1123 entry_id: ProjectEntryId,
1124 cx: &AppContext,
1125 ) -> Option<WorktreeId> {
1126 self.worktree_for_entry(entry_id, cx)
1127 .map(|worktree| worktree.read(cx).id())
1128 }
1129
1130 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1131 paths.iter().all(|path| self.contains_path(path, cx))
1132 }
1133
1134 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1135 for worktree in self.worktrees() {
1136 let worktree = worktree.read(cx).as_local();
1137 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1138 return true;
1139 }
1140 }
1141 false
1142 }
1143
1144 pub fn create_entry(
1145 &mut self,
1146 project_path: impl Into<ProjectPath>,
1147 is_directory: bool,
1148 cx: &mut ModelContext<Self>,
1149 ) -> Task<Result<Option<Entry>>> {
1150 let project_path = project_path.into();
1151 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1152 return Task::ready(Ok(None));
1153 };
1154 if self.is_local() {
1155 worktree.update(cx, |worktree, cx| {
1156 worktree
1157 .as_local_mut()
1158 .unwrap()
1159 .create_entry(project_path.path, is_directory, cx)
1160 })
1161 } else {
1162 let client = self.client.clone();
1163 let project_id = self.remote_id().unwrap();
1164 cx.spawn(move |_, mut cx| async move {
1165 let response = client
1166 .request(proto::CreateProjectEntry {
1167 worktree_id: project_path.worktree_id.to_proto(),
1168 project_id,
1169 path: project_path.path.to_string_lossy().into(),
1170 is_directory,
1171 })
1172 .await?;
1173 match response.entry {
1174 Some(entry) => worktree
1175 .update(&mut cx, |worktree, cx| {
1176 worktree.as_remote_mut().unwrap().insert_entry(
1177 entry,
1178 response.worktree_scan_id as usize,
1179 cx,
1180 )
1181 })?
1182 .await
1183 .map(Some),
1184 None => Ok(None),
1185 }
1186 })
1187 }
1188 }
1189
1190 pub fn copy_entry(
1191 &mut self,
1192 entry_id: ProjectEntryId,
1193 new_path: impl Into<Arc<Path>>,
1194 cx: &mut ModelContext<Self>,
1195 ) -> Task<Result<Option<Entry>>> {
1196 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1197 return Task::ready(Ok(None));
1198 };
1199 let new_path = new_path.into();
1200 if self.is_local() {
1201 worktree.update(cx, |worktree, cx| {
1202 worktree
1203 .as_local_mut()
1204 .unwrap()
1205 .copy_entry(entry_id, new_path, cx)
1206 })
1207 } else {
1208 let client = self.client.clone();
1209 let project_id = self.remote_id().unwrap();
1210
1211 cx.spawn(move |_, mut cx| async move {
1212 let response = client
1213 .request(proto::CopyProjectEntry {
1214 project_id,
1215 entry_id: entry_id.to_proto(),
1216 new_path: new_path.to_string_lossy().into(),
1217 })
1218 .await?;
1219 match response.entry {
1220 Some(entry) => worktree
1221 .update(&mut cx, |worktree, cx| {
1222 worktree.as_remote_mut().unwrap().insert_entry(
1223 entry,
1224 response.worktree_scan_id as usize,
1225 cx,
1226 )
1227 })?
1228 .await
1229 .map(Some),
1230 None => Ok(None),
1231 }
1232 })
1233 }
1234 }
1235
1236 pub fn rename_entry(
1237 &mut self,
1238 entry_id: ProjectEntryId,
1239 new_path: impl Into<Arc<Path>>,
1240 cx: &mut ModelContext<Self>,
1241 ) -> Task<Result<Option<Entry>>> {
1242 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1243 return Task::ready(Ok(None));
1244 };
1245 let new_path = new_path.into();
1246 if self.is_local() {
1247 worktree.update(cx, |worktree, cx| {
1248 worktree
1249 .as_local_mut()
1250 .unwrap()
1251 .rename_entry(entry_id, new_path, cx)
1252 })
1253 } else {
1254 let client = self.client.clone();
1255 let project_id = self.remote_id().unwrap();
1256
1257 cx.spawn(move |_, mut cx| async move {
1258 let response = client
1259 .request(proto::RenameProjectEntry {
1260 project_id,
1261 entry_id: entry_id.to_proto(),
1262 new_path: new_path.to_string_lossy().into(),
1263 })
1264 .await?;
1265 match response.entry {
1266 Some(entry) => worktree
1267 .update(&mut cx, |worktree, cx| {
1268 worktree.as_remote_mut().unwrap().insert_entry(
1269 entry,
1270 response.worktree_scan_id as usize,
1271 cx,
1272 )
1273 })?
1274 .await
1275 .map(Some),
1276 None => Ok(None),
1277 }
1278 })
1279 }
1280 }
1281
1282 pub fn delete_entry(
1283 &mut self,
1284 entry_id: ProjectEntryId,
1285 cx: &mut ModelContext<Self>,
1286 ) -> Option<Task<Result<()>>> {
1287 let worktree = self.worktree_for_entry(entry_id, cx)?;
1288
1289 cx.emit(Event::DeletedEntry(entry_id));
1290
1291 if self.is_local() {
1292 worktree.update(cx, |worktree, cx| {
1293 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1294 })
1295 } else {
1296 let client = self.client.clone();
1297 let project_id = self.remote_id().unwrap();
1298 Some(cx.spawn(move |_, mut cx| async move {
1299 let response = client
1300 .request(proto::DeleteProjectEntry {
1301 project_id,
1302 entry_id: entry_id.to_proto(),
1303 })
1304 .await?;
1305 worktree
1306 .update(&mut cx, move |worktree, cx| {
1307 worktree.as_remote_mut().unwrap().delete_entry(
1308 entry_id,
1309 response.worktree_scan_id as usize,
1310 cx,
1311 )
1312 })?
1313 .await
1314 }))
1315 }
1316 }
1317
1318 pub fn expand_entry(
1319 &mut self,
1320 worktree_id: WorktreeId,
1321 entry_id: ProjectEntryId,
1322 cx: &mut ModelContext<Self>,
1323 ) -> Option<Task<Result<()>>> {
1324 let worktree = self.worktree_for_id(worktree_id, cx)?;
1325 if self.is_local() {
1326 worktree.update(cx, |worktree, cx| {
1327 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1328 })
1329 } else {
1330 let worktree = worktree.downgrade();
1331 let request = self.client.request(proto::ExpandProjectEntry {
1332 project_id: self.remote_id().unwrap(),
1333 entry_id: entry_id.to_proto(),
1334 });
1335 Some(cx.spawn(move |_, mut cx| async move {
1336 let response = request.await?;
1337 if let Some(worktree) = worktree.upgrade() {
1338 worktree
1339 .update(&mut cx, |worktree, _| {
1340 worktree
1341 .as_remote_mut()
1342 .unwrap()
1343 .wait_for_snapshot(response.worktree_scan_id as usize)
1344 })?
1345 .await?;
1346 }
1347 Ok(())
1348 }))
1349 }
1350 }
1351
1352 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1353 if !matches!(self.client_state, ProjectClientState::Local) {
1354 return Err(anyhow!("project was already shared"));
1355 }
1356 self.client_subscriptions.push(
1357 self.client
1358 .subscribe_to_entity(project_id)?
1359 .set_model(&cx.handle(), &mut cx.to_async()),
1360 );
1361
1362 for open_buffer in self.opened_buffers.values_mut() {
1363 match open_buffer {
1364 OpenBuffer::Strong(_) => {}
1365 OpenBuffer::Weak(buffer) => {
1366 if let Some(buffer) = buffer.upgrade() {
1367 *open_buffer = OpenBuffer::Strong(buffer);
1368 }
1369 }
1370 OpenBuffer::Operations(_) => unreachable!(),
1371 }
1372 }
1373
1374 for worktree_handle in self.worktrees.iter_mut() {
1375 match worktree_handle {
1376 WorktreeHandle::Strong(_) => {}
1377 WorktreeHandle::Weak(worktree) => {
1378 if let Some(worktree) = worktree.upgrade() {
1379 *worktree_handle = WorktreeHandle::Strong(worktree);
1380 }
1381 }
1382 }
1383 }
1384
1385 for (server_id, status) in &self.language_server_statuses {
1386 self.client
1387 .send(proto::StartLanguageServer {
1388 project_id,
1389 server: Some(proto::LanguageServer {
1390 id: server_id.0 as u64,
1391 name: status.name.clone(),
1392 }),
1393 })
1394 .log_err();
1395 }
1396
1397 let store = cx.global::<SettingsStore>();
1398 for worktree in self.worktrees() {
1399 let worktree_id = worktree.read(cx).id().to_proto();
1400 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1401 self.client
1402 .send(proto::UpdateWorktreeSettings {
1403 project_id,
1404 worktree_id,
1405 path: path.to_string_lossy().into(),
1406 content: Some(content),
1407 })
1408 .log_err();
1409 }
1410 }
1411
1412 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1413 let client = self.client.clone();
1414 self.client_state = ProjectClientState::Shared {
1415 remote_id: project_id,
1416 updates_tx,
1417 _send_updates: cx.spawn(move |this, mut cx| async move {
1418 while let Some(update) = updates_rx.next().await {
1419 match update {
1420 LocalProjectUpdate::WorktreesChanged => {
1421 let worktrees = this.update(&mut cx, |this, _cx| {
1422 this.worktrees().collect::<Vec<_>>()
1423 })?;
1424 let update_project = this
1425 .update(&mut cx, |this, cx| {
1426 this.client.request(proto::UpdateProject {
1427 project_id,
1428 worktrees: this.worktree_metadata_protos(cx),
1429 })
1430 })?
1431 .await;
1432 if update_project.is_ok() {
1433 for worktree in worktrees {
1434 worktree.update(&mut cx, |worktree, cx| {
1435 let worktree = worktree.as_local_mut().unwrap();
1436 worktree.share(project_id, cx).detach_and_log_err(cx)
1437 })?;
1438 }
1439 }
1440 }
1441 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1442 let buffer = this.update(&mut cx, |this, _| {
1443 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1444 let shared_buffers =
1445 this.shared_buffers.entry(peer_id).or_default();
1446 if shared_buffers.insert(buffer_id) {
1447 if let OpenBuffer::Strong(buffer) = buffer {
1448 Some(buffer.clone())
1449 } else {
1450 None
1451 }
1452 } else {
1453 None
1454 }
1455 })?;
1456
1457 let Some(buffer) = buffer else { continue };
1458 let operations =
1459 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1460 let operations = operations.await;
1461 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1462
1463 let initial_state = proto::CreateBufferForPeer {
1464 project_id,
1465 peer_id: Some(peer_id),
1466 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1467 };
1468 if client.send(initial_state).log_err().is_some() {
1469 let client = client.clone();
1470 cx.background_executor()
1471 .spawn(async move {
1472 let mut chunks = split_operations(operations).peekable();
1473 while let Some(chunk) = chunks.next() {
1474 let is_last = chunks.peek().is_none();
1475 client.send(proto::CreateBufferForPeer {
1476 project_id,
1477 peer_id: Some(peer_id),
1478 variant: Some(
1479 proto::create_buffer_for_peer::Variant::Chunk(
1480 proto::BufferChunk {
1481 buffer_id,
1482 operations: chunk,
1483 is_last,
1484 },
1485 ),
1486 ),
1487 })?;
1488 }
1489 anyhow::Ok(())
1490 })
1491 .await
1492 .log_err();
1493 }
1494 }
1495 }
1496 }
1497 Ok(())
1498 }),
1499 };
1500
1501 self.metadata_changed(cx);
1502 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1503 cx.notify();
1504 Ok(())
1505 }
1506
1507 pub fn reshared(
1508 &mut self,
1509 message: proto::ResharedProject,
1510 cx: &mut ModelContext<Self>,
1511 ) -> Result<()> {
1512 self.shared_buffers.clear();
1513 self.set_collaborators_from_proto(message.collaborators, cx)?;
1514 self.metadata_changed(cx);
1515 Ok(())
1516 }
1517
1518 pub fn rejoined(
1519 &mut self,
1520 message: proto::RejoinedProject,
1521 message_id: u32,
1522 cx: &mut ModelContext<Self>,
1523 ) -> Result<()> {
1524 cx.update_global::<SettingsStore, _>(|store, cx| {
1525 for worktree in &self.worktrees {
1526 store
1527 .clear_local_settings(worktree.handle_id(), cx)
1528 .log_err();
1529 }
1530 });
1531
1532 self.join_project_response_message_id = message_id;
1533 self.set_worktrees_from_proto(message.worktrees, cx)?;
1534 self.set_collaborators_from_proto(message.collaborators, cx)?;
1535 self.language_server_statuses = message
1536 .language_servers
1537 .into_iter()
1538 .map(|server| {
1539 (
1540 LanguageServerId(server.id as usize),
1541 LanguageServerStatus {
1542 name: server.name,
1543 pending_work: Default::default(),
1544 has_pending_diagnostic_updates: false,
1545 progress_tokens: Default::default(),
1546 },
1547 )
1548 })
1549 .collect();
1550 self.buffer_ordered_messages_tx
1551 .unbounded_send(BufferOrderedMessage::Resync)
1552 .unwrap();
1553 cx.notify();
1554 Ok(())
1555 }
1556
1557 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1558 self.unshare_internal(cx)?;
1559 self.metadata_changed(cx);
1560 cx.notify();
1561 Ok(())
1562 }
1563
1564 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1565 if self.is_remote() {
1566 return Err(anyhow!("attempted to unshare a remote project"));
1567 }
1568
1569 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1570 self.client_state = ProjectClientState::Local;
1571 self.collaborators.clear();
1572 self.shared_buffers.clear();
1573 self.client_subscriptions.clear();
1574
1575 for worktree_handle in self.worktrees.iter_mut() {
1576 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1577 let is_visible = worktree.update(cx, |worktree, _| {
1578 worktree.as_local_mut().unwrap().unshare();
1579 worktree.is_visible()
1580 });
1581 if !is_visible {
1582 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1583 }
1584 }
1585 }
1586
1587 for open_buffer in self.opened_buffers.values_mut() {
1588 // Wake up any tasks waiting for peers' edits to this buffer.
1589 if let Some(buffer) = open_buffer.upgrade() {
1590 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1591 }
1592
1593 if let OpenBuffer::Strong(buffer) = open_buffer {
1594 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1595 }
1596 }
1597
1598 self.client.send(proto::UnshareProject {
1599 project_id: remote_id,
1600 })?;
1601
1602 Ok(())
1603 } else {
1604 Err(anyhow!("attempted to unshare an unshared project"))
1605 }
1606 }
1607
1608 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1609 self.disconnected_from_host_internal(cx);
1610 cx.emit(Event::DisconnectedFromHost);
1611 cx.notify();
1612 }
1613
1614 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1615 let new_capability =
1616 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1617 Capability::ReadWrite
1618 } else {
1619 Capability::ReadOnly
1620 };
1621 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1622 if *capability == new_capability {
1623 return;
1624 }
1625
1626 *capability = new_capability;
1627 for buffer in self.opened_buffers() {
1628 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1629 }
1630 }
1631 }
1632
1633 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1634 if let ProjectClientState::Remote {
1635 sharing_has_stopped,
1636 ..
1637 } = &mut self.client_state
1638 {
1639 *sharing_has_stopped = true;
1640
1641 self.collaborators.clear();
1642
1643 for worktree in &self.worktrees {
1644 if let Some(worktree) = worktree.upgrade() {
1645 worktree.update(cx, |worktree, _| {
1646 if let Some(worktree) = worktree.as_remote_mut() {
1647 worktree.disconnected_from_host();
1648 }
1649 });
1650 }
1651 }
1652
1653 for open_buffer in self.opened_buffers.values_mut() {
1654 // Wake up any tasks waiting for peers' edits to this buffer.
1655 if let Some(buffer) = open_buffer.upgrade() {
1656 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1657 }
1658
1659 if let OpenBuffer::Strong(buffer) = open_buffer {
1660 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1661 }
1662 }
1663
1664 // Wake up all futures currently waiting on a buffer to get opened,
1665 // to give them a chance to fail now that we've disconnected.
1666 *self.opened_buffer.0.borrow_mut() = ();
1667 }
1668 }
1669
1670 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1671 cx.emit(Event::Closed);
1672 }
1673
1674 pub fn is_disconnected(&self) -> bool {
1675 match &self.client_state {
1676 ProjectClientState::Remote {
1677 sharing_has_stopped,
1678 ..
1679 } => *sharing_has_stopped,
1680 _ => false,
1681 }
1682 }
1683
1684 pub fn capability(&self) -> Capability {
1685 match &self.client_state {
1686 ProjectClientState::Remote { capability, .. } => *capability,
1687 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1688 }
1689 }
1690
1691 pub fn is_read_only(&self) -> bool {
1692 self.is_disconnected() || self.capability() == Capability::ReadOnly
1693 }
1694
1695 pub fn is_local(&self) -> bool {
1696 match &self.client_state {
1697 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1698 ProjectClientState::Remote { .. } => false,
1699 }
1700 }
1701
1702 pub fn is_remote(&self) -> bool {
1703 !self.is_local()
1704 }
1705
1706 pub fn create_buffer(
1707 &mut self,
1708 text: &str,
1709 language: Option<Arc<Language>>,
1710 cx: &mut ModelContext<Self>,
1711 ) -> Result<Model<Buffer>> {
1712 if self.is_remote() {
1713 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1714 }
1715 let id = post_inc(&mut self.next_buffer_id);
1716 let buffer = cx.new_model(|cx| {
1717 Buffer::new(self.replica_id(), id, text)
1718 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1719 });
1720 self.register_buffer(&buffer, cx)?;
1721 Ok(buffer)
1722 }
1723
1724 pub fn open_path(
1725 &mut self,
1726 path: ProjectPath,
1727 cx: &mut ModelContext<Self>,
1728 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1729 let task = self.open_buffer(path.clone(), cx);
1730 cx.spawn(move |_, cx| async move {
1731 let buffer = task.await?;
1732 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1733 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1734 })?;
1735
1736 let buffer: &AnyModel = &buffer;
1737 Ok((project_entry_id, buffer.clone()))
1738 })
1739 }
1740
1741 pub fn open_local_buffer(
1742 &mut self,
1743 abs_path: impl AsRef<Path>,
1744 cx: &mut ModelContext<Self>,
1745 ) -> Task<Result<Model<Buffer>>> {
1746 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1747 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1748 } else {
1749 Task::ready(Err(anyhow!("no such path")))
1750 }
1751 }
1752
1753 pub fn open_buffer(
1754 &mut self,
1755 path: impl Into<ProjectPath>,
1756 cx: &mut ModelContext<Self>,
1757 ) -> Task<Result<Model<Buffer>>> {
1758 let project_path = path.into();
1759 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1760 worktree
1761 } else {
1762 return Task::ready(Err(anyhow!("no such worktree")));
1763 };
1764
1765 // If there is already a buffer for the given path, then return it.
1766 let existing_buffer = self.get_open_buffer(&project_path, cx);
1767 if let Some(existing_buffer) = existing_buffer {
1768 return Task::ready(Ok(existing_buffer));
1769 }
1770
1771 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1772 // If the given path is already being loaded, then wait for that existing
1773 // task to complete and return the same buffer.
1774 hash_map::Entry::Occupied(e) => e.get().clone(),
1775
1776 // Otherwise, record the fact that this path is now being loaded.
1777 hash_map::Entry::Vacant(entry) => {
1778 let (mut tx, rx) = postage::watch::channel();
1779 entry.insert(rx.clone());
1780
1781 let load_buffer = if worktree.read(cx).is_local() {
1782 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1783 } else {
1784 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1785 };
1786
1787 let project_path = project_path.clone();
1788 cx.spawn(move |this, mut cx| async move {
1789 let load_result = load_buffer.await;
1790 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1791 // Record the fact that the buffer is no longer loading.
1792 this.loading_buffers_by_path.remove(&project_path);
1793 let buffer = load_result.map_err(Arc::new)?;
1794 Ok(buffer)
1795 })?);
1796 anyhow::Ok(())
1797 })
1798 .detach();
1799 rx
1800 }
1801 };
1802
1803 cx.background_executor().spawn(async move {
1804 wait_for_loading_buffer(loading_watch)
1805 .await
1806 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1807 })
1808 }
1809
1810 fn open_local_buffer_internal(
1811 &mut self,
1812 path: &Arc<Path>,
1813 worktree: &Model<Worktree>,
1814 cx: &mut ModelContext<Self>,
1815 ) -> Task<Result<Model<Buffer>>> {
1816 let buffer_id = post_inc(&mut self.next_buffer_id);
1817 let load_buffer = worktree.update(cx, |worktree, cx| {
1818 let worktree = worktree.as_local_mut().unwrap();
1819 worktree.load_buffer(buffer_id, path, cx)
1820 });
1821 cx.spawn(move |this, mut cx| async move {
1822 let buffer = load_buffer.await?;
1823 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1824 Ok(buffer)
1825 })
1826 }
1827
1828 fn open_remote_buffer_internal(
1829 &mut self,
1830 path: &Arc<Path>,
1831 worktree: &Model<Worktree>,
1832 cx: &mut ModelContext<Self>,
1833 ) -> Task<Result<Model<Buffer>>> {
1834 let rpc = self.client.clone();
1835 let project_id = self.remote_id().unwrap();
1836 let remote_worktree_id = worktree.read(cx).id();
1837 let path = path.clone();
1838 let path_string = path.to_string_lossy().to_string();
1839 cx.spawn(move |this, mut cx| async move {
1840 let response = rpc
1841 .request(proto::OpenBufferByPath {
1842 project_id,
1843 worktree_id: remote_worktree_id.to_proto(),
1844 path: path_string,
1845 })
1846 .await?;
1847 this.update(&mut cx, |this, cx| {
1848 this.wait_for_remote_buffer(response.buffer_id, cx)
1849 })?
1850 .await
1851 })
1852 }
1853
1854 /// LanguageServerName is owned, because it is inserted into a map
1855 pub fn open_local_buffer_via_lsp(
1856 &mut self,
1857 abs_path: lsp::Url,
1858 language_server_id: LanguageServerId,
1859 language_server_name: LanguageServerName,
1860 cx: &mut ModelContext<Self>,
1861 ) -> Task<Result<Model<Buffer>>> {
1862 cx.spawn(move |this, mut cx| async move {
1863 let abs_path = abs_path
1864 .to_file_path()
1865 .map_err(|_| anyhow!("can't convert URI to path"))?;
1866 let (worktree, relative_path) = if let Some(result) =
1867 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1868 {
1869 result
1870 } else {
1871 let worktree = this
1872 .update(&mut cx, |this, cx| {
1873 this.create_local_worktree(&abs_path, false, cx)
1874 })?
1875 .await?;
1876 this.update(&mut cx, |this, cx| {
1877 this.language_server_ids.insert(
1878 (worktree.read(cx).id(), language_server_name),
1879 language_server_id,
1880 );
1881 })
1882 .ok();
1883 (worktree, PathBuf::new())
1884 };
1885
1886 let project_path = ProjectPath {
1887 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1888 path: relative_path.into(),
1889 };
1890 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1891 .await
1892 })
1893 }
1894
1895 pub fn open_buffer_by_id(
1896 &mut self,
1897 id: u64,
1898 cx: &mut ModelContext<Self>,
1899 ) -> Task<Result<Model<Buffer>>> {
1900 if let Some(buffer) = self.buffer_for_id(id) {
1901 Task::ready(Ok(buffer))
1902 } else if self.is_local() {
1903 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1904 } else if let Some(project_id) = self.remote_id() {
1905 let request = self
1906 .client
1907 .request(proto::OpenBufferById { project_id, id });
1908 cx.spawn(move |this, mut cx| async move {
1909 let buffer_id = request.await?.buffer_id;
1910 this.update(&mut cx, |this, cx| {
1911 this.wait_for_remote_buffer(buffer_id, cx)
1912 })?
1913 .await
1914 })
1915 } else {
1916 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1917 }
1918 }
1919
1920 pub fn save_buffers(
1921 &self,
1922 buffers: HashSet<Model<Buffer>>,
1923 cx: &mut ModelContext<Self>,
1924 ) -> Task<Result<()>> {
1925 cx.spawn(move |this, mut cx| async move {
1926 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1927 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1928 .ok()
1929 });
1930 try_join_all(save_tasks).await?;
1931 Ok(())
1932 })
1933 }
1934
1935 pub fn save_buffer(
1936 &self,
1937 buffer: Model<Buffer>,
1938 cx: &mut ModelContext<Self>,
1939 ) -> Task<Result<()>> {
1940 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1941 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1942 };
1943 let worktree = file.worktree.clone();
1944 let path = file.path.clone();
1945 worktree.update(cx, |worktree, cx| match worktree {
1946 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1947 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1948 })
1949 }
1950
1951 pub fn save_buffer_as(
1952 &mut self,
1953 buffer: Model<Buffer>,
1954 abs_path: PathBuf,
1955 cx: &mut ModelContext<Self>,
1956 ) -> Task<Result<()>> {
1957 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1958 let old_file = File::from_dyn(buffer.read(cx).file())
1959 .filter(|f| f.is_local())
1960 .cloned();
1961 cx.spawn(move |this, mut cx| async move {
1962 if let Some(old_file) = &old_file {
1963 this.update(&mut cx, |this, cx| {
1964 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1965 })?;
1966 }
1967 let (worktree, path) = worktree_task.await?;
1968 worktree
1969 .update(&mut cx, |worktree, cx| match worktree {
1970 Worktree::Local(worktree) => {
1971 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1972 }
1973 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1974 })?
1975 .await?;
1976
1977 this.update(&mut cx, |this, cx| {
1978 this.detect_language_for_buffer(&buffer, cx);
1979 this.register_buffer_with_language_servers(&buffer, cx);
1980 })?;
1981 Ok(())
1982 })
1983 }
1984
1985 pub fn get_open_buffer(
1986 &mut self,
1987 path: &ProjectPath,
1988 cx: &mut ModelContext<Self>,
1989 ) -> Option<Model<Buffer>> {
1990 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1991 self.opened_buffers.values().find_map(|buffer| {
1992 let buffer = buffer.upgrade()?;
1993 let file = File::from_dyn(buffer.read(cx).file())?;
1994 if file.worktree == worktree && file.path() == &path.path {
1995 Some(buffer)
1996 } else {
1997 None
1998 }
1999 })
2000 }
2001
2002 fn register_buffer(
2003 &mut self,
2004 buffer: &Model<Buffer>,
2005 cx: &mut ModelContext<Self>,
2006 ) -> Result<()> {
2007 self.request_buffer_diff_recalculation(buffer, cx);
2008 buffer.update(cx, |buffer, _| {
2009 buffer.set_language_registry(self.languages.clone())
2010 });
2011
2012 let remote_id = buffer.read(cx).remote_id();
2013 let is_remote = self.is_remote();
2014 let open_buffer = if is_remote || self.is_shared() {
2015 OpenBuffer::Strong(buffer.clone())
2016 } else {
2017 OpenBuffer::Weak(buffer.downgrade())
2018 };
2019
2020 match self.opened_buffers.entry(remote_id) {
2021 hash_map::Entry::Vacant(entry) => {
2022 entry.insert(open_buffer);
2023 }
2024 hash_map::Entry::Occupied(mut entry) => {
2025 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2026 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2027 } else if entry.get().upgrade().is_some() {
2028 if is_remote {
2029 return Ok(());
2030 } else {
2031 debug_panic!("buffer {} was already registered", remote_id);
2032 Err(anyhow!("buffer {} was already registered", remote_id))?;
2033 }
2034 }
2035 entry.insert(open_buffer);
2036 }
2037 }
2038 cx.subscribe(buffer, |this, buffer, event, cx| {
2039 this.on_buffer_event(buffer, event, cx);
2040 })
2041 .detach();
2042
2043 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2044 if file.is_local {
2045 self.local_buffer_ids_by_path.insert(
2046 ProjectPath {
2047 worktree_id: file.worktree_id(cx),
2048 path: file.path.clone(),
2049 },
2050 remote_id,
2051 );
2052
2053 if let Some(entry_id) = file.entry_id {
2054 self.local_buffer_ids_by_entry_id
2055 .insert(entry_id, remote_id);
2056 }
2057 }
2058 }
2059
2060 self.detect_language_for_buffer(buffer, cx);
2061 self.register_buffer_with_language_servers(buffer, cx);
2062 self.register_buffer_with_copilot(buffer, cx);
2063 cx.observe_release(buffer, |this, buffer, cx| {
2064 if let Some(file) = File::from_dyn(buffer.file()) {
2065 if file.is_local() {
2066 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2067 for server in this.language_servers_for_buffer(buffer, cx) {
2068 server
2069 .1
2070 .notify::<lsp::notification::DidCloseTextDocument>(
2071 lsp::DidCloseTextDocumentParams {
2072 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2073 },
2074 )
2075 .log_err();
2076 }
2077 }
2078 }
2079 })
2080 .detach();
2081
2082 *self.opened_buffer.0.borrow_mut() = ();
2083 Ok(())
2084 }
2085
2086 fn register_buffer_with_language_servers(
2087 &mut self,
2088 buffer_handle: &Model<Buffer>,
2089 cx: &mut ModelContext<Self>,
2090 ) {
2091 let buffer = buffer_handle.read(cx);
2092 let buffer_id = buffer.remote_id();
2093
2094 if let Some(file) = File::from_dyn(buffer.file()) {
2095 if !file.is_local() {
2096 return;
2097 }
2098
2099 let abs_path = file.abs_path(cx);
2100 let uri = lsp::Url::from_file_path(&abs_path)
2101 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2102 let initial_snapshot = buffer.text_snapshot();
2103 let language = buffer.language().cloned();
2104 let worktree_id = file.worktree_id(cx);
2105
2106 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2107 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2108 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2109 .log_err();
2110 }
2111 }
2112
2113 if let Some(language) = language {
2114 for adapter in language.lsp_adapters() {
2115 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2116 let server = self
2117 .language_server_ids
2118 .get(&(worktree_id, adapter.name.clone()))
2119 .and_then(|id| self.language_servers.get(id))
2120 .and_then(|server_state| {
2121 if let LanguageServerState::Running { server, .. } = server_state {
2122 Some(server.clone())
2123 } else {
2124 None
2125 }
2126 });
2127 let server = match server {
2128 Some(server) => server,
2129 None => continue,
2130 };
2131
2132 server
2133 .notify::<lsp::notification::DidOpenTextDocument>(
2134 lsp::DidOpenTextDocumentParams {
2135 text_document: lsp::TextDocumentItem::new(
2136 uri.clone(),
2137 language_id.unwrap_or_default(),
2138 0,
2139 initial_snapshot.text(),
2140 ),
2141 },
2142 )
2143 .log_err();
2144
2145 buffer_handle.update(cx, |buffer, cx| {
2146 buffer.set_completion_triggers(
2147 server
2148 .capabilities()
2149 .completion_provider
2150 .as_ref()
2151 .and_then(|provider| provider.trigger_characters.clone())
2152 .unwrap_or_default(),
2153 cx,
2154 );
2155 });
2156
2157 let snapshot = LspBufferSnapshot {
2158 version: 0,
2159 snapshot: initial_snapshot.clone(),
2160 };
2161 self.buffer_snapshots
2162 .entry(buffer_id)
2163 .or_default()
2164 .insert(server.server_id(), vec![snapshot]);
2165 }
2166 }
2167 }
2168 }
2169
2170 fn unregister_buffer_from_language_servers(
2171 &mut self,
2172 buffer: &Model<Buffer>,
2173 old_file: &File,
2174 cx: &mut ModelContext<Self>,
2175 ) {
2176 let old_path = match old_file.as_local() {
2177 Some(local) => local.abs_path(cx),
2178 None => return,
2179 };
2180
2181 buffer.update(cx, |buffer, cx| {
2182 let worktree_id = old_file.worktree_id(cx);
2183 let ids = &self.language_server_ids;
2184
2185 let language = buffer.language().cloned();
2186 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2187 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2188 buffer.update_diagnostics(server_id, Default::default(), cx);
2189 }
2190
2191 self.buffer_snapshots.remove(&buffer.remote_id());
2192 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2193 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2194 language_server
2195 .notify::<lsp::notification::DidCloseTextDocument>(
2196 lsp::DidCloseTextDocumentParams {
2197 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2198 },
2199 )
2200 .log_err();
2201 }
2202 });
2203 }
2204
2205 fn register_buffer_with_copilot(
2206 &self,
2207 buffer_handle: &Model<Buffer>,
2208 cx: &mut ModelContext<Self>,
2209 ) {
2210 if let Some(copilot) = Copilot::global(cx) {
2211 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2212 }
2213 }
2214
2215 async fn send_buffer_ordered_messages(
2216 this: WeakModel<Self>,
2217 rx: UnboundedReceiver<BufferOrderedMessage>,
2218 mut cx: AsyncAppContext,
2219 ) -> Result<()> {
2220 const MAX_BATCH_SIZE: usize = 128;
2221
2222 let mut operations_by_buffer_id = HashMap::default();
2223 async fn flush_operations(
2224 this: &WeakModel<Project>,
2225 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2226 needs_resync_with_host: &mut bool,
2227 is_local: bool,
2228 cx: &mut AsyncAppContext,
2229 ) -> Result<()> {
2230 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2231 let request = this.update(cx, |this, _| {
2232 let project_id = this.remote_id()?;
2233 Some(this.client.request(proto::UpdateBuffer {
2234 buffer_id,
2235 project_id,
2236 operations,
2237 }))
2238 })?;
2239 if let Some(request) = request {
2240 if request.await.is_err() && !is_local {
2241 *needs_resync_with_host = true;
2242 break;
2243 }
2244 }
2245 }
2246 Ok(())
2247 }
2248
2249 let mut needs_resync_with_host = false;
2250 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2251
2252 while let Some(changes) = changes.next().await {
2253 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2254
2255 for change in changes {
2256 match change {
2257 BufferOrderedMessage::Operation {
2258 buffer_id,
2259 operation,
2260 } => {
2261 if needs_resync_with_host {
2262 continue;
2263 }
2264
2265 operations_by_buffer_id
2266 .entry(buffer_id)
2267 .or_insert(Vec::new())
2268 .push(operation);
2269 }
2270
2271 BufferOrderedMessage::Resync => {
2272 operations_by_buffer_id.clear();
2273 if this
2274 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2275 .await
2276 .is_ok()
2277 {
2278 needs_resync_with_host = false;
2279 }
2280 }
2281
2282 BufferOrderedMessage::LanguageServerUpdate {
2283 language_server_id,
2284 message,
2285 } => {
2286 flush_operations(
2287 &this,
2288 &mut operations_by_buffer_id,
2289 &mut needs_resync_with_host,
2290 is_local,
2291 &mut cx,
2292 )
2293 .await?;
2294
2295 this.update(&mut cx, |this, _| {
2296 if let Some(project_id) = this.remote_id() {
2297 this.client
2298 .send(proto::UpdateLanguageServer {
2299 project_id,
2300 language_server_id: language_server_id.0 as u64,
2301 variant: Some(message),
2302 })
2303 .log_err();
2304 }
2305 })?;
2306 }
2307 }
2308 }
2309
2310 flush_operations(
2311 &this,
2312 &mut operations_by_buffer_id,
2313 &mut needs_resync_with_host,
2314 is_local,
2315 &mut cx,
2316 )
2317 .await?;
2318 }
2319
2320 Ok(())
2321 }
2322
2323 fn on_buffer_event(
2324 &mut self,
2325 buffer: Model<Buffer>,
2326 event: &BufferEvent,
2327 cx: &mut ModelContext<Self>,
2328 ) -> Option<()> {
2329 if matches!(
2330 event,
2331 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2332 ) {
2333 self.request_buffer_diff_recalculation(&buffer, cx);
2334 }
2335
2336 match event {
2337 BufferEvent::Operation(operation) => {
2338 self.buffer_ordered_messages_tx
2339 .unbounded_send(BufferOrderedMessage::Operation {
2340 buffer_id: buffer.read(cx).remote_id(),
2341 operation: language::proto::serialize_operation(operation),
2342 })
2343 .ok();
2344 }
2345
2346 BufferEvent::Edited { .. } => {
2347 let buffer = buffer.read(cx);
2348 let file = File::from_dyn(buffer.file())?;
2349 let abs_path = file.as_local()?.abs_path(cx);
2350 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2351 let next_snapshot = buffer.text_snapshot();
2352
2353 let language_servers: Vec<_> = self
2354 .language_servers_for_buffer(buffer, cx)
2355 .map(|i| i.1.clone())
2356 .collect();
2357
2358 for language_server in language_servers {
2359 let language_server = language_server.clone();
2360
2361 let buffer_snapshots = self
2362 .buffer_snapshots
2363 .get_mut(&buffer.remote_id())
2364 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2365 let previous_snapshot = buffer_snapshots.last()?;
2366
2367 let build_incremental_change = || {
2368 buffer
2369 .edits_since::<(PointUtf16, usize)>(
2370 previous_snapshot.snapshot.version(),
2371 )
2372 .map(|edit| {
2373 let edit_start = edit.new.start.0;
2374 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2375 let new_text = next_snapshot
2376 .text_for_range(edit.new.start.1..edit.new.end.1)
2377 .collect();
2378 lsp::TextDocumentContentChangeEvent {
2379 range: Some(lsp::Range::new(
2380 point_to_lsp(edit_start),
2381 point_to_lsp(edit_end),
2382 )),
2383 range_length: None,
2384 text: new_text,
2385 }
2386 })
2387 .collect()
2388 };
2389
2390 let document_sync_kind = language_server
2391 .capabilities()
2392 .text_document_sync
2393 .as_ref()
2394 .and_then(|sync| match sync {
2395 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2396 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2397 });
2398
2399 let content_changes: Vec<_> = match document_sync_kind {
2400 Some(lsp::TextDocumentSyncKind::FULL) => {
2401 vec![lsp::TextDocumentContentChangeEvent {
2402 range: None,
2403 range_length: None,
2404 text: next_snapshot.text(),
2405 }]
2406 }
2407 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2408 _ => {
2409 #[cfg(any(test, feature = "test-support"))]
2410 {
2411 build_incremental_change()
2412 }
2413
2414 #[cfg(not(any(test, feature = "test-support")))]
2415 {
2416 continue;
2417 }
2418 }
2419 };
2420
2421 let next_version = previous_snapshot.version + 1;
2422
2423 buffer_snapshots.push(LspBufferSnapshot {
2424 version: next_version,
2425 snapshot: next_snapshot.clone(),
2426 });
2427
2428 language_server
2429 .notify::<lsp::notification::DidChangeTextDocument>(
2430 lsp::DidChangeTextDocumentParams {
2431 text_document: lsp::VersionedTextDocumentIdentifier::new(
2432 uri.clone(),
2433 next_version,
2434 ),
2435 content_changes,
2436 },
2437 )
2438 .log_err();
2439 }
2440 }
2441
2442 BufferEvent::Saved => {
2443 let file = File::from_dyn(buffer.read(cx).file())?;
2444 let worktree_id = file.worktree_id(cx);
2445 let abs_path = file.as_local()?.abs_path(cx);
2446 let text_document = lsp::TextDocumentIdentifier {
2447 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2448 };
2449
2450 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2451 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2452
2453 server
2454 .notify::<lsp::notification::DidSaveTextDocument>(
2455 lsp::DidSaveTextDocumentParams {
2456 text_document: text_document.clone(),
2457 text,
2458 },
2459 )
2460 .log_err();
2461 }
2462
2463 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2464 for language_server_id in language_server_ids {
2465 if let Some(LanguageServerState::Running {
2466 adapter,
2467 simulate_disk_based_diagnostics_completion,
2468 ..
2469 }) = self.language_servers.get_mut(&language_server_id)
2470 {
2471 // After saving a buffer using a language server that doesn't provide
2472 // a disk-based progress token, kick off a timer that will reset every
2473 // time the buffer is saved. If the timer eventually fires, simulate
2474 // disk-based diagnostics being finished so that other pieces of UI
2475 // (e.g., project diagnostics view, diagnostic status bar) can update.
2476 // We don't emit an event right away because the language server might take
2477 // some time to publish diagnostics.
2478 if adapter.disk_based_diagnostics_progress_token.is_none() {
2479 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2480 Duration::from_secs(1);
2481
2482 let task = cx.spawn(move |this, mut cx| async move {
2483 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2484 if let Some(this) = this.upgrade() {
2485 this.update(&mut cx, |this, cx| {
2486 this.disk_based_diagnostics_finished(
2487 language_server_id,
2488 cx,
2489 );
2490 this.buffer_ordered_messages_tx
2491 .unbounded_send(
2492 BufferOrderedMessage::LanguageServerUpdate {
2493 language_server_id,
2494 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2495 },
2496 )
2497 .ok();
2498 }).ok();
2499 }
2500 });
2501 *simulate_disk_based_diagnostics_completion = Some(task);
2502 }
2503 }
2504 }
2505 }
2506 BufferEvent::FileHandleChanged => {
2507 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2508 return None;
2509 };
2510
2511 let remote_id = buffer.read(cx).remote_id();
2512 if let Some(entry_id) = file.entry_id {
2513 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2514 Some(_) => {
2515 return None;
2516 }
2517 None => {
2518 self.local_buffer_ids_by_entry_id
2519 .insert(entry_id, remote_id);
2520 }
2521 }
2522 };
2523 self.local_buffer_ids_by_path.insert(
2524 ProjectPath {
2525 worktree_id: file.worktree_id(cx),
2526 path: file.path.clone(),
2527 },
2528 remote_id,
2529 );
2530 }
2531 _ => {}
2532 }
2533
2534 None
2535 }
2536
2537 fn request_buffer_diff_recalculation(
2538 &mut self,
2539 buffer: &Model<Buffer>,
2540 cx: &mut ModelContext<Self>,
2541 ) {
2542 self.buffers_needing_diff.insert(buffer.downgrade());
2543 let first_insertion = self.buffers_needing_diff.len() == 1;
2544
2545 let settings = ProjectSettings::get_global(cx);
2546 let delay = if let Some(delay) = settings.git.gutter_debounce {
2547 delay
2548 } else {
2549 if first_insertion {
2550 let this = cx.weak_model();
2551 cx.defer(move |cx| {
2552 if let Some(this) = this.upgrade() {
2553 this.update(cx, |this, cx| {
2554 this.recalculate_buffer_diffs(cx).detach();
2555 });
2556 }
2557 });
2558 }
2559 return;
2560 };
2561
2562 const MIN_DELAY: u64 = 50;
2563 let delay = delay.max(MIN_DELAY);
2564 let duration = Duration::from_millis(delay);
2565
2566 self.git_diff_debouncer
2567 .fire_new(duration, cx, move |this, cx| {
2568 this.recalculate_buffer_diffs(cx)
2569 });
2570 }
2571
2572 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2573 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2574 cx.spawn(move |this, mut cx| async move {
2575 let tasks: Vec<_> = buffers
2576 .iter()
2577 .filter_map(|buffer| {
2578 let buffer = buffer.upgrade()?;
2579 buffer
2580 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2581 .ok()
2582 .flatten()
2583 })
2584 .collect();
2585
2586 futures::future::join_all(tasks).await;
2587
2588 this.update(&mut cx, |this, cx| {
2589 if !this.buffers_needing_diff.is_empty() {
2590 this.recalculate_buffer_diffs(cx).detach();
2591 } else {
2592 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2593 for buffer in buffers {
2594 if let Some(buffer) = buffer.upgrade() {
2595 buffer.update(cx, |_, cx| cx.notify());
2596 }
2597 }
2598 }
2599 })
2600 .ok();
2601 })
2602 }
2603
2604 fn language_servers_for_worktree(
2605 &self,
2606 worktree_id: WorktreeId,
2607 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2608 self.language_server_ids
2609 .iter()
2610 .filter_map(move |((language_server_worktree_id, _), id)| {
2611 if *language_server_worktree_id == worktree_id {
2612 if let Some(LanguageServerState::Running {
2613 adapter,
2614 language,
2615 server,
2616 ..
2617 }) = self.language_servers.get(id)
2618 {
2619 return Some((adapter, language, server));
2620 }
2621 }
2622 None
2623 })
2624 }
2625
2626 fn maintain_buffer_languages(
2627 languages: Arc<LanguageRegistry>,
2628 cx: &mut ModelContext<Project>,
2629 ) -> Task<()> {
2630 let mut subscription = languages.subscribe();
2631 let mut prev_reload_count = languages.reload_count();
2632 cx.spawn(move |project, mut cx| async move {
2633 while let Some(()) = subscription.next().await {
2634 if let Some(project) = project.upgrade() {
2635 // If the language registry has been reloaded, then remove and
2636 // re-assign the languages on all open buffers.
2637 let reload_count = languages.reload_count();
2638 if reload_count > prev_reload_count {
2639 prev_reload_count = reload_count;
2640 project
2641 .update(&mut cx, |this, cx| {
2642 let buffers = this
2643 .opened_buffers
2644 .values()
2645 .filter_map(|b| b.upgrade())
2646 .collect::<Vec<_>>();
2647 for buffer in buffers {
2648 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2649 {
2650 this.unregister_buffer_from_language_servers(
2651 &buffer, &f, cx,
2652 );
2653 buffer
2654 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2655 }
2656 }
2657 })
2658 .ok();
2659 }
2660
2661 project
2662 .update(&mut cx, |project, cx| {
2663 let mut plain_text_buffers = Vec::new();
2664 let mut buffers_with_unknown_injections = Vec::new();
2665 for buffer in project.opened_buffers.values() {
2666 if let Some(handle) = buffer.upgrade() {
2667 let buffer = &handle.read(cx);
2668 if buffer.language().is_none()
2669 || buffer.language() == Some(&*language::PLAIN_TEXT)
2670 {
2671 plain_text_buffers.push(handle);
2672 } else if buffer.contains_unknown_injections() {
2673 buffers_with_unknown_injections.push(handle);
2674 }
2675 }
2676 }
2677
2678 for buffer in plain_text_buffers {
2679 project.detect_language_for_buffer(&buffer, cx);
2680 project.register_buffer_with_language_servers(&buffer, cx);
2681 }
2682
2683 for buffer in buffers_with_unknown_injections {
2684 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2685 }
2686 })
2687 .ok();
2688 }
2689 }
2690 })
2691 }
2692
2693 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2694 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2695 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2696
2697 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2698 *settings_changed_tx.borrow_mut() = ();
2699 });
2700
2701 cx.spawn(move |this, mut cx| async move {
2702 while let Some(()) = settings_changed_rx.next().await {
2703 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2704 this.language_servers
2705 .values()
2706 .filter_map(|state| match state {
2707 LanguageServerState::Starting(_) => None,
2708 LanguageServerState::Running {
2709 adapter, server, ..
2710 } => Some((adapter.clone(), server.clone())),
2711 })
2712 .collect()
2713 })?;
2714
2715 for (adapter, server) in servers {
2716 let workspace_config =
2717 cx.update(|cx| adapter.workspace_configuration(server.root_path(), cx))?;
2718 server
2719 .notify::<lsp::notification::DidChangeConfiguration>(
2720 lsp::DidChangeConfigurationParams {
2721 settings: workspace_config.clone(),
2722 },
2723 )
2724 .ok();
2725 }
2726 }
2727
2728 drop(settings_observation);
2729 anyhow::Ok(())
2730 })
2731 }
2732
2733 fn detect_language_for_buffer(
2734 &mut self,
2735 buffer_handle: &Model<Buffer>,
2736 cx: &mut ModelContext<Self>,
2737 ) -> Option<()> {
2738 // If the buffer has a language, set it and start the language server if we haven't already.
2739 let buffer = buffer_handle.read(cx);
2740 let full_path = buffer.file()?.full_path(cx);
2741 let content = buffer.as_rope();
2742 let new_language = self
2743 .languages
2744 .language_for_file(&full_path, Some(content))
2745 .now_or_never()?
2746 .ok()?;
2747 self.set_language_for_buffer(buffer_handle, new_language, cx);
2748 None
2749 }
2750
2751 pub fn set_language_for_buffer(
2752 &mut self,
2753 buffer: &Model<Buffer>,
2754 new_language: Arc<Language>,
2755 cx: &mut ModelContext<Self>,
2756 ) {
2757 buffer.update(cx, |buffer, cx| {
2758 if buffer.language().map_or(true, |old_language| {
2759 !Arc::ptr_eq(old_language, &new_language)
2760 }) {
2761 buffer.set_language(Some(new_language.clone()), cx);
2762 }
2763 });
2764
2765 let buffer_file = buffer.read(cx).file().cloned();
2766 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2767 let buffer_file = File::from_dyn(buffer_file.as_ref());
2768 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2769 if let Some(prettier_plugins) =
2770 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2771 {
2772 self.install_default_prettier(worktree, prettier_plugins, cx);
2773 };
2774 if let Some(file) = buffer_file {
2775 let worktree = file.worktree.clone();
2776 if worktree.read(cx).is_local() {
2777 self.start_language_servers(&worktree, new_language, cx);
2778 }
2779 }
2780 }
2781
2782 fn start_language_servers(
2783 &mut self,
2784 worktree: &Model<Worktree>,
2785 language: Arc<Language>,
2786 cx: &mut ModelContext<Self>,
2787 ) {
2788 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2789 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2790 if !settings.enable_language_server {
2791 return;
2792 }
2793
2794 for adapter in language.lsp_adapters() {
2795 self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
2796 }
2797 }
2798
2799 fn start_language_server(
2800 &mut self,
2801 worktree: &Model<Worktree>,
2802 adapter: Arc<CachedLspAdapter>,
2803 language: Arc<Language>,
2804 cx: &mut ModelContext<Self>,
2805 ) {
2806 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2807 return;
2808 }
2809
2810 let worktree = worktree.read(cx);
2811 let worktree_id = worktree.id();
2812 let worktree_path = worktree.abs_path();
2813 let key = (worktree_id, adapter.name.clone());
2814 if self.language_server_ids.contains_key(&key) {
2815 return;
2816 }
2817
2818 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2819 let pending_server = match self.languages.create_pending_language_server(
2820 stderr_capture.clone(),
2821 language.clone(),
2822 adapter.clone(),
2823 Arc::clone(&worktree_path),
2824 ProjectLspAdapterDelegate::new(self, cx),
2825 cx,
2826 ) {
2827 Some(pending_server) => pending_server,
2828 None => return,
2829 };
2830
2831 let project_settings = ProjectSettings::get_global(cx);
2832 let lsp = project_settings.lsp.get(&adapter.name.0);
2833 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2834
2835 let server_id = pending_server.server_id;
2836 let container_dir = pending_server.container_dir.clone();
2837 let state = LanguageServerState::Starting({
2838 let adapter = adapter.clone();
2839 let server_name = adapter.name.0.clone();
2840 let language = language.clone();
2841 let key = key.clone();
2842
2843 cx.spawn(move |this, mut cx| async move {
2844 let result = Self::setup_and_insert_language_server(
2845 this.clone(),
2846 &worktree_path,
2847 override_options,
2848 pending_server,
2849 adapter.clone(),
2850 language.clone(),
2851 server_id,
2852 key,
2853 &mut cx,
2854 )
2855 .await;
2856
2857 match result {
2858 Ok(server) => {
2859 stderr_capture.lock().take();
2860 server
2861 }
2862
2863 Err(err) => {
2864 log::error!("failed to start language server {server_name:?}: {err}");
2865 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2866
2867 let this = this.upgrade()?;
2868 let container_dir = container_dir?;
2869
2870 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2871 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2872 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2873 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2874 return None;
2875 }
2876
2877 let installation_test_binary = adapter
2878 .installation_test_binary(container_dir.to_path_buf())
2879 .await;
2880
2881 this.update(&mut cx, |_, cx| {
2882 Self::check_errored_server(
2883 language,
2884 adapter,
2885 server_id,
2886 installation_test_binary,
2887 cx,
2888 )
2889 })
2890 .ok();
2891
2892 None
2893 }
2894 }
2895 })
2896 });
2897
2898 self.language_servers.insert(server_id, state);
2899 self.language_server_ids.insert(key, server_id);
2900 }
2901
2902 fn reinstall_language_server(
2903 &mut self,
2904 language: Arc<Language>,
2905 adapter: Arc<CachedLspAdapter>,
2906 server_id: LanguageServerId,
2907 cx: &mut ModelContext<Self>,
2908 ) -> Option<Task<()>> {
2909 log::info!("beginning to reinstall server");
2910
2911 let existing_server = match self.language_servers.remove(&server_id) {
2912 Some(LanguageServerState::Running { server, .. }) => Some(server),
2913 _ => None,
2914 };
2915
2916 for worktree in &self.worktrees {
2917 if let Some(worktree) = worktree.upgrade() {
2918 let key = (worktree.read(cx).id(), adapter.name.clone());
2919 self.language_server_ids.remove(&key);
2920 }
2921 }
2922
2923 Some(cx.spawn(move |this, mut cx| async move {
2924 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2925 log::info!("shutting down existing server");
2926 task.await;
2927 }
2928
2929 // TODO: This is race-safe with regards to preventing new instances from
2930 // starting while deleting, but existing instances in other projects are going
2931 // to be very confused and messed up
2932 let Some(task) = this
2933 .update(&mut cx, |this, cx| {
2934 this.languages.delete_server_container(adapter.clone(), cx)
2935 })
2936 .log_err()
2937 else {
2938 return;
2939 };
2940 task.await;
2941
2942 this.update(&mut cx, |this, cx| {
2943 let worktrees = this.worktrees.clone();
2944 for worktree in worktrees {
2945 if let Some(worktree) = worktree.upgrade() {
2946 this.start_language_server(
2947 &worktree,
2948 adapter.clone(),
2949 language.clone(),
2950 cx,
2951 );
2952 }
2953 }
2954 })
2955 .ok();
2956 }))
2957 }
2958
2959 async fn setup_and_insert_language_server(
2960 this: WeakModel<Self>,
2961 worktree_path: &Path,
2962 override_initialization_options: Option<serde_json::Value>,
2963 pending_server: PendingLanguageServer,
2964 adapter: Arc<CachedLspAdapter>,
2965 language: Arc<Language>,
2966 server_id: LanguageServerId,
2967 key: (WorktreeId, LanguageServerName),
2968 cx: &mut AsyncAppContext,
2969 ) -> Result<Option<Arc<LanguageServer>>> {
2970 let language_server = Self::setup_pending_language_server(
2971 this.clone(),
2972 override_initialization_options,
2973 pending_server,
2974 worktree_path,
2975 adapter.clone(),
2976 server_id,
2977 cx,
2978 )
2979 .await?;
2980
2981 let this = match this.upgrade() {
2982 Some(this) => this,
2983 None => return Err(anyhow!("failed to upgrade project handle")),
2984 };
2985
2986 this.update(cx, |this, cx| {
2987 this.insert_newly_running_language_server(
2988 language,
2989 adapter,
2990 language_server.clone(),
2991 server_id,
2992 key,
2993 cx,
2994 )
2995 })??;
2996
2997 Ok(Some(language_server))
2998 }
2999
3000 async fn setup_pending_language_server(
3001 this: WeakModel<Self>,
3002 override_options: Option<serde_json::Value>,
3003 pending_server: PendingLanguageServer,
3004 worktree_path: &Path,
3005 adapter: Arc<CachedLspAdapter>,
3006 server_id: LanguageServerId,
3007 cx: &mut AsyncAppContext,
3008 ) -> Result<Arc<LanguageServer>> {
3009 let workspace_config =
3010 cx.update(|cx| adapter.workspace_configuration(worktree_path, cx))?;
3011 let language_server = pending_server.task.await?;
3012
3013 language_server
3014 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3015 let adapter = adapter.clone();
3016 let this = this.clone();
3017 move |mut params, mut cx| {
3018 let adapter = adapter.clone();
3019 if let Some(this) = this.upgrade() {
3020 adapter.process_diagnostics(&mut params);
3021 this.update(&mut cx, |this, cx| {
3022 this.update_diagnostics(
3023 server_id,
3024 params,
3025 &adapter.disk_based_diagnostic_sources,
3026 cx,
3027 )
3028 .log_err();
3029 })
3030 .ok();
3031 }
3032 }
3033 })
3034 .detach();
3035
3036 language_server
3037 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3038 let adapter = adapter.clone();
3039 let worktree_path = worktree_path.to_path_buf();
3040 move |params, cx| {
3041 let adapter = adapter.clone();
3042 let worktree_path = worktree_path.clone();
3043 async move {
3044 let workspace_config =
3045 cx.update(|cx| adapter.workspace_configuration(&worktree_path, cx))?;
3046 Ok(params
3047 .items
3048 .into_iter()
3049 .map(|item| {
3050 if let Some(section) = &item.section {
3051 workspace_config
3052 .get(section)
3053 .cloned()
3054 .unwrap_or(serde_json::Value::Null)
3055 } else {
3056 workspace_config.clone()
3057 }
3058 })
3059 .collect())
3060 }
3061 }
3062 })
3063 .detach();
3064
3065 // Even though we don't have handling for these requests, respond to them to
3066 // avoid stalling any language server like `gopls` which waits for a response
3067 // to these requests when initializing.
3068 language_server
3069 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3070 let this = this.clone();
3071 move |params, mut cx| {
3072 let this = this.clone();
3073 async move {
3074 this.update(&mut cx, |this, _| {
3075 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3076 {
3077 if let lsp::NumberOrString::String(token) = params.token {
3078 status.progress_tokens.insert(token);
3079 }
3080 }
3081 })?;
3082
3083 Ok(())
3084 }
3085 }
3086 })
3087 .detach();
3088
3089 language_server
3090 .on_request::<lsp::request::RegisterCapability, _, _>({
3091 let this = this.clone();
3092 move |params, mut cx| {
3093 let this = this.clone();
3094 async move {
3095 for reg in params.registrations {
3096 if reg.method == "workspace/didChangeWatchedFiles" {
3097 if let Some(options) = reg.register_options {
3098 let options = serde_json::from_value(options)?;
3099 this.update(&mut cx, |this, cx| {
3100 this.on_lsp_did_change_watched_files(
3101 server_id, options, cx,
3102 );
3103 })?;
3104 }
3105 }
3106 }
3107 Ok(())
3108 }
3109 }
3110 })
3111 .detach();
3112
3113 language_server
3114 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3115 let adapter = adapter.clone();
3116 let this = this.clone();
3117 move |params, cx| {
3118 Self::on_lsp_workspace_edit(
3119 this.clone(),
3120 params,
3121 server_id,
3122 adapter.clone(),
3123 cx,
3124 )
3125 }
3126 })
3127 .detach();
3128
3129 language_server
3130 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3131 let this = this.clone();
3132 move |(), mut cx| {
3133 let this = this.clone();
3134 async move {
3135 this.update(&mut cx, |project, cx| {
3136 cx.emit(Event::RefreshInlayHints);
3137 project.remote_id().map(|project_id| {
3138 project.client.send(proto::RefreshInlayHints { project_id })
3139 })
3140 })?
3141 .transpose()?;
3142 Ok(())
3143 }
3144 }
3145 })
3146 .detach();
3147
3148 let disk_based_diagnostics_progress_token =
3149 adapter.disk_based_diagnostics_progress_token.clone();
3150
3151 language_server
3152 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3153 if let Some(this) = this.upgrade() {
3154 this.update(&mut cx, |this, cx| {
3155 this.on_lsp_progress(
3156 params,
3157 server_id,
3158 disk_based_diagnostics_progress_token.clone(),
3159 cx,
3160 );
3161 })
3162 .ok();
3163 }
3164 })
3165 .detach();
3166 let mut initialization_options = adapter.adapter.initialization_options();
3167 match (&mut initialization_options, override_options) {
3168 (Some(initialization_options), Some(override_options)) => {
3169 merge_json_value_into(override_options, initialization_options);
3170 }
3171 (None, override_options) => initialization_options = override_options,
3172 _ => {}
3173 }
3174 let language_server = language_server.initialize(initialization_options).await?;
3175
3176 language_server
3177 .notify::<lsp::notification::DidChangeConfiguration>(
3178 lsp::DidChangeConfigurationParams {
3179 settings: workspace_config,
3180 },
3181 )
3182 .ok();
3183
3184 Ok(language_server)
3185 }
3186
3187 fn insert_newly_running_language_server(
3188 &mut self,
3189 language: Arc<Language>,
3190 adapter: Arc<CachedLspAdapter>,
3191 language_server: Arc<LanguageServer>,
3192 server_id: LanguageServerId,
3193 key: (WorktreeId, LanguageServerName),
3194 cx: &mut ModelContext<Self>,
3195 ) -> Result<()> {
3196 // If the language server for this key doesn't match the server id, don't store the
3197 // server. Which will cause it to be dropped, killing the process
3198 if self
3199 .language_server_ids
3200 .get(&key)
3201 .map(|id| id != &server_id)
3202 .unwrap_or(false)
3203 {
3204 return Ok(());
3205 }
3206
3207 // Update language_servers collection with Running variant of LanguageServerState
3208 // indicating that the server is up and running and ready
3209 self.language_servers.insert(
3210 server_id,
3211 LanguageServerState::Running {
3212 adapter: adapter.clone(),
3213 language: language.clone(),
3214 watched_paths: Default::default(),
3215 server: language_server.clone(),
3216 simulate_disk_based_diagnostics_completion: None,
3217 },
3218 );
3219
3220 self.language_server_statuses.insert(
3221 server_id,
3222 LanguageServerStatus {
3223 name: language_server.name().to_string(),
3224 pending_work: Default::default(),
3225 has_pending_diagnostic_updates: false,
3226 progress_tokens: Default::default(),
3227 },
3228 );
3229
3230 cx.emit(Event::LanguageServerAdded(server_id));
3231
3232 if let Some(project_id) = self.remote_id() {
3233 self.client.send(proto::StartLanguageServer {
3234 project_id,
3235 server: Some(proto::LanguageServer {
3236 id: server_id.0 as u64,
3237 name: language_server.name().to_string(),
3238 }),
3239 })?;
3240 }
3241
3242 // Tell the language server about every open buffer in the worktree that matches the language.
3243 for buffer in self.opened_buffers.values() {
3244 if let Some(buffer_handle) = buffer.upgrade() {
3245 let buffer = buffer_handle.read(cx);
3246 let file = match File::from_dyn(buffer.file()) {
3247 Some(file) => file,
3248 None => continue,
3249 };
3250 let language = match buffer.language() {
3251 Some(language) => language,
3252 None => continue,
3253 };
3254
3255 if file.worktree.read(cx).id() != key.0
3256 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3257 {
3258 continue;
3259 }
3260
3261 let file = match file.as_local() {
3262 Some(file) => file,
3263 None => continue,
3264 };
3265
3266 let versions = self
3267 .buffer_snapshots
3268 .entry(buffer.remote_id())
3269 .or_default()
3270 .entry(server_id)
3271 .or_insert_with(|| {
3272 vec![LspBufferSnapshot {
3273 version: 0,
3274 snapshot: buffer.text_snapshot(),
3275 }]
3276 });
3277
3278 let snapshot = versions.last().unwrap();
3279 let version = snapshot.version;
3280 let initial_snapshot = &snapshot.snapshot;
3281 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3282 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3283 lsp::DidOpenTextDocumentParams {
3284 text_document: lsp::TextDocumentItem::new(
3285 uri,
3286 adapter
3287 .language_ids
3288 .get(language.name().as_ref())
3289 .cloned()
3290 .unwrap_or_default(),
3291 version,
3292 initial_snapshot.text(),
3293 ),
3294 },
3295 )?;
3296
3297 buffer_handle.update(cx, |buffer, cx| {
3298 buffer.set_completion_triggers(
3299 language_server
3300 .capabilities()
3301 .completion_provider
3302 .as_ref()
3303 .and_then(|provider| provider.trigger_characters.clone())
3304 .unwrap_or_default(),
3305 cx,
3306 )
3307 });
3308 }
3309 }
3310
3311 cx.notify();
3312 Ok(())
3313 }
3314
3315 // Returns a list of all of the worktrees which no longer have a language server and the root path
3316 // for the stopped server
3317 fn stop_language_server(
3318 &mut self,
3319 worktree_id: WorktreeId,
3320 adapter_name: LanguageServerName,
3321 cx: &mut ModelContext<Self>,
3322 ) -> Task<Vec<WorktreeId>> {
3323 let key = (worktree_id, adapter_name);
3324 if let Some(server_id) = self.language_server_ids.remove(&key) {
3325 log::info!("stopping language server {}", key.1 .0);
3326
3327 // Remove other entries for this language server as well
3328 let mut orphaned_worktrees = vec![worktree_id];
3329 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3330 for other_key in other_keys {
3331 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3332 self.language_server_ids.remove(&other_key);
3333 orphaned_worktrees.push(other_key.0);
3334 }
3335 }
3336
3337 for buffer in self.opened_buffers.values() {
3338 if let Some(buffer) = buffer.upgrade() {
3339 buffer.update(cx, |buffer, cx| {
3340 buffer.update_diagnostics(server_id, Default::default(), cx);
3341 });
3342 }
3343 }
3344 for worktree in &self.worktrees {
3345 if let Some(worktree) = worktree.upgrade() {
3346 worktree.update(cx, |worktree, cx| {
3347 if let Some(worktree) = worktree.as_local_mut() {
3348 worktree.clear_diagnostics_for_language_server(server_id, cx);
3349 }
3350 });
3351 }
3352 }
3353
3354 self.language_server_statuses.remove(&server_id);
3355 cx.notify();
3356
3357 let server_state = self.language_servers.remove(&server_id);
3358 cx.emit(Event::LanguageServerRemoved(server_id));
3359 cx.spawn(move |this, mut cx| async move {
3360 let server = match server_state {
3361 Some(LanguageServerState::Starting(task)) => task.await,
3362 Some(LanguageServerState::Running { server, .. }) => Some(server),
3363 None => None,
3364 };
3365
3366 if let Some(server) = server {
3367 if let Some(shutdown) = server.shutdown() {
3368 shutdown.await;
3369 }
3370 }
3371
3372 if let Some(this) = this.upgrade() {
3373 this.update(&mut cx, |this, cx| {
3374 this.language_server_statuses.remove(&server_id);
3375 cx.notify();
3376 })
3377 .ok();
3378 }
3379
3380 orphaned_worktrees
3381 })
3382 } else {
3383 Task::ready(Vec::new())
3384 }
3385 }
3386
3387 pub fn restart_language_servers_for_buffers(
3388 &mut self,
3389 buffers: impl IntoIterator<Item = Model<Buffer>>,
3390 cx: &mut ModelContext<Self>,
3391 ) -> Option<()> {
3392 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3393 .into_iter()
3394 .filter_map(|buffer| {
3395 let buffer = buffer.read(cx);
3396 let file = File::from_dyn(buffer.file())?;
3397 let full_path = file.full_path(cx);
3398 let language = self
3399 .languages
3400 .language_for_file(&full_path, Some(buffer.as_rope()))
3401 .now_or_never()?
3402 .ok()?;
3403 Some((file.worktree.clone(), language))
3404 })
3405 .collect();
3406 for (worktree, language) in language_server_lookup_info {
3407 self.restart_language_servers(worktree, language, cx);
3408 }
3409
3410 None
3411 }
3412
3413 fn restart_language_servers(
3414 &mut self,
3415 worktree: Model<Worktree>,
3416 language: Arc<Language>,
3417 cx: &mut ModelContext<Self>,
3418 ) {
3419 let worktree_id = worktree.read(cx).id();
3420
3421 let stop_tasks = language
3422 .lsp_adapters()
3423 .iter()
3424 .map(|adapter| {
3425 let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
3426 (stop_task, adapter.name.clone())
3427 })
3428 .collect::<Vec<_>>();
3429 if stop_tasks.is_empty() {
3430 return;
3431 }
3432
3433 cx.spawn(move |this, mut cx| async move {
3434 // For each stopped language server, record all of the worktrees with which
3435 // it was associated.
3436 let mut affected_worktrees = Vec::new();
3437 for (stop_task, language_server_name) in stop_tasks {
3438 for affected_worktree_id in stop_task.await {
3439 affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
3440 }
3441 }
3442
3443 this.update(&mut cx, |this, cx| {
3444 // Restart the language server for the given worktree.
3445 this.start_language_servers(&worktree, language.clone(), cx);
3446
3447 // Lookup new server ids and set them for each of the orphaned worktrees
3448 for (affected_worktree_id, language_server_name) in affected_worktrees {
3449 if let Some(new_server_id) = this
3450 .language_server_ids
3451 .get(&(worktree_id, language_server_name.clone()))
3452 .cloned()
3453 {
3454 this.language_server_ids
3455 .insert((affected_worktree_id, language_server_name), new_server_id);
3456 }
3457 }
3458 })
3459 .ok();
3460 })
3461 .detach();
3462 }
3463
3464 fn check_errored_server(
3465 language: Arc<Language>,
3466 adapter: Arc<CachedLspAdapter>,
3467 server_id: LanguageServerId,
3468 installation_test_binary: Option<LanguageServerBinary>,
3469 cx: &mut ModelContext<Self>,
3470 ) {
3471 if !adapter.can_be_reinstalled() {
3472 log::info!(
3473 "Validation check requested for {:?} but it cannot be reinstalled",
3474 adapter.name.0
3475 );
3476 return;
3477 }
3478
3479 cx.spawn(move |this, mut cx| async move {
3480 log::info!("About to spawn test binary");
3481
3482 // A lack of test binary counts as a failure
3483 let process = installation_test_binary.and_then(|binary| {
3484 smol::process::Command::new(&binary.path)
3485 .current_dir(&binary.path)
3486 .args(binary.arguments)
3487 .stdin(Stdio::piped())
3488 .stdout(Stdio::piped())
3489 .stderr(Stdio::inherit())
3490 .kill_on_drop(true)
3491 .spawn()
3492 .ok()
3493 });
3494
3495 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3496 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3497
3498 let mut errored = false;
3499 if let Some(mut process) = process {
3500 futures::select! {
3501 status = process.status().fuse() => match status {
3502 Ok(status) => errored = !status.success(),
3503 Err(_) => errored = true,
3504 },
3505
3506 _ = timeout => {
3507 log::info!("test binary time-ed out, this counts as a success");
3508 _ = process.kill();
3509 }
3510 }
3511 } else {
3512 log::warn!("test binary failed to launch");
3513 errored = true;
3514 }
3515
3516 if errored {
3517 log::warn!("test binary check failed");
3518 let task = this
3519 .update(&mut cx, move |this, cx| {
3520 this.reinstall_language_server(language, adapter, server_id, cx)
3521 })
3522 .ok()
3523 .flatten();
3524
3525 if let Some(task) = task {
3526 task.await;
3527 }
3528 }
3529 })
3530 .detach();
3531 }
3532
3533 fn on_lsp_progress(
3534 &mut self,
3535 progress: lsp::ProgressParams,
3536 language_server_id: LanguageServerId,
3537 disk_based_diagnostics_progress_token: Option<String>,
3538 cx: &mut ModelContext<Self>,
3539 ) {
3540 let token = match progress.token {
3541 lsp::NumberOrString::String(token) => token,
3542 lsp::NumberOrString::Number(token) => {
3543 log::info!("skipping numeric progress token {}", token);
3544 return;
3545 }
3546 };
3547 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3548 let language_server_status =
3549 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3550 status
3551 } else {
3552 return;
3553 };
3554
3555 if !language_server_status.progress_tokens.contains(&token) {
3556 return;
3557 }
3558
3559 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3560 .as_ref()
3561 .map_or(false, |disk_based_token| {
3562 token.starts_with(disk_based_token)
3563 });
3564
3565 match progress {
3566 lsp::WorkDoneProgress::Begin(report) => {
3567 if is_disk_based_diagnostics_progress {
3568 language_server_status.has_pending_diagnostic_updates = true;
3569 self.disk_based_diagnostics_started(language_server_id, cx);
3570 self.buffer_ordered_messages_tx
3571 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3572 language_server_id,
3573 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3574 })
3575 .ok();
3576 } else {
3577 self.on_lsp_work_start(
3578 language_server_id,
3579 token.clone(),
3580 LanguageServerProgress {
3581 message: report.message.clone(),
3582 percentage: report.percentage.map(|p| p as usize),
3583 last_update_at: Instant::now(),
3584 },
3585 cx,
3586 );
3587 self.buffer_ordered_messages_tx
3588 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3589 language_server_id,
3590 message: proto::update_language_server::Variant::WorkStart(
3591 proto::LspWorkStart {
3592 token,
3593 message: report.message,
3594 percentage: report.percentage.map(|p| p as u32),
3595 },
3596 ),
3597 })
3598 .ok();
3599 }
3600 }
3601 lsp::WorkDoneProgress::Report(report) => {
3602 if !is_disk_based_diagnostics_progress {
3603 self.on_lsp_work_progress(
3604 language_server_id,
3605 token.clone(),
3606 LanguageServerProgress {
3607 message: report.message.clone(),
3608 percentage: report.percentage.map(|p| p as usize),
3609 last_update_at: Instant::now(),
3610 },
3611 cx,
3612 );
3613 self.buffer_ordered_messages_tx
3614 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3615 language_server_id,
3616 message: proto::update_language_server::Variant::WorkProgress(
3617 proto::LspWorkProgress {
3618 token,
3619 message: report.message,
3620 percentage: report.percentage.map(|p| p as u32),
3621 },
3622 ),
3623 })
3624 .ok();
3625 }
3626 }
3627 lsp::WorkDoneProgress::End(_) => {
3628 language_server_status.progress_tokens.remove(&token);
3629
3630 if is_disk_based_diagnostics_progress {
3631 language_server_status.has_pending_diagnostic_updates = false;
3632 self.disk_based_diagnostics_finished(language_server_id, cx);
3633 self.buffer_ordered_messages_tx
3634 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3635 language_server_id,
3636 message:
3637 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3638 Default::default(),
3639 ),
3640 })
3641 .ok();
3642 } else {
3643 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3644 self.buffer_ordered_messages_tx
3645 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3646 language_server_id,
3647 message: proto::update_language_server::Variant::WorkEnd(
3648 proto::LspWorkEnd { token },
3649 ),
3650 })
3651 .ok();
3652 }
3653 }
3654 }
3655 }
3656
3657 fn on_lsp_work_start(
3658 &mut self,
3659 language_server_id: LanguageServerId,
3660 token: String,
3661 progress: LanguageServerProgress,
3662 cx: &mut ModelContext<Self>,
3663 ) {
3664 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3665 status.pending_work.insert(token, progress);
3666 cx.notify();
3667 }
3668 }
3669
3670 fn on_lsp_work_progress(
3671 &mut self,
3672 language_server_id: LanguageServerId,
3673 token: String,
3674 progress: LanguageServerProgress,
3675 cx: &mut ModelContext<Self>,
3676 ) {
3677 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3678 let entry = status
3679 .pending_work
3680 .entry(token)
3681 .or_insert(LanguageServerProgress {
3682 message: Default::default(),
3683 percentage: Default::default(),
3684 last_update_at: progress.last_update_at,
3685 });
3686 if progress.message.is_some() {
3687 entry.message = progress.message;
3688 }
3689 if progress.percentage.is_some() {
3690 entry.percentage = progress.percentage;
3691 }
3692 entry.last_update_at = progress.last_update_at;
3693 cx.notify();
3694 }
3695 }
3696
3697 fn on_lsp_work_end(
3698 &mut self,
3699 language_server_id: LanguageServerId,
3700 token: String,
3701 cx: &mut ModelContext<Self>,
3702 ) {
3703 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3704 cx.emit(Event::RefreshInlayHints);
3705 status.pending_work.remove(&token);
3706 cx.notify();
3707 }
3708 }
3709
3710 fn on_lsp_did_change_watched_files(
3711 &mut self,
3712 language_server_id: LanguageServerId,
3713 params: DidChangeWatchedFilesRegistrationOptions,
3714 cx: &mut ModelContext<Self>,
3715 ) {
3716 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3717 self.language_servers.get_mut(&language_server_id)
3718 {
3719 let mut builders = HashMap::default();
3720 for watcher in params.watchers {
3721 for worktree in &self.worktrees {
3722 if let Some(worktree) = worktree.upgrade() {
3723 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3724 if let Some(abs_path) = tree.abs_path().to_str() {
3725 let relative_glob_pattern = match &watcher.glob_pattern {
3726 lsp::GlobPattern::String(s) => s
3727 .strip_prefix(abs_path)
3728 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3729 lsp::GlobPattern::Relative(rp) => {
3730 let base_uri = match &rp.base_uri {
3731 lsp::OneOf::Left(workspace_folder) => {
3732 &workspace_folder.uri
3733 }
3734 lsp::OneOf::Right(base_uri) => base_uri,
3735 };
3736 base_uri.to_file_path().ok().and_then(|file_path| {
3737 (file_path.to_str() == Some(abs_path))
3738 .then_some(rp.pattern.as_str())
3739 })
3740 }
3741 };
3742 if let Some(relative_glob_pattern) = relative_glob_pattern {
3743 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
3744 tree.as_local_mut()
3745 .unwrap()
3746 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3747 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3748 builders
3749 .entry(tree.id())
3750 .or_insert_with(|| GlobSetBuilder::new())
3751 .add(glob);
3752 }
3753 return true;
3754 }
3755 }
3756 false
3757 });
3758 if glob_is_inside_worktree {
3759 break;
3760 }
3761 }
3762 }
3763 }
3764
3765 watched_paths.clear();
3766 for (worktree_id, builder) in builders {
3767 if let Ok(globset) = builder.build() {
3768 watched_paths.insert(worktree_id, globset);
3769 }
3770 }
3771
3772 cx.notify();
3773 }
3774 }
3775
3776 async fn on_lsp_workspace_edit(
3777 this: WeakModel<Self>,
3778 params: lsp::ApplyWorkspaceEditParams,
3779 server_id: LanguageServerId,
3780 adapter: Arc<CachedLspAdapter>,
3781 mut cx: AsyncAppContext,
3782 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3783 let this = this
3784 .upgrade()
3785 .ok_or_else(|| anyhow!("project project closed"))?;
3786 let language_server = this
3787 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3788 .ok_or_else(|| anyhow!("language server not found"))?;
3789 let transaction = Self::deserialize_workspace_edit(
3790 this.clone(),
3791 params.edit,
3792 true,
3793 adapter.clone(),
3794 language_server.clone(),
3795 &mut cx,
3796 )
3797 .await
3798 .log_err();
3799 this.update(&mut cx, |this, _| {
3800 if let Some(transaction) = transaction {
3801 this.last_workspace_edits_by_language_server
3802 .insert(server_id, transaction);
3803 }
3804 })?;
3805 Ok(lsp::ApplyWorkspaceEditResponse {
3806 applied: true,
3807 failed_change: None,
3808 failure_reason: None,
3809 })
3810 }
3811
3812 pub fn language_server_statuses(
3813 &self,
3814 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3815 self.language_server_statuses.values()
3816 }
3817
3818 pub fn update_diagnostics(
3819 &mut self,
3820 language_server_id: LanguageServerId,
3821 mut params: lsp::PublishDiagnosticsParams,
3822 disk_based_sources: &[String],
3823 cx: &mut ModelContext<Self>,
3824 ) -> Result<()> {
3825 let abs_path = params
3826 .uri
3827 .to_file_path()
3828 .map_err(|_| anyhow!("URI is not a file"))?;
3829 let mut diagnostics = Vec::default();
3830 let mut primary_diagnostic_group_ids = HashMap::default();
3831 let mut sources_by_group_id = HashMap::default();
3832 let mut supporting_diagnostics = HashMap::default();
3833
3834 // Ensure that primary diagnostics are always the most severe
3835 params.diagnostics.sort_by_key(|item| item.severity);
3836
3837 for diagnostic in ¶ms.diagnostics {
3838 let source = diagnostic.source.as_ref();
3839 let code = diagnostic.code.as_ref().map(|code| match code {
3840 lsp::NumberOrString::Number(code) => code.to_string(),
3841 lsp::NumberOrString::String(code) => code.clone(),
3842 });
3843 let range = range_from_lsp(diagnostic.range);
3844 let is_supporting = diagnostic
3845 .related_information
3846 .as_ref()
3847 .map_or(false, |infos| {
3848 infos.iter().any(|info| {
3849 primary_diagnostic_group_ids.contains_key(&(
3850 source,
3851 code.clone(),
3852 range_from_lsp(info.location.range),
3853 ))
3854 })
3855 });
3856
3857 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3858 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3859 });
3860
3861 if is_supporting {
3862 supporting_diagnostics.insert(
3863 (source, code.clone(), range),
3864 (diagnostic.severity, is_unnecessary),
3865 );
3866 } else {
3867 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3868 let is_disk_based =
3869 source.map_or(false, |source| disk_based_sources.contains(source));
3870
3871 sources_by_group_id.insert(group_id, source);
3872 primary_diagnostic_group_ids
3873 .insert((source, code.clone(), range.clone()), group_id);
3874
3875 diagnostics.push(DiagnosticEntry {
3876 range,
3877 diagnostic: Diagnostic {
3878 source: diagnostic.source.clone(),
3879 code: code.clone(),
3880 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3881 message: diagnostic.message.trim().to_string(),
3882 group_id,
3883 is_primary: true,
3884 is_disk_based,
3885 is_unnecessary,
3886 },
3887 });
3888 if let Some(infos) = &diagnostic.related_information {
3889 for info in infos {
3890 if info.location.uri == params.uri && !info.message.is_empty() {
3891 let range = range_from_lsp(info.location.range);
3892 diagnostics.push(DiagnosticEntry {
3893 range,
3894 diagnostic: Diagnostic {
3895 source: diagnostic.source.clone(),
3896 code: code.clone(),
3897 severity: DiagnosticSeverity::INFORMATION,
3898 message: info.message.trim().to_string(),
3899 group_id,
3900 is_primary: false,
3901 is_disk_based,
3902 is_unnecessary: false,
3903 },
3904 });
3905 }
3906 }
3907 }
3908 }
3909 }
3910
3911 for entry in &mut diagnostics {
3912 let diagnostic = &mut entry.diagnostic;
3913 if !diagnostic.is_primary {
3914 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3915 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3916 source,
3917 diagnostic.code.clone(),
3918 entry.range.clone(),
3919 )) {
3920 if let Some(severity) = severity {
3921 diagnostic.severity = severity;
3922 }
3923 diagnostic.is_unnecessary = is_unnecessary;
3924 }
3925 }
3926 }
3927
3928 self.update_diagnostic_entries(
3929 language_server_id,
3930 abs_path,
3931 params.version,
3932 diagnostics,
3933 cx,
3934 )?;
3935 Ok(())
3936 }
3937
3938 pub fn update_diagnostic_entries(
3939 &mut self,
3940 server_id: LanguageServerId,
3941 abs_path: PathBuf,
3942 version: Option<i32>,
3943 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3944 cx: &mut ModelContext<Project>,
3945 ) -> Result<(), anyhow::Error> {
3946 let (worktree, relative_path) = self
3947 .find_local_worktree(&abs_path, cx)
3948 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3949
3950 let project_path = ProjectPath {
3951 worktree_id: worktree.read(cx).id(),
3952 path: relative_path.into(),
3953 };
3954
3955 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3956 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3957 }
3958
3959 let updated = worktree.update(cx, |worktree, cx| {
3960 worktree
3961 .as_local_mut()
3962 .ok_or_else(|| anyhow!("not a local worktree"))?
3963 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3964 })?;
3965 if updated {
3966 cx.emit(Event::DiagnosticsUpdated {
3967 language_server_id: server_id,
3968 path: project_path,
3969 });
3970 }
3971 Ok(())
3972 }
3973
3974 fn update_buffer_diagnostics(
3975 &mut self,
3976 buffer: &Model<Buffer>,
3977 server_id: LanguageServerId,
3978 version: Option<i32>,
3979 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3980 cx: &mut ModelContext<Self>,
3981 ) -> Result<()> {
3982 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3983 Ordering::Equal
3984 .then_with(|| b.is_primary.cmp(&a.is_primary))
3985 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3986 .then_with(|| a.severity.cmp(&b.severity))
3987 .then_with(|| a.message.cmp(&b.message))
3988 }
3989
3990 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3991
3992 diagnostics.sort_unstable_by(|a, b| {
3993 Ordering::Equal
3994 .then_with(|| a.range.start.cmp(&b.range.start))
3995 .then_with(|| b.range.end.cmp(&a.range.end))
3996 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3997 });
3998
3999 let mut sanitized_diagnostics = Vec::new();
4000 let edits_since_save = Patch::new(
4001 snapshot
4002 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4003 .collect(),
4004 );
4005 for entry in diagnostics {
4006 let start;
4007 let end;
4008 if entry.diagnostic.is_disk_based {
4009 // Some diagnostics are based on files on disk instead of buffers'
4010 // current contents. Adjust these diagnostics' ranges to reflect
4011 // any unsaved edits.
4012 start = edits_since_save.old_to_new(entry.range.start);
4013 end = edits_since_save.old_to_new(entry.range.end);
4014 } else {
4015 start = entry.range.start;
4016 end = entry.range.end;
4017 }
4018
4019 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4020 ..snapshot.clip_point_utf16(end, Bias::Right);
4021
4022 // Expand empty ranges by one codepoint
4023 if range.start == range.end {
4024 // This will be go to the next boundary when being clipped
4025 range.end.column += 1;
4026 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4027 if range.start == range.end && range.end.column > 0 {
4028 range.start.column -= 1;
4029 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
4030 }
4031 }
4032
4033 sanitized_diagnostics.push(DiagnosticEntry {
4034 range,
4035 diagnostic: entry.diagnostic,
4036 });
4037 }
4038 drop(edits_since_save);
4039
4040 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4041 buffer.update(cx, |buffer, cx| {
4042 buffer.update_diagnostics(server_id, set, cx)
4043 });
4044 Ok(())
4045 }
4046
4047 pub fn reload_buffers(
4048 &self,
4049 buffers: HashSet<Model<Buffer>>,
4050 push_to_history: bool,
4051 cx: &mut ModelContext<Self>,
4052 ) -> Task<Result<ProjectTransaction>> {
4053 let mut local_buffers = Vec::new();
4054 let mut remote_buffers = None;
4055 for buffer_handle in buffers {
4056 let buffer = buffer_handle.read(cx);
4057 if buffer.is_dirty() {
4058 if let Some(file) = File::from_dyn(buffer.file()) {
4059 if file.is_local() {
4060 local_buffers.push(buffer_handle);
4061 } else {
4062 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4063 }
4064 }
4065 }
4066 }
4067
4068 let remote_buffers = self.remote_id().zip(remote_buffers);
4069 let client = self.client.clone();
4070
4071 cx.spawn(move |this, mut cx| async move {
4072 let mut project_transaction = ProjectTransaction::default();
4073
4074 if let Some((project_id, remote_buffers)) = remote_buffers {
4075 let response = client
4076 .request(proto::ReloadBuffers {
4077 project_id,
4078 buffer_ids: remote_buffers
4079 .iter()
4080 .filter_map(|buffer| {
4081 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4082 })
4083 .collect(),
4084 })
4085 .await?
4086 .transaction
4087 .ok_or_else(|| anyhow!("missing transaction"))?;
4088 project_transaction = this
4089 .update(&mut cx, |this, cx| {
4090 this.deserialize_project_transaction(response, push_to_history, cx)
4091 })?
4092 .await?;
4093 }
4094
4095 for buffer in local_buffers {
4096 let transaction = buffer
4097 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4098 .await?;
4099 buffer.update(&mut cx, |buffer, cx| {
4100 if let Some(transaction) = transaction {
4101 if !push_to_history {
4102 buffer.forget_transaction(transaction.id);
4103 }
4104 project_transaction.0.insert(cx.handle(), transaction);
4105 }
4106 })?;
4107 }
4108
4109 Ok(project_transaction)
4110 })
4111 }
4112
4113 pub fn format(
4114 &mut self,
4115 buffers: HashSet<Model<Buffer>>,
4116 push_to_history: bool,
4117 trigger: FormatTrigger,
4118 cx: &mut ModelContext<Project>,
4119 ) -> Task<anyhow::Result<ProjectTransaction>> {
4120 if self.is_local() {
4121 let mut buffers_with_paths_and_servers = buffers
4122 .into_iter()
4123 .filter_map(|buffer_handle| {
4124 let buffer = buffer_handle.read(cx);
4125 let file = File::from_dyn(buffer.file())?;
4126 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4127 let server = self
4128 .primary_language_server_for_buffer(buffer, cx)
4129 .map(|s| s.1.clone());
4130 Some((buffer_handle, buffer_abs_path, server))
4131 })
4132 .collect::<Vec<_>>();
4133
4134 cx.spawn(move |project, mut cx| async move {
4135 // Do not allow multiple concurrent formatting requests for the
4136 // same buffer.
4137 project.update(&mut cx, |this, cx| {
4138 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4139 this.buffers_being_formatted
4140 .insert(buffer.read(cx).remote_id())
4141 });
4142 })?;
4143
4144 let _cleanup = defer({
4145 let this = project.clone();
4146 let mut cx = cx.clone();
4147 let buffers = &buffers_with_paths_and_servers;
4148 move || {
4149 this.update(&mut cx, |this, cx| {
4150 for (buffer, _, _) in buffers {
4151 this.buffers_being_formatted
4152 .remove(&buffer.read(cx).remote_id());
4153 }
4154 })
4155 .ok();
4156 }
4157 });
4158
4159 let mut project_transaction = ProjectTransaction::default();
4160 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4161 let settings = buffer.update(&mut cx, |buffer, cx| {
4162 language_settings(buffer.language(), buffer.file(), cx).clone()
4163 })?;
4164
4165 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4166 let ensure_final_newline = settings.ensure_final_newline_on_save;
4167 let tab_size = settings.tab_size;
4168
4169 // First, format buffer's whitespace according to the settings.
4170 let trailing_whitespace_diff = if remove_trailing_whitespace {
4171 Some(
4172 buffer
4173 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4174 .await,
4175 )
4176 } else {
4177 None
4178 };
4179 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4180 buffer.finalize_last_transaction();
4181 buffer.start_transaction();
4182 if let Some(diff) = trailing_whitespace_diff {
4183 buffer.apply_diff(diff, cx);
4184 }
4185 if ensure_final_newline {
4186 buffer.ensure_final_newline(cx);
4187 }
4188 buffer.end_transaction(cx)
4189 })?;
4190
4191 // Apply language-specific formatting using either a language server
4192 // or external command.
4193 let mut format_operation = None;
4194 match (&settings.formatter, &settings.format_on_save) {
4195 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4196
4197 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4198 | (_, FormatOnSave::LanguageServer) => {
4199 if let Some((language_server, buffer_abs_path)) =
4200 language_server.as_ref().zip(buffer_abs_path.as_ref())
4201 {
4202 format_operation = Some(FormatOperation::Lsp(
4203 Self::format_via_lsp(
4204 &project,
4205 buffer,
4206 buffer_abs_path,
4207 language_server,
4208 tab_size,
4209 &mut cx,
4210 )
4211 .await
4212 .context("failed to format via language server")?,
4213 ));
4214 }
4215 }
4216
4217 (
4218 Formatter::External { command, arguments },
4219 FormatOnSave::On | FormatOnSave::Off,
4220 )
4221 | (_, FormatOnSave::External { command, arguments }) => {
4222 if let Some(buffer_abs_path) = buffer_abs_path {
4223 format_operation = Self::format_via_external_command(
4224 buffer,
4225 buffer_abs_path,
4226 command,
4227 arguments,
4228 &mut cx,
4229 )
4230 .await
4231 .context(format!(
4232 "failed to format via external command {:?}",
4233 command
4234 ))?
4235 .map(FormatOperation::External);
4236 }
4237 }
4238 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4239 if let Some(new_operation) =
4240 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4241 .await
4242 {
4243 format_operation = Some(new_operation);
4244 } else if let Some((language_server, buffer_abs_path)) =
4245 language_server.as_ref().zip(buffer_abs_path.as_ref())
4246 {
4247 format_operation = Some(FormatOperation::Lsp(
4248 Self::format_via_lsp(
4249 &project,
4250 buffer,
4251 buffer_abs_path,
4252 language_server,
4253 tab_size,
4254 &mut cx,
4255 )
4256 .await
4257 .context("failed to format via language server")?,
4258 ));
4259 }
4260 }
4261 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4262 if let Some(new_operation) =
4263 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4264 .await
4265 {
4266 format_operation = Some(new_operation);
4267 }
4268 }
4269 };
4270
4271 buffer.update(&mut cx, |b, cx| {
4272 // If the buffer had its whitespace formatted and was edited while the language-specific
4273 // formatting was being computed, avoid applying the language-specific formatting, because
4274 // it can't be grouped with the whitespace formatting in the undo history.
4275 if let Some(transaction_id) = whitespace_transaction_id {
4276 if b.peek_undo_stack()
4277 .map_or(true, |e| e.transaction_id() != transaction_id)
4278 {
4279 format_operation.take();
4280 }
4281 }
4282
4283 // Apply any language-specific formatting, and group the two formatting operations
4284 // in the buffer's undo history.
4285 if let Some(operation) = format_operation {
4286 match operation {
4287 FormatOperation::Lsp(edits) => {
4288 b.edit(edits, None, cx);
4289 }
4290 FormatOperation::External(diff) => {
4291 b.apply_diff(diff, cx);
4292 }
4293 FormatOperation::Prettier(diff) => {
4294 b.apply_diff(diff, cx);
4295 }
4296 }
4297
4298 if let Some(transaction_id) = whitespace_transaction_id {
4299 b.group_until_transaction(transaction_id);
4300 }
4301 }
4302
4303 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4304 if !push_to_history {
4305 b.forget_transaction(transaction.id);
4306 }
4307 project_transaction.0.insert(buffer.clone(), transaction);
4308 }
4309 })?;
4310 }
4311
4312 Ok(project_transaction)
4313 })
4314 } else {
4315 let remote_id = self.remote_id();
4316 let client = self.client.clone();
4317 cx.spawn(move |this, mut cx| async move {
4318 let mut project_transaction = ProjectTransaction::default();
4319 if let Some(project_id) = remote_id {
4320 let response = client
4321 .request(proto::FormatBuffers {
4322 project_id,
4323 trigger: trigger as i32,
4324 buffer_ids: buffers
4325 .iter()
4326 .map(|buffer| {
4327 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4328 })
4329 .collect::<Result<_>>()?,
4330 })
4331 .await?
4332 .transaction
4333 .ok_or_else(|| anyhow!("missing transaction"))?;
4334 project_transaction = this
4335 .update(&mut cx, |this, cx| {
4336 this.deserialize_project_transaction(response, push_to_history, cx)
4337 })?
4338 .await?;
4339 }
4340 Ok(project_transaction)
4341 })
4342 }
4343 }
4344
4345 async fn format_via_lsp(
4346 this: &WeakModel<Self>,
4347 buffer: &Model<Buffer>,
4348 abs_path: &Path,
4349 language_server: &Arc<LanguageServer>,
4350 tab_size: NonZeroU32,
4351 cx: &mut AsyncAppContext,
4352 ) -> Result<Vec<(Range<Anchor>, String)>> {
4353 let uri = lsp::Url::from_file_path(abs_path)
4354 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4355 let text_document = lsp::TextDocumentIdentifier::new(uri);
4356 let capabilities = &language_server.capabilities();
4357
4358 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4359 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4360
4361 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4362 language_server
4363 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4364 text_document,
4365 options: lsp_command::lsp_formatting_options(tab_size.get()),
4366 work_done_progress_params: Default::default(),
4367 })
4368 .await?
4369 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4370 let buffer_start = lsp::Position::new(0, 0);
4371 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4372
4373 language_server
4374 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4375 text_document,
4376 range: lsp::Range::new(buffer_start, buffer_end),
4377 options: lsp_command::lsp_formatting_options(tab_size.get()),
4378 work_done_progress_params: Default::default(),
4379 })
4380 .await?
4381 } else {
4382 None
4383 };
4384
4385 if let Some(lsp_edits) = lsp_edits {
4386 this.update(cx, |this, cx| {
4387 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4388 })?
4389 .await
4390 } else {
4391 Ok(Vec::new())
4392 }
4393 }
4394
4395 async fn format_via_external_command(
4396 buffer: &Model<Buffer>,
4397 buffer_abs_path: &Path,
4398 command: &str,
4399 arguments: &[String],
4400 cx: &mut AsyncAppContext,
4401 ) -> Result<Option<Diff>> {
4402 let working_dir_path = buffer.update(cx, |buffer, cx| {
4403 let file = File::from_dyn(buffer.file())?;
4404 let worktree = file.worktree.read(cx).as_local()?;
4405 let mut worktree_path = worktree.abs_path().to_path_buf();
4406 if worktree.root_entry()?.is_file() {
4407 worktree_path.pop();
4408 }
4409 Some(worktree_path)
4410 })?;
4411
4412 if let Some(working_dir_path) = working_dir_path {
4413 let mut child =
4414 smol::process::Command::new(command)
4415 .args(arguments.iter().map(|arg| {
4416 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4417 }))
4418 .current_dir(&working_dir_path)
4419 .stdin(smol::process::Stdio::piped())
4420 .stdout(smol::process::Stdio::piped())
4421 .stderr(smol::process::Stdio::piped())
4422 .spawn()?;
4423 let stdin = child
4424 .stdin
4425 .as_mut()
4426 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4427 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4428 for chunk in text.chunks() {
4429 stdin.write_all(chunk.as_bytes()).await?;
4430 }
4431 stdin.flush().await?;
4432
4433 let output = child.output().await?;
4434 if !output.status.success() {
4435 return Err(anyhow!(
4436 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4437 output.status.code(),
4438 String::from_utf8_lossy(&output.stdout),
4439 String::from_utf8_lossy(&output.stderr),
4440 ));
4441 }
4442
4443 let stdout = String::from_utf8(output.stdout)?;
4444 Ok(Some(
4445 buffer
4446 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4447 .await,
4448 ))
4449 } else {
4450 Ok(None)
4451 }
4452 }
4453
4454 pub fn definition<T: ToPointUtf16>(
4455 &self,
4456 buffer: &Model<Buffer>,
4457 position: T,
4458 cx: &mut ModelContext<Self>,
4459 ) -> Task<Result<Vec<LocationLink>>> {
4460 let position = position.to_point_utf16(buffer.read(cx));
4461 self.request_lsp(
4462 buffer.clone(),
4463 LanguageServerToQuery::Primary,
4464 GetDefinition { position },
4465 cx,
4466 )
4467 }
4468
4469 pub fn type_definition<T: ToPointUtf16>(
4470 &self,
4471 buffer: &Model<Buffer>,
4472 position: T,
4473 cx: &mut ModelContext<Self>,
4474 ) -> Task<Result<Vec<LocationLink>>> {
4475 let position = position.to_point_utf16(buffer.read(cx));
4476 self.request_lsp(
4477 buffer.clone(),
4478 LanguageServerToQuery::Primary,
4479 GetTypeDefinition { position },
4480 cx,
4481 )
4482 }
4483
4484 pub fn references<T: ToPointUtf16>(
4485 &self,
4486 buffer: &Model<Buffer>,
4487 position: T,
4488 cx: &mut ModelContext<Self>,
4489 ) -> Task<Result<Vec<Location>>> {
4490 let position = position.to_point_utf16(buffer.read(cx));
4491 self.request_lsp(
4492 buffer.clone(),
4493 LanguageServerToQuery::Primary,
4494 GetReferences { position },
4495 cx,
4496 )
4497 }
4498
4499 pub fn document_highlights<T: ToPointUtf16>(
4500 &self,
4501 buffer: &Model<Buffer>,
4502 position: T,
4503 cx: &mut ModelContext<Self>,
4504 ) -> Task<Result<Vec<DocumentHighlight>>> {
4505 let position = position.to_point_utf16(buffer.read(cx));
4506 self.request_lsp(
4507 buffer.clone(),
4508 LanguageServerToQuery::Primary,
4509 GetDocumentHighlights { position },
4510 cx,
4511 )
4512 }
4513
4514 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4515 if self.is_local() {
4516 let mut requests = Vec::new();
4517 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4518 let worktree_id = *worktree_id;
4519 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4520 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4521 Some(worktree) => worktree,
4522 None => continue,
4523 };
4524 let worktree_abs_path = worktree.abs_path().clone();
4525
4526 let (adapter, language, server) = match self.language_servers.get(server_id) {
4527 Some(LanguageServerState::Running {
4528 adapter,
4529 language,
4530 server,
4531 ..
4532 }) => (adapter.clone(), language.clone(), server),
4533
4534 _ => continue,
4535 };
4536
4537 requests.push(
4538 server
4539 .request::<lsp::request::WorkspaceSymbolRequest>(
4540 lsp::WorkspaceSymbolParams {
4541 query: query.to_string(),
4542 ..Default::default()
4543 },
4544 )
4545 .log_err()
4546 .map(move |response| {
4547 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4548 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4549 flat_responses.into_iter().map(|lsp_symbol| {
4550 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4551 }).collect::<Vec<_>>()
4552 }
4553 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4554 nested_responses.into_iter().filter_map(|lsp_symbol| {
4555 let location = match lsp_symbol.location {
4556 OneOf::Left(location) => location,
4557 OneOf::Right(_) => {
4558 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4559 return None
4560 }
4561 };
4562 Some((lsp_symbol.name, lsp_symbol.kind, location))
4563 }).collect::<Vec<_>>()
4564 }
4565 }).unwrap_or_default();
4566
4567 (
4568 adapter,
4569 language,
4570 worktree_id,
4571 worktree_abs_path,
4572 lsp_symbols,
4573 )
4574 }),
4575 );
4576 }
4577
4578 cx.spawn(move |this, mut cx| async move {
4579 let responses = futures::future::join_all(requests).await;
4580 let this = match this.upgrade() {
4581 Some(this) => this,
4582 None => return Ok(Vec::new()),
4583 };
4584
4585 let symbols = this.update(&mut cx, |this, cx| {
4586 let mut symbols = Vec::new();
4587 for (
4588 adapter,
4589 adapter_language,
4590 source_worktree_id,
4591 worktree_abs_path,
4592 lsp_symbols,
4593 ) in responses
4594 {
4595 symbols.extend(lsp_symbols.into_iter().filter_map(
4596 |(symbol_name, symbol_kind, symbol_location)| {
4597 let abs_path = symbol_location.uri.to_file_path().ok()?;
4598 let mut worktree_id = source_worktree_id;
4599 let path;
4600 if let Some((worktree, rel_path)) =
4601 this.find_local_worktree(&abs_path, cx)
4602 {
4603 worktree_id = worktree.read(cx).id();
4604 path = rel_path;
4605 } else {
4606 path = relativize_path(&worktree_abs_path, &abs_path);
4607 }
4608
4609 let project_path = ProjectPath {
4610 worktree_id,
4611 path: path.into(),
4612 };
4613 let signature = this.symbol_signature(&project_path);
4614 let adapter_language = adapter_language.clone();
4615 let language = this
4616 .languages
4617 .language_for_file(&project_path.path, None)
4618 .unwrap_or_else(move |_| adapter_language);
4619 let language_server_name = adapter.name.clone();
4620 Some(async move {
4621 let language = language.await;
4622 let label =
4623 language.label_for_symbol(&symbol_name, symbol_kind).await;
4624
4625 Symbol {
4626 language_server_name,
4627 source_worktree_id,
4628 path: project_path,
4629 label: label.unwrap_or_else(|| {
4630 CodeLabel::plain(symbol_name.clone(), None)
4631 }),
4632 kind: symbol_kind,
4633 name: symbol_name,
4634 range: range_from_lsp(symbol_location.range),
4635 signature,
4636 }
4637 })
4638 },
4639 ));
4640 }
4641
4642 symbols
4643 })?;
4644
4645 Ok(futures::future::join_all(symbols).await)
4646 })
4647 } else if let Some(project_id) = self.remote_id() {
4648 let request = self.client.request(proto::GetProjectSymbols {
4649 project_id,
4650 query: query.to_string(),
4651 });
4652 cx.spawn(move |this, mut cx| async move {
4653 let response = request.await?;
4654 let mut symbols = Vec::new();
4655 if let Some(this) = this.upgrade() {
4656 let new_symbols = this.update(&mut cx, |this, _| {
4657 response
4658 .symbols
4659 .into_iter()
4660 .map(|symbol| this.deserialize_symbol(symbol))
4661 .collect::<Vec<_>>()
4662 })?;
4663 symbols = futures::future::join_all(new_symbols)
4664 .await
4665 .into_iter()
4666 .filter_map(|symbol| symbol.log_err())
4667 .collect::<Vec<_>>();
4668 }
4669 Ok(symbols)
4670 })
4671 } else {
4672 Task::ready(Ok(Default::default()))
4673 }
4674 }
4675
4676 pub fn open_buffer_for_symbol(
4677 &mut self,
4678 symbol: &Symbol,
4679 cx: &mut ModelContext<Self>,
4680 ) -> Task<Result<Model<Buffer>>> {
4681 if self.is_local() {
4682 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4683 symbol.source_worktree_id,
4684 symbol.language_server_name.clone(),
4685 )) {
4686 *id
4687 } else {
4688 return Task::ready(Err(anyhow!(
4689 "language server for worktree and language not found"
4690 )));
4691 };
4692
4693 let worktree_abs_path = if let Some(worktree_abs_path) = self
4694 .worktree_for_id(symbol.path.worktree_id, cx)
4695 .and_then(|worktree| worktree.read(cx).as_local())
4696 .map(|local_worktree| local_worktree.abs_path())
4697 {
4698 worktree_abs_path
4699 } else {
4700 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4701 };
4702
4703 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
4704 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4705 uri
4706 } else {
4707 return Task::ready(Err(anyhow!("invalid symbol path")));
4708 };
4709
4710 self.open_local_buffer_via_lsp(
4711 symbol_uri,
4712 language_server_id,
4713 symbol.language_server_name.clone(),
4714 cx,
4715 )
4716 } else if let Some(project_id) = self.remote_id() {
4717 let request = self.client.request(proto::OpenBufferForSymbol {
4718 project_id,
4719 symbol: Some(serialize_symbol(symbol)),
4720 });
4721 cx.spawn(move |this, mut cx| async move {
4722 let response = request.await?;
4723 this.update(&mut cx, |this, cx| {
4724 this.wait_for_remote_buffer(response.buffer_id, cx)
4725 })?
4726 .await
4727 })
4728 } else {
4729 Task::ready(Err(anyhow!("project does not have a remote id")))
4730 }
4731 }
4732
4733 pub fn hover<T: ToPointUtf16>(
4734 &self,
4735 buffer: &Model<Buffer>,
4736 position: T,
4737 cx: &mut ModelContext<Self>,
4738 ) -> Task<Result<Option<Hover>>> {
4739 let position = position.to_point_utf16(buffer.read(cx));
4740 self.request_lsp(
4741 buffer.clone(),
4742 LanguageServerToQuery::Primary,
4743 GetHover { position },
4744 cx,
4745 )
4746 }
4747
4748 pub fn completions<T: ToOffset + ToPointUtf16>(
4749 &self,
4750 buffer: &Model<Buffer>,
4751 position: T,
4752 cx: &mut ModelContext<Self>,
4753 ) -> Task<Result<Vec<Completion>>> {
4754 let position = position.to_point_utf16(buffer.read(cx));
4755 if self.is_local() {
4756 let snapshot = buffer.read(cx).snapshot();
4757 let offset = position.to_offset(&snapshot);
4758 let scope = snapshot.language_scope_at(offset);
4759
4760 let server_ids: Vec<_> = self
4761 .language_servers_for_buffer(buffer.read(cx), cx)
4762 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4763 .filter(|(adapter, _)| {
4764 scope
4765 .as_ref()
4766 .map(|scope| scope.language_allowed(&adapter.name))
4767 .unwrap_or(true)
4768 })
4769 .map(|(_, server)| server.server_id())
4770 .collect();
4771
4772 let buffer = buffer.clone();
4773 cx.spawn(move |this, mut cx| async move {
4774 let mut tasks = Vec::with_capacity(server_ids.len());
4775 this.update(&mut cx, |this, cx| {
4776 for server_id in server_ids {
4777 tasks.push(this.request_lsp(
4778 buffer.clone(),
4779 LanguageServerToQuery::Other(server_id),
4780 GetCompletions { position },
4781 cx,
4782 ));
4783 }
4784 })?;
4785
4786 let mut completions = Vec::new();
4787 for task in tasks {
4788 if let Ok(new_completions) = task.await {
4789 completions.extend_from_slice(&new_completions);
4790 }
4791 }
4792
4793 Ok(completions)
4794 })
4795 } else if let Some(project_id) = self.remote_id() {
4796 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4797 } else {
4798 Task::ready(Ok(Default::default()))
4799 }
4800 }
4801
4802 pub fn resolve_completions(
4803 &self,
4804 completion_indices: Vec<usize>,
4805 completions: Arc<RwLock<Box<[Completion]>>>,
4806 cx: &mut ModelContext<Self>,
4807 ) -> Task<Result<bool>> {
4808 let client = self.client();
4809 let language_registry = self.languages().clone();
4810
4811 let is_remote = self.is_remote();
4812 let project_id = self.remote_id();
4813
4814 cx.spawn(move |this, mut cx| async move {
4815 let mut did_resolve = false;
4816 if is_remote {
4817 let project_id =
4818 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
4819
4820 for completion_index in completion_indices {
4821 let completions_guard = completions.read();
4822 let completion = &completions_guard[completion_index];
4823 if completion.documentation.is_some() {
4824 continue;
4825 }
4826
4827 did_resolve = true;
4828 let server_id = completion.server_id;
4829 let completion = completion.lsp_completion.clone();
4830 drop(completions_guard);
4831
4832 Self::resolve_completion_documentation_remote(
4833 project_id,
4834 server_id,
4835 completions.clone(),
4836 completion_index,
4837 completion,
4838 client.clone(),
4839 language_registry.clone(),
4840 )
4841 .await;
4842 }
4843 } else {
4844 for completion_index in completion_indices {
4845 let completions_guard = completions.read();
4846 let completion = &completions_guard[completion_index];
4847 if completion.documentation.is_some() {
4848 continue;
4849 }
4850
4851 let server_id = completion.server_id;
4852 let completion = completion.lsp_completion.clone();
4853 drop(completions_guard);
4854
4855 let server = this
4856 .read_with(&mut cx, |project, _| {
4857 project.language_server_for_id(server_id)
4858 })
4859 .ok()
4860 .flatten();
4861 let Some(server) = server else {
4862 continue;
4863 };
4864
4865 did_resolve = true;
4866 Self::resolve_completion_documentation_local(
4867 server,
4868 completions.clone(),
4869 completion_index,
4870 completion,
4871 language_registry.clone(),
4872 )
4873 .await;
4874 }
4875 }
4876
4877 Ok(did_resolve)
4878 })
4879 }
4880
4881 async fn resolve_completion_documentation_local(
4882 server: Arc<lsp::LanguageServer>,
4883 completions: Arc<RwLock<Box<[Completion]>>>,
4884 completion_index: usize,
4885 completion: lsp::CompletionItem,
4886 language_registry: Arc<LanguageRegistry>,
4887 ) {
4888 let can_resolve = server
4889 .capabilities()
4890 .completion_provider
4891 .as_ref()
4892 .and_then(|options| options.resolve_provider)
4893 .unwrap_or(false);
4894 if !can_resolve {
4895 return;
4896 }
4897
4898 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
4899 let Some(completion_item) = request.await.log_err() else {
4900 return;
4901 };
4902
4903 if let Some(lsp_documentation) = completion_item.documentation {
4904 let documentation = language::prepare_completion_documentation(
4905 &lsp_documentation,
4906 &language_registry,
4907 None, // TODO: Try to reasonably work out which language the completion is for
4908 )
4909 .await;
4910
4911 let mut completions = completions.write();
4912 let completion = &mut completions[completion_index];
4913 completion.documentation = Some(documentation);
4914 } else {
4915 let mut completions = completions.write();
4916 let completion = &mut completions[completion_index];
4917 completion.documentation = Some(Documentation::Undocumented);
4918 }
4919 }
4920
4921 async fn resolve_completion_documentation_remote(
4922 project_id: u64,
4923 server_id: LanguageServerId,
4924 completions: Arc<RwLock<Box<[Completion]>>>,
4925 completion_index: usize,
4926 completion: lsp::CompletionItem,
4927 client: Arc<Client>,
4928 language_registry: Arc<LanguageRegistry>,
4929 ) {
4930 let request = proto::ResolveCompletionDocumentation {
4931 project_id,
4932 language_server_id: server_id.0 as u64,
4933 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
4934 };
4935
4936 let Some(response) = client
4937 .request(request)
4938 .await
4939 .context("completion documentation resolve proto request")
4940 .log_err()
4941 else {
4942 return;
4943 };
4944
4945 if response.text.is_empty() {
4946 let mut completions = completions.write();
4947 let completion = &mut completions[completion_index];
4948 completion.documentation = Some(Documentation::Undocumented);
4949 }
4950
4951 let documentation = if response.is_markdown {
4952 Documentation::MultiLineMarkdown(
4953 markdown::parse_markdown(&response.text, &language_registry, None).await,
4954 )
4955 } else if response.text.lines().count() <= 1 {
4956 Documentation::SingleLine(response.text)
4957 } else {
4958 Documentation::MultiLinePlainText(response.text)
4959 };
4960
4961 let mut completions = completions.write();
4962 let completion = &mut completions[completion_index];
4963 completion.documentation = Some(documentation);
4964 }
4965
4966 pub fn apply_additional_edits_for_completion(
4967 &self,
4968 buffer_handle: Model<Buffer>,
4969 completion: Completion,
4970 push_to_history: bool,
4971 cx: &mut ModelContext<Self>,
4972 ) -> Task<Result<Option<Transaction>>> {
4973 let buffer = buffer_handle.read(cx);
4974 let buffer_id = buffer.remote_id();
4975
4976 if self.is_local() {
4977 let server_id = completion.server_id;
4978 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4979 Some((_, server)) => server.clone(),
4980 _ => return Task::ready(Ok(Default::default())),
4981 };
4982
4983 cx.spawn(move |this, mut cx| async move {
4984 let can_resolve = lang_server
4985 .capabilities()
4986 .completion_provider
4987 .as_ref()
4988 .and_then(|options| options.resolve_provider)
4989 .unwrap_or(false);
4990 let additional_text_edits = if can_resolve {
4991 lang_server
4992 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4993 .await?
4994 .additional_text_edits
4995 } else {
4996 completion.lsp_completion.additional_text_edits
4997 };
4998 if let Some(edits) = additional_text_edits {
4999 let edits = this
5000 .update(&mut cx, |this, cx| {
5001 this.edits_from_lsp(
5002 &buffer_handle,
5003 edits,
5004 lang_server.server_id(),
5005 None,
5006 cx,
5007 )
5008 })?
5009 .await?;
5010
5011 buffer_handle.update(&mut cx, |buffer, cx| {
5012 buffer.finalize_last_transaction();
5013 buffer.start_transaction();
5014
5015 for (range, text) in edits {
5016 let primary = &completion.old_range;
5017 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5018 && primary.end.cmp(&range.start, buffer).is_ge();
5019 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5020 && range.end.cmp(&primary.end, buffer).is_ge();
5021
5022 //Skip additional edits which overlap with the primary completion edit
5023 //https://github.com/zed-industries/zed/pull/1871
5024 if !start_within && !end_within {
5025 buffer.edit([(range, text)], None, cx);
5026 }
5027 }
5028
5029 let transaction = if buffer.end_transaction(cx).is_some() {
5030 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5031 if !push_to_history {
5032 buffer.forget_transaction(transaction.id);
5033 }
5034 Some(transaction)
5035 } else {
5036 None
5037 };
5038 Ok(transaction)
5039 })?
5040 } else {
5041 Ok(None)
5042 }
5043 })
5044 } else if let Some(project_id) = self.remote_id() {
5045 let client = self.client.clone();
5046 cx.spawn(move |_, mut cx| async move {
5047 let response = client
5048 .request(proto::ApplyCompletionAdditionalEdits {
5049 project_id,
5050 buffer_id,
5051 completion: Some(language::proto::serialize_completion(&completion)),
5052 })
5053 .await?;
5054
5055 if let Some(transaction) = response.transaction {
5056 let transaction = language::proto::deserialize_transaction(transaction)?;
5057 buffer_handle
5058 .update(&mut cx, |buffer, _| {
5059 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5060 })?
5061 .await?;
5062 if push_to_history {
5063 buffer_handle.update(&mut cx, |buffer, _| {
5064 buffer.push_transaction(transaction.clone(), Instant::now());
5065 })?;
5066 }
5067 Ok(Some(transaction))
5068 } else {
5069 Ok(None)
5070 }
5071 })
5072 } else {
5073 Task::ready(Err(anyhow!("project does not have a remote id")))
5074 }
5075 }
5076
5077 pub fn code_actions<T: Clone + ToOffset>(
5078 &self,
5079 buffer_handle: &Model<Buffer>,
5080 range: Range<T>,
5081 cx: &mut ModelContext<Self>,
5082 ) -> Task<Result<Vec<CodeAction>>> {
5083 let buffer = buffer_handle.read(cx);
5084 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5085 self.request_lsp(
5086 buffer_handle.clone(),
5087 LanguageServerToQuery::Primary,
5088 GetCodeActions { range },
5089 cx,
5090 )
5091 }
5092
5093 pub fn apply_code_action(
5094 &self,
5095 buffer_handle: Model<Buffer>,
5096 mut action: CodeAction,
5097 push_to_history: bool,
5098 cx: &mut ModelContext<Self>,
5099 ) -> Task<Result<ProjectTransaction>> {
5100 if self.is_local() {
5101 let buffer = buffer_handle.read(cx);
5102 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5103 self.language_server_for_buffer(buffer, action.server_id, cx)
5104 {
5105 (adapter.clone(), server.clone())
5106 } else {
5107 return Task::ready(Ok(Default::default()));
5108 };
5109 let range = action.range.to_point_utf16(buffer);
5110
5111 cx.spawn(move |this, mut cx| async move {
5112 if let Some(lsp_range) = action
5113 .lsp_action
5114 .data
5115 .as_mut()
5116 .and_then(|d| d.get_mut("codeActionParams"))
5117 .and_then(|d| d.get_mut("range"))
5118 {
5119 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
5120 action.lsp_action = lang_server
5121 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
5122 .await?;
5123 } else {
5124 let actions = this
5125 .update(&mut cx, |this, cx| {
5126 this.code_actions(&buffer_handle, action.range, cx)
5127 })?
5128 .await?;
5129 action.lsp_action = actions
5130 .into_iter()
5131 .find(|a| a.lsp_action.title == action.lsp_action.title)
5132 .ok_or_else(|| anyhow!("code action is outdated"))?
5133 .lsp_action;
5134 }
5135
5136 if let Some(edit) = action.lsp_action.edit {
5137 if edit.changes.is_some() || edit.document_changes.is_some() {
5138 return Self::deserialize_workspace_edit(
5139 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5140 edit,
5141 push_to_history,
5142 lsp_adapter.clone(),
5143 lang_server.clone(),
5144 &mut cx,
5145 )
5146 .await;
5147 }
5148 }
5149
5150 if let Some(command) = action.lsp_action.command {
5151 this.update(&mut cx, |this, _| {
5152 this.last_workspace_edits_by_language_server
5153 .remove(&lang_server.server_id());
5154 })?;
5155
5156 let result = lang_server
5157 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5158 command: command.command,
5159 arguments: command.arguments.unwrap_or_default(),
5160 ..Default::default()
5161 })
5162 .await;
5163
5164 if let Err(err) = result {
5165 // TODO: LSP ERROR
5166 return Err(err);
5167 }
5168
5169 return Ok(this.update(&mut cx, |this, _| {
5170 this.last_workspace_edits_by_language_server
5171 .remove(&lang_server.server_id())
5172 .unwrap_or_default()
5173 })?);
5174 }
5175
5176 Ok(ProjectTransaction::default())
5177 })
5178 } else if let Some(project_id) = self.remote_id() {
5179 let client = self.client.clone();
5180 let request = proto::ApplyCodeAction {
5181 project_id,
5182 buffer_id: buffer_handle.read(cx).remote_id(),
5183 action: Some(language::proto::serialize_code_action(&action)),
5184 };
5185 cx.spawn(move |this, mut cx| async move {
5186 let response = client
5187 .request(request)
5188 .await?
5189 .transaction
5190 .ok_or_else(|| anyhow!("missing transaction"))?;
5191 this.update(&mut cx, |this, cx| {
5192 this.deserialize_project_transaction(response, push_to_history, cx)
5193 })?
5194 .await
5195 })
5196 } else {
5197 Task::ready(Err(anyhow!("project does not have a remote id")))
5198 }
5199 }
5200
5201 fn apply_on_type_formatting(
5202 &self,
5203 buffer: Model<Buffer>,
5204 position: Anchor,
5205 trigger: String,
5206 cx: &mut ModelContext<Self>,
5207 ) -> Task<Result<Option<Transaction>>> {
5208 if self.is_local() {
5209 cx.spawn(move |this, mut cx| async move {
5210 // Do not allow multiple concurrent formatting requests for the
5211 // same buffer.
5212 this.update(&mut cx, |this, cx| {
5213 this.buffers_being_formatted
5214 .insert(buffer.read(cx).remote_id())
5215 })?;
5216
5217 let _cleanup = defer({
5218 let this = this.clone();
5219 let mut cx = cx.clone();
5220 let closure_buffer = buffer.clone();
5221 move || {
5222 this.update(&mut cx, |this, cx| {
5223 this.buffers_being_formatted
5224 .remove(&closure_buffer.read(cx).remote_id());
5225 })
5226 .ok();
5227 }
5228 });
5229
5230 buffer
5231 .update(&mut cx, |buffer, _| {
5232 buffer.wait_for_edits(Some(position.timestamp))
5233 })?
5234 .await?;
5235 this.update(&mut cx, |this, cx| {
5236 let position = position.to_point_utf16(buffer.read(cx));
5237 this.on_type_format(buffer, position, trigger, false, cx)
5238 })?
5239 .await
5240 })
5241 } else if let Some(project_id) = self.remote_id() {
5242 let client = self.client.clone();
5243 let request = proto::OnTypeFormatting {
5244 project_id,
5245 buffer_id: buffer.read(cx).remote_id(),
5246 position: Some(serialize_anchor(&position)),
5247 trigger,
5248 version: serialize_version(&buffer.read(cx).version()),
5249 };
5250 cx.spawn(move |_, _| async move {
5251 client
5252 .request(request)
5253 .await?
5254 .transaction
5255 .map(language::proto::deserialize_transaction)
5256 .transpose()
5257 })
5258 } else {
5259 Task::ready(Err(anyhow!("project does not have a remote id")))
5260 }
5261 }
5262
5263 async fn deserialize_edits(
5264 this: Model<Self>,
5265 buffer_to_edit: Model<Buffer>,
5266 edits: Vec<lsp::TextEdit>,
5267 push_to_history: bool,
5268 _: Arc<CachedLspAdapter>,
5269 language_server: Arc<LanguageServer>,
5270 cx: &mut AsyncAppContext,
5271 ) -> Result<Option<Transaction>> {
5272 let edits = this
5273 .update(cx, |this, cx| {
5274 this.edits_from_lsp(
5275 &buffer_to_edit,
5276 edits,
5277 language_server.server_id(),
5278 None,
5279 cx,
5280 )
5281 })?
5282 .await?;
5283
5284 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5285 buffer.finalize_last_transaction();
5286 buffer.start_transaction();
5287 for (range, text) in edits {
5288 buffer.edit([(range, text)], None, cx);
5289 }
5290
5291 if buffer.end_transaction(cx).is_some() {
5292 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5293 if !push_to_history {
5294 buffer.forget_transaction(transaction.id);
5295 }
5296 Some(transaction)
5297 } else {
5298 None
5299 }
5300 })?;
5301
5302 Ok(transaction)
5303 }
5304
5305 async fn deserialize_workspace_edit(
5306 this: Model<Self>,
5307 edit: lsp::WorkspaceEdit,
5308 push_to_history: bool,
5309 lsp_adapter: Arc<CachedLspAdapter>,
5310 language_server: Arc<LanguageServer>,
5311 cx: &mut AsyncAppContext,
5312 ) -> Result<ProjectTransaction> {
5313 let fs = this.update(cx, |this, _| this.fs.clone())?;
5314 let mut operations = Vec::new();
5315 if let Some(document_changes) = edit.document_changes {
5316 match document_changes {
5317 lsp::DocumentChanges::Edits(edits) => {
5318 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5319 }
5320 lsp::DocumentChanges::Operations(ops) => operations = ops,
5321 }
5322 } else if let Some(changes) = edit.changes {
5323 operations.extend(changes.into_iter().map(|(uri, edits)| {
5324 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5325 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5326 uri,
5327 version: None,
5328 },
5329 edits: edits.into_iter().map(OneOf::Left).collect(),
5330 })
5331 }));
5332 }
5333
5334 let mut project_transaction = ProjectTransaction::default();
5335 for operation in operations {
5336 match operation {
5337 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5338 let abs_path = op
5339 .uri
5340 .to_file_path()
5341 .map_err(|_| anyhow!("can't convert URI to path"))?;
5342
5343 if let Some(parent_path) = abs_path.parent() {
5344 fs.create_dir(parent_path).await?;
5345 }
5346 if abs_path.ends_with("/") {
5347 fs.create_dir(&abs_path).await?;
5348 } else {
5349 fs.create_file(
5350 &abs_path,
5351 op.options
5352 .map(|options| fs::CreateOptions {
5353 overwrite: options.overwrite.unwrap_or(false),
5354 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5355 })
5356 .unwrap_or_default(),
5357 )
5358 .await?;
5359 }
5360 }
5361
5362 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5363 let source_abs_path = op
5364 .old_uri
5365 .to_file_path()
5366 .map_err(|_| anyhow!("can't convert URI to path"))?;
5367 let target_abs_path = op
5368 .new_uri
5369 .to_file_path()
5370 .map_err(|_| anyhow!("can't convert URI to path"))?;
5371 fs.rename(
5372 &source_abs_path,
5373 &target_abs_path,
5374 op.options
5375 .map(|options| fs::RenameOptions {
5376 overwrite: options.overwrite.unwrap_or(false),
5377 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5378 })
5379 .unwrap_or_default(),
5380 )
5381 .await?;
5382 }
5383
5384 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5385 let abs_path = op
5386 .uri
5387 .to_file_path()
5388 .map_err(|_| anyhow!("can't convert URI to path"))?;
5389 let options = op
5390 .options
5391 .map(|options| fs::RemoveOptions {
5392 recursive: options.recursive.unwrap_or(false),
5393 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5394 })
5395 .unwrap_or_default();
5396 if abs_path.ends_with("/") {
5397 fs.remove_dir(&abs_path, options).await?;
5398 } else {
5399 fs.remove_file(&abs_path, options).await?;
5400 }
5401 }
5402
5403 lsp::DocumentChangeOperation::Edit(op) => {
5404 let buffer_to_edit = this
5405 .update(cx, |this, cx| {
5406 this.open_local_buffer_via_lsp(
5407 op.text_document.uri,
5408 language_server.server_id(),
5409 lsp_adapter.name.clone(),
5410 cx,
5411 )
5412 })?
5413 .await?;
5414
5415 let edits = this
5416 .update(cx, |this, cx| {
5417 let edits = op.edits.into_iter().map(|edit| match edit {
5418 OneOf::Left(edit) => edit,
5419 OneOf::Right(edit) => edit.text_edit,
5420 });
5421 this.edits_from_lsp(
5422 &buffer_to_edit,
5423 edits,
5424 language_server.server_id(),
5425 op.text_document.version,
5426 cx,
5427 )
5428 })?
5429 .await?;
5430
5431 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5432 buffer.finalize_last_transaction();
5433 buffer.start_transaction();
5434 for (range, text) in edits {
5435 buffer.edit([(range, text)], None, cx);
5436 }
5437 let transaction = if buffer.end_transaction(cx).is_some() {
5438 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5439 if !push_to_history {
5440 buffer.forget_transaction(transaction.id);
5441 }
5442 Some(transaction)
5443 } else {
5444 None
5445 };
5446
5447 transaction
5448 })?;
5449 if let Some(transaction) = transaction {
5450 project_transaction.0.insert(buffer_to_edit, transaction);
5451 }
5452 }
5453 }
5454 }
5455
5456 Ok(project_transaction)
5457 }
5458
5459 pub fn prepare_rename<T: ToPointUtf16>(
5460 &self,
5461 buffer: Model<Buffer>,
5462 position: T,
5463 cx: &mut ModelContext<Self>,
5464 ) -> Task<Result<Option<Range<Anchor>>>> {
5465 let position = position.to_point_utf16(buffer.read(cx));
5466 self.request_lsp(
5467 buffer,
5468 LanguageServerToQuery::Primary,
5469 PrepareRename { position },
5470 cx,
5471 )
5472 }
5473
5474 pub fn perform_rename<T: ToPointUtf16>(
5475 &self,
5476 buffer: Model<Buffer>,
5477 position: T,
5478 new_name: String,
5479 push_to_history: bool,
5480 cx: &mut ModelContext<Self>,
5481 ) -> Task<Result<ProjectTransaction>> {
5482 let position = position.to_point_utf16(buffer.read(cx));
5483 self.request_lsp(
5484 buffer,
5485 LanguageServerToQuery::Primary,
5486 PerformRename {
5487 position,
5488 new_name,
5489 push_to_history,
5490 },
5491 cx,
5492 )
5493 }
5494
5495 pub fn on_type_format<T: ToPointUtf16>(
5496 &self,
5497 buffer: Model<Buffer>,
5498 position: T,
5499 trigger: String,
5500 push_to_history: bool,
5501 cx: &mut ModelContext<Self>,
5502 ) -> Task<Result<Option<Transaction>>> {
5503 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5504 let position = position.to_point_utf16(buffer);
5505 (
5506 position,
5507 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5508 .tab_size,
5509 )
5510 });
5511 self.request_lsp(
5512 buffer.clone(),
5513 LanguageServerToQuery::Primary,
5514 OnTypeFormatting {
5515 position,
5516 trigger,
5517 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5518 push_to_history,
5519 },
5520 cx,
5521 )
5522 }
5523
5524 pub fn inlay_hints<T: ToOffset>(
5525 &self,
5526 buffer_handle: Model<Buffer>,
5527 range: Range<T>,
5528 cx: &mut ModelContext<Self>,
5529 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5530 let buffer = buffer_handle.read(cx);
5531 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5532 let range_start = range.start;
5533 let range_end = range.end;
5534 let buffer_id = buffer.remote_id();
5535 let buffer_version = buffer.version().clone();
5536 let lsp_request = InlayHints { range };
5537
5538 if self.is_local() {
5539 let lsp_request_task = self.request_lsp(
5540 buffer_handle.clone(),
5541 LanguageServerToQuery::Primary,
5542 lsp_request,
5543 cx,
5544 );
5545 cx.spawn(move |_, mut cx| async move {
5546 buffer_handle
5547 .update(&mut cx, |buffer, _| {
5548 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5549 })?
5550 .await
5551 .context("waiting for inlay hint request range edits")?;
5552 lsp_request_task.await.context("inlay hints LSP request")
5553 })
5554 } else if let Some(project_id) = self.remote_id() {
5555 let client = self.client.clone();
5556 let request = proto::InlayHints {
5557 project_id,
5558 buffer_id,
5559 start: Some(serialize_anchor(&range_start)),
5560 end: Some(serialize_anchor(&range_end)),
5561 version: serialize_version(&buffer_version),
5562 };
5563 cx.spawn(move |project, cx| async move {
5564 let response = client
5565 .request(request)
5566 .await
5567 .context("inlay hints proto request")?;
5568 let hints_request_result = LspCommand::response_from_proto(
5569 lsp_request,
5570 response,
5571 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5572 buffer_handle.clone(),
5573 cx,
5574 )
5575 .await;
5576
5577 hints_request_result.context("inlay hints proto response conversion")
5578 })
5579 } else {
5580 Task::ready(Err(anyhow!("project does not have a remote id")))
5581 }
5582 }
5583
5584 pub fn resolve_inlay_hint(
5585 &self,
5586 hint: InlayHint,
5587 buffer_handle: Model<Buffer>,
5588 server_id: LanguageServerId,
5589 cx: &mut ModelContext<Self>,
5590 ) -> Task<anyhow::Result<InlayHint>> {
5591 if self.is_local() {
5592 let buffer = buffer_handle.read(cx);
5593 let (_, lang_server) = if let Some((adapter, server)) =
5594 self.language_server_for_buffer(buffer, server_id, cx)
5595 {
5596 (adapter.clone(), server.clone())
5597 } else {
5598 return Task::ready(Ok(hint));
5599 };
5600 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5601 return Task::ready(Ok(hint));
5602 }
5603
5604 let buffer_snapshot = buffer.snapshot();
5605 cx.spawn(move |_, mut cx| async move {
5606 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5607 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5608 );
5609 let resolved_hint = resolve_task
5610 .await
5611 .context("inlay hint resolve LSP request")?;
5612 let resolved_hint = InlayHints::lsp_to_project_hint(
5613 resolved_hint,
5614 &buffer_handle,
5615 server_id,
5616 ResolveState::Resolved,
5617 false,
5618 &mut cx,
5619 )
5620 .await?;
5621 Ok(resolved_hint)
5622 })
5623 } else if let Some(project_id) = self.remote_id() {
5624 let client = self.client.clone();
5625 let request = proto::ResolveInlayHint {
5626 project_id,
5627 buffer_id: buffer_handle.read(cx).remote_id(),
5628 language_server_id: server_id.0 as u64,
5629 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5630 };
5631 cx.spawn(move |_, _| async move {
5632 let response = client
5633 .request(request)
5634 .await
5635 .context("inlay hints proto request")?;
5636 match response.hint {
5637 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5638 .context("inlay hints proto resolve response conversion"),
5639 None => Ok(hint),
5640 }
5641 })
5642 } else {
5643 Task::ready(Err(anyhow!("project does not have a remote id")))
5644 }
5645 }
5646
5647 #[allow(clippy::type_complexity)]
5648 pub fn search(
5649 &self,
5650 query: SearchQuery,
5651 cx: &mut ModelContext<Self>,
5652 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5653 if self.is_local() {
5654 self.search_local(query, cx)
5655 } else if let Some(project_id) = self.remote_id() {
5656 let (tx, rx) = smol::channel::unbounded();
5657 let request = self.client.request(query.to_proto(project_id));
5658 cx.spawn(move |this, mut cx| async move {
5659 let response = request.await?;
5660 let mut result = HashMap::default();
5661 for location in response.locations {
5662 let target_buffer = this
5663 .update(&mut cx, |this, cx| {
5664 this.wait_for_remote_buffer(location.buffer_id, cx)
5665 })?
5666 .await?;
5667 let start = location
5668 .start
5669 .and_then(deserialize_anchor)
5670 .ok_or_else(|| anyhow!("missing target start"))?;
5671 let end = location
5672 .end
5673 .and_then(deserialize_anchor)
5674 .ok_or_else(|| anyhow!("missing target end"))?;
5675 result
5676 .entry(target_buffer)
5677 .or_insert(Vec::new())
5678 .push(start..end)
5679 }
5680 for (buffer, ranges) in result {
5681 let _ = tx.send((buffer, ranges)).await;
5682 }
5683 Result::<(), anyhow::Error>::Ok(())
5684 })
5685 .detach_and_log_err(cx);
5686 rx
5687 } else {
5688 unimplemented!();
5689 }
5690 }
5691
5692 pub fn search_local(
5693 &self,
5694 query: SearchQuery,
5695 cx: &mut ModelContext<Self>,
5696 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5697 // Local search is split into several phases.
5698 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5699 // and the second phase that finds positions of all the matches found in the candidate files.
5700 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5701 //
5702 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5703 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5704 //
5705 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5706 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5707 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5708 // 2. At this point, we have a list of all potentially matching buffers/files.
5709 // We sort that list by buffer path - this list is retained for later use.
5710 // We ensure that all buffers are now opened and available in project.
5711 // 3. We run a scan over all the candidate buffers on multiple background threads.
5712 // We cannot assume that there will even be a match - while at least one match
5713 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5714 // There is also an auxiliary background thread responsible for result gathering.
5715 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5716 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5717 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5718 // entry - which might already be available thanks to out-of-order processing.
5719 //
5720 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5721 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5722 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5723 // in face of constantly updating list of sorted matches.
5724 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5725 let snapshots = self
5726 .visible_worktrees(cx)
5727 .filter_map(|tree| {
5728 let tree = tree.read(cx).as_local()?;
5729 Some(tree.snapshot())
5730 })
5731 .collect::<Vec<_>>();
5732
5733 let background = cx.background_executor().clone();
5734 let path_count: usize = snapshots
5735 .iter()
5736 .map(|s| {
5737 if query.include_ignored() {
5738 s.file_count()
5739 } else {
5740 s.visible_file_count()
5741 }
5742 })
5743 .sum();
5744 if path_count == 0 {
5745 let (_, rx) = smol::channel::bounded(1024);
5746 return rx;
5747 }
5748 let workers = background.num_cpus().min(path_count);
5749 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5750 let mut unnamed_files = vec![];
5751 let opened_buffers = self
5752 .opened_buffers
5753 .iter()
5754 .filter_map(|(_, b)| {
5755 let buffer = b.upgrade()?;
5756 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5757 let is_ignored = buffer
5758 .project_path(cx)
5759 .and_then(|path| self.entry_for_path(&path, cx))
5760 .map_or(false, |entry| entry.is_ignored);
5761 (is_ignored, buffer.snapshot())
5762 });
5763 if is_ignored && !query.include_ignored() {
5764 return None;
5765 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5766 Some((path.clone(), (buffer, snapshot)))
5767 } else {
5768 unnamed_files.push(buffer);
5769 None
5770 }
5771 })
5772 .collect();
5773 cx.background_executor()
5774 .spawn(Self::background_search(
5775 unnamed_files,
5776 opened_buffers,
5777 cx.background_executor().clone(),
5778 self.fs.clone(),
5779 workers,
5780 query.clone(),
5781 path_count,
5782 snapshots,
5783 matching_paths_tx,
5784 ))
5785 .detach();
5786
5787 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5788 let background = cx.background_executor().clone();
5789 let (result_tx, result_rx) = smol::channel::bounded(1024);
5790 cx.background_executor()
5791 .spawn(async move {
5792 let Ok(buffers) = buffers.await else {
5793 return;
5794 };
5795
5796 let buffers_len = buffers.len();
5797 if buffers_len == 0 {
5798 return;
5799 }
5800 let query = &query;
5801 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5802 background
5803 .scoped(|scope| {
5804 #[derive(Clone)]
5805 struct FinishedStatus {
5806 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
5807 buffer_index: SearchMatchCandidateIndex,
5808 }
5809
5810 for _ in 0..workers {
5811 let finished_tx = finished_tx.clone();
5812 let mut buffers_rx = buffers_rx.clone();
5813 scope.spawn(async move {
5814 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5815 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5816 {
5817 if query.file_matches(
5818 snapshot.file().map(|file| file.path().as_ref()),
5819 ) {
5820 query
5821 .search(snapshot, None)
5822 .await
5823 .iter()
5824 .map(|range| {
5825 snapshot.anchor_before(range.start)
5826 ..snapshot.anchor_after(range.end)
5827 })
5828 .collect()
5829 } else {
5830 Vec::new()
5831 }
5832 } else {
5833 Vec::new()
5834 };
5835
5836 let status = if !buffer_matches.is_empty() {
5837 let entry = if let Some((buffer, _)) = entry.as_ref() {
5838 Some((buffer.clone(), buffer_matches))
5839 } else {
5840 None
5841 };
5842 FinishedStatus {
5843 entry,
5844 buffer_index,
5845 }
5846 } else {
5847 FinishedStatus {
5848 entry: None,
5849 buffer_index,
5850 }
5851 };
5852 if finished_tx.send(status).await.is_err() {
5853 break;
5854 }
5855 }
5856 });
5857 }
5858 // Report sorted matches
5859 scope.spawn(async move {
5860 let mut current_index = 0;
5861 let mut scratch = vec![None; buffers_len];
5862 while let Some(status) = finished_rx.next().await {
5863 debug_assert!(
5864 scratch[status.buffer_index].is_none(),
5865 "Got match status of position {} twice",
5866 status.buffer_index
5867 );
5868 let index = status.buffer_index;
5869 scratch[index] = Some(status);
5870 while current_index < buffers_len {
5871 let Some(current_entry) = scratch[current_index].take() else {
5872 // We intentionally **do not** increment `current_index` here. When next element arrives
5873 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5874 // this time.
5875 break;
5876 };
5877 if let Some(entry) = current_entry.entry {
5878 result_tx.send(entry).await.log_err();
5879 }
5880 current_index += 1;
5881 }
5882 if current_index == buffers_len {
5883 break;
5884 }
5885 }
5886 });
5887 })
5888 .await;
5889 })
5890 .detach();
5891 result_rx
5892 }
5893
5894 /// Pick paths that might potentially contain a match of a given search query.
5895 async fn background_search(
5896 unnamed_buffers: Vec<Model<Buffer>>,
5897 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
5898 executor: BackgroundExecutor,
5899 fs: Arc<dyn Fs>,
5900 workers: usize,
5901 query: SearchQuery,
5902 path_count: usize,
5903 snapshots: Vec<LocalSnapshot>,
5904 matching_paths_tx: Sender<SearchMatchCandidate>,
5905 ) {
5906 let fs = &fs;
5907 let query = &query;
5908 let matching_paths_tx = &matching_paths_tx;
5909 let snapshots = &snapshots;
5910 let paths_per_worker = (path_count + workers - 1) / workers;
5911 for buffer in unnamed_buffers {
5912 matching_paths_tx
5913 .send(SearchMatchCandidate::OpenBuffer {
5914 buffer: buffer.clone(),
5915 path: None,
5916 })
5917 .await
5918 .log_err();
5919 }
5920 for (path, (buffer, _)) in opened_buffers.iter() {
5921 matching_paths_tx
5922 .send(SearchMatchCandidate::OpenBuffer {
5923 buffer: buffer.clone(),
5924 path: Some(path.clone()),
5925 })
5926 .await
5927 .log_err();
5928 }
5929 executor
5930 .scoped(|scope| {
5931 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5932
5933 for worker_ix in 0..workers {
5934 let worker_start_ix = worker_ix * paths_per_worker;
5935 let worker_end_ix = worker_start_ix + paths_per_worker;
5936 let unnamed_buffers = opened_buffers.clone();
5937 let limiter = Arc::clone(&max_concurrent_workers);
5938 scope.spawn(async move {
5939 let _guard = limiter.acquire().await;
5940 let mut snapshot_start_ix = 0;
5941 let mut abs_path = PathBuf::new();
5942 for snapshot in snapshots {
5943 let snapshot_end_ix = snapshot_start_ix
5944 + if query.include_ignored() {
5945 snapshot.file_count()
5946 } else {
5947 snapshot.visible_file_count()
5948 };
5949 if worker_end_ix <= snapshot_start_ix {
5950 break;
5951 } else if worker_start_ix > snapshot_end_ix {
5952 snapshot_start_ix = snapshot_end_ix;
5953 continue;
5954 } else {
5955 let start_in_snapshot =
5956 worker_start_ix.saturating_sub(snapshot_start_ix);
5957 let end_in_snapshot =
5958 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5959
5960 for entry in snapshot
5961 .files(query.include_ignored(), start_in_snapshot)
5962 .take(end_in_snapshot - start_in_snapshot)
5963 {
5964 if matching_paths_tx.is_closed() {
5965 break;
5966 }
5967 if unnamed_buffers.contains_key(&entry.path) {
5968 continue;
5969 }
5970 let matches = if query.file_matches(Some(&entry.path)) {
5971 abs_path.clear();
5972 abs_path.push(&snapshot.abs_path());
5973 abs_path.push(&entry.path);
5974 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5975 {
5976 query.detect(file).unwrap_or(false)
5977 } else {
5978 false
5979 }
5980 } else {
5981 false
5982 };
5983
5984 if matches {
5985 let project_path = SearchMatchCandidate::Path {
5986 worktree_id: snapshot.id(),
5987 path: entry.path.clone(),
5988 is_ignored: entry.is_ignored,
5989 };
5990 if matching_paths_tx.send(project_path).await.is_err() {
5991 break;
5992 }
5993 }
5994 }
5995
5996 snapshot_start_ix = snapshot_end_ix;
5997 }
5998 }
5999 });
6000 }
6001
6002 if query.include_ignored() {
6003 for snapshot in snapshots {
6004 for ignored_entry in snapshot
6005 .entries(query.include_ignored())
6006 .filter(|e| e.is_ignored)
6007 {
6008 let limiter = Arc::clone(&max_concurrent_workers);
6009 scope.spawn(async move {
6010 let _guard = limiter.acquire().await;
6011 let mut ignored_paths_to_process =
6012 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
6013 while let Some(ignored_abs_path) =
6014 ignored_paths_to_process.pop_front()
6015 {
6016 if let Some(fs_metadata) = fs
6017 .metadata(&ignored_abs_path)
6018 .await
6019 .with_context(|| {
6020 format!("fetching fs metadata for {ignored_abs_path:?}")
6021 })
6022 .log_err()
6023 .flatten()
6024 {
6025 if fs_metadata.is_dir {
6026 if let Some(mut subfiles) = fs
6027 .read_dir(&ignored_abs_path)
6028 .await
6029 .with_context(|| {
6030 format!(
6031 "listing ignored path {ignored_abs_path:?}"
6032 )
6033 })
6034 .log_err()
6035 {
6036 while let Some(subfile) = subfiles.next().await {
6037 if let Some(subfile) = subfile.log_err() {
6038 ignored_paths_to_process.push_back(subfile);
6039 }
6040 }
6041 }
6042 } else if !fs_metadata.is_symlink {
6043 if !query.file_matches(Some(&ignored_abs_path))
6044 || snapshot.is_path_excluded(
6045 ignored_entry.path.to_path_buf(),
6046 )
6047 {
6048 continue;
6049 }
6050 let matches = if let Some(file) = fs
6051 .open_sync(&ignored_abs_path)
6052 .await
6053 .with_context(|| {
6054 format!(
6055 "Opening ignored path {ignored_abs_path:?}"
6056 )
6057 })
6058 .log_err()
6059 {
6060 query.detect(file).unwrap_or(false)
6061 } else {
6062 false
6063 };
6064 if matches {
6065 let project_path = SearchMatchCandidate::Path {
6066 worktree_id: snapshot.id(),
6067 path: Arc::from(
6068 ignored_abs_path
6069 .strip_prefix(snapshot.abs_path())
6070 .expect(
6071 "scanning worktree-related files",
6072 ),
6073 ),
6074 is_ignored: true,
6075 };
6076 if matching_paths_tx
6077 .send(project_path)
6078 .await
6079 .is_err()
6080 {
6081 return;
6082 }
6083 }
6084 }
6085 }
6086 }
6087 });
6088 }
6089 }
6090 }
6091 })
6092 .await;
6093 }
6094
6095 pub fn request_lsp<R: LspCommand>(
6096 &self,
6097 buffer_handle: Model<Buffer>,
6098 server: LanguageServerToQuery,
6099 request: R,
6100 cx: &mut ModelContext<Self>,
6101 ) -> Task<Result<R::Response>>
6102 where
6103 <R::LspRequest as lsp::request::Request>::Result: Send,
6104 <R::LspRequest as lsp::request::Request>::Params: Send,
6105 {
6106 let buffer = buffer_handle.read(cx);
6107 if self.is_local() {
6108 let language_server = match server {
6109 LanguageServerToQuery::Primary => {
6110 match self.primary_language_server_for_buffer(buffer, cx) {
6111 Some((_, server)) => Some(Arc::clone(server)),
6112 None => return Task::ready(Ok(Default::default())),
6113 }
6114 }
6115 LanguageServerToQuery::Other(id) => self
6116 .language_server_for_buffer(buffer, id, cx)
6117 .map(|(_, server)| Arc::clone(server)),
6118 };
6119 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6120 if let (Some(file), Some(language_server)) = (file, language_server) {
6121 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6122 return cx.spawn(move |this, cx| async move {
6123 if !request.check_capabilities(language_server.capabilities()) {
6124 return Ok(Default::default());
6125 }
6126
6127 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6128 let response = match result {
6129 Ok(response) => response,
6130
6131 Err(err) => {
6132 log::warn!(
6133 "Generic lsp request to {} failed: {}",
6134 language_server.name(),
6135 err
6136 );
6137 return Err(err);
6138 }
6139 };
6140
6141 request
6142 .response_from_lsp(
6143 response,
6144 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6145 buffer_handle,
6146 language_server.server_id(),
6147 cx,
6148 )
6149 .await
6150 });
6151 }
6152 } else if let Some(project_id) = self.remote_id() {
6153 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6154 }
6155
6156 Task::ready(Ok(Default::default()))
6157 }
6158
6159 fn send_lsp_proto_request<R: LspCommand>(
6160 &self,
6161 buffer: Model<Buffer>,
6162 project_id: u64,
6163 request: R,
6164 cx: &mut ModelContext<'_, Project>,
6165 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6166 let rpc = self.client.clone();
6167 let message = request.to_proto(project_id, buffer.read(cx));
6168 cx.spawn(move |this, mut cx| async move {
6169 // Ensure the project is still alive by the time the task
6170 // is scheduled.
6171 this.upgrade().context("project dropped")?;
6172 let response = rpc.request(message).await?;
6173 let this = this.upgrade().context("project dropped")?;
6174 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6175 Err(anyhow!("disconnected before completing request"))
6176 } else {
6177 request
6178 .response_from_proto(response, this, buffer, cx)
6179 .await
6180 }
6181 })
6182 }
6183
6184 fn sort_candidates_and_open_buffers(
6185 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6186 cx: &mut ModelContext<Self>,
6187 ) -> (
6188 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6189 Receiver<(
6190 Option<(Model<Buffer>, BufferSnapshot)>,
6191 SearchMatchCandidateIndex,
6192 )>,
6193 ) {
6194 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6195 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6196 cx.spawn(move |this, cx| async move {
6197 let mut buffers = Vec::new();
6198 let mut ignored_buffers = Vec::new();
6199 while let Some(entry) = matching_paths_rx.next().await {
6200 if matches!(
6201 entry,
6202 SearchMatchCandidate::Path {
6203 is_ignored: true,
6204 ..
6205 }
6206 ) {
6207 ignored_buffers.push(entry);
6208 } else {
6209 buffers.push(entry);
6210 }
6211 }
6212 buffers.sort_by_key(|candidate| candidate.path());
6213 ignored_buffers.sort_by_key(|candidate| candidate.path());
6214 buffers.extend(ignored_buffers);
6215 let matching_paths = buffers.clone();
6216 let _ = sorted_buffers_tx.send(buffers);
6217 for (index, candidate) in matching_paths.into_iter().enumerate() {
6218 if buffers_tx.is_closed() {
6219 break;
6220 }
6221 let this = this.clone();
6222 let buffers_tx = buffers_tx.clone();
6223 cx.spawn(move |mut cx| async move {
6224 let buffer = match candidate {
6225 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6226 SearchMatchCandidate::Path {
6227 worktree_id, path, ..
6228 } => this
6229 .update(&mut cx, |this, cx| {
6230 this.open_buffer((worktree_id, path), cx)
6231 })?
6232 .await
6233 .log_err(),
6234 };
6235 if let Some(buffer) = buffer {
6236 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6237 buffers_tx
6238 .send((Some((buffer, snapshot)), index))
6239 .await
6240 .log_err();
6241 } else {
6242 buffers_tx.send((None, index)).await.log_err();
6243 }
6244
6245 Ok::<_, anyhow::Error>(())
6246 })
6247 .detach();
6248 }
6249 })
6250 .detach();
6251 (sorted_buffers_rx, buffers_rx)
6252 }
6253
6254 pub fn find_or_create_local_worktree(
6255 &mut self,
6256 abs_path: impl AsRef<Path>,
6257 visible: bool,
6258 cx: &mut ModelContext<Self>,
6259 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6260 let abs_path = abs_path.as_ref();
6261 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6262 Task::ready(Ok((tree, relative_path)))
6263 } else {
6264 let worktree = self.create_local_worktree(abs_path, visible, cx);
6265 cx.background_executor()
6266 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6267 }
6268 }
6269
6270 pub fn find_local_worktree(
6271 &self,
6272 abs_path: &Path,
6273 cx: &AppContext,
6274 ) -> Option<(Model<Worktree>, PathBuf)> {
6275 for tree in &self.worktrees {
6276 if let Some(tree) = tree.upgrade() {
6277 if let Some(relative_path) = tree
6278 .read(cx)
6279 .as_local()
6280 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6281 {
6282 return Some((tree.clone(), relative_path.into()));
6283 }
6284 }
6285 }
6286 None
6287 }
6288
6289 pub fn is_shared(&self) -> bool {
6290 match &self.client_state {
6291 ProjectClientState::Shared { .. } => true,
6292 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6293 }
6294 }
6295
6296 fn create_local_worktree(
6297 &mut self,
6298 abs_path: impl AsRef<Path>,
6299 visible: bool,
6300 cx: &mut ModelContext<Self>,
6301 ) -> Task<Result<Model<Worktree>>> {
6302 let fs = self.fs.clone();
6303 let client = self.client.clone();
6304 let next_entry_id = self.next_entry_id.clone();
6305 let path: Arc<Path> = abs_path.as_ref().into();
6306 let task = self
6307 .loading_local_worktrees
6308 .entry(path.clone())
6309 .or_insert_with(|| {
6310 cx.spawn(move |project, mut cx| {
6311 async move {
6312 let worktree = Worktree::local(
6313 client.clone(),
6314 path.clone(),
6315 visible,
6316 fs,
6317 next_entry_id,
6318 &mut cx,
6319 )
6320 .await;
6321
6322 project.update(&mut cx, |project, _| {
6323 project.loading_local_worktrees.remove(&path);
6324 })?;
6325
6326 let worktree = worktree?;
6327 project
6328 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6329 Ok(worktree)
6330 }
6331 .map_err(Arc::new)
6332 })
6333 .shared()
6334 })
6335 .clone();
6336 cx.background_executor().spawn(async move {
6337 match task.await {
6338 Ok(worktree) => Ok(worktree),
6339 Err(err) => Err(anyhow!("{}", err)),
6340 }
6341 })
6342 }
6343
6344 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6345 let mut servers_to_remove = HashMap::default();
6346 let mut servers_to_preserve = HashSet::default();
6347 for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
6348 if worktree_id == &id_to_remove {
6349 servers_to_remove.insert(server_id, server_name.clone());
6350 } else {
6351 servers_to_preserve.insert(server_id);
6352 }
6353 }
6354 servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id));
6355 for (server_id_to_remove, server_name) in servers_to_remove {
6356 self.language_server_ids
6357 .remove(&(id_to_remove, server_name));
6358 self.language_server_statuses.remove(&server_id_to_remove);
6359 self.last_workspace_edits_by_language_server
6360 .remove(&server_id_to_remove);
6361 self.language_servers.remove(&server_id_to_remove);
6362 cx.emit(Event::LanguageServerRemoved(server_id_to_remove));
6363 }
6364
6365 let mut prettier_instances_to_clean = FuturesUnordered::new();
6366 if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) {
6367 for path in prettier_paths.iter().flatten() {
6368 if let Some(prettier_instance) = self.prettier_instances.remove(path) {
6369 prettier_instances_to_clean.push(async move {
6370 prettier_instance
6371 .server()
6372 .await
6373 .map(|server| server.server_id())
6374 });
6375 }
6376 }
6377 }
6378 cx.spawn(|project, mut cx| async move {
6379 while let Some(prettier_server_id) = prettier_instances_to_clean.next().await {
6380 if let Some(prettier_server_id) = prettier_server_id {
6381 project
6382 .update(&mut cx, |project, cx| {
6383 project
6384 .supplementary_language_servers
6385 .remove(&prettier_server_id);
6386 cx.emit(Event::LanguageServerRemoved(prettier_server_id));
6387 })
6388 .ok();
6389 }
6390 }
6391 })
6392 .detach();
6393
6394 self.worktrees.retain(|worktree| {
6395 if let Some(worktree) = worktree.upgrade() {
6396 let id = worktree.read(cx).id();
6397 if id == id_to_remove {
6398 cx.emit(Event::WorktreeRemoved(id));
6399 false
6400 } else {
6401 true
6402 }
6403 } else {
6404 false
6405 }
6406 });
6407 self.metadata_changed(cx);
6408 }
6409
6410 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6411 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6412 if worktree.read(cx).is_local() {
6413 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6414 worktree::Event::UpdatedEntries(changes) => {
6415 this.update_local_worktree_buffers(&worktree, changes, cx);
6416 this.update_local_worktree_language_servers(&worktree, changes, cx);
6417 this.update_local_worktree_settings(&worktree, changes, cx);
6418 this.update_prettier_settings(&worktree, changes, cx);
6419 cx.emit(Event::WorktreeUpdatedEntries(
6420 worktree.read(cx).id(),
6421 changes.clone(),
6422 ));
6423 }
6424 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6425 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6426 }
6427 })
6428 .detach();
6429 }
6430
6431 let push_strong_handle = {
6432 let worktree = worktree.read(cx);
6433 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6434 };
6435 if push_strong_handle {
6436 self.worktrees
6437 .push(WorktreeHandle::Strong(worktree.clone()));
6438 } else {
6439 self.worktrees
6440 .push(WorktreeHandle::Weak(worktree.downgrade()));
6441 }
6442
6443 let handle_id = worktree.entity_id();
6444 cx.observe_release(worktree, move |this, worktree, cx| {
6445 let _ = this.remove_worktree(worktree.id(), cx);
6446 cx.update_global::<SettingsStore, _>(|store, cx| {
6447 store
6448 .clear_local_settings(handle_id.as_u64() as usize, cx)
6449 .log_err()
6450 });
6451 })
6452 .detach();
6453
6454 cx.emit(Event::WorktreeAdded);
6455 self.metadata_changed(cx);
6456 }
6457
6458 fn update_local_worktree_buffers(
6459 &mut self,
6460 worktree_handle: &Model<Worktree>,
6461 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6462 cx: &mut ModelContext<Self>,
6463 ) {
6464 let snapshot = worktree_handle.read(cx).snapshot();
6465
6466 let mut renamed_buffers = Vec::new();
6467 for (path, entry_id, _) in changes {
6468 let worktree_id = worktree_handle.read(cx).id();
6469 let project_path = ProjectPath {
6470 worktree_id,
6471 path: path.clone(),
6472 };
6473
6474 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6475 Some(&buffer_id) => buffer_id,
6476 None => match self.local_buffer_ids_by_path.get(&project_path) {
6477 Some(&buffer_id) => buffer_id,
6478 None => {
6479 continue;
6480 }
6481 },
6482 };
6483
6484 let open_buffer = self.opened_buffers.get(&buffer_id);
6485 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6486 buffer
6487 } else {
6488 self.opened_buffers.remove(&buffer_id);
6489 self.local_buffer_ids_by_path.remove(&project_path);
6490 self.local_buffer_ids_by_entry_id.remove(entry_id);
6491 continue;
6492 };
6493
6494 buffer.update(cx, |buffer, cx| {
6495 if let Some(old_file) = File::from_dyn(buffer.file()) {
6496 if old_file.worktree != *worktree_handle {
6497 return;
6498 }
6499
6500 let new_file = if let Some(entry) = old_file
6501 .entry_id
6502 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6503 {
6504 File {
6505 is_local: true,
6506 entry_id: Some(entry.id),
6507 mtime: entry.mtime,
6508 path: entry.path.clone(),
6509 worktree: worktree_handle.clone(),
6510 is_deleted: false,
6511 }
6512 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6513 File {
6514 is_local: true,
6515 entry_id: Some(entry.id),
6516 mtime: entry.mtime,
6517 path: entry.path.clone(),
6518 worktree: worktree_handle.clone(),
6519 is_deleted: false,
6520 }
6521 } else {
6522 File {
6523 is_local: true,
6524 entry_id: old_file.entry_id,
6525 path: old_file.path().clone(),
6526 mtime: old_file.mtime(),
6527 worktree: worktree_handle.clone(),
6528 is_deleted: true,
6529 }
6530 };
6531
6532 let old_path = old_file.abs_path(cx);
6533 if new_file.abs_path(cx) != old_path {
6534 renamed_buffers.push((cx.handle(), old_file.clone()));
6535 self.local_buffer_ids_by_path.remove(&project_path);
6536 self.local_buffer_ids_by_path.insert(
6537 ProjectPath {
6538 worktree_id,
6539 path: path.clone(),
6540 },
6541 buffer_id,
6542 );
6543 }
6544
6545 if new_file.entry_id != Some(*entry_id) {
6546 self.local_buffer_ids_by_entry_id.remove(entry_id);
6547 if let Some(entry_id) = new_file.entry_id {
6548 self.local_buffer_ids_by_entry_id
6549 .insert(entry_id, buffer_id);
6550 }
6551 }
6552
6553 if new_file != *old_file {
6554 if let Some(project_id) = self.remote_id() {
6555 self.client
6556 .send(proto::UpdateBufferFile {
6557 project_id,
6558 buffer_id: buffer_id as u64,
6559 file: Some(new_file.to_proto()),
6560 })
6561 .log_err();
6562 }
6563
6564 buffer.file_updated(Arc::new(new_file), cx);
6565 }
6566 }
6567 });
6568 }
6569
6570 for (buffer, old_file) in renamed_buffers {
6571 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6572 self.detect_language_for_buffer(&buffer, cx);
6573 self.register_buffer_with_language_servers(&buffer, cx);
6574 }
6575 }
6576
6577 fn update_local_worktree_language_servers(
6578 &mut self,
6579 worktree_handle: &Model<Worktree>,
6580 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6581 cx: &mut ModelContext<Self>,
6582 ) {
6583 if changes.is_empty() {
6584 return;
6585 }
6586
6587 let worktree_id = worktree_handle.read(cx).id();
6588 let mut language_server_ids = self
6589 .language_server_ids
6590 .iter()
6591 .filter_map(|((server_worktree_id, _), server_id)| {
6592 (*server_worktree_id == worktree_id).then_some(*server_id)
6593 })
6594 .collect::<Vec<_>>();
6595 language_server_ids.sort();
6596 language_server_ids.dedup();
6597
6598 let abs_path = worktree_handle.read(cx).abs_path();
6599 for server_id in &language_server_ids {
6600 if let Some(LanguageServerState::Running {
6601 server,
6602 watched_paths,
6603 ..
6604 }) = self.language_servers.get(server_id)
6605 {
6606 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6607 let params = lsp::DidChangeWatchedFilesParams {
6608 changes: changes
6609 .iter()
6610 .filter_map(|(path, _, change)| {
6611 if !watched_paths.is_match(&path) {
6612 return None;
6613 }
6614 let typ = match change {
6615 PathChange::Loaded => return None,
6616 PathChange::Added => lsp::FileChangeType::CREATED,
6617 PathChange::Removed => lsp::FileChangeType::DELETED,
6618 PathChange::Updated => lsp::FileChangeType::CHANGED,
6619 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6620 };
6621 Some(lsp::FileEvent {
6622 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6623 typ,
6624 })
6625 })
6626 .collect(),
6627 };
6628
6629 if !params.changes.is_empty() {
6630 server
6631 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6632 .log_err();
6633 }
6634 }
6635 }
6636 }
6637 }
6638
6639 fn update_local_worktree_buffers_git_repos(
6640 &mut self,
6641 worktree_handle: Model<Worktree>,
6642 changed_repos: &UpdatedGitRepositoriesSet,
6643 cx: &mut ModelContext<Self>,
6644 ) {
6645 debug_assert!(worktree_handle.read(cx).is_local());
6646
6647 // Identify the loading buffers whose containing repository that has changed.
6648 let future_buffers = self
6649 .loading_buffers_by_path
6650 .iter()
6651 .filter_map(|(project_path, receiver)| {
6652 if project_path.worktree_id != worktree_handle.read(cx).id() {
6653 return None;
6654 }
6655 let path = &project_path.path;
6656 changed_repos
6657 .iter()
6658 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6659 let receiver = receiver.clone();
6660 let path = path.clone();
6661 Some(async move {
6662 wait_for_loading_buffer(receiver)
6663 .await
6664 .ok()
6665 .map(|buffer| (buffer, path))
6666 })
6667 })
6668 .collect::<FuturesUnordered<_>>();
6669
6670 // Identify the current buffers whose containing repository has changed.
6671 let current_buffers = self
6672 .opened_buffers
6673 .values()
6674 .filter_map(|buffer| {
6675 let buffer = buffer.upgrade()?;
6676 let file = File::from_dyn(buffer.read(cx).file())?;
6677 if file.worktree != worktree_handle {
6678 return None;
6679 }
6680 let path = file.path();
6681 changed_repos
6682 .iter()
6683 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6684 Some((buffer, path.clone()))
6685 })
6686 .collect::<Vec<_>>();
6687
6688 if future_buffers.len() + current_buffers.len() == 0 {
6689 return;
6690 }
6691
6692 let remote_id = self.remote_id();
6693 let client = self.client.clone();
6694 cx.spawn(move |_, mut cx| async move {
6695 // Wait for all of the buffers to load.
6696 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6697
6698 // Reload the diff base for every buffer whose containing git repository has changed.
6699 let snapshot =
6700 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6701 let diff_bases_by_buffer = cx
6702 .background_executor()
6703 .spawn(async move {
6704 future_buffers
6705 .into_iter()
6706 .filter_map(|e| e)
6707 .chain(current_buffers)
6708 .filter_map(|(buffer, path)| {
6709 let (work_directory, repo) =
6710 snapshot.repository_and_work_directory_for_path(&path)?;
6711 let repo = snapshot.get_local_repo(&repo)?;
6712 let relative_path = path.strip_prefix(&work_directory).ok()?;
6713 let base_text = repo.repo_ptr.lock().load_index_text(relative_path);
6714 Some((buffer, base_text))
6715 })
6716 .collect::<Vec<_>>()
6717 })
6718 .await;
6719
6720 // Assign the new diff bases on all of the buffers.
6721 for (buffer, diff_base) in diff_bases_by_buffer {
6722 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6723 buffer.set_diff_base(diff_base.clone(), cx);
6724 buffer.remote_id()
6725 })?;
6726 if let Some(project_id) = remote_id {
6727 client
6728 .send(proto::UpdateDiffBase {
6729 project_id,
6730 buffer_id,
6731 diff_base,
6732 })
6733 .log_err();
6734 }
6735 }
6736
6737 anyhow::Ok(())
6738 })
6739 .detach();
6740 }
6741
6742 fn update_local_worktree_settings(
6743 &mut self,
6744 worktree: &Model<Worktree>,
6745 changes: &UpdatedEntriesSet,
6746 cx: &mut ModelContext<Self>,
6747 ) {
6748 let project_id = self.remote_id();
6749 let worktree_id = worktree.entity_id();
6750 let worktree = worktree.read(cx).as_local().unwrap();
6751 let remote_worktree_id = worktree.id();
6752
6753 let mut settings_contents = Vec::new();
6754 for (path, _, change) in changes.iter() {
6755 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6756 let settings_dir = Arc::from(
6757 path.ancestors()
6758 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6759 .unwrap(),
6760 );
6761 let fs = self.fs.clone();
6762 let removed = *change == PathChange::Removed;
6763 let abs_path = worktree.absolutize(path);
6764 settings_contents.push(async move {
6765 (
6766 settings_dir,
6767 if removed {
6768 None
6769 } else {
6770 Some(async move { fs.load(&abs_path?).await }.await)
6771 },
6772 )
6773 });
6774 }
6775 }
6776
6777 if settings_contents.is_empty() {
6778 return;
6779 }
6780
6781 let client = self.client.clone();
6782 cx.spawn(move |_, cx| async move {
6783 let settings_contents: Vec<(Arc<Path>, _)> =
6784 futures::future::join_all(settings_contents).await;
6785 cx.update(|cx| {
6786 cx.update_global::<SettingsStore, _>(|store, cx| {
6787 for (directory, file_content) in settings_contents {
6788 let file_content = file_content.and_then(|content| content.log_err());
6789 store
6790 .set_local_settings(
6791 worktree_id.as_u64() as usize,
6792 directory.clone(),
6793 file_content.as_ref().map(String::as_str),
6794 cx,
6795 )
6796 .log_err();
6797 if let Some(remote_id) = project_id {
6798 client
6799 .send(proto::UpdateWorktreeSettings {
6800 project_id: remote_id,
6801 worktree_id: remote_worktree_id.to_proto(),
6802 path: directory.to_string_lossy().into_owned(),
6803 content: file_content,
6804 })
6805 .log_err();
6806 }
6807 }
6808 });
6809 })
6810 .ok();
6811 })
6812 .detach();
6813 }
6814
6815 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6816 let new_active_entry = entry.and_then(|project_path| {
6817 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6818 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6819 Some(entry.id)
6820 });
6821 if new_active_entry != self.active_entry {
6822 self.active_entry = new_active_entry;
6823 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6824 }
6825 }
6826
6827 pub fn language_servers_running_disk_based_diagnostics(
6828 &self,
6829 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6830 self.language_server_statuses
6831 .iter()
6832 .filter_map(|(id, status)| {
6833 if status.has_pending_diagnostic_updates {
6834 Some(*id)
6835 } else {
6836 None
6837 }
6838 })
6839 }
6840
6841 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
6842 let mut summary = DiagnosticSummary::default();
6843 for (_, _, path_summary) in
6844 self.diagnostic_summaries(include_ignored, cx)
6845 .filter(|(path, _, _)| {
6846 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
6847 include_ignored || worktree == Some(false)
6848 })
6849 {
6850 summary.error_count += path_summary.error_count;
6851 summary.warning_count += path_summary.warning_count;
6852 }
6853 summary
6854 }
6855
6856 pub fn diagnostic_summaries<'a>(
6857 &'a self,
6858 include_ignored: bool,
6859 cx: &'a AppContext,
6860 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6861 self.visible_worktrees(cx)
6862 .flat_map(move |worktree| {
6863 let worktree = worktree.read(cx);
6864 let worktree_id = worktree.id();
6865 worktree
6866 .diagnostic_summaries()
6867 .map(move |(path, server_id, summary)| {
6868 (ProjectPath { worktree_id, path }, server_id, summary)
6869 })
6870 })
6871 .filter(move |(path, _, _)| {
6872 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
6873 include_ignored || worktree == Some(false)
6874 })
6875 }
6876
6877 pub fn disk_based_diagnostics_started(
6878 &mut self,
6879 language_server_id: LanguageServerId,
6880 cx: &mut ModelContext<Self>,
6881 ) {
6882 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6883 }
6884
6885 pub fn disk_based_diagnostics_finished(
6886 &mut self,
6887 language_server_id: LanguageServerId,
6888 cx: &mut ModelContext<Self>,
6889 ) {
6890 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6891 }
6892
6893 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6894 self.active_entry
6895 }
6896
6897 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6898 self.worktree_for_id(path.worktree_id, cx)?
6899 .read(cx)
6900 .entry_for_path(&path.path)
6901 .cloned()
6902 }
6903
6904 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6905 let worktree = self.worktree_for_entry(entry_id, cx)?;
6906 let worktree = worktree.read(cx);
6907 let worktree_id = worktree.id();
6908 let path = worktree.entry_for_id(entry_id)?.path.clone();
6909 Some(ProjectPath { worktree_id, path })
6910 }
6911
6912 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6913 let workspace_root = self
6914 .worktree_for_id(project_path.worktree_id, cx)?
6915 .read(cx)
6916 .abs_path();
6917 let project_path = project_path.path.as_ref();
6918
6919 Some(if project_path == Path::new("") {
6920 workspace_root.to_path_buf()
6921 } else {
6922 workspace_root.join(project_path)
6923 })
6924 }
6925
6926 // RPC message handlers
6927
6928 async fn handle_unshare_project(
6929 this: Model<Self>,
6930 _: TypedEnvelope<proto::UnshareProject>,
6931 _: Arc<Client>,
6932 mut cx: AsyncAppContext,
6933 ) -> Result<()> {
6934 this.update(&mut cx, |this, cx| {
6935 if this.is_local() {
6936 this.unshare(cx)?;
6937 } else {
6938 this.disconnected_from_host(cx);
6939 }
6940 Ok(())
6941 })?
6942 }
6943
6944 async fn handle_add_collaborator(
6945 this: Model<Self>,
6946 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6947 _: Arc<Client>,
6948 mut cx: AsyncAppContext,
6949 ) -> Result<()> {
6950 let collaborator = envelope
6951 .payload
6952 .collaborator
6953 .take()
6954 .ok_or_else(|| anyhow!("empty collaborator"))?;
6955
6956 let collaborator = Collaborator::from_proto(collaborator)?;
6957 this.update(&mut cx, |this, cx| {
6958 this.shared_buffers.remove(&collaborator.peer_id);
6959 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6960 this.collaborators
6961 .insert(collaborator.peer_id, collaborator);
6962 cx.notify();
6963 })?;
6964
6965 Ok(())
6966 }
6967
6968 async fn handle_update_project_collaborator(
6969 this: Model<Self>,
6970 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6971 _: Arc<Client>,
6972 mut cx: AsyncAppContext,
6973 ) -> Result<()> {
6974 let old_peer_id = envelope
6975 .payload
6976 .old_peer_id
6977 .ok_or_else(|| anyhow!("missing old peer id"))?;
6978 let new_peer_id = envelope
6979 .payload
6980 .new_peer_id
6981 .ok_or_else(|| anyhow!("missing new peer id"))?;
6982 this.update(&mut cx, |this, cx| {
6983 let collaborator = this
6984 .collaborators
6985 .remove(&old_peer_id)
6986 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6987 let is_host = collaborator.replica_id == 0;
6988 this.collaborators.insert(new_peer_id, collaborator);
6989
6990 let buffers = this.shared_buffers.remove(&old_peer_id);
6991 log::info!(
6992 "peer {} became {}. moving buffers {:?}",
6993 old_peer_id,
6994 new_peer_id,
6995 &buffers
6996 );
6997 if let Some(buffers) = buffers {
6998 this.shared_buffers.insert(new_peer_id, buffers);
6999 }
7000
7001 if is_host {
7002 this.opened_buffers
7003 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
7004 this.buffer_ordered_messages_tx
7005 .unbounded_send(BufferOrderedMessage::Resync)
7006 .unwrap();
7007 }
7008
7009 cx.emit(Event::CollaboratorUpdated {
7010 old_peer_id,
7011 new_peer_id,
7012 });
7013 cx.notify();
7014 Ok(())
7015 })?
7016 }
7017
7018 async fn handle_remove_collaborator(
7019 this: Model<Self>,
7020 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7021 _: Arc<Client>,
7022 mut cx: AsyncAppContext,
7023 ) -> Result<()> {
7024 this.update(&mut cx, |this, cx| {
7025 let peer_id = envelope
7026 .payload
7027 .peer_id
7028 .ok_or_else(|| anyhow!("invalid peer id"))?;
7029 let replica_id = this
7030 .collaborators
7031 .remove(&peer_id)
7032 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7033 .replica_id;
7034 for buffer in this.opened_buffers.values() {
7035 if let Some(buffer) = buffer.upgrade() {
7036 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7037 }
7038 }
7039 this.shared_buffers.remove(&peer_id);
7040
7041 cx.emit(Event::CollaboratorLeft(peer_id));
7042 cx.notify();
7043 Ok(())
7044 })?
7045 }
7046
7047 async fn handle_update_project(
7048 this: Model<Self>,
7049 envelope: TypedEnvelope<proto::UpdateProject>,
7050 _: Arc<Client>,
7051 mut cx: AsyncAppContext,
7052 ) -> Result<()> {
7053 this.update(&mut cx, |this, cx| {
7054 // Don't handle messages that were sent before the response to us joining the project
7055 if envelope.message_id > this.join_project_response_message_id {
7056 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7057 }
7058 Ok(())
7059 })?
7060 }
7061
7062 async fn handle_update_worktree(
7063 this: Model<Self>,
7064 envelope: TypedEnvelope<proto::UpdateWorktree>,
7065 _: Arc<Client>,
7066 mut cx: AsyncAppContext,
7067 ) -> Result<()> {
7068 this.update(&mut cx, |this, cx| {
7069 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7070 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7071 worktree.update(cx, |worktree, _| {
7072 let worktree = worktree.as_remote_mut().unwrap();
7073 worktree.update_from_remote(envelope.payload);
7074 });
7075 }
7076 Ok(())
7077 })?
7078 }
7079
7080 async fn handle_update_worktree_settings(
7081 this: Model<Self>,
7082 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7083 _: Arc<Client>,
7084 mut cx: AsyncAppContext,
7085 ) -> Result<()> {
7086 this.update(&mut cx, |this, cx| {
7087 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7088 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7089 cx.update_global::<SettingsStore, _>(|store, cx| {
7090 store
7091 .set_local_settings(
7092 worktree.entity_id().as_u64() as usize,
7093 PathBuf::from(&envelope.payload.path).into(),
7094 envelope.payload.content.as_ref().map(String::as_str),
7095 cx,
7096 )
7097 .log_err();
7098 });
7099 }
7100 Ok(())
7101 })?
7102 }
7103
7104 async fn handle_create_project_entry(
7105 this: Model<Self>,
7106 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7107 _: Arc<Client>,
7108 mut cx: AsyncAppContext,
7109 ) -> Result<proto::ProjectEntryResponse> {
7110 let worktree = this.update(&mut cx, |this, cx| {
7111 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7112 this.worktree_for_id(worktree_id, cx)
7113 .ok_or_else(|| anyhow!("worktree not found"))
7114 })??;
7115 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7116 let entry = worktree
7117 .update(&mut cx, |worktree, cx| {
7118 let worktree = worktree.as_local_mut().unwrap();
7119 let path = PathBuf::from(envelope.payload.path);
7120 worktree.create_entry(path, envelope.payload.is_directory, cx)
7121 })?
7122 .await?;
7123 Ok(proto::ProjectEntryResponse {
7124 entry: entry.as_ref().map(|e| e.into()),
7125 worktree_scan_id: worktree_scan_id as u64,
7126 })
7127 }
7128
7129 async fn handle_rename_project_entry(
7130 this: Model<Self>,
7131 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7132 _: Arc<Client>,
7133 mut cx: AsyncAppContext,
7134 ) -> Result<proto::ProjectEntryResponse> {
7135 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7136 let worktree = this.update(&mut cx, |this, cx| {
7137 this.worktree_for_entry(entry_id, cx)
7138 .ok_or_else(|| anyhow!("worktree not found"))
7139 })??;
7140 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7141 let entry = worktree
7142 .update(&mut cx, |worktree, cx| {
7143 let new_path = PathBuf::from(envelope.payload.new_path);
7144 worktree
7145 .as_local_mut()
7146 .unwrap()
7147 .rename_entry(entry_id, new_path, cx)
7148 })?
7149 .await?;
7150 Ok(proto::ProjectEntryResponse {
7151 entry: entry.as_ref().map(|e| e.into()),
7152 worktree_scan_id: worktree_scan_id as u64,
7153 })
7154 }
7155
7156 async fn handle_copy_project_entry(
7157 this: Model<Self>,
7158 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7159 _: Arc<Client>,
7160 mut cx: AsyncAppContext,
7161 ) -> Result<proto::ProjectEntryResponse> {
7162 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7163 let worktree = this.update(&mut cx, |this, cx| {
7164 this.worktree_for_entry(entry_id, cx)
7165 .ok_or_else(|| anyhow!("worktree not found"))
7166 })??;
7167 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7168 let entry = worktree
7169 .update(&mut cx, |worktree, cx| {
7170 let new_path = PathBuf::from(envelope.payload.new_path);
7171 worktree
7172 .as_local_mut()
7173 .unwrap()
7174 .copy_entry(entry_id, new_path, cx)
7175 })?
7176 .await?;
7177 Ok(proto::ProjectEntryResponse {
7178 entry: entry.as_ref().map(|e| e.into()),
7179 worktree_scan_id: worktree_scan_id as u64,
7180 })
7181 }
7182
7183 async fn handle_delete_project_entry(
7184 this: Model<Self>,
7185 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7186 _: Arc<Client>,
7187 mut cx: AsyncAppContext,
7188 ) -> Result<proto::ProjectEntryResponse> {
7189 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7190
7191 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7192
7193 let worktree = this.update(&mut cx, |this, cx| {
7194 this.worktree_for_entry(entry_id, cx)
7195 .ok_or_else(|| anyhow!("worktree not found"))
7196 })??;
7197 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7198 worktree
7199 .update(&mut cx, |worktree, cx| {
7200 worktree
7201 .as_local_mut()
7202 .unwrap()
7203 .delete_entry(entry_id, cx)
7204 .ok_or_else(|| anyhow!("invalid entry"))
7205 })??
7206 .await?;
7207 Ok(proto::ProjectEntryResponse {
7208 entry: None,
7209 worktree_scan_id: worktree_scan_id as u64,
7210 })
7211 }
7212
7213 async fn handle_expand_project_entry(
7214 this: Model<Self>,
7215 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7216 _: Arc<Client>,
7217 mut cx: AsyncAppContext,
7218 ) -> Result<proto::ExpandProjectEntryResponse> {
7219 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7220 let worktree = this
7221 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7222 .ok_or_else(|| anyhow!("invalid request"))?;
7223 worktree
7224 .update(&mut cx, |worktree, cx| {
7225 worktree
7226 .as_local_mut()
7227 .unwrap()
7228 .expand_entry(entry_id, cx)
7229 .ok_or_else(|| anyhow!("invalid entry"))
7230 })??
7231 .await?;
7232 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7233 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7234 }
7235
7236 async fn handle_update_diagnostic_summary(
7237 this: Model<Self>,
7238 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7239 _: Arc<Client>,
7240 mut cx: AsyncAppContext,
7241 ) -> Result<()> {
7242 this.update(&mut cx, |this, cx| {
7243 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7244 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7245 if let Some(summary) = envelope.payload.summary {
7246 let project_path = ProjectPath {
7247 worktree_id,
7248 path: Path::new(&summary.path).into(),
7249 };
7250 worktree.update(cx, |worktree, _| {
7251 worktree
7252 .as_remote_mut()
7253 .unwrap()
7254 .update_diagnostic_summary(project_path.path.clone(), &summary);
7255 });
7256 cx.emit(Event::DiagnosticsUpdated {
7257 language_server_id: LanguageServerId(summary.language_server_id as usize),
7258 path: project_path,
7259 });
7260 }
7261 }
7262 Ok(())
7263 })?
7264 }
7265
7266 async fn handle_start_language_server(
7267 this: Model<Self>,
7268 envelope: TypedEnvelope<proto::StartLanguageServer>,
7269 _: Arc<Client>,
7270 mut cx: AsyncAppContext,
7271 ) -> Result<()> {
7272 let server = envelope
7273 .payload
7274 .server
7275 .ok_or_else(|| anyhow!("invalid server"))?;
7276 this.update(&mut cx, |this, cx| {
7277 this.language_server_statuses.insert(
7278 LanguageServerId(server.id as usize),
7279 LanguageServerStatus {
7280 name: server.name,
7281 pending_work: Default::default(),
7282 has_pending_diagnostic_updates: false,
7283 progress_tokens: Default::default(),
7284 },
7285 );
7286 cx.notify();
7287 })?;
7288 Ok(())
7289 }
7290
7291 async fn handle_update_language_server(
7292 this: Model<Self>,
7293 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7294 _: Arc<Client>,
7295 mut cx: AsyncAppContext,
7296 ) -> Result<()> {
7297 this.update(&mut cx, |this, cx| {
7298 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7299
7300 match envelope
7301 .payload
7302 .variant
7303 .ok_or_else(|| anyhow!("invalid variant"))?
7304 {
7305 proto::update_language_server::Variant::WorkStart(payload) => {
7306 this.on_lsp_work_start(
7307 language_server_id,
7308 payload.token,
7309 LanguageServerProgress {
7310 message: payload.message,
7311 percentage: payload.percentage.map(|p| p as usize),
7312 last_update_at: Instant::now(),
7313 },
7314 cx,
7315 );
7316 }
7317
7318 proto::update_language_server::Variant::WorkProgress(payload) => {
7319 this.on_lsp_work_progress(
7320 language_server_id,
7321 payload.token,
7322 LanguageServerProgress {
7323 message: payload.message,
7324 percentage: payload.percentage.map(|p| p as usize),
7325 last_update_at: Instant::now(),
7326 },
7327 cx,
7328 );
7329 }
7330
7331 proto::update_language_server::Variant::WorkEnd(payload) => {
7332 this.on_lsp_work_end(language_server_id, payload.token, cx);
7333 }
7334
7335 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7336 this.disk_based_diagnostics_started(language_server_id, cx);
7337 }
7338
7339 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7340 this.disk_based_diagnostics_finished(language_server_id, cx)
7341 }
7342 }
7343
7344 Ok(())
7345 })?
7346 }
7347
7348 async fn handle_update_buffer(
7349 this: Model<Self>,
7350 envelope: TypedEnvelope<proto::UpdateBuffer>,
7351 _: Arc<Client>,
7352 mut cx: AsyncAppContext,
7353 ) -> Result<proto::Ack> {
7354 this.update(&mut cx, |this, cx| {
7355 let payload = envelope.payload.clone();
7356 let buffer_id = payload.buffer_id;
7357 let ops = payload
7358 .operations
7359 .into_iter()
7360 .map(language::proto::deserialize_operation)
7361 .collect::<Result<Vec<_>, _>>()?;
7362 let is_remote = this.is_remote();
7363 match this.opened_buffers.entry(buffer_id) {
7364 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7365 OpenBuffer::Strong(buffer) => {
7366 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7367 }
7368 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7369 OpenBuffer::Weak(_) => {}
7370 },
7371 hash_map::Entry::Vacant(e) => {
7372 assert!(
7373 is_remote,
7374 "received buffer update from {:?}",
7375 envelope.original_sender_id
7376 );
7377 e.insert(OpenBuffer::Operations(ops));
7378 }
7379 }
7380 Ok(proto::Ack {})
7381 })?
7382 }
7383
7384 async fn handle_create_buffer_for_peer(
7385 this: Model<Self>,
7386 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7387 _: Arc<Client>,
7388 mut cx: AsyncAppContext,
7389 ) -> Result<()> {
7390 this.update(&mut cx, |this, cx| {
7391 match envelope
7392 .payload
7393 .variant
7394 .ok_or_else(|| anyhow!("missing variant"))?
7395 {
7396 proto::create_buffer_for_peer::Variant::State(mut state) => {
7397 let mut buffer_file = None;
7398 if let Some(file) = state.file.take() {
7399 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7400 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7401 anyhow!("no worktree found for id {}", file.worktree_id)
7402 })?;
7403 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7404 as Arc<dyn language::File>);
7405 }
7406
7407 let buffer_id = state.id;
7408 let buffer = cx.new_model(|_| {
7409 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
7410 .unwrap()
7411 });
7412 this.incomplete_remote_buffers
7413 .insert(buffer_id, Some(buffer));
7414 }
7415 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7416 let buffer = this
7417 .incomplete_remote_buffers
7418 .get(&chunk.buffer_id)
7419 .cloned()
7420 .flatten()
7421 .ok_or_else(|| {
7422 anyhow!(
7423 "received chunk for buffer {} without initial state",
7424 chunk.buffer_id
7425 )
7426 })?;
7427 let operations = chunk
7428 .operations
7429 .into_iter()
7430 .map(language::proto::deserialize_operation)
7431 .collect::<Result<Vec<_>>>()?;
7432 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7433
7434 if chunk.is_last {
7435 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7436 this.register_buffer(&buffer, cx)?;
7437 }
7438 }
7439 }
7440
7441 Ok(())
7442 })?
7443 }
7444
7445 async fn handle_update_diff_base(
7446 this: Model<Self>,
7447 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7448 _: Arc<Client>,
7449 mut cx: AsyncAppContext,
7450 ) -> Result<()> {
7451 this.update(&mut cx, |this, cx| {
7452 let buffer_id = envelope.payload.buffer_id;
7453 let diff_base = envelope.payload.diff_base;
7454 if let Some(buffer) = this
7455 .opened_buffers
7456 .get_mut(&buffer_id)
7457 .and_then(|b| b.upgrade())
7458 .or_else(|| {
7459 this.incomplete_remote_buffers
7460 .get(&buffer_id)
7461 .cloned()
7462 .flatten()
7463 })
7464 {
7465 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7466 }
7467 Ok(())
7468 })?
7469 }
7470
7471 async fn handle_update_buffer_file(
7472 this: Model<Self>,
7473 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7474 _: Arc<Client>,
7475 mut cx: AsyncAppContext,
7476 ) -> Result<()> {
7477 let buffer_id = envelope.payload.buffer_id;
7478
7479 this.update(&mut cx, |this, cx| {
7480 let payload = envelope.payload.clone();
7481 if let Some(buffer) = this
7482 .opened_buffers
7483 .get(&buffer_id)
7484 .and_then(|b| b.upgrade())
7485 .or_else(|| {
7486 this.incomplete_remote_buffers
7487 .get(&buffer_id)
7488 .cloned()
7489 .flatten()
7490 })
7491 {
7492 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7493 let worktree = this
7494 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7495 .ok_or_else(|| anyhow!("no such worktree"))?;
7496 let file = File::from_proto(file, worktree, cx)?;
7497 buffer.update(cx, |buffer, cx| {
7498 buffer.file_updated(Arc::new(file), cx);
7499 });
7500 this.detect_language_for_buffer(&buffer, cx);
7501 }
7502 Ok(())
7503 })?
7504 }
7505
7506 async fn handle_save_buffer(
7507 this: Model<Self>,
7508 envelope: TypedEnvelope<proto::SaveBuffer>,
7509 _: Arc<Client>,
7510 mut cx: AsyncAppContext,
7511 ) -> Result<proto::BufferSaved> {
7512 let buffer_id = envelope.payload.buffer_id;
7513 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7514 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7515 let buffer = this
7516 .opened_buffers
7517 .get(&buffer_id)
7518 .and_then(|buffer| buffer.upgrade())
7519 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7520 anyhow::Ok((project_id, buffer))
7521 })??;
7522 buffer
7523 .update(&mut cx, |buffer, _| {
7524 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7525 })?
7526 .await?;
7527 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7528
7529 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7530 .await?;
7531 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7532 project_id,
7533 buffer_id,
7534 version: serialize_version(buffer.saved_version()),
7535 mtime: Some(buffer.saved_mtime().into()),
7536 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7537 })?)
7538 }
7539
7540 async fn handle_reload_buffers(
7541 this: Model<Self>,
7542 envelope: TypedEnvelope<proto::ReloadBuffers>,
7543 _: Arc<Client>,
7544 mut cx: AsyncAppContext,
7545 ) -> Result<proto::ReloadBuffersResponse> {
7546 let sender_id = envelope.original_sender_id()?;
7547 let reload = this.update(&mut cx, |this, cx| {
7548 let mut buffers = HashSet::default();
7549 for buffer_id in &envelope.payload.buffer_ids {
7550 buffers.insert(
7551 this.opened_buffers
7552 .get(buffer_id)
7553 .and_then(|buffer| buffer.upgrade())
7554 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7555 );
7556 }
7557 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7558 })??;
7559
7560 let project_transaction = reload.await?;
7561 let project_transaction = this.update(&mut cx, |this, cx| {
7562 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7563 })?;
7564 Ok(proto::ReloadBuffersResponse {
7565 transaction: Some(project_transaction),
7566 })
7567 }
7568
7569 async fn handle_synchronize_buffers(
7570 this: Model<Self>,
7571 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7572 _: Arc<Client>,
7573 mut cx: AsyncAppContext,
7574 ) -> Result<proto::SynchronizeBuffersResponse> {
7575 let project_id = envelope.payload.project_id;
7576 let mut response = proto::SynchronizeBuffersResponse {
7577 buffers: Default::default(),
7578 };
7579
7580 this.update(&mut cx, |this, cx| {
7581 let Some(guest_id) = envelope.original_sender_id else {
7582 error!("missing original_sender_id on SynchronizeBuffers request");
7583 return;
7584 };
7585
7586 this.shared_buffers.entry(guest_id).or_default().clear();
7587 for buffer in envelope.payload.buffers {
7588 let buffer_id = buffer.id;
7589 let remote_version = language::proto::deserialize_version(&buffer.version);
7590 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7591 this.shared_buffers
7592 .entry(guest_id)
7593 .or_default()
7594 .insert(buffer_id);
7595
7596 let buffer = buffer.read(cx);
7597 response.buffers.push(proto::BufferVersion {
7598 id: buffer_id,
7599 version: language::proto::serialize_version(&buffer.version),
7600 });
7601
7602 let operations = buffer.serialize_ops(Some(remote_version), cx);
7603 let client = this.client.clone();
7604 if let Some(file) = buffer.file() {
7605 client
7606 .send(proto::UpdateBufferFile {
7607 project_id,
7608 buffer_id: buffer_id as u64,
7609 file: Some(file.to_proto()),
7610 })
7611 .log_err();
7612 }
7613
7614 client
7615 .send(proto::UpdateDiffBase {
7616 project_id,
7617 buffer_id: buffer_id as u64,
7618 diff_base: buffer.diff_base().map(Into::into),
7619 })
7620 .log_err();
7621
7622 client
7623 .send(proto::BufferReloaded {
7624 project_id,
7625 buffer_id,
7626 version: language::proto::serialize_version(buffer.saved_version()),
7627 mtime: Some(buffer.saved_mtime().into()),
7628 fingerprint: language::proto::serialize_fingerprint(
7629 buffer.saved_version_fingerprint(),
7630 ),
7631 line_ending: language::proto::serialize_line_ending(
7632 buffer.line_ending(),
7633 ) as i32,
7634 })
7635 .log_err();
7636
7637 cx.background_executor()
7638 .spawn(
7639 async move {
7640 let operations = operations.await;
7641 for chunk in split_operations(operations) {
7642 client
7643 .request(proto::UpdateBuffer {
7644 project_id,
7645 buffer_id,
7646 operations: chunk,
7647 })
7648 .await?;
7649 }
7650 anyhow::Ok(())
7651 }
7652 .log_err(),
7653 )
7654 .detach();
7655 }
7656 }
7657 })?;
7658
7659 Ok(response)
7660 }
7661
7662 async fn handle_format_buffers(
7663 this: Model<Self>,
7664 envelope: TypedEnvelope<proto::FormatBuffers>,
7665 _: Arc<Client>,
7666 mut cx: AsyncAppContext,
7667 ) -> Result<proto::FormatBuffersResponse> {
7668 let sender_id = envelope.original_sender_id()?;
7669 let format = this.update(&mut cx, |this, cx| {
7670 let mut buffers = HashSet::default();
7671 for buffer_id in &envelope.payload.buffer_ids {
7672 buffers.insert(
7673 this.opened_buffers
7674 .get(buffer_id)
7675 .and_then(|buffer| buffer.upgrade())
7676 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7677 );
7678 }
7679 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7680 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7681 })??;
7682
7683 let project_transaction = format.await?;
7684 let project_transaction = this.update(&mut cx, |this, cx| {
7685 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7686 })?;
7687 Ok(proto::FormatBuffersResponse {
7688 transaction: Some(project_transaction),
7689 })
7690 }
7691
7692 async fn handle_apply_additional_edits_for_completion(
7693 this: Model<Self>,
7694 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7695 _: Arc<Client>,
7696 mut cx: AsyncAppContext,
7697 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7698 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7699 let buffer = this
7700 .opened_buffers
7701 .get(&envelope.payload.buffer_id)
7702 .and_then(|buffer| buffer.upgrade())
7703 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7704 let language = buffer.read(cx).language();
7705 let completion = language::proto::deserialize_completion(
7706 envelope
7707 .payload
7708 .completion
7709 .ok_or_else(|| anyhow!("invalid completion"))?,
7710 language.cloned(),
7711 );
7712 Ok::<_, anyhow::Error>((buffer, completion))
7713 })??;
7714
7715 let completion = completion.await?;
7716
7717 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7718 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7719 })?;
7720
7721 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7722 transaction: apply_additional_edits
7723 .await?
7724 .as_ref()
7725 .map(language::proto::serialize_transaction),
7726 })
7727 }
7728
7729 async fn handle_apply_code_action(
7730 this: Model<Self>,
7731 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7732 _: Arc<Client>,
7733 mut cx: AsyncAppContext,
7734 ) -> Result<proto::ApplyCodeActionResponse> {
7735 let sender_id = envelope.original_sender_id()?;
7736 let action = language::proto::deserialize_code_action(
7737 envelope
7738 .payload
7739 .action
7740 .ok_or_else(|| anyhow!("invalid action"))?,
7741 )?;
7742 let apply_code_action = this.update(&mut cx, |this, cx| {
7743 let buffer = this
7744 .opened_buffers
7745 .get(&envelope.payload.buffer_id)
7746 .and_then(|buffer| buffer.upgrade())
7747 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7748 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7749 })??;
7750
7751 let project_transaction = apply_code_action.await?;
7752 let project_transaction = this.update(&mut cx, |this, cx| {
7753 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7754 })?;
7755 Ok(proto::ApplyCodeActionResponse {
7756 transaction: Some(project_transaction),
7757 })
7758 }
7759
7760 async fn handle_on_type_formatting(
7761 this: Model<Self>,
7762 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7763 _: Arc<Client>,
7764 mut cx: AsyncAppContext,
7765 ) -> Result<proto::OnTypeFormattingResponse> {
7766 let on_type_formatting = this.update(&mut cx, |this, cx| {
7767 let buffer = this
7768 .opened_buffers
7769 .get(&envelope.payload.buffer_id)
7770 .and_then(|buffer| buffer.upgrade())
7771 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7772 let position = envelope
7773 .payload
7774 .position
7775 .and_then(deserialize_anchor)
7776 .ok_or_else(|| anyhow!("invalid position"))?;
7777 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7778 buffer,
7779 position,
7780 envelope.payload.trigger.clone(),
7781 cx,
7782 ))
7783 })??;
7784
7785 let transaction = on_type_formatting
7786 .await?
7787 .as_ref()
7788 .map(language::proto::serialize_transaction);
7789 Ok(proto::OnTypeFormattingResponse { transaction })
7790 }
7791
7792 async fn handle_inlay_hints(
7793 this: Model<Self>,
7794 envelope: TypedEnvelope<proto::InlayHints>,
7795 _: Arc<Client>,
7796 mut cx: AsyncAppContext,
7797 ) -> Result<proto::InlayHintsResponse> {
7798 let sender_id = envelope.original_sender_id()?;
7799 let buffer = this.update(&mut cx, |this, _| {
7800 this.opened_buffers
7801 .get(&envelope.payload.buffer_id)
7802 .and_then(|buffer| buffer.upgrade())
7803 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7804 })??;
7805 let buffer_version = deserialize_version(&envelope.payload.version);
7806
7807 buffer
7808 .update(&mut cx, |buffer, _| {
7809 buffer.wait_for_version(buffer_version.clone())
7810 })?
7811 .await
7812 .with_context(|| {
7813 format!(
7814 "waiting for version {:?} for buffer {}",
7815 buffer_version,
7816 buffer.entity_id()
7817 )
7818 })?;
7819
7820 let start = envelope
7821 .payload
7822 .start
7823 .and_then(deserialize_anchor)
7824 .context("missing range start")?;
7825 let end = envelope
7826 .payload
7827 .end
7828 .and_then(deserialize_anchor)
7829 .context("missing range end")?;
7830 let buffer_hints = this
7831 .update(&mut cx, |project, cx| {
7832 project.inlay_hints(buffer, start..end, cx)
7833 })?
7834 .await
7835 .context("inlay hints fetch")?;
7836
7837 Ok(this.update(&mut cx, |project, cx| {
7838 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7839 })?)
7840 }
7841
7842 async fn handle_resolve_inlay_hint(
7843 this: Model<Self>,
7844 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7845 _: Arc<Client>,
7846 mut cx: AsyncAppContext,
7847 ) -> Result<proto::ResolveInlayHintResponse> {
7848 let proto_hint = envelope
7849 .payload
7850 .hint
7851 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7852 let hint = InlayHints::proto_to_project_hint(proto_hint)
7853 .context("resolved proto inlay hint conversion")?;
7854 let buffer = this.update(&mut cx, |this, _cx| {
7855 this.opened_buffers
7856 .get(&envelope.payload.buffer_id)
7857 .and_then(|buffer| buffer.upgrade())
7858 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7859 })??;
7860 let response_hint = this
7861 .update(&mut cx, |project, cx| {
7862 project.resolve_inlay_hint(
7863 hint,
7864 buffer,
7865 LanguageServerId(envelope.payload.language_server_id as usize),
7866 cx,
7867 )
7868 })?
7869 .await
7870 .context("inlay hints fetch")?;
7871 Ok(proto::ResolveInlayHintResponse {
7872 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7873 })
7874 }
7875
7876 async fn handle_refresh_inlay_hints(
7877 this: Model<Self>,
7878 _: TypedEnvelope<proto::RefreshInlayHints>,
7879 _: Arc<Client>,
7880 mut cx: AsyncAppContext,
7881 ) -> Result<proto::Ack> {
7882 this.update(&mut cx, |_, cx| {
7883 cx.emit(Event::RefreshInlayHints);
7884 })?;
7885 Ok(proto::Ack {})
7886 }
7887
7888 async fn handle_lsp_command<T: LspCommand>(
7889 this: Model<Self>,
7890 envelope: TypedEnvelope<T::ProtoRequest>,
7891 _: Arc<Client>,
7892 mut cx: AsyncAppContext,
7893 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7894 where
7895 <T::LspRequest as lsp::request::Request>::Params: Send,
7896 <T::LspRequest as lsp::request::Request>::Result: Send,
7897 {
7898 let sender_id = envelope.original_sender_id()?;
7899 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7900 let buffer_handle = this.update(&mut cx, |this, _cx| {
7901 this.opened_buffers
7902 .get(&buffer_id)
7903 .and_then(|buffer| buffer.upgrade())
7904 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7905 })??;
7906 let request = T::from_proto(
7907 envelope.payload,
7908 this.clone(),
7909 buffer_handle.clone(),
7910 cx.clone(),
7911 )
7912 .await?;
7913 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7914 let response = this
7915 .update(&mut cx, |this, cx| {
7916 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7917 })?
7918 .await?;
7919 this.update(&mut cx, |this, cx| {
7920 Ok(T::response_to_proto(
7921 response,
7922 this,
7923 sender_id,
7924 &buffer_version,
7925 cx,
7926 ))
7927 })?
7928 }
7929
7930 async fn handle_get_project_symbols(
7931 this: Model<Self>,
7932 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7933 _: Arc<Client>,
7934 mut cx: AsyncAppContext,
7935 ) -> Result<proto::GetProjectSymbolsResponse> {
7936 let symbols = this
7937 .update(&mut cx, |this, cx| {
7938 this.symbols(&envelope.payload.query, cx)
7939 })?
7940 .await?;
7941
7942 Ok(proto::GetProjectSymbolsResponse {
7943 symbols: symbols.iter().map(serialize_symbol).collect(),
7944 })
7945 }
7946
7947 async fn handle_search_project(
7948 this: Model<Self>,
7949 envelope: TypedEnvelope<proto::SearchProject>,
7950 _: Arc<Client>,
7951 mut cx: AsyncAppContext,
7952 ) -> Result<proto::SearchProjectResponse> {
7953 let peer_id = envelope.original_sender_id()?;
7954 let query = SearchQuery::from_proto(envelope.payload)?;
7955 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7956
7957 cx.spawn(move |mut cx| async move {
7958 let mut locations = Vec::new();
7959 while let Some((buffer, ranges)) = result.next().await {
7960 for range in ranges {
7961 let start = serialize_anchor(&range.start);
7962 let end = serialize_anchor(&range.end);
7963 let buffer_id = this.update(&mut cx, |this, cx| {
7964 this.create_buffer_for_peer(&buffer, peer_id, cx)
7965 })?;
7966 locations.push(proto::Location {
7967 buffer_id,
7968 start: Some(start),
7969 end: Some(end),
7970 });
7971 }
7972 }
7973 Ok(proto::SearchProjectResponse { locations })
7974 })
7975 .await
7976 }
7977
7978 async fn handle_open_buffer_for_symbol(
7979 this: Model<Self>,
7980 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7981 _: Arc<Client>,
7982 mut cx: AsyncAppContext,
7983 ) -> Result<proto::OpenBufferForSymbolResponse> {
7984 let peer_id = envelope.original_sender_id()?;
7985 let symbol = envelope
7986 .payload
7987 .symbol
7988 .ok_or_else(|| anyhow!("invalid symbol"))?;
7989 let symbol = this
7990 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
7991 .await?;
7992 let symbol = this.update(&mut cx, |this, _| {
7993 let signature = this.symbol_signature(&symbol.path);
7994 if signature == symbol.signature {
7995 Ok(symbol)
7996 } else {
7997 Err(anyhow!("invalid symbol signature"))
7998 }
7999 })??;
8000 let buffer = this
8001 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
8002 .await?;
8003
8004 Ok(proto::OpenBufferForSymbolResponse {
8005 buffer_id: this.update(&mut cx, |this, cx| {
8006 this.create_buffer_for_peer(&buffer, peer_id, cx)
8007 })?,
8008 })
8009 }
8010
8011 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
8012 let mut hasher = Sha256::new();
8013 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
8014 hasher.update(project_path.path.to_string_lossy().as_bytes());
8015 hasher.update(self.nonce.to_be_bytes());
8016 hasher.finalize().as_slice().try_into().unwrap()
8017 }
8018
8019 async fn handle_open_buffer_by_id(
8020 this: Model<Self>,
8021 envelope: TypedEnvelope<proto::OpenBufferById>,
8022 _: Arc<Client>,
8023 mut cx: AsyncAppContext,
8024 ) -> Result<proto::OpenBufferResponse> {
8025 let peer_id = envelope.original_sender_id()?;
8026 let buffer = this
8027 .update(&mut cx, |this, cx| {
8028 this.open_buffer_by_id(envelope.payload.id, cx)
8029 })?
8030 .await?;
8031 this.update(&mut cx, |this, cx| {
8032 Ok(proto::OpenBufferResponse {
8033 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
8034 })
8035 })?
8036 }
8037
8038 async fn handle_open_buffer_by_path(
8039 this: Model<Self>,
8040 envelope: TypedEnvelope<proto::OpenBufferByPath>,
8041 _: Arc<Client>,
8042 mut cx: AsyncAppContext,
8043 ) -> Result<proto::OpenBufferResponse> {
8044 let peer_id = envelope.original_sender_id()?;
8045 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8046 let open_buffer = this.update(&mut cx, |this, cx| {
8047 this.open_buffer(
8048 ProjectPath {
8049 worktree_id,
8050 path: PathBuf::from(envelope.payload.path).into(),
8051 },
8052 cx,
8053 )
8054 })?;
8055
8056 let buffer = open_buffer.await?;
8057 this.update(&mut cx, |this, cx| {
8058 Ok(proto::OpenBufferResponse {
8059 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
8060 })
8061 })?
8062 }
8063
8064 fn serialize_project_transaction_for_peer(
8065 &mut self,
8066 project_transaction: ProjectTransaction,
8067 peer_id: proto::PeerId,
8068 cx: &mut AppContext,
8069 ) -> proto::ProjectTransaction {
8070 let mut serialized_transaction = proto::ProjectTransaction {
8071 buffer_ids: Default::default(),
8072 transactions: Default::default(),
8073 };
8074 for (buffer, transaction) in project_transaction.0 {
8075 serialized_transaction
8076 .buffer_ids
8077 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
8078 serialized_transaction
8079 .transactions
8080 .push(language::proto::serialize_transaction(&transaction));
8081 }
8082 serialized_transaction
8083 }
8084
8085 fn deserialize_project_transaction(
8086 &mut self,
8087 message: proto::ProjectTransaction,
8088 push_to_history: bool,
8089 cx: &mut ModelContext<Self>,
8090 ) -> Task<Result<ProjectTransaction>> {
8091 cx.spawn(move |this, mut cx| async move {
8092 let mut project_transaction = ProjectTransaction::default();
8093 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
8094 {
8095 let buffer = this
8096 .update(&mut cx, |this, cx| {
8097 this.wait_for_remote_buffer(buffer_id, cx)
8098 })?
8099 .await?;
8100 let transaction = language::proto::deserialize_transaction(transaction)?;
8101 project_transaction.0.insert(buffer, transaction);
8102 }
8103
8104 for (buffer, transaction) in &project_transaction.0 {
8105 buffer
8106 .update(&mut cx, |buffer, _| {
8107 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
8108 })?
8109 .await?;
8110
8111 if push_to_history {
8112 buffer.update(&mut cx, |buffer, _| {
8113 buffer.push_transaction(transaction.clone(), Instant::now());
8114 })?;
8115 }
8116 }
8117
8118 Ok(project_transaction)
8119 })
8120 }
8121
8122 fn create_buffer_for_peer(
8123 &mut self,
8124 buffer: &Model<Buffer>,
8125 peer_id: proto::PeerId,
8126 cx: &mut AppContext,
8127 ) -> u64 {
8128 let buffer_id = buffer.read(cx).remote_id();
8129 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
8130 updates_tx
8131 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
8132 .ok();
8133 }
8134 buffer_id
8135 }
8136
8137 fn wait_for_remote_buffer(
8138 &mut self,
8139 id: u64,
8140 cx: &mut ModelContext<Self>,
8141 ) -> Task<Result<Model<Buffer>>> {
8142 let mut opened_buffer_rx = self.opened_buffer.1.clone();
8143
8144 cx.spawn(move |this, mut cx| async move {
8145 let buffer = loop {
8146 let Some(this) = this.upgrade() else {
8147 return Err(anyhow!("project dropped"));
8148 };
8149
8150 let buffer = this.update(&mut cx, |this, _cx| {
8151 this.opened_buffers
8152 .get(&id)
8153 .and_then(|buffer| buffer.upgrade())
8154 })?;
8155
8156 if let Some(buffer) = buffer {
8157 break buffer;
8158 } else if this.update(&mut cx, |this, _| this.is_disconnected())? {
8159 return Err(anyhow!("disconnected before buffer {} could be opened", id));
8160 }
8161
8162 this.update(&mut cx, |this, _| {
8163 this.incomplete_remote_buffers.entry(id).or_default();
8164 })?;
8165 drop(this);
8166
8167 opened_buffer_rx
8168 .next()
8169 .await
8170 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
8171 };
8172
8173 Ok(buffer)
8174 })
8175 }
8176
8177 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8178 let project_id = match self.client_state {
8179 ProjectClientState::Remote {
8180 sharing_has_stopped,
8181 remote_id,
8182 ..
8183 } => {
8184 if sharing_has_stopped {
8185 return Task::ready(Err(anyhow!(
8186 "can't synchronize remote buffers on a readonly project"
8187 )));
8188 } else {
8189 remote_id
8190 }
8191 }
8192 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8193 return Task::ready(Err(anyhow!(
8194 "can't synchronize remote buffers on a local project"
8195 )))
8196 }
8197 };
8198
8199 let client = self.client.clone();
8200 cx.spawn(move |this, mut cx| async move {
8201 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8202 let buffers = this
8203 .opened_buffers
8204 .iter()
8205 .filter_map(|(id, buffer)| {
8206 let buffer = buffer.upgrade()?;
8207 Some(proto::BufferVersion {
8208 id: *id,
8209 version: language::proto::serialize_version(&buffer.read(cx).version),
8210 })
8211 })
8212 .collect();
8213 let incomplete_buffer_ids = this
8214 .incomplete_remote_buffers
8215 .keys()
8216 .copied()
8217 .collect::<Vec<_>>();
8218
8219 (buffers, incomplete_buffer_ids)
8220 })?;
8221 let response = client
8222 .request(proto::SynchronizeBuffers {
8223 project_id,
8224 buffers,
8225 })
8226 .await?;
8227
8228 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8229 response
8230 .buffers
8231 .into_iter()
8232 .map(|buffer| {
8233 let client = client.clone();
8234 let buffer_id = buffer.id;
8235 let remote_version = language::proto::deserialize_version(&buffer.version);
8236 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8237 let operations =
8238 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8239 cx.background_executor().spawn(async move {
8240 let operations = operations.await;
8241 for chunk in split_operations(operations) {
8242 client
8243 .request(proto::UpdateBuffer {
8244 project_id,
8245 buffer_id,
8246 operations: chunk,
8247 })
8248 .await?;
8249 }
8250 anyhow::Ok(())
8251 })
8252 } else {
8253 Task::ready(Ok(()))
8254 }
8255 })
8256 .collect::<Vec<_>>()
8257 })?;
8258
8259 // Any incomplete buffers have open requests waiting. Request that the host sends
8260 // creates these buffers for us again to unblock any waiting futures.
8261 for id in incomplete_buffer_ids {
8262 cx.background_executor()
8263 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8264 .detach();
8265 }
8266
8267 futures::future::join_all(send_updates_for_buffers)
8268 .await
8269 .into_iter()
8270 .collect()
8271 })
8272 }
8273
8274 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8275 self.worktrees()
8276 .map(|worktree| {
8277 let worktree = worktree.read(cx);
8278 proto::WorktreeMetadata {
8279 id: worktree.id().to_proto(),
8280 root_name: worktree.root_name().into(),
8281 visible: worktree.is_visible(),
8282 abs_path: worktree.abs_path().to_string_lossy().into(),
8283 }
8284 })
8285 .collect()
8286 }
8287
8288 fn set_worktrees_from_proto(
8289 &mut self,
8290 worktrees: Vec<proto::WorktreeMetadata>,
8291 cx: &mut ModelContext<Project>,
8292 ) -> Result<()> {
8293 let replica_id = self.replica_id();
8294 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8295
8296 let mut old_worktrees_by_id = self
8297 .worktrees
8298 .drain(..)
8299 .filter_map(|worktree| {
8300 let worktree = worktree.upgrade()?;
8301 Some((worktree.read(cx).id(), worktree))
8302 })
8303 .collect::<HashMap<_, _>>();
8304
8305 for worktree in worktrees {
8306 if let Some(old_worktree) =
8307 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8308 {
8309 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8310 } else {
8311 let worktree =
8312 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8313 let _ = self.add_worktree(&worktree, cx);
8314 }
8315 }
8316
8317 self.metadata_changed(cx);
8318 for id in old_worktrees_by_id.keys() {
8319 cx.emit(Event::WorktreeRemoved(*id));
8320 }
8321
8322 Ok(())
8323 }
8324
8325 fn set_collaborators_from_proto(
8326 &mut self,
8327 messages: Vec<proto::Collaborator>,
8328 cx: &mut ModelContext<Self>,
8329 ) -> Result<()> {
8330 let mut collaborators = HashMap::default();
8331 for message in messages {
8332 let collaborator = Collaborator::from_proto(message)?;
8333 collaborators.insert(collaborator.peer_id, collaborator);
8334 }
8335 for old_peer_id in self.collaborators.keys() {
8336 if !collaborators.contains_key(old_peer_id) {
8337 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8338 }
8339 }
8340 self.collaborators = collaborators;
8341 Ok(())
8342 }
8343
8344 fn deserialize_symbol(
8345 &self,
8346 serialized_symbol: proto::Symbol,
8347 ) -> impl Future<Output = Result<Symbol>> {
8348 let languages = self.languages.clone();
8349 async move {
8350 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8351 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8352 let start = serialized_symbol
8353 .start
8354 .ok_or_else(|| anyhow!("invalid start"))?;
8355 let end = serialized_symbol
8356 .end
8357 .ok_or_else(|| anyhow!("invalid end"))?;
8358 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8359 let path = ProjectPath {
8360 worktree_id,
8361 path: PathBuf::from(serialized_symbol.path).into(),
8362 };
8363 let language = languages
8364 .language_for_file(&path.path, None)
8365 .await
8366 .log_err();
8367 Ok(Symbol {
8368 language_server_name: LanguageServerName(
8369 serialized_symbol.language_server_name.into(),
8370 ),
8371 source_worktree_id,
8372 path,
8373 label: {
8374 match language {
8375 Some(language) => {
8376 language
8377 .label_for_symbol(&serialized_symbol.name, kind)
8378 .await
8379 }
8380 None => None,
8381 }
8382 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8383 },
8384
8385 name: serialized_symbol.name,
8386 range: Unclipped(PointUtf16::new(start.row, start.column))
8387 ..Unclipped(PointUtf16::new(end.row, end.column)),
8388 kind,
8389 signature: serialized_symbol
8390 .signature
8391 .try_into()
8392 .map_err(|_| anyhow!("invalid signature"))?,
8393 })
8394 }
8395 }
8396
8397 async fn handle_buffer_saved(
8398 this: Model<Self>,
8399 envelope: TypedEnvelope<proto::BufferSaved>,
8400 _: Arc<Client>,
8401 mut cx: AsyncAppContext,
8402 ) -> Result<()> {
8403 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8404 let version = deserialize_version(&envelope.payload.version);
8405 let mtime = envelope
8406 .payload
8407 .mtime
8408 .ok_or_else(|| anyhow!("missing mtime"))?
8409 .into();
8410
8411 this.update(&mut cx, |this, cx| {
8412 let buffer = this
8413 .opened_buffers
8414 .get(&envelope.payload.buffer_id)
8415 .and_then(|buffer| buffer.upgrade())
8416 .or_else(|| {
8417 this.incomplete_remote_buffers
8418 .get(&envelope.payload.buffer_id)
8419 .and_then(|b| b.clone())
8420 });
8421 if let Some(buffer) = buffer {
8422 buffer.update(cx, |buffer, cx| {
8423 buffer.did_save(version, fingerprint, mtime, cx);
8424 });
8425 }
8426 Ok(())
8427 })?
8428 }
8429
8430 async fn handle_buffer_reloaded(
8431 this: Model<Self>,
8432 envelope: TypedEnvelope<proto::BufferReloaded>,
8433 _: Arc<Client>,
8434 mut cx: AsyncAppContext,
8435 ) -> Result<()> {
8436 let payload = envelope.payload;
8437 let version = deserialize_version(&payload.version);
8438 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8439 let line_ending = deserialize_line_ending(
8440 proto::LineEnding::from_i32(payload.line_ending)
8441 .ok_or_else(|| anyhow!("missing line ending"))?,
8442 );
8443 let mtime = payload
8444 .mtime
8445 .ok_or_else(|| anyhow!("missing mtime"))?
8446 .into();
8447 this.update(&mut cx, |this, cx| {
8448 let buffer = this
8449 .opened_buffers
8450 .get(&payload.buffer_id)
8451 .and_then(|buffer| buffer.upgrade())
8452 .or_else(|| {
8453 this.incomplete_remote_buffers
8454 .get(&payload.buffer_id)
8455 .cloned()
8456 .flatten()
8457 });
8458 if let Some(buffer) = buffer {
8459 buffer.update(cx, |buffer, cx| {
8460 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8461 });
8462 }
8463 Ok(())
8464 })?
8465 }
8466
8467 #[allow(clippy::type_complexity)]
8468 fn edits_from_lsp(
8469 &mut self,
8470 buffer: &Model<Buffer>,
8471 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8472 server_id: LanguageServerId,
8473 version: Option<i32>,
8474 cx: &mut ModelContext<Self>,
8475 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8476 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8477 cx.background_executor().spawn(async move {
8478 let snapshot = snapshot?;
8479 let mut lsp_edits = lsp_edits
8480 .into_iter()
8481 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8482 .collect::<Vec<_>>();
8483 lsp_edits.sort_by_key(|(range, _)| range.start);
8484
8485 let mut lsp_edits = lsp_edits.into_iter().peekable();
8486 let mut edits = Vec::new();
8487 while let Some((range, mut new_text)) = lsp_edits.next() {
8488 // Clip invalid ranges provided by the language server.
8489 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8490 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8491
8492 // Combine any LSP edits that are adjacent.
8493 //
8494 // Also, combine LSP edits that are separated from each other by only
8495 // a newline. This is important because for some code actions,
8496 // Rust-analyzer rewrites the entire buffer via a series of edits that
8497 // are separated by unchanged newline characters.
8498 //
8499 // In order for the diffing logic below to work properly, any edits that
8500 // cancel each other out must be combined into one.
8501 while let Some((next_range, next_text)) = lsp_edits.peek() {
8502 if next_range.start.0 > range.end {
8503 if next_range.start.0.row > range.end.row + 1
8504 || next_range.start.0.column > 0
8505 || snapshot.clip_point_utf16(
8506 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8507 Bias::Left,
8508 ) > range.end
8509 {
8510 break;
8511 }
8512 new_text.push('\n');
8513 }
8514 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8515 new_text.push_str(next_text);
8516 lsp_edits.next();
8517 }
8518
8519 // For multiline edits, perform a diff of the old and new text so that
8520 // we can identify the changes more precisely, preserving the locations
8521 // of any anchors positioned in the unchanged regions.
8522 if range.end.row > range.start.row {
8523 let mut offset = range.start.to_offset(&snapshot);
8524 let old_text = snapshot.text_for_range(range).collect::<String>();
8525
8526 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8527 let mut moved_since_edit = true;
8528 for change in diff.iter_all_changes() {
8529 let tag = change.tag();
8530 let value = change.value();
8531 match tag {
8532 ChangeTag::Equal => {
8533 offset += value.len();
8534 moved_since_edit = true;
8535 }
8536 ChangeTag::Delete => {
8537 let start = snapshot.anchor_after(offset);
8538 let end = snapshot.anchor_before(offset + value.len());
8539 if moved_since_edit {
8540 edits.push((start..end, String::new()));
8541 } else {
8542 edits.last_mut().unwrap().0.end = end;
8543 }
8544 offset += value.len();
8545 moved_since_edit = false;
8546 }
8547 ChangeTag::Insert => {
8548 if moved_since_edit {
8549 let anchor = snapshot.anchor_after(offset);
8550 edits.push((anchor..anchor, value.to_string()));
8551 } else {
8552 edits.last_mut().unwrap().1.push_str(value);
8553 }
8554 moved_since_edit = false;
8555 }
8556 }
8557 }
8558 } else if range.end == range.start {
8559 let anchor = snapshot.anchor_after(range.start);
8560 edits.push((anchor..anchor, new_text));
8561 } else {
8562 let edit_start = snapshot.anchor_after(range.start);
8563 let edit_end = snapshot.anchor_before(range.end);
8564 edits.push((edit_start..edit_end, new_text));
8565 }
8566 }
8567
8568 Ok(edits)
8569 })
8570 }
8571
8572 fn buffer_snapshot_for_lsp_version(
8573 &mut self,
8574 buffer: &Model<Buffer>,
8575 server_id: LanguageServerId,
8576 version: Option<i32>,
8577 cx: &AppContext,
8578 ) -> Result<TextBufferSnapshot> {
8579 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8580
8581 if let Some(version) = version {
8582 let buffer_id = buffer.read(cx).remote_id();
8583 let snapshots = self
8584 .buffer_snapshots
8585 .get_mut(&buffer_id)
8586 .and_then(|m| m.get_mut(&server_id))
8587 .ok_or_else(|| {
8588 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8589 })?;
8590
8591 let found_snapshot = snapshots
8592 .binary_search_by_key(&version, |e| e.version)
8593 .map(|ix| snapshots[ix].snapshot.clone())
8594 .map_err(|_| {
8595 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8596 })?;
8597
8598 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8599 Ok(found_snapshot)
8600 } else {
8601 Ok((buffer.read(cx)).text_snapshot())
8602 }
8603 }
8604
8605 pub fn language_servers(
8606 &self,
8607 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8608 self.language_server_ids
8609 .iter()
8610 .map(|((worktree_id, server_name), server_id)| {
8611 (*server_id, server_name.clone(), *worktree_id)
8612 })
8613 }
8614
8615 pub fn supplementary_language_servers(
8616 &self,
8617 ) -> impl '_
8618 + Iterator<
8619 Item = (
8620 &LanguageServerId,
8621 &(LanguageServerName, Arc<LanguageServer>),
8622 ),
8623 > {
8624 self.supplementary_language_servers.iter()
8625 }
8626
8627 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8628 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8629 Some(server.clone())
8630 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8631 Some(Arc::clone(server))
8632 } else {
8633 None
8634 }
8635 }
8636
8637 pub fn language_servers_for_buffer(
8638 &self,
8639 buffer: &Buffer,
8640 cx: &AppContext,
8641 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8642 self.language_server_ids_for_buffer(buffer, cx)
8643 .into_iter()
8644 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8645 LanguageServerState::Running {
8646 adapter, server, ..
8647 } => Some((adapter, server)),
8648 _ => None,
8649 })
8650 }
8651
8652 fn primary_language_server_for_buffer(
8653 &self,
8654 buffer: &Buffer,
8655 cx: &AppContext,
8656 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8657 self.language_servers_for_buffer(buffer, cx).next()
8658 }
8659
8660 pub fn language_server_for_buffer(
8661 &self,
8662 buffer: &Buffer,
8663 server_id: LanguageServerId,
8664 cx: &AppContext,
8665 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8666 self.language_servers_for_buffer(buffer, cx)
8667 .find(|(_, s)| s.server_id() == server_id)
8668 }
8669
8670 fn language_server_ids_for_buffer(
8671 &self,
8672 buffer: &Buffer,
8673 cx: &AppContext,
8674 ) -> Vec<LanguageServerId> {
8675 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8676 let worktree_id = file.worktree_id(cx);
8677 language
8678 .lsp_adapters()
8679 .iter()
8680 .flat_map(|adapter| {
8681 let key = (worktree_id, adapter.name.clone());
8682 self.language_server_ids.get(&key).copied()
8683 })
8684 .collect()
8685 } else {
8686 Vec::new()
8687 }
8688 }
8689}
8690
8691fn subscribe_for_copilot_events(
8692 copilot: &Model<Copilot>,
8693 cx: &mut ModelContext<'_, Project>,
8694) -> gpui::Subscription {
8695 cx.subscribe(
8696 copilot,
8697 |project, copilot, copilot_event, cx| match copilot_event {
8698 copilot::Event::CopilotLanguageServerStarted => {
8699 match copilot.read(cx).language_server() {
8700 Some((name, copilot_server)) => {
8701 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8702 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8703 let new_server_id = copilot_server.server_id();
8704 let weak_project = cx.weak_model();
8705 let copilot_log_subscription = copilot_server
8706 .on_notification::<copilot::request::LogMessage, _>(
8707 move |params, mut cx| {
8708 weak_project.update(&mut cx, |_, cx| {
8709 cx.emit(Event::LanguageServerLog(
8710 new_server_id,
8711 params.message,
8712 ));
8713 }).ok();
8714 },
8715 );
8716 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8717 project.copilot_log_subscription = Some(copilot_log_subscription);
8718 cx.emit(Event::LanguageServerAdded(new_server_id));
8719 }
8720 }
8721 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8722 }
8723 }
8724 },
8725 )
8726}
8727
8728fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8729 let mut literal_end = 0;
8730 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8731 if part.contains(&['*', '?', '{', '}']) {
8732 break;
8733 } else {
8734 if i > 0 {
8735 // Account for separator prior to this part
8736 literal_end += path::MAIN_SEPARATOR.len_utf8();
8737 }
8738 literal_end += part.len();
8739 }
8740 }
8741 &glob[..literal_end]
8742}
8743
8744impl WorktreeHandle {
8745 pub fn upgrade(&self) -> Option<Model<Worktree>> {
8746 match self {
8747 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8748 WorktreeHandle::Weak(handle) => handle.upgrade(),
8749 }
8750 }
8751
8752 pub fn handle_id(&self) -> usize {
8753 match self {
8754 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8755 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8756 }
8757 }
8758}
8759
8760impl OpenBuffer {
8761 pub fn upgrade(&self) -> Option<Model<Buffer>> {
8762 match self {
8763 OpenBuffer::Strong(handle) => Some(handle.clone()),
8764 OpenBuffer::Weak(handle) => handle.upgrade(),
8765 OpenBuffer::Operations(_) => None,
8766 }
8767 }
8768}
8769
8770pub struct PathMatchCandidateSet {
8771 pub snapshot: Snapshot,
8772 pub include_ignored: bool,
8773 pub include_root_name: bool,
8774}
8775
8776impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8777 type Candidates = PathMatchCandidateSetIter<'a>;
8778
8779 fn id(&self) -> usize {
8780 self.snapshot.id().to_usize()
8781 }
8782
8783 fn len(&self) -> usize {
8784 if self.include_ignored {
8785 self.snapshot.file_count()
8786 } else {
8787 self.snapshot.visible_file_count()
8788 }
8789 }
8790
8791 fn prefix(&self) -> Arc<str> {
8792 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8793 self.snapshot.root_name().into()
8794 } else if self.include_root_name {
8795 format!("{}/", self.snapshot.root_name()).into()
8796 } else {
8797 "".into()
8798 }
8799 }
8800
8801 fn candidates(&'a self, start: usize) -> Self::Candidates {
8802 PathMatchCandidateSetIter {
8803 traversal: self.snapshot.files(self.include_ignored, start),
8804 }
8805 }
8806}
8807
8808pub struct PathMatchCandidateSetIter<'a> {
8809 traversal: Traversal<'a>,
8810}
8811
8812impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8813 type Item = fuzzy::PathMatchCandidate<'a>;
8814
8815 fn next(&mut self) -> Option<Self::Item> {
8816 self.traversal.next().map(|entry| {
8817 if let EntryKind::File(char_bag) = entry.kind {
8818 fuzzy::PathMatchCandidate {
8819 path: &entry.path,
8820 char_bag,
8821 }
8822 } else {
8823 unreachable!()
8824 }
8825 })
8826 }
8827}
8828
8829impl EventEmitter<Event> for Project {}
8830
8831impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8832 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8833 Self {
8834 worktree_id,
8835 path: path.as_ref().into(),
8836 }
8837 }
8838}
8839
8840impl ProjectLspAdapterDelegate {
8841 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8842 Arc::new(Self {
8843 project: cx.handle(),
8844 http_client: project.client.http_client(),
8845 })
8846 }
8847}
8848
8849impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8850 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8851 self.project
8852 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8853 }
8854
8855 fn http_client(&self) -> Arc<dyn HttpClient> {
8856 self.http_client.clone()
8857 }
8858}
8859
8860fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8861 proto::Symbol {
8862 language_server_name: symbol.language_server_name.0.to_string(),
8863 source_worktree_id: symbol.source_worktree_id.to_proto(),
8864 worktree_id: symbol.path.worktree_id.to_proto(),
8865 path: symbol.path.path.to_string_lossy().to_string(),
8866 name: symbol.name.clone(),
8867 kind: unsafe { mem::transmute(symbol.kind) },
8868 start: Some(proto::PointUtf16 {
8869 row: symbol.range.start.0.row,
8870 column: symbol.range.start.0.column,
8871 }),
8872 end: Some(proto::PointUtf16 {
8873 row: symbol.range.end.0.row,
8874 column: symbol.range.end.0.column,
8875 }),
8876 signature: symbol.signature.to_vec(),
8877 }
8878}
8879
8880fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8881 let mut path_components = path.components();
8882 let mut base_components = base.components();
8883 let mut components: Vec<Component> = Vec::new();
8884 loop {
8885 match (path_components.next(), base_components.next()) {
8886 (None, None) => break,
8887 (Some(a), None) => {
8888 components.push(a);
8889 components.extend(path_components.by_ref());
8890 break;
8891 }
8892 (None, _) => components.push(Component::ParentDir),
8893 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8894 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8895 (Some(a), Some(_)) => {
8896 components.push(Component::ParentDir);
8897 for _ in base_components {
8898 components.push(Component::ParentDir);
8899 }
8900 components.push(a);
8901 components.extend(path_components.by_ref());
8902 break;
8903 }
8904 }
8905 }
8906 components.iter().map(|c| c.as_os_str()).collect()
8907}
8908
8909fn resolve_path(base: &Path, path: &Path) -> PathBuf {
8910 let mut result = base.to_path_buf();
8911 for component in path.components() {
8912 match component {
8913 Component::ParentDir => {
8914 result.pop();
8915 }
8916 Component::CurDir => (),
8917 _ => result.push(component),
8918 }
8919 }
8920 result
8921}
8922
8923impl Item for Buffer {
8924 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8925 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8926 }
8927
8928 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8929 File::from_dyn(self.file()).map(|file| ProjectPath {
8930 worktree_id: file.worktree_id(cx),
8931 path: file.path().clone(),
8932 })
8933 }
8934}
8935
8936async fn wait_for_loading_buffer(
8937 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
8938) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
8939 loop {
8940 if let Some(result) = receiver.borrow().as_ref() {
8941 match result {
8942 Ok(buffer) => return Ok(buffer.to_owned()),
8943 Err(e) => return Err(e.to_owned()),
8944 }
8945 }
8946 receiver.next().await;
8947 }
8948}
8949
8950fn include_text(server: &lsp::LanguageServer) -> bool {
8951 server
8952 .capabilities()
8953 .text_document_sync
8954 .as_ref()
8955 .and_then(|sync| match sync {
8956 lsp::TextDocumentSyncCapability::Kind(_) => None,
8957 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8958 })
8959 .and_then(|save_options| match save_options {
8960 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
8961 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8962 })
8963 .unwrap_or(false)
8964}