1mod ignore;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7pub mod terminals;
8pub mod worktree;
9
10#[cfg(test)]
11mod project_tests;
12#[cfg(test)]
13mod worktree_tests;
14
15use anyhow::{anyhow, Context as _, Result};
16use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
17use clock::ReplicaId;
18use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
19use copilot::Copilot;
20use futures::{
21 channel::{
22 mpsc::{self, UnboundedReceiver},
23 oneshot,
24 },
25 future::{try_join_all, Shared},
26 stream::FuturesUnordered,
27 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
28};
29use globset::{Glob, GlobSet, GlobSetBuilder};
30use gpui::{
31 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
32 Model, ModelContext, Task, WeakModel,
33};
34use itertools::Itertools;
35use language::{
36 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
37 point_to_lsp,
38 proto::{
39 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
40 serialize_anchor, serialize_version, split_operations,
41 },
42 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability,
43 CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff,
44 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
45 LspAdapterDelegate, OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16,
46 TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, Unclipped,
47};
48use log::error;
49use lsp::{
50 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
51 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
52};
53use lsp_command::*;
54use node_runtime::NodeRuntime;
55use parking_lot::Mutex;
56use postage::watch;
57use prettier_support::{DefaultPrettier, PrettierInstance};
58use project_settings::{LspSettings, ProjectSettings};
59use rand::prelude::*;
60use search::SearchQuery;
61use serde::Serialize;
62use settings::{Settings, SettingsStore};
63use sha2::{Digest, Sha256};
64use similar::{ChangeTag, TextDiff};
65use smol::channel::{Receiver, Sender};
66use smol::lock::Semaphore;
67use std::{
68 cmp::{self, Ordering},
69 convert::TryInto,
70 hash::Hash,
71 mem,
72 num::NonZeroU32,
73 ops::Range,
74 path::{self, Component, Path, PathBuf},
75 process::Stdio,
76 str,
77 sync::{
78 atomic::{AtomicUsize, Ordering::SeqCst},
79 Arc,
80 },
81 time::{Duration, Instant},
82};
83use terminals::Terminals;
84use text::Anchor;
85use util::{
86 debug_panic, defer, http::HttpClient, merge_json_value_into,
87 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
88};
89
90pub use fs::*;
91#[cfg(any(test, feature = "test-support"))]
92pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
93pub use worktree::*;
94
95const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
96
97pub trait Item {
98 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
99 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
100}
101
102// Language server state is stored across 3 collections:
103// language_servers =>
104// a mapping from unique server id to LanguageServerState which can either be a task for a
105// server in the process of starting, or a running server with adapter and language server arcs
106// language_server_ids => a mapping from worktreeId and server name to the unique server id
107// language_server_statuses => a mapping from unique server id to the current server status
108//
109// Multiple worktrees can map to the same language server for example when you jump to the definition
110// of a file in the standard library. So language_server_ids is used to look up which server is active
111// for a given worktree and language server name
112//
113// When starting a language server, first the id map is checked to make sure a server isn't already available
114// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
115// the Starting variant of LanguageServerState is stored in the language_servers map.
116pub struct Project {
117 worktrees: Vec<WorktreeHandle>,
118 active_entry: Option<ProjectEntryId>,
119 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
120 languages: Arc<LanguageRegistry>,
121 supplementary_language_servers:
122 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
123 language_servers: HashMap<LanguageServerId, LanguageServerState>,
124 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
125 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
126 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
127 client: Arc<client::Client>,
128 next_entry_id: Arc<AtomicUsize>,
129 join_project_response_message_id: u32,
130 next_diagnostic_group_id: usize,
131 user_store: Model<UserStore>,
132 fs: Arc<dyn Fs>,
133 client_state: ProjectClientState,
134 collaborators: HashMap<proto::PeerId, Collaborator>,
135 client_subscriptions: Vec<client::Subscription>,
136 _subscriptions: Vec<gpui::Subscription>,
137 next_buffer_id: u64,
138 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
139 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
140 #[allow(clippy::type_complexity)]
141 loading_buffers_by_path: HashMap<
142 ProjectPath,
143 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
144 >,
145 #[allow(clippy::type_complexity)]
146 loading_local_worktrees:
147 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
148 opened_buffers: HashMap<u64, OpenBuffer>,
149 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
150 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
151 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
152 /// Used for re-issuing buffer requests when peers temporarily disconnect
153 incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>,
154 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
155 buffers_being_formatted: HashSet<u64>,
156 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
157 git_diff_debouncer: DelayedDebounced,
158 nonce: u128,
159 _maintain_buffer_languages: Task<()>,
160 _maintain_workspace_config: Task<Result<()>>,
161 terminals: Terminals,
162 copilot_lsp_subscription: Option<gpui::Subscription>,
163 copilot_log_subscription: Option<lsp::Subscription>,
164 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
165 node: Option<Arc<dyn NodeRuntime>>,
166 default_prettier: DefaultPrettier,
167 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
168 prettier_instances: HashMap<PathBuf, PrettierInstance>,
169}
170
171struct DelayedDebounced {
172 task: Option<Task<()>>,
173 cancel_channel: Option<oneshot::Sender<()>>,
174}
175
176pub enum LanguageServerToQuery {
177 Primary,
178 Other(LanguageServerId),
179}
180
181impl DelayedDebounced {
182 fn new() -> DelayedDebounced {
183 DelayedDebounced {
184 task: None,
185 cancel_channel: None,
186 }
187 }
188
189 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
190 where
191 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
192 {
193 if let Some(channel) = self.cancel_channel.take() {
194 _ = channel.send(());
195 }
196
197 let (sender, mut receiver) = oneshot::channel::<()>();
198 self.cancel_channel = Some(sender);
199
200 let previous_task = self.task.take();
201 self.task = Some(cx.spawn(move |project, mut cx| async move {
202 let mut timer = cx.background_executor().timer(delay).fuse();
203 if let Some(previous_task) = previous_task {
204 previous_task.await;
205 }
206
207 futures::select_biased! {
208 _ = receiver => return,
209 _ = timer => {}
210 }
211
212 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
213 task.await;
214 }
215 }));
216 }
217}
218
219struct LspBufferSnapshot {
220 version: i32,
221 snapshot: TextBufferSnapshot,
222}
223
224/// Message ordered with respect to buffer operations
225enum BufferOrderedMessage {
226 Operation {
227 buffer_id: u64,
228 operation: proto::Operation,
229 },
230 LanguageServerUpdate {
231 language_server_id: LanguageServerId,
232 message: proto::update_language_server::Variant,
233 },
234 Resync,
235}
236
237enum LocalProjectUpdate {
238 WorktreesChanged,
239 CreateBufferForPeer {
240 peer_id: proto::PeerId,
241 buffer_id: u64,
242 },
243}
244
245enum OpenBuffer {
246 Strong(Model<Buffer>),
247 Weak(WeakModel<Buffer>),
248 Operations(Vec<Operation>),
249}
250
251#[derive(Clone)]
252enum WorktreeHandle {
253 Strong(Model<Worktree>),
254 Weak(WeakModel<Worktree>),
255}
256
257#[derive(Debug)]
258enum ProjectClientState {
259 Local,
260 Shared {
261 remote_id: u64,
262 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
263 _send_updates: Task<Result<()>>,
264 },
265 Remote {
266 sharing_has_stopped: bool,
267 capability: Capability,
268 remote_id: u64,
269 replica_id: ReplicaId,
270 },
271}
272
273#[derive(Clone, Debug, PartialEq)]
274pub enum Event {
275 LanguageServerAdded(LanguageServerId),
276 LanguageServerRemoved(LanguageServerId),
277 LanguageServerLog(LanguageServerId, String),
278 Notification(String),
279 ActiveEntryChanged(Option<ProjectEntryId>),
280 ActivateProjectPanel,
281 WorktreeAdded,
282 WorktreeRemoved(WorktreeId),
283 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
284 DiskBasedDiagnosticsStarted {
285 language_server_id: LanguageServerId,
286 },
287 DiskBasedDiagnosticsFinished {
288 language_server_id: LanguageServerId,
289 },
290 DiagnosticsUpdated {
291 path: ProjectPath,
292 language_server_id: LanguageServerId,
293 },
294 RemoteIdChanged(Option<u64>),
295 DisconnectedFromHost,
296 Closed,
297 DeletedEntry(ProjectEntryId),
298 CollaboratorUpdated {
299 old_peer_id: proto::PeerId,
300 new_peer_id: proto::PeerId,
301 },
302 CollaboratorJoined(proto::PeerId),
303 CollaboratorLeft(proto::PeerId),
304 RefreshInlayHints,
305 RevealInProjectPanel(ProjectEntryId),
306}
307
308pub enum LanguageServerState {
309 Starting(Task<Option<Arc<LanguageServer>>>),
310
311 Running {
312 language: Arc<Language>,
313 adapter: Arc<CachedLspAdapter>,
314 server: Arc<LanguageServer>,
315 watched_paths: HashMap<WorktreeId, GlobSet>,
316 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
317 },
318}
319
320#[derive(Serialize)]
321pub struct LanguageServerStatus {
322 pub name: String,
323 pub pending_work: BTreeMap<String, LanguageServerProgress>,
324 pub has_pending_diagnostic_updates: bool,
325 progress_tokens: HashSet<String>,
326}
327
328#[derive(Clone, Debug, Serialize)]
329pub struct LanguageServerProgress {
330 pub message: Option<String>,
331 pub percentage: Option<usize>,
332 #[serde(skip_serializing)]
333 pub last_update_at: Instant,
334}
335
336#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
337pub struct ProjectPath {
338 pub worktree_id: WorktreeId,
339 pub path: Arc<Path>,
340}
341
342#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
343pub struct DiagnosticSummary {
344 pub error_count: usize,
345 pub warning_count: usize,
346}
347
348#[derive(Debug, Clone, PartialEq, Eq, Hash)]
349pub struct Location {
350 pub buffer: Model<Buffer>,
351 pub range: Range<language::Anchor>,
352}
353
354#[derive(Debug, Clone, PartialEq, Eq)]
355pub struct InlayHint {
356 pub position: language::Anchor,
357 pub label: InlayHintLabel,
358 pub kind: Option<InlayHintKind>,
359 pub padding_left: bool,
360 pub padding_right: bool,
361 pub tooltip: Option<InlayHintTooltip>,
362 pub resolve_state: ResolveState,
363}
364
365#[derive(Debug, Clone, PartialEq, Eq)]
366pub enum ResolveState {
367 Resolved,
368 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
369 Resolving,
370}
371
372impl InlayHint {
373 pub fn text(&self) -> String {
374 match &self.label {
375 InlayHintLabel::String(s) => s.to_owned(),
376 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
377 }
378 }
379}
380
381#[derive(Debug, Clone, PartialEq, Eq)]
382pub enum InlayHintLabel {
383 String(String),
384 LabelParts(Vec<InlayHintLabelPart>),
385}
386
387#[derive(Debug, Clone, PartialEq, Eq)]
388pub struct InlayHintLabelPart {
389 pub value: String,
390 pub tooltip: Option<InlayHintLabelPartTooltip>,
391 pub location: Option<(LanguageServerId, lsp::Location)>,
392}
393
394#[derive(Debug, Clone, PartialEq, Eq)]
395pub enum InlayHintTooltip {
396 String(String),
397 MarkupContent(MarkupContent),
398}
399
400#[derive(Debug, Clone, PartialEq, Eq)]
401pub enum InlayHintLabelPartTooltip {
402 String(String),
403 MarkupContent(MarkupContent),
404}
405
406#[derive(Debug, Clone, PartialEq, Eq)]
407pub struct MarkupContent {
408 pub kind: HoverBlockKind,
409 pub value: String,
410}
411
412#[derive(Debug, Clone)]
413pub struct LocationLink {
414 pub origin: Option<Location>,
415 pub target: Location,
416}
417
418#[derive(Debug)]
419pub struct DocumentHighlight {
420 pub range: Range<language::Anchor>,
421 pub kind: DocumentHighlightKind,
422}
423
424#[derive(Clone, Debug)]
425pub struct Symbol {
426 pub language_server_name: LanguageServerName,
427 pub source_worktree_id: WorktreeId,
428 pub path: ProjectPath,
429 pub label: CodeLabel,
430 pub name: String,
431 pub kind: lsp::SymbolKind,
432 pub range: Range<Unclipped<PointUtf16>>,
433 pub signature: [u8; 32],
434}
435
436#[derive(Clone, Debug, PartialEq)]
437pub struct HoverBlock {
438 pub text: String,
439 pub kind: HoverBlockKind,
440}
441
442#[derive(Clone, Debug, PartialEq, Eq)]
443pub enum HoverBlockKind {
444 PlainText,
445 Markdown,
446 Code { language: String },
447}
448
449#[derive(Debug)]
450pub struct Hover {
451 pub contents: Vec<HoverBlock>,
452 pub range: Option<Range<language::Anchor>>,
453 pub language: Option<Arc<Language>>,
454}
455
456impl Hover {
457 pub fn is_empty(&self) -> bool {
458 self.contents.iter().all(|block| block.text.is_empty())
459 }
460}
461
462#[derive(Default)]
463pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
464
465impl DiagnosticSummary {
466 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
467 let mut this = Self {
468 error_count: 0,
469 warning_count: 0,
470 };
471
472 for entry in diagnostics {
473 if entry.diagnostic.is_primary {
474 match entry.diagnostic.severity {
475 DiagnosticSeverity::ERROR => this.error_count += 1,
476 DiagnosticSeverity::WARNING => this.warning_count += 1,
477 _ => {}
478 }
479 }
480 }
481
482 this
483 }
484
485 pub fn is_empty(&self) -> bool {
486 self.error_count == 0 && self.warning_count == 0
487 }
488
489 pub fn to_proto(
490 &self,
491 language_server_id: LanguageServerId,
492 path: &Path,
493 ) -> proto::DiagnosticSummary {
494 proto::DiagnosticSummary {
495 path: path.to_string_lossy().to_string(),
496 language_server_id: language_server_id.0 as u64,
497 error_count: self.error_count as u32,
498 warning_count: self.warning_count as u32,
499 }
500 }
501}
502
503#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
504pub struct ProjectEntryId(usize);
505
506impl ProjectEntryId {
507 pub const MAX: Self = Self(usize::MAX);
508
509 pub fn new(counter: &AtomicUsize) -> Self {
510 Self(counter.fetch_add(1, SeqCst))
511 }
512
513 pub fn from_proto(id: u64) -> Self {
514 Self(id as usize)
515 }
516
517 pub fn to_proto(&self) -> u64 {
518 self.0 as u64
519 }
520
521 pub fn to_usize(&self) -> usize {
522 self.0
523 }
524}
525
526#[derive(Debug, Clone, Copy, PartialEq, Eq)]
527pub enum FormatTrigger {
528 Save,
529 Manual,
530}
531
532struct ProjectLspAdapterDelegate {
533 project: Model<Project>,
534 http_client: Arc<dyn HttpClient>,
535}
536
537// Currently, formatting operations are represented differently depending on
538// whether they come from a language server or an external command.
539enum FormatOperation {
540 Lsp(Vec<(Range<Anchor>, String)>),
541 External(Diff),
542 Prettier(Diff),
543}
544
545impl FormatTrigger {
546 fn from_proto(value: i32) -> FormatTrigger {
547 match value {
548 0 => FormatTrigger::Save,
549 1 => FormatTrigger::Manual,
550 _ => FormatTrigger::Save,
551 }
552 }
553}
554#[derive(Clone, Debug, PartialEq)]
555enum SearchMatchCandidate {
556 OpenBuffer {
557 buffer: Model<Buffer>,
558 // This might be an unnamed file without representation on filesystem
559 path: Option<Arc<Path>>,
560 },
561 Path {
562 worktree_id: WorktreeId,
563 is_ignored: bool,
564 path: Arc<Path>,
565 },
566}
567
568type SearchMatchCandidateIndex = usize;
569impl SearchMatchCandidate {
570 fn path(&self) -> Option<Arc<Path>> {
571 match self {
572 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
573 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
574 }
575 }
576}
577
578impl Project {
579 pub fn init_settings(cx: &mut AppContext) {
580 ProjectSettings::register(cx);
581 }
582
583 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
584 Self::init_settings(cx);
585
586 client.add_model_message_handler(Self::handle_add_collaborator);
587 client.add_model_message_handler(Self::handle_update_project_collaborator);
588 client.add_model_message_handler(Self::handle_remove_collaborator);
589 client.add_model_message_handler(Self::handle_buffer_reloaded);
590 client.add_model_message_handler(Self::handle_buffer_saved);
591 client.add_model_message_handler(Self::handle_start_language_server);
592 client.add_model_message_handler(Self::handle_update_language_server);
593 client.add_model_message_handler(Self::handle_update_project);
594 client.add_model_message_handler(Self::handle_unshare_project);
595 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
596 client.add_model_message_handler(Self::handle_update_buffer_file);
597 client.add_model_request_handler(Self::handle_update_buffer);
598 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
599 client.add_model_message_handler(Self::handle_update_worktree);
600 client.add_model_message_handler(Self::handle_update_worktree_settings);
601 client.add_model_request_handler(Self::handle_create_project_entry);
602 client.add_model_request_handler(Self::handle_rename_project_entry);
603 client.add_model_request_handler(Self::handle_copy_project_entry);
604 client.add_model_request_handler(Self::handle_delete_project_entry);
605 client.add_model_request_handler(Self::handle_expand_project_entry);
606 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
607 client.add_model_request_handler(Self::handle_apply_code_action);
608 client.add_model_request_handler(Self::handle_on_type_formatting);
609 client.add_model_request_handler(Self::handle_inlay_hints);
610 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
611 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
612 client.add_model_request_handler(Self::handle_reload_buffers);
613 client.add_model_request_handler(Self::handle_synchronize_buffers);
614 client.add_model_request_handler(Self::handle_format_buffers);
615 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
616 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
617 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
618 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
619 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
620 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
621 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
622 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
623 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
624 client.add_model_request_handler(Self::handle_search_project);
625 client.add_model_request_handler(Self::handle_get_project_symbols);
626 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
627 client.add_model_request_handler(Self::handle_open_buffer_by_id);
628 client.add_model_request_handler(Self::handle_open_buffer_by_path);
629 client.add_model_request_handler(Self::handle_save_buffer);
630 client.add_model_message_handler(Self::handle_update_diff_base);
631 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
632 }
633
634 pub fn local(
635 client: Arc<Client>,
636 node: Arc<dyn NodeRuntime>,
637 user_store: Model<UserStore>,
638 languages: Arc<LanguageRegistry>,
639 fs: Arc<dyn Fs>,
640 cx: &mut AppContext,
641 ) -> Model<Self> {
642 cx.new_model(|cx: &mut ModelContext<Self>| {
643 let (tx, rx) = mpsc::unbounded();
644 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
645 .detach();
646 let copilot_lsp_subscription =
647 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
648 Self {
649 worktrees: Default::default(),
650 buffer_ordered_messages_tx: tx,
651 collaborators: Default::default(),
652 next_buffer_id: 0,
653 opened_buffers: Default::default(),
654 shared_buffers: Default::default(),
655 incomplete_remote_buffers: Default::default(),
656 loading_buffers_by_path: Default::default(),
657 loading_local_worktrees: Default::default(),
658 local_buffer_ids_by_path: Default::default(),
659 local_buffer_ids_by_entry_id: Default::default(),
660 buffer_snapshots: Default::default(),
661 join_project_response_message_id: 0,
662 client_state: ProjectClientState::Local,
663 opened_buffer: watch::channel(),
664 client_subscriptions: Vec::new(),
665 _subscriptions: vec![
666 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
667 cx.on_release(Self::release),
668 cx.on_app_quit(Self::shutdown_language_servers),
669 ],
670 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
671 _maintain_workspace_config: Self::maintain_workspace_config(cx),
672 active_entry: None,
673 languages,
674 client,
675 user_store,
676 fs,
677 next_entry_id: Default::default(),
678 next_diagnostic_group_id: Default::default(),
679 supplementary_language_servers: HashMap::default(),
680 language_servers: Default::default(),
681 language_server_ids: Default::default(),
682 language_server_statuses: Default::default(),
683 last_workspace_edits_by_language_server: Default::default(),
684 buffers_being_formatted: Default::default(),
685 buffers_needing_diff: Default::default(),
686 git_diff_debouncer: DelayedDebounced::new(),
687 nonce: StdRng::from_entropy().gen(),
688 terminals: Terminals {
689 local_handles: Vec::new(),
690 },
691 copilot_lsp_subscription,
692 copilot_log_subscription: None,
693 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
694 node: Some(node),
695 default_prettier: DefaultPrettier::default(),
696 prettiers_per_worktree: HashMap::default(),
697 prettier_instances: HashMap::default(),
698 }
699 })
700 }
701
702 pub async fn remote(
703 remote_id: u64,
704 client: Arc<Client>,
705 user_store: Model<UserStore>,
706 languages: Arc<LanguageRegistry>,
707 fs: Arc<dyn Fs>,
708 role: proto::ChannelRole,
709 mut cx: AsyncAppContext,
710 ) -> Result<Model<Self>> {
711 client.authenticate_and_connect(true, &cx).await?;
712
713 let subscription = client.subscribe_to_entity(remote_id)?;
714 let response = client
715 .request_envelope(proto::JoinProject {
716 project_id: remote_id,
717 })
718 .await?;
719 let this = cx.new_model(|cx| {
720 let replica_id = response.payload.replica_id as ReplicaId;
721
722 let mut worktrees = Vec::new();
723 for worktree in response.payload.worktrees {
724 let worktree =
725 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
726 worktrees.push(worktree);
727 }
728
729 let (tx, rx) = mpsc::unbounded();
730 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
731 .detach();
732 let copilot_lsp_subscription =
733 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
734 let mut this = Self {
735 worktrees: Vec::new(),
736 buffer_ordered_messages_tx: tx,
737 loading_buffers_by_path: Default::default(),
738 next_buffer_id: 0,
739 opened_buffer: watch::channel(),
740 shared_buffers: Default::default(),
741 incomplete_remote_buffers: Default::default(),
742 loading_local_worktrees: Default::default(),
743 local_buffer_ids_by_path: Default::default(),
744 local_buffer_ids_by_entry_id: Default::default(),
745 active_entry: None,
746 collaborators: Default::default(),
747 join_project_response_message_id: response.message_id,
748 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
749 _maintain_workspace_config: Self::maintain_workspace_config(cx),
750 languages,
751 user_store: user_store.clone(),
752 fs,
753 next_entry_id: Default::default(),
754 next_diagnostic_group_id: Default::default(),
755 client_subscriptions: Default::default(),
756 _subscriptions: vec![
757 cx.on_release(Self::release),
758 cx.on_app_quit(Self::shutdown_language_servers),
759 ],
760 client: client.clone(),
761 client_state: ProjectClientState::Remote {
762 sharing_has_stopped: false,
763 capability: Capability::ReadWrite,
764 remote_id,
765 replica_id,
766 },
767 supplementary_language_servers: HashMap::default(),
768 language_servers: Default::default(),
769 language_server_ids: Default::default(),
770 language_server_statuses: response
771 .payload
772 .language_servers
773 .into_iter()
774 .map(|server| {
775 (
776 LanguageServerId(server.id as usize),
777 LanguageServerStatus {
778 name: server.name,
779 pending_work: Default::default(),
780 has_pending_diagnostic_updates: false,
781 progress_tokens: Default::default(),
782 },
783 )
784 })
785 .collect(),
786 last_workspace_edits_by_language_server: Default::default(),
787 opened_buffers: Default::default(),
788 buffers_being_formatted: Default::default(),
789 buffers_needing_diff: Default::default(),
790 git_diff_debouncer: DelayedDebounced::new(),
791 buffer_snapshots: Default::default(),
792 nonce: StdRng::from_entropy().gen(),
793 terminals: Terminals {
794 local_handles: Vec::new(),
795 },
796 copilot_lsp_subscription,
797 copilot_log_subscription: None,
798 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
799 node: None,
800 default_prettier: DefaultPrettier::default(),
801 prettiers_per_worktree: HashMap::default(),
802 prettier_instances: HashMap::default(),
803 };
804 this.set_role(role, cx);
805 for worktree in worktrees {
806 let _ = this.add_worktree(&worktree, cx);
807 }
808 this
809 })?;
810 let subscription = subscription.set_model(&this, &mut cx);
811
812 let user_ids = response
813 .payload
814 .collaborators
815 .iter()
816 .map(|peer| peer.user_id)
817 .collect();
818 user_store
819 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
820 .await?;
821
822 this.update(&mut cx, |this, cx| {
823 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
824 this.client_subscriptions.push(subscription);
825 anyhow::Ok(())
826 })??;
827
828 Ok(this)
829 }
830
831 fn release(&mut self, cx: &mut AppContext) {
832 match &self.client_state {
833 ProjectClientState::Local => {}
834 ProjectClientState::Shared { .. } => {
835 let _ = self.unshare_internal(cx);
836 }
837 ProjectClientState::Remote { remote_id, .. } => {
838 let _ = self.client.send(proto::LeaveProject {
839 project_id: *remote_id,
840 });
841 self.disconnected_from_host_internal(cx);
842 }
843 }
844 }
845
846 fn shutdown_language_servers(
847 &mut self,
848 _cx: &mut ModelContext<Self>,
849 ) -> impl Future<Output = ()> {
850 let shutdown_futures = self
851 .language_servers
852 .drain()
853 .map(|(_, server_state)| async {
854 use LanguageServerState::*;
855 match server_state {
856 Running { server, .. } => server.shutdown()?.await,
857 Starting(task) => task.await?.shutdown()?.await,
858 }
859 })
860 .collect::<Vec<_>>();
861
862 async move {
863 futures::future::join_all(shutdown_futures).await;
864 }
865 }
866
867 #[cfg(any(test, feature = "test-support"))]
868 pub async fn test(
869 fs: Arc<dyn Fs>,
870 root_paths: impl IntoIterator<Item = &Path>,
871 cx: &mut gpui::TestAppContext,
872 ) -> Model<Project> {
873 let mut languages = LanguageRegistry::test();
874 languages.set_executor(cx.executor());
875 let http_client = util::http::FakeHttpClient::with_404_response();
876 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
877 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
878 let project = cx.update(|cx| {
879 Project::local(
880 client,
881 node_runtime::FakeNodeRuntime::new(),
882 user_store,
883 Arc::new(languages),
884 fs,
885 cx,
886 )
887 });
888 for path in root_paths {
889 let (tree, _) = project
890 .update(cx, |project, cx| {
891 project.find_or_create_local_worktree(path, true, cx)
892 })
893 .await
894 .unwrap();
895 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
896 .await;
897 }
898 project
899 }
900
901 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
902 let mut language_servers_to_start = Vec::new();
903 let mut language_formatters_to_check = Vec::new();
904 for buffer in self.opened_buffers.values() {
905 if let Some(buffer) = buffer.upgrade() {
906 let buffer = buffer.read(cx);
907 let buffer_file = File::from_dyn(buffer.file());
908 let buffer_language = buffer.language();
909 let settings = language_settings(buffer_language, buffer.file(), cx);
910 if let Some(language) = buffer_language {
911 if settings.enable_language_server {
912 if let Some(file) = buffer_file {
913 language_servers_to_start
914 .push((file.worktree.clone(), Arc::clone(language)));
915 }
916 }
917 language_formatters_to_check.push((
918 buffer_file.map(|f| f.worktree_id(cx)),
919 Arc::clone(language),
920 settings.clone(),
921 ));
922 }
923 }
924 }
925
926 let mut language_servers_to_stop = Vec::new();
927 let mut language_servers_to_restart = Vec::new();
928 let languages = self.languages.to_vec();
929
930 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
931 let current_lsp_settings = &self.current_lsp_settings;
932 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
933 let language = languages.iter().find_map(|l| {
934 let adapter = l
935 .lsp_adapters()
936 .iter()
937 .find(|adapter| &adapter.name == started_lsp_name)?;
938 Some((l, adapter))
939 });
940 if let Some((language, adapter)) = language {
941 let worktree = self.worktree_for_id(*worktree_id, cx);
942 let file = worktree.as_ref().and_then(|tree| {
943 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
944 });
945 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
946 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
947 } else if let Some(worktree) = worktree {
948 let server_name = &adapter.name.0;
949 match (
950 current_lsp_settings.get(server_name),
951 new_lsp_settings.get(server_name),
952 ) {
953 (None, None) => {}
954 (Some(_), None) | (None, Some(_)) => {
955 language_servers_to_restart.push((worktree, Arc::clone(language)));
956 }
957 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
958 if current_lsp_settings != new_lsp_settings {
959 language_servers_to_restart.push((worktree, Arc::clone(language)));
960 }
961 }
962 }
963 }
964 }
965 }
966 self.current_lsp_settings = new_lsp_settings;
967
968 // Stop all newly-disabled language servers.
969 for (worktree_id, adapter_name) in language_servers_to_stop {
970 self.stop_language_server(worktree_id, adapter_name, cx)
971 .detach();
972 }
973
974 let mut prettier_plugins_by_worktree = HashMap::default();
975 for (worktree, language, settings) in language_formatters_to_check {
976 if let Some(plugins) =
977 prettier_support::prettier_plugins_for_language(&language, &settings)
978 {
979 prettier_plugins_by_worktree
980 .entry(worktree)
981 .or_insert_with(|| HashSet::default())
982 .extend(plugins);
983 }
984 }
985 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
986 self.install_default_prettier(worktree, prettier_plugins, cx);
987 }
988
989 // Start all the newly-enabled language servers.
990 for (worktree, language) in language_servers_to_start {
991 let worktree_path = worktree.read(cx).abs_path();
992 self.start_language_servers(&worktree, worktree_path, language, cx);
993 }
994
995 // Restart all language servers with changed initialization options.
996 for (worktree, language) in language_servers_to_restart {
997 self.restart_language_servers(worktree, language, cx);
998 }
999
1000 if self.copilot_lsp_subscription.is_none() {
1001 if let Some(copilot) = Copilot::global(cx) {
1002 for buffer in self.opened_buffers.values() {
1003 if let Some(buffer) = buffer.upgrade() {
1004 self.register_buffer_with_copilot(&buffer, cx);
1005 }
1006 }
1007 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
1008 }
1009 }
1010
1011 cx.notify();
1012 }
1013
1014 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> {
1015 self.opened_buffers
1016 .get(&remote_id)
1017 .and_then(|buffer| buffer.upgrade())
1018 }
1019
1020 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1021 &self.languages
1022 }
1023
1024 pub fn client(&self) -> Arc<Client> {
1025 self.client.clone()
1026 }
1027
1028 pub fn user_store(&self) -> Model<UserStore> {
1029 self.user_store.clone()
1030 }
1031
1032 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1033 self.opened_buffers
1034 .values()
1035 .filter_map(|b| b.upgrade())
1036 .collect()
1037 }
1038
1039 #[cfg(any(test, feature = "test-support"))]
1040 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1041 let path = path.into();
1042 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1043 self.opened_buffers.iter().any(|(_, buffer)| {
1044 if let Some(buffer) = buffer.upgrade() {
1045 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1046 if file.worktree == worktree && file.path() == &path.path {
1047 return true;
1048 }
1049 }
1050 }
1051 false
1052 })
1053 } else {
1054 false
1055 }
1056 }
1057
1058 pub fn fs(&self) -> &Arc<dyn Fs> {
1059 &self.fs
1060 }
1061
1062 pub fn remote_id(&self) -> Option<u64> {
1063 match self.client_state {
1064 ProjectClientState::Local => None,
1065 ProjectClientState::Shared { remote_id, .. }
1066 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1067 }
1068 }
1069
1070 pub fn replica_id(&self) -> ReplicaId {
1071 match self.client_state {
1072 ProjectClientState::Remote { replica_id, .. } => replica_id,
1073 _ => 0,
1074 }
1075 }
1076
1077 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1078 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1079 updates_tx
1080 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1081 .ok();
1082 }
1083 cx.notify();
1084 }
1085
1086 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1087 &self.collaborators
1088 }
1089
1090 pub fn host(&self) -> Option<&Collaborator> {
1091 self.collaborators.values().find(|c| c.replica_id == 0)
1092 }
1093
1094 /// Collect all worktrees, including ones that don't appear in the project panel
1095 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1096 self.worktrees
1097 .iter()
1098 .filter_map(move |worktree| worktree.upgrade())
1099 }
1100
1101 /// Collect all user-visible worktrees, the ones that appear in the project panel
1102 pub fn visible_worktrees<'a>(
1103 &'a self,
1104 cx: &'a AppContext,
1105 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1106 self.worktrees.iter().filter_map(|worktree| {
1107 worktree.upgrade().and_then(|worktree| {
1108 if worktree.read(cx).is_visible() {
1109 Some(worktree)
1110 } else {
1111 None
1112 }
1113 })
1114 })
1115 }
1116
1117 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1118 self.visible_worktrees(cx)
1119 .map(|tree| tree.read(cx).root_name())
1120 }
1121
1122 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1123 self.worktrees()
1124 .find(|worktree| worktree.read(cx).id() == id)
1125 }
1126
1127 pub fn worktree_for_entry(
1128 &self,
1129 entry_id: ProjectEntryId,
1130 cx: &AppContext,
1131 ) -> Option<Model<Worktree>> {
1132 self.worktrees()
1133 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1134 }
1135
1136 pub fn worktree_id_for_entry(
1137 &self,
1138 entry_id: ProjectEntryId,
1139 cx: &AppContext,
1140 ) -> Option<WorktreeId> {
1141 self.worktree_for_entry(entry_id, cx)
1142 .map(|worktree| worktree.read(cx).id())
1143 }
1144
1145 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1146 paths.iter().all(|path| self.contains_path(path, cx))
1147 }
1148
1149 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1150 for worktree in self.worktrees() {
1151 let worktree = worktree.read(cx).as_local();
1152 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1153 return true;
1154 }
1155 }
1156 false
1157 }
1158
1159 pub fn create_entry(
1160 &mut self,
1161 project_path: impl Into<ProjectPath>,
1162 is_directory: bool,
1163 cx: &mut ModelContext<Self>,
1164 ) -> Task<Result<Option<Entry>>> {
1165 let project_path = project_path.into();
1166 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1167 return Task::ready(Ok(None));
1168 };
1169 if self.is_local() {
1170 worktree.update(cx, |worktree, cx| {
1171 worktree
1172 .as_local_mut()
1173 .unwrap()
1174 .create_entry(project_path.path, is_directory, cx)
1175 })
1176 } else {
1177 let client = self.client.clone();
1178 let project_id = self.remote_id().unwrap();
1179 cx.spawn(move |_, mut cx| async move {
1180 let response = client
1181 .request(proto::CreateProjectEntry {
1182 worktree_id: project_path.worktree_id.to_proto(),
1183 project_id,
1184 path: project_path.path.to_string_lossy().into(),
1185 is_directory,
1186 })
1187 .await?;
1188 match response.entry {
1189 Some(entry) => worktree
1190 .update(&mut cx, |worktree, cx| {
1191 worktree.as_remote_mut().unwrap().insert_entry(
1192 entry,
1193 response.worktree_scan_id as usize,
1194 cx,
1195 )
1196 })?
1197 .await
1198 .map(Some),
1199 None => Ok(None),
1200 }
1201 })
1202 }
1203 }
1204
1205 pub fn copy_entry(
1206 &mut self,
1207 entry_id: ProjectEntryId,
1208 new_path: impl Into<Arc<Path>>,
1209 cx: &mut ModelContext<Self>,
1210 ) -> Task<Result<Option<Entry>>> {
1211 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1212 return Task::ready(Ok(None));
1213 };
1214 let new_path = new_path.into();
1215 if self.is_local() {
1216 worktree.update(cx, |worktree, cx| {
1217 worktree
1218 .as_local_mut()
1219 .unwrap()
1220 .copy_entry(entry_id, new_path, cx)
1221 })
1222 } else {
1223 let client = self.client.clone();
1224 let project_id = self.remote_id().unwrap();
1225
1226 cx.spawn(move |_, mut cx| async move {
1227 let response = client
1228 .request(proto::CopyProjectEntry {
1229 project_id,
1230 entry_id: entry_id.to_proto(),
1231 new_path: new_path.to_string_lossy().into(),
1232 })
1233 .await?;
1234 match response.entry {
1235 Some(entry) => worktree
1236 .update(&mut cx, |worktree, cx| {
1237 worktree.as_remote_mut().unwrap().insert_entry(
1238 entry,
1239 response.worktree_scan_id as usize,
1240 cx,
1241 )
1242 })?
1243 .await
1244 .map(Some),
1245 None => Ok(None),
1246 }
1247 })
1248 }
1249 }
1250
1251 pub fn rename_entry(
1252 &mut self,
1253 entry_id: ProjectEntryId,
1254 new_path: impl Into<Arc<Path>>,
1255 cx: &mut ModelContext<Self>,
1256 ) -> Task<Result<Option<Entry>>> {
1257 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1258 return Task::ready(Ok(None));
1259 };
1260 let new_path = new_path.into();
1261 if self.is_local() {
1262 worktree.update(cx, |worktree, cx| {
1263 worktree
1264 .as_local_mut()
1265 .unwrap()
1266 .rename_entry(entry_id, new_path, cx)
1267 })
1268 } else {
1269 let client = self.client.clone();
1270 let project_id = self.remote_id().unwrap();
1271
1272 cx.spawn(move |_, mut cx| async move {
1273 let response = client
1274 .request(proto::RenameProjectEntry {
1275 project_id,
1276 entry_id: entry_id.to_proto(),
1277 new_path: new_path.to_string_lossy().into(),
1278 })
1279 .await?;
1280 match response.entry {
1281 Some(entry) => worktree
1282 .update(&mut cx, |worktree, cx| {
1283 worktree.as_remote_mut().unwrap().insert_entry(
1284 entry,
1285 response.worktree_scan_id as usize,
1286 cx,
1287 )
1288 })?
1289 .await
1290 .map(Some),
1291 None => Ok(None),
1292 }
1293 })
1294 }
1295 }
1296
1297 pub fn delete_entry(
1298 &mut self,
1299 entry_id: ProjectEntryId,
1300 cx: &mut ModelContext<Self>,
1301 ) -> Option<Task<Result<()>>> {
1302 let worktree = self.worktree_for_entry(entry_id, cx)?;
1303
1304 cx.emit(Event::DeletedEntry(entry_id));
1305
1306 if self.is_local() {
1307 worktree.update(cx, |worktree, cx| {
1308 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1309 })
1310 } else {
1311 let client = self.client.clone();
1312 let project_id = self.remote_id().unwrap();
1313 Some(cx.spawn(move |_, mut cx| async move {
1314 let response = client
1315 .request(proto::DeleteProjectEntry {
1316 project_id,
1317 entry_id: entry_id.to_proto(),
1318 })
1319 .await?;
1320 worktree
1321 .update(&mut cx, move |worktree, cx| {
1322 worktree.as_remote_mut().unwrap().delete_entry(
1323 entry_id,
1324 response.worktree_scan_id as usize,
1325 cx,
1326 )
1327 })?
1328 .await
1329 }))
1330 }
1331 }
1332
1333 pub fn expand_entry(
1334 &mut self,
1335 worktree_id: WorktreeId,
1336 entry_id: ProjectEntryId,
1337 cx: &mut ModelContext<Self>,
1338 ) -> Option<Task<Result<()>>> {
1339 let worktree = self.worktree_for_id(worktree_id, cx)?;
1340 if self.is_local() {
1341 worktree.update(cx, |worktree, cx| {
1342 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1343 })
1344 } else {
1345 let worktree = worktree.downgrade();
1346 let request = self.client.request(proto::ExpandProjectEntry {
1347 project_id: self.remote_id().unwrap(),
1348 entry_id: entry_id.to_proto(),
1349 });
1350 Some(cx.spawn(move |_, mut cx| async move {
1351 let response = request.await?;
1352 if let Some(worktree) = worktree.upgrade() {
1353 worktree
1354 .update(&mut cx, |worktree, _| {
1355 worktree
1356 .as_remote_mut()
1357 .unwrap()
1358 .wait_for_snapshot(response.worktree_scan_id as usize)
1359 })?
1360 .await?;
1361 }
1362 Ok(())
1363 }))
1364 }
1365 }
1366
1367 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1368 if !matches!(self.client_state, ProjectClientState::Local) {
1369 return Err(anyhow!("project was already shared"));
1370 }
1371 self.client_subscriptions.push(
1372 self.client
1373 .subscribe_to_entity(project_id)?
1374 .set_model(&cx.handle(), &mut cx.to_async()),
1375 );
1376
1377 for open_buffer in self.opened_buffers.values_mut() {
1378 match open_buffer {
1379 OpenBuffer::Strong(_) => {}
1380 OpenBuffer::Weak(buffer) => {
1381 if let Some(buffer) = buffer.upgrade() {
1382 *open_buffer = OpenBuffer::Strong(buffer);
1383 }
1384 }
1385 OpenBuffer::Operations(_) => unreachable!(),
1386 }
1387 }
1388
1389 for worktree_handle in self.worktrees.iter_mut() {
1390 match worktree_handle {
1391 WorktreeHandle::Strong(_) => {}
1392 WorktreeHandle::Weak(worktree) => {
1393 if let Some(worktree) = worktree.upgrade() {
1394 *worktree_handle = WorktreeHandle::Strong(worktree);
1395 }
1396 }
1397 }
1398 }
1399
1400 for (server_id, status) in &self.language_server_statuses {
1401 self.client
1402 .send(proto::StartLanguageServer {
1403 project_id,
1404 server: Some(proto::LanguageServer {
1405 id: server_id.0 as u64,
1406 name: status.name.clone(),
1407 }),
1408 })
1409 .log_err();
1410 }
1411
1412 let store = cx.global::<SettingsStore>();
1413 for worktree in self.worktrees() {
1414 let worktree_id = worktree.read(cx).id().to_proto();
1415 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1416 self.client
1417 .send(proto::UpdateWorktreeSettings {
1418 project_id,
1419 worktree_id,
1420 path: path.to_string_lossy().into(),
1421 content: Some(content),
1422 })
1423 .log_err();
1424 }
1425 }
1426
1427 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1428 let client = self.client.clone();
1429 self.client_state = ProjectClientState::Shared {
1430 remote_id: project_id,
1431 updates_tx,
1432 _send_updates: cx.spawn(move |this, mut cx| async move {
1433 while let Some(update) = updates_rx.next().await {
1434 match update {
1435 LocalProjectUpdate::WorktreesChanged => {
1436 let worktrees = this.update(&mut cx, |this, _cx| {
1437 this.worktrees().collect::<Vec<_>>()
1438 })?;
1439 let update_project = this
1440 .update(&mut cx, |this, cx| {
1441 this.client.request(proto::UpdateProject {
1442 project_id,
1443 worktrees: this.worktree_metadata_protos(cx),
1444 })
1445 })?
1446 .await;
1447 if update_project.is_ok() {
1448 for worktree in worktrees {
1449 worktree.update(&mut cx, |worktree, cx| {
1450 let worktree = worktree.as_local_mut().unwrap();
1451 worktree.share(project_id, cx).detach_and_log_err(cx)
1452 })?;
1453 }
1454 }
1455 }
1456 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1457 let buffer = this.update(&mut cx, |this, _| {
1458 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1459 let shared_buffers =
1460 this.shared_buffers.entry(peer_id).or_default();
1461 if shared_buffers.insert(buffer_id) {
1462 if let OpenBuffer::Strong(buffer) = buffer {
1463 Some(buffer.clone())
1464 } else {
1465 None
1466 }
1467 } else {
1468 None
1469 }
1470 })?;
1471
1472 let Some(buffer) = buffer else { continue };
1473 let operations =
1474 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1475 let operations = operations.await;
1476 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1477
1478 let initial_state = proto::CreateBufferForPeer {
1479 project_id,
1480 peer_id: Some(peer_id),
1481 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1482 };
1483 if client.send(initial_state).log_err().is_some() {
1484 let client = client.clone();
1485 cx.background_executor()
1486 .spawn(async move {
1487 let mut chunks = split_operations(operations).peekable();
1488 while let Some(chunk) = chunks.next() {
1489 let is_last = chunks.peek().is_none();
1490 client.send(proto::CreateBufferForPeer {
1491 project_id,
1492 peer_id: Some(peer_id),
1493 variant: Some(
1494 proto::create_buffer_for_peer::Variant::Chunk(
1495 proto::BufferChunk {
1496 buffer_id,
1497 operations: chunk,
1498 is_last,
1499 },
1500 ),
1501 ),
1502 })?;
1503 }
1504 anyhow::Ok(())
1505 })
1506 .await
1507 .log_err();
1508 }
1509 }
1510 }
1511 }
1512 Ok(())
1513 }),
1514 };
1515
1516 self.metadata_changed(cx);
1517 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1518 cx.notify();
1519 Ok(())
1520 }
1521
1522 pub fn reshared(
1523 &mut self,
1524 message: proto::ResharedProject,
1525 cx: &mut ModelContext<Self>,
1526 ) -> Result<()> {
1527 self.shared_buffers.clear();
1528 self.set_collaborators_from_proto(message.collaborators, cx)?;
1529 self.metadata_changed(cx);
1530 Ok(())
1531 }
1532
1533 pub fn rejoined(
1534 &mut self,
1535 message: proto::RejoinedProject,
1536 message_id: u32,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Result<()> {
1539 cx.update_global::<SettingsStore, _>(|store, cx| {
1540 for worktree in &self.worktrees {
1541 store
1542 .clear_local_settings(worktree.handle_id(), cx)
1543 .log_err();
1544 }
1545 });
1546
1547 self.join_project_response_message_id = message_id;
1548 self.set_worktrees_from_proto(message.worktrees, cx)?;
1549 self.set_collaborators_from_proto(message.collaborators, cx)?;
1550 self.language_server_statuses = message
1551 .language_servers
1552 .into_iter()
1553 .map(|server| {
1554 (
1555 LanguageServerId(server.id as usize),
1556 LanguageServerStatus {
1557 name: server.name,
1558 pending_work: Default::default(),
1559 has_pending_diagnostic_updates: false,
1560 progress_tokens: Default::default(),
1561 },
1562 )
1563 })
1564 .collect();
1565 self.buffer_ordered_messages_tx
1566 .unbounded_send(BufferOrderedMessage::Resync)
1567 .unwrap();
1568 cx.notify();
1569 Ok(())
1570 }
1571
1572 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1573 self.unshare_internal(cx)?;
1574 self.metadata_changed(cx);
1575 cx.notify();
1576 Ok(())
1577 }
1578
1579 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1580 if self.is_remote() {
1581 return Err(anyhow!("attempted to unshare a remote project"));
1582 }
1583
1584 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1585 self.client_state = ProjectClientState::Local;
1586 self.collaborators.clear();
1587 self.shared_buffers.clear();
1588 self.client_subscriptions.clear();
1589
1590 for worktree_handle in self.worktrees.iter_mut() {
1591 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1592 let is_visible = worktree.update(cx, |worktree, _| {
1593 worktree.as_local_mut().unwrap().unshare();
1594 worktree.is_visible()
1595 });
1596 if !is_visible {
1597 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1598 }
1599 }
1600 }
1601
1602 for open_buffer in self.opened_buffers.values_mut() {
1603 // Wake up any tasks waiting for peers' edits to this buffer.
1604 if let Some(buffer) = open_buffer.upgrade() {
1605 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1606 }
1607
1608 if let OpenBuffer::Strong(buffer) = open_buffer {
1609 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1610 }
1611 }
1612
1613 self.client.send(proto::UnshareProject {
1614 project_id: remote_id,
1615 })?;
1616
1617 Ok(())
1618 } else {
1619 Err(anyhow!("attempted to unshare an unshared project"))
1620 }
1621 }
1622
1623 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1624 self.disconnected_from_host_internal(cx);
1625 cx.emit(Event::DisconnectedFromHost);
1626 cx.notify();
1627 }
1628
1629 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1630 let new_capability =
1631 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1632 Capability::ReadWrite
1633 } else {
1634 Capability::ReadOnly
1635 };
1636 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1637 if *capability == new_capability {
1638 return;
1639 }
1640
1641 *capability = new_capability;
1642 for buffer in self.opened_buffers() {
1643 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1644 }
1645 }
1646 }
1647
1648 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1649 if let ProjectClientState::Remote {
1650 sharing_has_stopped,
1651 ..
1652 } = &mut self.client_state
1653 {
1654 *sharing_has_stopped = true;
1655
1656 self.collaborators.clear();
1657
1658 for worktree in &self.worktrees {
1659 if let Some(worktree) = worktree.upgrade() {
1660 worktree.update(cx, |worktree, _| {
1661 if let Some(worktree) = worktree.as_remote_mut() {
1662 worktree.disconnected_from_host();
1663 }
1664 });
1665 }
1666 }
1667
1668 for open_buffer in self.opened_buffers.values_mut() {
1669 // Wake up any tasks waiting for peers' edits to this buffer.
1670 if let Some(buffer) = open_buffer.upgrade() {
1671 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1672 }
1673
1674 if let OpenBuffer::Strong(buffer) = open_buffer {
1675 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1676 }
1677 }
1678
1679 // Wake up all futures currently waiting on a buffer to get opened,
1680 // to give them a chance to fail now that we've disconnected.
1681 *self.opened_buffer.0.borrow_mut() = ();
1682 }
1683 }
1684
1685 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1686 cx.emit(Event::Closed);
1687 }
1688
1689 pub fn is_disconnected(&self) -> bool {
1690 match &self.client_state {
1691 ProjectClientState::Remote {
1692 sharing_has_stopped,
1693 ..
1694 } => *sharing_has_stopped,
1695 _ => false,
1696 }
1697 }
1698
1699 pub fn capability(&self) -> Capability {
1700 match &self.client_state {
1701 ProjectClientState::Remote { capability, .. } => *capability,
1702 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1703 }
1704 }
1705
1706 pub fn is_read_only(&self) -> bool {
1707 self.is_disconnected() || self.capability() == Capability::ReadOnly
1708 }
1709
1710 pub fn is_local(&self) -> bool {
1711 match &self.client_state {
1712 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1713 ProjectClientState::Remote { .. } => false,
1714 }
1715 }
1716
1717 pub fn is_remote(&self) -> bool {
1718 !self.is_local()
1719 }
1720
1721 pub fn create_buffer(
1722 &mut self,
1723 text: &str,
1724 language: Option<Arc<Language>>,
1725 cx: &mut ModelContext<Self>,
1726 ) -> Result<Model<Buffer>> {
1727 if self.is_remote() {
1728 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1729 }
1730 let id = post_inc(&mut self.next_buffer_id);
1731 let buffer = cx.new_model(|cx| {
1732 Buffer::new(self.replica_id(), id, text)
1733 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1734 });
1735 self.register_buffer(&buffer, cx)?;
1736 Ok(buffer)
1737 }
1738
1739 pub fn open_path(
1740 &mut self,
1741 path: ProjectPath,
1742 cx: &mut ModelContext<Self>,
1743 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1744 let task = self.open_buffer(path.clone(), cx);
1745 cx.spawn(move |_, cx| async move {
1746 let buffer = task.await?;
1747 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1748 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1749 })?;
1750
1751 let buffer: &AnyModel = &buffer;
1752 Ok((project_entry_id, buffer.clone()))
1753 })
1754 }
1755
1756 pub fn open_local_buffer(
1757 &mut self,
1758 abs_path: impl AsRef<Path>,
1759 cx: &mut ModelContext<Self>,
1760 ) -> Task<Result<Model<Buffer>>> {
1761 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1762 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1763 } else {
1764 Task::ready(Err(anyhow!("no such path")))
1765 }
1766 }
1767
1768 pub fn open_buffer(
1769 &mut self,
1770 path: impl Into<ProjectPath>,
1771 cx: &mut ModelContext<Self>,
1772 ) -> Task<Result<Model<Buffer>>> {
1773 let project_path = path.into();
1774 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1775 worktree
1776 } else {
1777 return Task::ready(Err(anyhow!("no such worktree")));
1778 };
1779
1780 // If there is already a buffer for the given path, then return it.
1781 let existing_buffer = self.get_open_buffer(&project_path, cx);
1782 if let Some(existing_buffer) = existing_buffer {
1783 return Task::ready(Ok(existing_buffer));
1784 }
1785
1786 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1787 // If the given path is already being loaded, then wait for that existing
1788 // task to complete and return the same buffer.
1789 hash_map::Entry::Occupied(e) => e.get().clone(),
1790
1791 // Otherwise, record the fact that this path is now being loaded.
1792 hash_map::Entry::Vacant(entry) => {
1793 let (mut tx, rx) = postage::watch::channel();
1794 entry.insert(rx.clone());
1795
1796 let load_buffer = if worktree.read(cx).is_local() {
1797 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1798 } else {
1799 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1800 };
1801
1802 let project_path = project_path.clone();
1803 cx.spawn(move |this, mut cx| async move {
1804 let load_result = load_buffer.await;
1805 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1806 // Record the fact that the buffer is no longer loading.
1807 this.loading_buffers_by_path.remove(&project_path);
1808 let buffer = load_result.map_err(Arc::new)?;
1809 Ok(buffer)
1810 })?);
1811 anyhow::Ok(())
1812 })
1813 .detach();
1814 rx
1815 }
1816 };
1817
1818 cx.background_executor().spawn(async move {
1819 wait_for_loading_buffer(loading_watch)
1820 .await
1821 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1822 })
1823 }
1824
1825 fn open_local_buffer_internal(
1826 &mut self,
1827 path: &Arc<Path>,
1828 worktree: &Model<Worktree>,
1829 cx: &mut ModelContext<Self>,
1830 ) -> Task<Result<Model<Buffer>>> {
1831 let buffer_id = post_inc(&mut self.next_buffer_id);
1832 let load_buffer = worktree.update(cx, |worktree, cx| {
1833 let worktree = worktree.as_local_mut().unwrap();
1834 worktree.load_buffer(buffer_id, path, cx)
1835 });
1836 cx.spawn(move |this, mut cx| async move {
1837 let buffer = load_buffer.await?;
1838 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1839 Ok(buffer)
1840 })
1841 }
1842
1843 fn open_remote_buffer_internal(
1844 &mut self,
1845 path: &Arc<Path>,
1846 worktree: &Model<Worktree>,
1847 cx: &mut ModelContext<Self>,
1848 ) -> Task<Result<Model<Buffer>>> {
1849 let rpc = self.client.clone();
1850 let project_id = self.remote_id().unwrap();
1851 let remote_worktree_id = worktree.read(cx).id();
1852 let path = path.clone();
1853 let path_string = path.to_string_lossy().to_string();
1854 cx.spawn(move |this, mut cx| async move {
1855 let response = rpc
1856 .request(proto::OpenBufferByPath {
1857 project_id,
1858 worktree_id: remote_worktree_id.to_proto(),
1859 path: path_string,
1860 })
1861 .await?;
1862 this.update(&mut cx, |this, cx| {
1863 this.wait_for_remote_buffer(response.buffer_id, cx)
1864 })?
1865 .await
1866 })
1867 }
1868
1869 /// LanguageServerName is owned, because it is inserted into a map
1870 pub fn open_local_buffer_via_lsp(
1871 &mut self,
1872 abs_path: lsp::Url,
1873 language_server_id: LanguageServerId,
1874 language_server_name: LanguageServerName,
1875 cx: &mut ModelContext<Self>,
1876 ) -> Task<Result<Model<Buffer>>> {
1877 cx.spawn(move |this, mut cx| async move {
1878 let abs_path = abs_path
1879 .to_file_path()
1880 .map_err(|_| anyhow!("can't convert URI to path"))?;
1881 let (worktree, relative_path) = if let Some(result) =
1882 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1883 {
1884 result
1885 } else {
1886 let worktree = this
1887 .update(&mut cx, |this, cx| {
1888 this.create_local_worktree(&abs_path, false, cx)
1889 })?
1890 .await?;
1891 this.update(&mut cx, |this, cx| {
1892 this.language_server_ids.insert(
1893 (worktree.read(cx).id(), language_server_name),
1894 language_server_id,
1895 );
1896 })
1897 .ok();
1898 (worktree, PathBuf::new())
1899 };
1900
1901 let project_path = ProjectPath {
1902 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1903 path: relative_path.into(),
1904 };
1905 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1906 .await
1907 })
1908 }
1909
1910 pub fn open_buffer_by_id(
1911 &mut self,
1912 id: u64,
1913 cx: &mut ModelContext<Self>,
1914 ) -> Task<Result<Model<Buffer>>> {
1915 if let Some(buffer) = self.buffer_for_id(id) {
1916 Task::ready(Ok(buffer))
1917 } else if self.is_local() {
1918 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1919 } else if let Some(project_id) = self.remote_id() {
1920 let request = self
1921 .client
1922 .request(proto::OpenBufferById { project_id, id });
1923 cx.spawn(move |this, mut cx| async move {
1924 let buffer_id = request.await?.buffer_id;
1925 this.update(&mut cx, |this, cx| {
1926 this.wait_for_remote_buffer(buffer_id, cx)
1927 })?
1928 .await
1929 })
1930 } else {
1931 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1932 }
1933 }
1934
1935 pub fn save_buffers(
1936 &self,
1937 buffers: HashSet<Model<Buffer>>,
1938 cx: &mut ModelContext<Self>,
1939 ) -> Task<Result<()>> {
1940 cx.spawn(move |this, mut cx| async move {
1941 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1942 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1943 .ok()
1944 });
1945 try_join_all(save_tasks).await?;
1946 Ok(())
1947 })
1948 }
1949
1950 pub fn save_buffer(
1951 &self,
1952 buffer: Model<Buffer>,
1953 cx: &mut ModelContext<Self>,
1954 ) -> Task<Result<()>> {
1955 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1956 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1957 };
1958 let worktree = file.worktree.clone();
1959 let path = file.path.clone();
1960 worktree.update(cx, |worktree, cx| match worktree {
1961 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1962 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1963 })
1964 }
1965
1966 pub fn save_buffer_as(
1967 &mut self,
1968 buffer: Model<Buffer>,
1969 abs_path: PathBuf,
1970 cx: &mut ModelContext<Self>,
1971 ) -> Task<Result<()>> {
1972 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1973 let old_file = File::from_dyn(buffer.read(cx).file())
1974 .filter(|f| f.is_local())
1975 .cloned();
1976 cx.spawn(move |this, mut cx| async move {
1977 if let Some(old_file) = &old_file {
1978 this.update(&mut cx, |this, cx| {
1979 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1980 })?;
1981 }
1982 let (worktree, path) = worktree_task.await?;
1983 worktree
1984 .update(&mut cx, |worktree, cx| match worktree {
1985 Worktree::Local(worktree) => {
1986 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1987 }
1988 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1989 })?
1990 .await?;
1991
1992 this.update(&mut cx, |this, cx| {
1993 this.detect_language_for_buffer(&buffer, cx);
1994 this.register_buffer_with_language_servers(&buffer, cx);
1995 })?;
1996 Ok(())
1997 })
1998 }
1999
2000 pub fn get_open_buffer(
2001 &mut self,
2002 path: &ProjectPath,
2003 cx: &mut ModelContext<Self>,
2004 ) -> Option<Model<Buffer>> {
2005 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
2006 self.opened_buffers.values().find_map(|buffer| {
2007 let buffer = buffer.upgrade()?;
2008 let file = File::from_dyn(buffer.read(cx).file())?;
2009 if file.worktree == worktree && file.path() == &path.path {
2010 Some(buffer)
2011 } else {
2012 None
2013 }
2014 })
2015 }
2016
2017 fn register_buffer(
2018 &mut self,
2019 buffer: &Model<Buffer>,
2020 cx: &mut ModelContext<Self>,
2021 ) -> Result<()> {
2022 self.request_buffer_diff_recalculation(buffer, cx);
2023 buffer.update(cx, |buffer, _| {
2024 buffer.set_language_registry(self.languages.clone())
2025 });
2026
2027 let remote_id = buffer.read(cx).remote_id();
2028 let is_remote = self.is_remote();
2029 let open_buffer = if is_remote || self.is_shared() {
2030 OpenBuffer::Strong(buffer.clone())
2031 } else {
2032 OpenBuffer::Weak(buffer.downgrade())
2033 };
2034
2035 match self.opened_buffers.entry(remote_id) {
2036 hash_map::Entry::Vacant(entry) => {
2037 entry.insert(open_buffer);
2038 }
2039 hash_map::Entry::Occupied(mut entry) => {
2040 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2041 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2042 } else if entry.get().upgrade().is_some() {
2043 if is_remote {
2044 return Ok(());
2045 } else {
2046 debug_panic!("buffer {} was already registered", remote_id);
2047 Err(anyhow!("buffer {} was already registered", remote_id))?;
2048 }
2049 }
2050 entry.insert(open_buffer);
2051 }
2052 }
2053 cx.subscribe(buffer, |this, buffer, event, cx| {
2054 this.on_buffer_event(buffer, event, cx);
2055 })
2056 .detach();
2057
2058 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2059 if file.is_local {
2060 self.local_buffer_ids_by_path.insert(
2061 ProjectPath {
2062 worktree_id: file.worktree_id(cx),
2063 path: file.path.clone(),
2064 },
2065 remote_id,
2066 );
2067
2068 if let Some(entry_id) = file.entry_id {
2069 self.local_buffer_ids_by_entry_id
2070 .insert(entry_id, remote_id);
2071 }
2072 }
2073 }
2074
2075 self.detect_language_for_buffer(buffer, cx);
2076 self.register_buffer_with_language_servers(buffer, cx);
2077 self.register_buffer_with_copilot(buffer, cx);
2078 cx.observe_release(buffer, |this, buffer, cx| {
2079 if let Some(file) = File::from_dyn(buffer.file()) {
2080 if file.is_local() {
2081 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2082 for server in this.language_servers_for_buffer(buffer, cx) {
2083 server
2084 .1
2085 .notify::<lsp::notification::DidCloseTextDocument>(
2086 lsp::DidCloseTextDocumentParams {
2087 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2088 },
2089 )
2090 .log_err();
2091 }
2092 }
2093 }
2094 })
2095 .detach();
2096
2097 *self.opened_buffer.0.borrow_mut() = ();
2098 Ok(())
2099 }
2100
2101 fn register_buffer_with_language_servers(
2102 &mut self,
2103 buffer_handle: &Model<Buffer>,
2104 cx: &mut ModelContext<Self>,
2105 ) {
2106 let buffer = buffer_handle.read(cx);
2107 let buffer_id = buffer.remote_id();
2108
2109 if let Some(file) = File::from_dyn(buffer.file()) {
2110 if !file.is_local() {
2111 return;
2112 }
2113
2114 let abs_path = file.abs_path(cx);
2115 let uri = lsp::Url::from_file_path(&abs_path)
2116 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2117 let initial_snapshot = buffer.text_snapshot();
2118 let language = buffer.language().cloned();
2119 let worktree_id = file.worktree_id(cx);
2120
2121 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2122 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2123 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2124 .log_err();
2125 }
2126 }
2127
2128 if let Some(language) = language {
2129 for adapter in language.lsp_adapters() {
2130 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2131 let server = self
2132 .language_server_ids
2133 .get(&(worktree_id, adapter.name.clone()))
2134 .and_then(|id| self.language_servers.get(id))
2135 .and_then(|server_state| {
2136 if let LanguageServerState::Running { server, .. } = server_state {
2137 Some(server.clone())
2138 } else {
2139 None
2140 }
2141 });
2142 let server = match server {
2143 Some(server) => server,
2144 None => continue,
2145 };
2146
2147 server
2148 .notify::<lsp::notification::DidOpenTextDocument>(
2149 lsp::DidOpenTextDocumentParams {
2150 text_document: lsp::TextDocumentItem::new(
2151 uri.clone(),
2152 language_id.unwrap_or_default(),
2153 0,
2154 initial_snapshot.text(),
2155 ),
2156 },
2157 )
2158 .log_err();
2159
2160 buffer_handle.update(cx, |buffer, cx| {
2161 buffer.set_completion_triggers(
2162 server
2163 .capabilities()
2164 .completion_provider
2165 .as_ref()
2166 .and_then(|provider| provider.trigger_characters.clone())
2167 .unwrap_or_default(),
2168 cx,
2169 );
2170 });
2171
2172 let snapshot = LspBufferSnapshot {
2173 version: 0,
2174 snapshot: initial_snapshot.clone(),
2175 };
2176 self.buffer_snapshots
2177 .entry(buffer_id)
2178 .or_default()
2179 .insert(server.server_id(), vec![snapshot]);
2180 }
2181 }
2182 }
2183 }
2184
2185 fn unregister_buffer_from_language_servers(
2186 &mut self,
2187 buffer: &Model<Buffer>,
2188 old_file: &File,
2189 cx: &mut ModelContext<Self>,
2190 ) {
2191 let old_path = match old_file.as_local() {
2192 Some(local) => local.abs_path(cx),
2193 None => return,
2194 };
2195
2196 buffer.update(cx, |buffer, cx| {
2197 let worktree_id = old_file.worktree_id(cx);
2198 let ids = &self.language_server_ids;
2199
2200 let language = buffer.language().cloned();
2201 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2202 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2203 buffer.update_diagnostics(server_id, Default::default(), cx);
2204 }
2205
2206 self.buffer_snapshots.remove(&buffer.remote_id());
2207 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2208 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2209 language_server
2210 .notify::<lsp::notification::DidCloseTextDocument>(
2211 lsp::DidCloseTextDocumentParams {
2212 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2213 },
2214 )
2215 .log_err();
2216 }
2217 });
2218 }
2219
2220 fn register_buffer_with_copilot(
2221 &self,
2222 buffer_handle: &Model<Buffer>,
2223 cx: &mut ModelContext<Self>,
2224 ) {
2225 if let Some(copilot) = Copilot::global(cx) {
2226 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2227 }
2228 }
2229
2230 async fn send_buffer_ordered_messages(
2231 this: WeakModel<Self>,
2232 rx: UnboundedReceiver<BufferOrderedMessage>,
2233 mut cx: AsyncAppContext,
2234 ) -> Result<()> {
2235 const MAX_BATCH_SIZE: usize = 128;
2236
2237 let mut operations_by_buffer_id = HashMap::default();
2238 async fn flush_operations(
2239 this: &WeakModel<Project>,
2240 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2241 needs_resync_with_host: &mut bool,
2242 is_local: bool,
2243 cx: &mut AsyncAppContext,
2244 ) -> Result<()> {
2245 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2246 let request = this.update(cx, |this, _| {
2247 let project_id = this.remote_id()?;
2248 Some(this.client.request(proto::UpdateBuffer {
2249 buffer_id,
2250 project_id,
2251 operations,
2252 }))
2253 })?;
2254 if let Some(request) = request {
2255 if request.await.is_err() && !is_local {
2256 *needs_resync_with_host = true;
2257 break;
2258 }
2259 }
2260 }
2261 Ok(())
2262 }
2263
2264 let mut needs_resync_with_host = false;
2265 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2266
2267 while let Some(changes) = changes.next().await {
2268 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2269
2270 for change in changes {
2271 match change {
2272 BufferOrderedMessage::Operation {
2273 buffer_id,
2274 operation,
2275 } => {
2276 if needs_resync_with_host {
2277 continue;
2278 }
2279
2280 operations_by_buffer_id
2281 .entry(buffer_id)
2282 .or_insert(Vec::new())
2283 .push(operation);
2284 }
2285
2286 BufferOrderedMessage::Resync => {
2287 operations_by_buffer_id.clear();
2288 if this
2289 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2290 .await
2291 .is_ok()
2292 {
2293 needs_resync_with_host = false;
2294 }
2295 }
2296
2297 BufferOrderedMessage::LanguageServerUpdate {
2298 language_server_id,
2299 message,
2300 } => {
2301 flush_operations(
2302 &this,
2303 &mut operations_by_buffer_id,
2304 &mut needs_resync_with_host,
2305 is_local,
2306 &mut cx,
2307 )
2308 .await?;
2309
2310 this.update(&mut cx, |this, _| {
2311 if let Some(project_id) = this.remote_id() {
2312 this.client
2313 .send(proto::UpdateLanguageServer {
2314 project_id,
2315 language_server_id: language_server_id.0 as u64,
2316 variant: Some(message),
2317 })
2318 .log_err();
2319 }
2320 })?;
2321 }
2322 }
2323 }
2324
2325 flush_operations(
2326 &this,
2327 &mut operations_by_buffer_id,
2328 &mut needs_resync_with_host,
2329 is_local,
2330 &mut cx,
2331 )
2332 .await?;
2333 }
2334
2335 Ok(())
2336 }
2337
2338 fn on_buffer_event(
2339 &mut self,
2340 buffer: Model<Buffer>,
2341 event: &BufferEvent,
2342 cx: &mut ModelContext<Self>,
2343 ) -> Option<()> {
2344 if matches!(
2345 event,
2346 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2347 ) {
2348 self.request_buffer_diff_recalculation(&buffer, cx);
2349 }
2350
2351 match event {
2352 BufferEvent::Operation(operation) => {
2353 self.buffer_ordered_messages_tx
2354 .unbounded_send(BufferOrderedMessage::Operation {
2355 buffer_id: buffer.read(cx).remote_id(),
2356 operation: language::proto::serialize_operation(operation),
2357 })
2358 .ok();
2359 }
2360
2361 BufferEvent::Edited { .. } => {
2362 let buffer = buffer.read(cx);
2363 let file = File::from_dyn(buffer.file())?;
2364 let abs_path = file.as_local()?.abs_path(cx);
2365 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2366 let next_snapshot = buffer.text_snapshot();
2367
2368 let language_servers: Vec<_> = self
2369 .language_servers_for_buffer(buffer, cx)
2370 .map(|i| i.1.clone())
2371 .collect();
2372
2373 for language_server in language_servers {
2374 let language_server = language_server.clone();
2375
2376 let buffer_snapshots = self
2377 .buffer_snapshots
2378 .get_mut(&buffer.remote_id())
2379 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2380 let previous_snapshot = buffer_snapshots.last()?;
2381
2382 let build_incremental_change = || {
2383 buffer
2384 .edits_since::<(PointUtf16, usize)>(
2385 previous_snapshot.snapshot.version(),
2386 )
2387 .map(|edit| {
2388 let edit_start = edit.new.start.0;
2389 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2390 let new_text = next_snapshot
2391 .text_for_range(edit.new.start.1..edit.new.end.1)
2392 .collect();
2393 lsp::TextDocumentContentChangeEvent {
2394 range: Some(lsp::Range::new(
2395 point_to_lsp(edit_start),
2396 point_to_lsp(edit_end),
2397 )),
2398 range_length: None,
2399 text: new_text,
2400 }
2401 })
2402 .collect()
2403 };
2404
2405 let document_sync_kind = language_server
2406 .capabilities()
2407 .text_document_sync
2408 .as_ref()
2409 .and_then(|sync| match sync {
2410 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2411 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2412 });
2413
2414 let content_changes: Vec<_> = match document_sync_kind {
2415 Some(lsp::TextDocumentSyncKind::FULL) => {
2416 vec![lsp::TextDocumentContentChangeEvent {
2417 range: None,
2418 range_length: None,
2419 text: next_snapshot.text(),
2420 }]
2421 }
2422 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2423 _ => {
2424 #[cfg(any(test, feature = "test-support"))]
2425 {
2426 build_incremental_change()
2427 }
2428
2429 #[cfg(not(any(test, feature = "test-support")))]
2430 {
2431 continue;
2432 }
2433 }
2434 };
2435
2436 let next_version = previous_snapshot.version + 1;
2437
2438 buffer_snapshots.push(LspBufferSnapshot {
2439 version: next_version,
2440 snapshot: next_snapshot.clone(),
2441 });
2442
2443 language_server
2444 .notify::<lsp::notification::DidChangeTextDocument>(
2445 lsp::DidChangeTextDocumentParams {
2446 text_document: lsp::VersionedTextDocumentIdentifier::new(
2447 uri.clone(),
2448 next_version,
2449 ),
2450 content_changes,
2451 },
2452 )
2453 .log_err();
2454 }
2455 }
2456
2457 BufferEvent::Saved => {
2458 let file = File::from_dyn(buffer.read(cx).file())?;
2459 let worktree_id = file.worktree_id(cx);
2460 let abs_path = file.as_local()?.abs_path(cx);
2461 let text_document = lsp::TextDocumentIdentifier {
2462 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2463 };
2464
2465 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2466 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2467
2468 server
2469 .notify::<lsp::notification::DidSaveTextDocument>(
2470 lsp::DidSaveTextDocumentParams {
2471 text_document: text_document.clone(),
2472 text,
2473 },
2474 )
2475 .log_err();
2476 }
2477
2478 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2479 for language_server_id in language_server_ids {
2480 if let Some(LanguageServerState::Running {
2481 adapter,
2482 simulate_disk_based_diagnostics_completion,
2483 ..
2484 }) = self.language_servers.get_mut(&language_server_id)
2485 {
2486 // After saving a buffer using a language server that doesn't provide
2487 // a disk-based progress token, kick off a timer that will reset every
2488 // time the buffer is saved. If the timer eventually fires, simulate
2489 // disk-based diagnostics being finished so that other pieces of UI
2490 // (e.g., project diagnostics view, diagnostic status bar) can update.
2491 // We don't emit an event right away because the language server might take
2492 // some time to publish diagnostics.
2493 if adapter.disk_based_diagnostics_progress_token.is_none() {
2494 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2495 Duration::from_secs(1);
2496
2497 let task = cx.spawn(move |this, mut cx| async move {
2498 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2499 if let Some(this) = this.upgrade() {
2500 this.update(&mut cx, |this, cx| {
2501 this.disk_based_diagnostics_finished(
2502 language_server_id,
2503 cx,
2504 );
2505 this.buffer_ordered_messages_tx
2506 .unbounded_send(
2507 BufferOrderedMessage::LanguageServerUpdate {
2508 language_server_id,
2509 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2510 },
2511 )
2512 .ok();
2513 }).ok();
2514 }
2515 });
2516 *simulate_disk_based_diagnostics_completion = Some(task);
2517 }
2518 }
2519 }
2520 }
2521 BufferEvent::FileHandleChanged => {
2522 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2523 return None;
2524 };
2525
2526 let remote_id = buffer.read(cx).remote_id();
2527 if let Some(entry_id) = file.entry_id {
2528 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2529 Some(_) => {
2530 return None;
2531 }
2532 None => {
2533 self.local_buffer_ids_by_entry_id
2534 .insert(entry_id, remote_id);
2535 }
2536 }
2537 };
2538 self.local_buffer_ids_by_path.insert(
2539 ProjectPath {
2540 worktree_id: file.worktree_id(cx),
2541 path: file.path.clone(),
2542 },
2543 remote_id,
2544 );
2545 }
2546 _ => {}
2547 }
2548
2549 None
2550 }
2551
2552 fn request_buffer_diff_recalculation(
2553 &mut self,
2554 buffer: &Model<Buffer>,
2555 cx: &mut ModelContext<Self>,
2556 ) {
2557 self.buffers_needing_diff.insert(buffer.downgrade());
2558 let first_insertion = self.buffers_needing_diff.len() == 1;
2559
2560 let settings = ProjectSettings::get_global(cx);
2561 let delay = if let Some(delay) = settings.git.gutter_debounce {
2562 delay
2563 } else {
2564 if first_insertion {
2565 let this = cx.weak_model();
2566 cx.defer(move |cx| {
2567 if let Some(this) = this.upgrade() {
2568 this.update(cx, |this, cx| {
2569 this.recalculate_buffer_diffs(cx).detach();
2570 });
2571 }
2572 });
2573 }
2574 return;
2575 };
2576
2577 const MIN_DELAY: u64 = 50;
2578 let delay = delay.max(MIN_DELAY);
2579 let duration = Duration::from_millis(delay);
2580
2581 self.git_diff_debouncer
2582 .fire_new(duration, cx, move |this, cx| {
2583 this.recalculate_buffer_diffs(cx)
2584 });
2585 }
2586
2587 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2588 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2589 cx.spawn(move |this, mut cx| async move {
2590 let tasks: Vec<_> = buffers
2591 .iter()
2592 .filter_map(|buffer| {
2593 let buffer = buffer.upgrade()?;
2594 buffer
2595 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2596 .ok()
2597 .flatten()
2598 })
2599 .collect();
2600
2601 futures::future::join_all(tasks).await;
2602
2603 this.update(&mut cx, |this, cx| {
2604 if !this.buffers_needing_diff.is_empty() {
2605 this.recalculate_buffer_diffs(cx).detach();
2606 } else {
2607 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2608 for buffer in buffers {
2609 if let Some(buffer) = buffer.upgrade() {
2610 buffer.update(cx, |_, cx| cx.notify());
2611 }
2612 }
2613 }
2614 })
2615 .ok();
2616 })
2617 }
2618
2619 fn language_servers_for_worktree(
2620 &self,
2621 worktree_id: WorktreeId,
2622 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2623 self.language_server_ids
2624 .iter()
2625 .filter_map(move |((language_server_worktree_id, _), id)| {
2626 if *language_server_worktree_id == worktree_id {
2627 if let Some(LanguageServerState::Running {
2628 adapter,
2629 language,
2630 server,
2631 ..
2632 }) = self.language_servers.get(id)
2633 {
2634 return Some((adapter, language, server));
2635 }
2636 }
2637 None
2638 })
2639 }
2640
2641 fn maintain_buffer_languages(
2642 languages: Arc<LanguageRegistry>,
2643 cx: &mut ModelContext<Project>,
2644 ) -> Task<()> {
2645 let mut subscription = languages.subscribe();
2646 let mut prev_reload_count = languages.reload_count();
2647 cx.spawn(move |project, mut cx| async move {
2648 while let Some(()) = subscription.next().await {
2649 if let Some(project) = project.upgrade() {
2650 // If the language registry has been reloaded, then remove and
2651 // re-assign the languages on all open buffers.
2652 let reload_count = languages.reload_count();
2653 if reload_count > prev_reload_count {
2654 prev_reload_count = reload_count;
2655 project
2656 .update(&mut cx, |this, cx| {
2657 let buffers = this
2658 .opened_buffers
2659 .values()
2660 .filter_map(|b| b.upgrade())
2661 .collect::<Vec<_>>();
2662 for buffer in buffers {
2663 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2664 {
2665 this.unregister_buffer_from_language_servers(
2666 &buffer, &f, cx,
2667 );
2668 buffer
2669 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2670 }
2671 }
2672 })
2673 .ok();
2674 }
2675
2676 project
2677 .update(&mut cx, |project, cx| {
2678 let mut plain_text_buffers = Vec::new();
2679 let mut buffers_with_unknown_injections = Vec::new();
2680 for buffer in project.opened_buffers.values() {
2681 if let Some(handle) = buffer.upgrade() {
2682 let buffer = &handle.read(cx);
2683 if buffer.language().is_none()
2684 || buffer.language() == Some(&*language::PLAIN_TEXT)
2685 {
2686 plain_text_buffers.push(handle);
2687 } else if buffer.contains_unknown_injections() {
2688 buffers_with_unknown_injections.push(handle);
2689 }
2690 }
2691 }
2692
2693 for buffer in plain_text_buffers {
2694 project.detect_language_for_buffer(&buffer, cx);
2695 project.register_buffer_with_language_servers(&buffer, cx);
2696 }
2697
2698 for buffer in buffers_with_unknown_injections {
2699 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2700 }
2701 })
2702 .ok();
2703 }
2704 }
2705 })
2706 }
2707
2708 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2709 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2710 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2711
2712 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2713 *settings_changed_tx.borrow_mut() = ();
2714 });
2715
2716 cx.spawn(move |this, mut cx| async move {
2717 while let Some(_) = settings_changed_rx.next().await {
2718 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2719 this.language_servers
2720 .values()
2721 .filter_map(|state| match state {
2722 LanguageServerState::Starting(_) => None,
2723 LanguageServerState::Running {
2724 adapter, server, ..
2725 } => Some((adapter.clone(), server.clone())),
2726 })
2727 .collect()
2728 })?;
2729
2730 for (adapter, server) in servers {
2731 let workspace_config = cx
2732 .update(|cx| adapter.workspace_configuration(server.root_path(), cx))?
2733 .await;
2734 server
2735 .notify::<lsp::notification::DidChangeConfiguration>(
2736 lsp::DidChangeConfigurationParams {
2737 settings: workspace_config.clone(),
2738 },
2739 )
2740 .ok();
2741 }
2742 }
2743
2744 drop(settings_observation);
2745 anyhow::Ok(())
2746 })
2747 }
2748
2749 fn detect_language_for_buffer(
2750 &mut self,
2751 buffer_handle: &Model<Buffer>,
2752 cx: &mut ModelContext<Self>,
2753 ) -> Option<()> {
2754 // If the buffer has a language, set it and start the language server if we haven't already.
2755 let buffer = buffer_handle.read(cx);
2756 let full_path = buffer.file()?.full_path(cx);
2757 let content = buffer.as_rope();
2758 let new_language = self
2759 .languages
2760 .language_for_file(&full_path, Some(content))
2761 .now_or_never()?
2762 .ok()?;
2763 self.set_language_for_buffer(buffer_handle, new_language, cx);
2764 None
2765 }
2766
2767 pub fn set_language_for_buffer(
2768 &mut self,
2769 buffer: &Model<Buffer>,
2770 new_language: Arc<Language>,
2771 cx: &mut ModelContext<Self>,
2772 ) {
2773 buffer.update(cx, |buffer, cx| {
2774 if buffer.language().map_or(true, |old_language| {
2775 !Arc::ptr_eq(old_language, &new_language)
2776 }) {
2777 buffer.set_language(Some(new_language.clone()), cx);
2778 }
2779 });
2780
2781 let buffer_file = buffer.read(cx).file().cloned();
2782 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2783 let buffer_file = File::from_dyn(buffer_file.as_ref());
2784 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2785 if let Some(prettier_plugins) =
2786 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2787 {
2788 self.install_default_prettier(worktree, prettier_plugins, cx);
2789 };
2790 if let Some(file) = buffer_file {
2791 let worktree = file.worktree.clone();
2792 if let Some(tree) = worktree.read(cx).as_local() {
2793 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2794 }
2795 }
2796 }
2797
2798 fn start_language_servers(
2799 &mut self,
2800 worktree: &Model<Worktree>,
2801 worktree_path: Arc<Path>,
2802 language: Arc<Language>,
2803 cx: &mut ModelContext<Self>,
2804 ) {
2805 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2806 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2807 if !settings.enable_language_server {
2808 return;
2809 }
2810
2811 let worktree_id = worktree.read(cx).id();
2812 for adapter in language.lsp_adapters() {
2813 self.start_language_server(
2814 worktree_id,
2815 worktree_path.clone(),
2816 adapter.clone(),
2817 language.clone(),
2818 cx,
2819 );
2820 }
2821 }
2822
2823 fn start_language_server(
2824 &mut self,
2825 worktree_id: WorktreeId,
2826 worktree_path: Arc<Path>,
2827 adapter: Arc<CachedLspAdapter>,
2828 language: Arc<Language>,
2829 cx: &mut ModelContext<Self>,
2830 ) {
2831 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2832 return;
2833 }
2834
2835 let key = (worktree_id, adapter.name.clone());
2836 if self.language_server_ids.contains_key(&key) {
2837 return;
2838 }
2839
2840 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2841 let pending_server = match self.languages.create_pending_language_server(
2842 stderr_capture.clone(),
2843 language.clone(),
2844 adapter.clone(),
2845 Arc::clone(&worktree_path),
2846 ProjectLspAdapterDelegate::new(self, cx),
2847 cx,
2848 ) {
2849 Some(pending_server) => pending_server,
2850 None => return,
2851 };
2852
2853 let project_settings = ProjectSettings::get_global(cx);
2854 let lsp = project_settings.lsp.get(&adapter.name.0);
2855 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2856
2857 let server_id = pending_server.server_id;
2858 let container_dir = pending_server.container_dir.clone();
2859 let state = LanguageServerState::Starting({
2860 let adapter = adapter.clone();
2861 let server_name = adapter.name.0.clone();
2862 let language = language.clone();
2863 let key = key.clone();
2864
2865 cx.spawn(move |this, mut cx| async move {
2866 let result = Self::setup_and_insert_language_server(
2867 this.clone(),
2868 &worktree_path,
2869 override_options,
2870 pending_server,
2871 adapter.clone(),
2872 language.clone(),
2873 server_id,
2874 key,
2875 &mut cx,
2876 )
2877 .await;
2878
2879 match result {
2880 Ok(server) => {
2881 stderr_capture.lock().take();
2882 server
2883 }
2884
2885 Err(err) => {
2886 log::error!("failed to start language server {server_name:?}: {err}");
2887 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2888
2889 let this = this.upgrade()?;
2890 let container_dir = container_dir?;
2891
2892 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2893 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2894 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2895 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2896 return None;
2897 }
2898
2899 let installation_test_binary = adapter
2900 .installation_test_binary(container_dir.to_path_buf())
2901 .await;
2902
2903 this.update(&mut cx, |_, cx| {
2904 Self::check_errored_server(
2905 language,
2906 adapter,
2907 server_id,
2908 installation_test_binary,
2909 cx,
2910 )
2911 })
2912 .ok();
2913
2914 None
2915 }
2916 }
2917 })
2918 });
2919
2920 self.language_servers.insert(server_id, state);
2921 self.language_server_ids.insert(key, server_id);
2922 }
2923
2924 fn reinstall_language_server(
2925 &mut self,
2926 language: Arc<Language>,
2927 adapter: Arc<CachedLspAdapter>,
2928 server_id: LanguageServerId,
2929 cx: &mut ModelContext<Self>,
2930 ) -> Option<Task<()>> {
2931 log::info!("beginning to reinstall server");
2932
2933 let existing_server = match self.language_servers.remove(&server_id) {
2934 Some(LanguageServerState::Running { server, .. }) => Some(server),
2935 _ => None,
2936 };
2937
2938 for worktree in &self.worktrees {
2939 if let Some(worktree) = worktree.upgrade() {
2940 let key = (worktree.read(cx).id(), adapter.name.clone());
2941 self.language_server_ids.remove(&key);
2942 }
2943 }
2944
2945 Some(cx.spawn(move |this, mut cx| async move {
2946 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2947 log::info!("shutting down existing server");
2948 task.await;
2949 }
2950
2951 // TODO: This is race-safe with regards to preventing new instances from
2952 // starting while deleting, but existing instances in other projects are going
2953 // to be very confused and messed up
2954 let Some(task) = this
2955 .update(&mut cx, |this, cx| {
2956 this.languages.delete_server_container(adapter.clone(), cx)
2957 })
2958 .log_err()
2959 else {
2960 return;
2961 };
2962 task.await;
2963
2964 this.update(&mut cx, |this, mut cx| {
2965 let worktrees = this.worktrees.clone();
2966 for worktree in worktrees {
2967 let worktree = match worktree.upgrade() {
2968 Some(worktree) => worktree.read(cx),
2969 None => continue,
2970 };
2971 let worktree_id = worktree.id();
2972 let root_path = worktree.abs_path();
2973
2974 this.start_language_server(
2975 worktree_id,
2976 root_path,
2977 adapter.clone(),
2978 language.clone(),
2979 &mut cx,
2980 );
2981 }
2982 })
2983 .ok();
2984 }))
2985 }
2986
2987 async fn setup_and_insert_language_server(
2988 this: WeakModel<Self>,
2989 worktree_path: &Path,
2990 override_initialization_options: Option<serde_json::Value>,
2991 pending_server: PendingLanguageServer,
2992 adapter: Arc<CachedLspAdapter>,
2993 language: Arc<Language>,
2994 server_id: LanguageServerId,
2995 key: (WorktreeId, LanguageServerName),
2996 cx: &mut AsyncAppContext,
2997 ) -> Result<Option<Arc<LanguageServer>>> {
2998 let language_server = Self::setup_pending_language_server(
2999 this.clone(),
3000 override_initialization_options,
3001 pending_server,
3002 worktree_path,
3003 adapter.clone(),
3004 server_id,
3005 cx,
3006 )
3007 .await?;
3008
3009 let this = match this.upgrade() {
3010 Some(this) => this,
3011 None => return Err(anyhow!("failed to upgrade project handle")),
3012 };
3013
3014 this.update(cx, |this, cx| {
3015 this.insert_newly_running_language_server(
3016 language,
3017 adapter,
3018 language_server.clone(),
3019 server_id,
3020 key,
3021 cx,
3022 )
3023 })??;
3024
3025 Ok(Some(language_server))
3026 }
3027
3028 async fn setup_pending_language_server(
3029 this: WeakModel<Self>,
3030 override_options: Option<serde_json::Value>,
3031 pending_server: PendingLanguageServer,
3032 worktree_path: &Path,
3033 adapter: Arc<CachedLspAdapter>,
3034 server_id: LanguageServerId,
3035 cx: &mut AsyncAppContext,
3036 ) -> Result<Arc<LanguageServer>> {
3037 let workspace_config = cx
3038 .update(|cx| adapter.workspace_configuration(worktree_path, cx))?
3039 .await;
3040 let language_server = pending_server.task.await?;
3041
3042 language_server
3043 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3044 let adapter = adapter.clone();
3045 let this = this.clone();
3046 move |mut params, mut cx| {
3047 let adapter = adapter.clone();
3048 if let Some(this) = this.upgrade() {
3049 adapter.process_diagnostics(&mut params);
3050 this.update(&mut cx, |this, cx| {
3051 this.update_diagnostics(
3052 server_id,
3053 params,
3054 &adapter.disk_based_diagnostic_sources,
3055 cx,
3056 )
3057 .log_err();
3058 })
3059 .ok();
3060 }
3061 }
3062 })
3063 .detach();
3064
3065 language_server
3066 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3067 let adapter = adapter.clone();
3068 let worktree_path = worktree_path.to_path_buf();
3069 move |params, cx| {
3070 let adapter = adapter.clone();
3071 let worktree_path = worktree_path.clone();
3072 async move {
3073 let workspace_config = cx
3074 .update(|cx| adapter.workspace_configuration(&worktree_path, cx))?
3075 .await;
3076 Ok(params
3077 .items
3078 .into_iter()
3079 .map(|item| {
3080 if let Some(section) = &item.section {
3081 workspace_config
3082 .get(section)
3083 .cloned()
3084 .unwrap_or(serde_json::Value::Null)
3085 } else {
3086 workspace_config.clone()
3087 }
3088 })
3089 .collect())
3090 }
3091 }
3092 })
3093 .detach();
3094
3095 // Even though we don't have handling for these requests, respond to them to
3096 // avoid stalling any language server like `gopls` which waits for a response
3097 // to these requests when initializing.
3098 language_server
3099 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3100 let this = this.clone();
3101 move |params, mut cx| {
3102 let this = this.clone();
3103 async move {
3104 this.update(&mut cx, |this, _| {
3105 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3106 {
3107 if let lsp::NumberOrString::String(token) = params.token {
3108 status.progress_tokens.insert(token);
3109 }
3110 }
3111 })?;
3112
3113 Ok(())
3114 }
3115 }
3116 })
3117 .detach();
3118
3119 language_server
3120 .on_request::<lsp::request::RegisterCapability, _, _>({
3121 let this = this.clone();
3122 move |params, mut cx| {
3123 let this = this.clone();
3124 async move {
3125 for reg in params.registrations {
3126 if reg.method == "workspace/didChangeWatchedFiles" {
3127 if let Some(options) = reg.register_options {
3128 let options = serde_json::from_value(options)?;
3129 this.update(&mut cx, |this, cx| {
3130 this.on_lsp_did_change_watched_files(
3131 server_id, options, cx,
3132 );
3133 })?;
3134 }
3135 }
3136 }
3137 Ok(())
3138 }
3139 }
3140 })
3141 .detach();
3142
3143 language_server
3144 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3145 let adapter = adapter.clone();
3146 let this = this.clone();
3147 move |params, cx| {
3148 Self::on_lsp_workspace_edit(
3149 this.clone(),
3150 params,
3151 server_id,
3152 adapter.clone(),
3153 cx,
3154 )
3155 }
3156 })
3157 .detach();
3158
3159 language_server
3160 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3161 let this = this.clone();
3162 move |(), mut cx| {
3163 let this = this.clone();
3164 async move {
3165 this.update(&mut cx, |project, cx| {
3166 cx.emit(Event::RefreshInlayHints);
3167 project.remote_id().map(|project_id| {
3168 project.client.send(proto::RefreshInlayHints { project_id })
3169 })
3170 })?
3171 .transpose()?;
3172 Ok(())
3173 }
3174 }
3175 })
3176 .detach();
3177
3178 let disk_based_diagnostics_progress_token =
3179 adapter.disk_based_diagnostics_progress_token.clone();
3180
3181 language_server
3182 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3183 if let Some(this) = this.upgrade() {
3184 this.update(&mut cx, |this, cx| {
3185 this.on_lsp_progress(
3186 params,
3187 server_id,
3188 disk_based_diagnostics_progress_token.clone(),
3189 cx,
3190 );
3191 })
3192 .ok();
3193 }
3194 })
3195 .detach();
3196 let mut initialization_options = adapter.adapter.initialization_options().await;
3197 match (&mut initialization_options, override_options) {
3198 (Some(initialization_options), Some(override_options)) => {
3199 merge_json_value_into(override_options, initialization_options);
3200 }
3201 (None, override_options) => initialization_options = override_options,
3202 _ => {}
3203 }
3204 let language_server = language_server.initialize(initialization_options).await?;
3205
3206 language_server
3207 .notify::<lsp::notification::DidChangeConfiguration>(
3208 lsp::DidChangeConfigurationParams {
3209 settings: workspace_config,
3210 },
3211 )
3212 .ok();
3213
3214 Ok(language_server)
3215 }
3216
3217 fn insert_newly_running_language_server(
3218 &mut self,
3219 language: Arc<Language>,
3220 adapter: Arc<CachedLspAdapter>,
3221 language_server: Arc<LanguageServer>,
3222 server_id: LanguageServerId,
3223 key: (WorktreeId, LanguageServerName),
3224 cx: &mut ModelContext<Self>,
3225 ) -> Result<()> {
3226 // If the language server for this key doesn't match the server id, don't store the
3227 // server. Which will cause it to be dropped, killing the process
3228 if self
3229 .language_server_ids
3230 .get(&key)
3231 .map(|id| id != &server_id)
3232 .unwrap_or(false)
3233 {
3234 return Ok(());
3235 }
3236
3237 // Update language_servers collection with Running variant of LanguageServerState
3238 // indicating that the server is up and running and ready
3239 self.language_servers.insert(
3240 server_id,
3241 LanguageServerState::Running {
3242 adapter: adapter.clone(),
3243 language: language.clone(),
3244 watched_paths: Default::default(),
3245 server: language_server.clone(),
3246 simulate_disk_based_diagnostics_completion: None,
3247 },
3248 );
3249
3250 self.language_server_statuses.insert(
3251 server_id,
3252 LanguageServerStatus {
3253 name: language_server.name().to_string(),
3254 pending_work: Default::default(),
3255 has_pending_diagnostic_updates: false,
3256 progress_tokens: Default::default(),
3257 },
3258 );
3259
3260 cx.emit(Event::LanguageServerAdded(server_id));
3261
3262 if let Some(project_id) = self.remote_id() {
3263 self.client.send(proto::StartLanguageServer {
3264 project_id,
3265 server: Some(proto::LanguageServer {
3266 id: server_id.0 as u64,
3267 name: language_server.name().to_string(),
3268 }),
3269 })?;
3270 }
3271
3272 // Tell the language server about every open buffer in the worktree that matches the language.
3273 for buffer in self.opened_buffers.values() {
3274 if let Some(buffer_handle) = buffer.upgrade() {
3275 let buffer = buffer_handle.read(cx);
3276 let file = match File::from_dyn(buffer.file()) {
3277 Some(file) => file,
3278 None => continue,
3279 };
3280 let language = match buffer.language() {
3281 Some(language) => language,
3282 None => continue,
3283 };
3284
3285 if file.worktree.read(cx).id() != key.0
3286 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3287 {
3288 continue;
3289 }
3290
3291 let file = match file.as_local() {
3292 Some(file) => file,
3293 None => continue,
3294 };
3295
3296 let versions = self
3297 .buffer_snapshots
3298 .entry(buffer.remote_id())
3299 .or_default()
3300 .entry(server_id)
3301 .or_insert_with(|| {
3302 vec![LspBufferSnapshot {
3303 version: 0,
3304 snapshot: buffer.text_snapshot(),
3305 }]
3306 });
3307
3308 let snapshot = versions.last().unwrap();
3309 let version = snapshot.version;
3310 let initial_snapshot = &snapshot.snapshot;
3311 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3312 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3313 lsp::DidOpenTextDocumentParams {
3314 text_document: lsp::TextDocumentItem::new(
3315 uri,
3316 adapter
3317 .language_ids
3318 .get(language.name().as_ref())
3319 .cloned()
3320 .unwrap_or_default(),
3321 version,
3322 initial_snapshot.text(),
3323 ),
3324 },
3325 )?;
3326
3327 buffer_handle.update(cx, |buffer, cx| {
3328 buffer.set_completion_triggers(
3329 language_server
3330 .capabilities()
3331 .completion_provider
3332 .as_ref()
3333 .and_then(|provider| provider.trigger_characters.clone())
3334 .unwrap_or_default(),
3335 cx,
3336 )
3337 });
3338 }
3339 }
3340
3341 cx.notify();
3342 Ok(())
3343 }
3344
3345 // Returns a list of all of the worktrees which no longer have a language server and the root path
3346 // for the stopped server
3347 fn stop_language_server(
3348 &mut self,
3349 worktree_id: WorktreeId,
3350 adapter_name: LanguageServerName,
3351 cx: &mut ModelContext<Self>,
3352 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3353 let key = (worktree_id, adapter_name);
3354 if let Some(server_id) = self.language_server_ids.remove(&key) {
3355 log::info!("stopping language server {}", key.1 .0);
3356
3357 // Remove other entries for this language server as well
3358 let mut orphaned_worktrees = vec![worktree_id];
3359 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3360 for other_key in other_keys {
3361 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3362 self.language_server_ids.remove(&other_key);
3363 orphaned_worktrees.push(other_key.0);
3364 }
3365 }
3366
3367 for buffer in self.opened_buffers.values() {
3368 if let Some(buffer) = buffer.upgrade() {
3369 buffer.update(cx, |buffer, cx| {
3370 buffer.update_diagnostics(server_id, Default::default(), cx);
3371 });
3372 }
3373 }
3374 for worktree in &self.worktrees {
3375 if let Some(worktree) = worktree.upgrade() {
3376 worktree.update(cx, |worktree, cx| {
3377 if let Some(worktree) = worktree.as_local_mut() {
3378 worktree.clear_diagnostics_for_language_server(server_id, cx);
3379 }
3380 });
3381 }
3382 }
3383
3384 self.language_server_statuses.remove(&server_id);
3385 cx.notify();
3386
3387 let server_state = self.language_servers.remove(&server_id);
3388 cx.emit(Event::LanguageServerRemoved(server_id));
3389 cx.spawn(move |this, mut cx| async move {
3390 let mut root_path = None;
3391
3392 let server = match server_state {
3393 Some(LanguageServerState::Starting(task)) => task.await,
3394 Some(LanguageServerState::Running { server, .. }) => Some(server),
3395 None => None,
3396 };
3397
3398 if let Some(server) = server {
3399 root_path = Some(server.root_path().clone());
3400 if let Some(shutdown) = server.shutdown() {
3401 shutdown.await;
3402 }
3403 }
3404
3405 if let Some(this) = this.upgrade() {
3406 this.update(&mut cx, |this, cx| {
3407 this.language_server_statuses.remove(&server_id);
3408 cx.notify();
3409 })
3410 .ok();
3411 }
3412
3413 (root_path, orphaned_worktrees)
3414 })
3415 } else {
3416 Task::ready((None, Vec::new()))
3417 }
3418 }
3419
3420 pub fn restart_language_servers_for_buffers(
3421 &mut self,
3422 buffers: impl IntoIterator<Item = Model<Buffer>>,
3423 cx: &mut ModelContext<Self>,
3424 ) -> Option<()> {
3425 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3426 .into_iter()
3427 .filter_map(|buffer| {
3428 let buffer = buffer.read(cx);
3429 let file = File::from_dyn(buffer.file())?;
3430 let full_path = file.full_path(cx);
3431 let language = self
3432 .languages
3433 .language_for_file(&full_path, Some(buffer.as_rope()))
3434 .now_or_never()?
3435 .ok()?;
3436 Some((file.worktree.clone(), language))
3437 })
3438 .collect();
3439 for (worktree, language) in language_server_lookup_info {
3440 self.restart_language_servers(worktree, language, cx);
3441 }
3442
3443 None
3444 }
3445
3446 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3447 fn restart_language_servers(
3448 &mut self,
3449 worktree: Model<Worktree>,
3450 language: Arc<Language>,
3451 cx: &mut ModelContext<Self>,
3452 ) {
3453 let worktree_id = worktree.read(cx).id();
3454 let fallback_path = worktree.read(cx).abs_path();
3455
3456 let mut stops = Vec::new();
3457 for adapter in language.lsp_adapters() {
3458 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3459 }
3460
3461 if stops.is_empty() {
3462 return;
3463 }
3464 let mut stops = stops.into_iter();
3465
3466 cx.spawn(move |this, mut cx| async move {
3467 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3468 for stop in stops {
3469 let (_, worktrees) = stop.await;
3470 orphaned_worktrees.extend_from_slice(&worktrees);
3471 }
3472
3473 let this = match this.upgrade() {
3474 Some(this) => this,
3475 None => return,
3476 };
3477
3478 this.update(&mut cx, |this, cx| {
3479 // Attempt to restart using original server path. Fallback to passed in
3480 // path if we could not retrieve the root path
3481 let root_path = original_root_path
3482 .map(|path_buf| Arc::from(path_buf.as_path()))
3483 .unwrap_or(fallback_path);
3484
3485 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3486
3487 // Lookup new server ids and set them for each of the orphaned worktrees
3488 for adapter in language.lsp_adapters() {
3489 if let Some(new_server_id) = this
3490 .language_server_ids
3491 .get(&(worktree_id, adapter.name.clone()))
3492 .cloned()
3493 {
3494 for &orphaned_worktree in &orphaned_worktrees {
3495 this.language_server_ids
3496 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3497 }
3498 }
3499 }
3500 })
3501 .ok();
3502 })
3503 .detach();
3504 }
3505
3506 fn check_errored_server(
3507 language: Arc<Language>,
3508 adapter: Arc<CachedLspAdapter>,
3509 server_id: LanguageServerId,
3510 installation_test_binary: Option<LanguageServerBinary>,
3511 cx: &mut ModelContext<Self>,
3512 ) {
3513 if !adapter.can_be_reinstalled() {
3514 log::info!(
3515 "Validation check requested for {:?} but it cannot be reinstalled",
3516 adapter.name.0
3517 );
3518 return;
3519 }
3520
3521 cx.spawn(move |this, mut cx| async move {
3522 log::info!("About to spawn test binary");
3523
3524 // A lack of test binary counts as a failure
3525 let process = installation_test_binary.and_then(|binary| {
3526 smol::process::Command::new(&binary.path)
3527 .current_dir(&binary.path)
3528 .args(binary.arguments)
3529 .stdin(Stdio::piped())
3530 .stdout(Stdio::piped())
3531 .stderr(Stdio::inherit())
3532 .kill_on_drop(true)
3533 .spawn()
3534 .ok()
3535 });
3536
3537 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3538 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3539
3540 let mut errored = false;
3541 if let Some(mut process) = process {
3542 futures::select! {
3543 status = process.status().fuse() => match status {
3544 Ok(status) => errored = !status.success(),
3545 Err(_) => errored = true,
3546 },
3547
3548 _ = timeout => {
3549 log::info!("test binary time-ed out, this counts as a success");
3550 _ = process.kill();
3551 }
3552 }
3553 } else {
3554 log::warn!("test binary failed to launch");
3555 errored = true;
3556 }
3557
3558 if errored {
3559 log::warn!("test binary check failed");
3560 let task = this
3561 .update(&mut cx, move |this, mut cx| {
3562 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3563 })
3564 .ok()
3565 .flatten();
3566
3567 if let Some(task) = task {
3568 task.await;
3569 }
3570 }
3571 })
3572 .detach();
3573 }
3574
3575 fn on_lsp_progress(
3576 &mut self,
3577 progress: lsp::ProgressParams,
3578 language_server_id: LanguageServerId,
3579 disk_based_diagnostics_progress_token: Option<String>,
3580 cx: &mut ModelContext<Self>,
3581 ) {
3582 let token = match progress.token {
3583 lsp::NumberOrString::String(token) => token,
3584 lsp::NumberOrString::Number(token) => {
3585 log::info!("skipping numeric progress token {}", token);
3586 return;
3587 }
3588 };
3589 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3590 let language_server_status =
3591 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3592 status
3593 } else {
3594 return;
3595 };
3596
3597 if !language_server_status.progress_tokens.contains(&token) {
3598 return;
3599 }
3600
3601 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3602 .as_ref()
3603 .map_or(false, |disk_based_token| {
3604 token.starts_with(disk_based_token)
3605 });
3606
3607 match progress {
3608 lsp::WorkDoneProgress::Begin(report) => {
3609 if is_disk_based_diagnostics_progress {
3610 language_server_status.has_pending_diagnostic_updates = true;
3611 self.disk_based_diagnostics_started(language_server_id, cx);
3612 self.buffer_ordered_messages_tx
3613 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3614 language_server_id,
3615 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3616 })
3617 .ok();
3618 } else {
3619 self.on_lsp_work_start(
3620 language_server_id,
3621 token.clone(),
3622 LanguageServerProgress {
3623 message: report.message.clone(),
3624 percentage: report.percentage.map(|p| p as usize),
3625 last_update_at: Instant::now(),
3626 },
3627 cx,
3628 );
3629 self.buffer_ordered_messages_tx
3630 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3631 language_server_id,
3632 message: proto::update_language_server::Variant::WorkStart(
3633 proto::LspWorkStart {
3634 token,
3635 message: report.message,
3636 percentage: report.percentage.map(|p| p as u32),
3637 },
3638 ),
3639 })
3640 .ok();
3641 }
3642 }
3643 lsp::WorkDoneProgress::Report(report) => {
3644 if !is_disk_based_diagnostics_progress {
3645 self.on_lsp_work_progress(
3646 language_server_id,
3647 token.clone(),
3648 LanguageServerProgress {
3649 message: report.message.clone(),
3650 percentage: report.percentage.map(|p| p as usize),
3651 last_update_at: Instant::now(),
3652 },
3653 cx,
3654 );
3655 self.buffer_ordered_messages_tx
3656 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3657 language_server_id,
3658 message: proto::update_language_server::Variant::WorkProgress(
3659 proto::LspWorkProgress {
3660 token,
3661 message: report.message,
3662 percentage: report.percentage.map(|p| p as u32),
3663 },
3664 ),
3665 })
3666 .ok();
3667 }
3668 }
3669 lsp::WorkDoneProgress::End(_) => {
3670 language_server_status.progress_tokens.remove(&token);
3671
3672 if is_disk_based_diagnostics_progress {
3673 language_server_status.has_pending_diagnostic_updates = false;
3674 self.disk_based_diagnostics_finished(language_server_id, cx);
3675 self.buffer_ordered_messages_tx
3676 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3677 language_server_id,
3678 message:
3679 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3680 Default::default(),
3681 ),
3682 })
3683 .ok();
3684 } else {
3685 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3686 self.buffer_ordered_messages_tx
3687 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3688 language_server_id,
3689 message: proto::update_language_server::Variant::WorkEnd(
3690 proto::LspWorkEnd { token },
3691 ),
3692 })
3693 .ok();
3694 }
3695 }
3696 }
3697 }
3698
3699 fn on_lsp_work_start(
3700 &mut self,
3701 language_server_id: LanguageServerId,
3702 token: String,
3703 progress: LanguageServerProgress,
3704 cx: &mut ModelContext<Self>,
3705 ) {
3706 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3707 status.pending_work.insert(token, progress);
3708 cx.notify();
3709 }
3710 }
3711
3712 fn on_lsp_work_progress(
3713 &mut self,
3714 language_server_id: LanguageServerId,
3715 token: String,
3716 progress: LanguageServerProgress,
3717 cx: &mut ModelContext<Self>,
3718 ) {
3719 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3720 let entry = status
3721 .pending_work
3722 .entry(token)
3723 .or_insert(LanguageServerProgress {
3724 message: Default::default(),
3725 percentage: Default::default(),
3726 last_update_at: progress.last_update_at,
3727 });
3728 if progress.message.is_some() {
3729 entry.message = progress.message;
3730 }
3731 if progress.percentage.is_some() {
3732 entry.percentage = progress.percentage;
3733 }
3734 entry.last_update_at = progress.last_update_at;
3735 cx.notify();
3736 }
3737 }
3738
3739 fn on_lsp_work_end(
3740 &mut self,
3741 language_server_id: LanguageServerId,
3742 token: String,
3743 cx: &mut ModelContext<Self>,
3744 ) {
3745 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3746 cx.emit(Event::RefreshInlayHints);
3747 status.pending_work.remove(&token);
3748 cx.notify();
3749 }
3750 }
3751
3752 fn on_lsp_did_change_watched_files(
3753 &mut self,
3754 language_server_id: LanguageServerId,
3755 params: DidChangeWatchedFilesRegistrationOptions,
3756 cx: &mut ModelContext<Self>,
3757 ) {
3758 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3759 self.language_servers.get_mut(&language_server_id)
3760 {
3761 let mut builders = HashMap::default();
3762 for watcher in params.watchers {
3763 for worktree in &self.worktrees {
3764 if let Some(worktree) = worktree.upgrade() {
3765 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3766 if let Some(abs_path) = tree.abs_path().to_str() {
3767 let relative_glob_pattern = match &watcher.glob_pattern {
3768 lsp::GlobPattern::String(s) => s
3769 .strip_prefix(abs_path)
3770 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3771 lsp::GlobPattern::Relative(rp) => {
3772 let base_uri = match &rp.base_uri {
3773 lsp::OneOf::Left(workspace_folder) => {
3774 &workspace_folder.uri
3775 }
3776 lsp::OneOf::Right(base_uri) => base_uri,
3777 };
3778 base_uri.to_file_path().ok().and_then(|file_path| {
3779 (file_path.to_str() == Some(abs_path))
3780 .then_some(rp.pattern.as_str())
3781 })
3782 }
3783 };
3784 if let Some(relative_glob_pattern) = relative_glob_pattern {
3785 let literal_prefix =
3786 glob_literal_prefix(&relative_glob_pattern);
3787 tree.as_local_mut()
3788 .unwrap()
3789 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3790 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3791 builders
3792 .entry(tree.id())
3793 .or_insert_with(|| GlobSetBuilder::new())
3794 .add(glob);
3795 }
3796 return true;
3797 }
3798 }
3799 false
3800 });
3801 if glob_is_inside_worktree {
3802 break;
3803 }
3804 }
3805 }
3806 }
3807
3808 watched_paths.clear();
3809 for (worktree_id, builder) in builders {
3810 if let Ok(globset) = builder.build() {
3811 watched_paths.insert(worktree_id, globset);
3812 }
3813 }
3814
3815 cx.notify();
3816 }
3817 }
3818
3819 async fn on_lsp_workspace_edit(
3820 this: WeakModel<Self>,
3821 params: lsp::ApplyWorkspaceEditParams,
3822 server_id: LanguageServerId,
3823 adapter: Arc<CachedLspAdapter>,
3824 mut cx: AsyncAppContext,
3825 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3826 let this = this
3827 .upgrade()
3828 .ok_or_else(|| anyhow!("project project closed"))?;
3829 let language_server = this
3830 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3831 .ok_or_else(|| anyhow!("language server not found"))?;
3832 let transaction = Self::deserialize_workspace_edit(
3833 this.clone(),
3834 params.edit,
3835 true,
3836 adapter.clone(),
3837 language_server.clone(),
3838 &mut cx,
3839 )
3840 .await
3841 .log_err();
3842 this.update(&mut cx, |this, _| {
3843 if let Some(transaction) = transaction {
3844 this.last_workspace_edits_by_language_server
3845 .insert(server_id, transaction);
3846 }
3847 })?;
3848 Ok(lsp::ApplyWorkspaceEditResponse {
3849 applied: true,
3850 failed_change: None,
3851 failure_reason: None,
3852 })
3853 }
3854
3855 pub fn language_server_statuses(
3856 &self,
3857 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3858 self.language_server_statuses.values()
3859 }
3860
3861 pub fn update_diagnostics(
3862 &mut self,
3863 language_server_id: LanguageServerId,
3864 mut params: lsp::PublishDiagnosticsParams,
3865 disk_based_sources: &[String],
3866 cx: &mut ModelContext<Self>,
3867 ) -> Result<()> {
3868 let abs_path = params
3869 .uri
3870 .to_file_path()
3871 .map_err(|_| anyhow!("URI is not a file"))?;
3872 let mut diagnostics = Vec::default();
3873 let mut primary_diagnostic_group_ids = HashMap::default();
3874 let mut sources_by_group_id = HashMap::default();
3875 let mut supporting_diagnostics = HashMap::default();
3876
3877 // Ensure that primary diagnostics are always the most severe
3878 params.diagnostics.sort_by_key(|item| item.severity);
3879
3880 for diagnostic in ¶ms.diagnostics {
3881 let source = diagnostic.source.as_ref();
3882 let code = diagnostic.code.as_ref().map(|code| match code {
3883 lsp::NumberOrString::Number(code) => code.to_string(),
3884 lsp::NumberOrString::String(code) => code.clone(),
3885 });
3886 let range = range_from_lsp(diagnostic.range);
3887 let is_supporting = diagnostic
3888 .related_information
3889 .as_ref()
3890 .map_or(false, |infos| {
3891 infos.iter().any(|info| {
3892 primary_diagnostic_group_ids.contains_key(&(
3893 source,
3894 code.clone(),
3895 range_from_lsp(info.location.range),
3896 ))
3897 })
3898 });
3899
3900 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3901 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3902 });
3903
3904 if is_supporting {
3905 supporting_diagnostics.insert(
3906 (source, code.clone(), range),
3907 (diagnostic.severity, is_unnecessary),
3908 );
3909 } else {
3910 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3911 let is_disk_based =
3912 source.map_or(false, |source| disk_based_sources.contains(source));
3913
3914 sources_by_group_id.insert(group_id, source);
3915 primary_diagnostic_group_ids
3916 .insert((source, code.clone(), range.clone()), group_id);
3917
3918 diagnostics.push(DiagnosticEntry {
3919 range,
3920 diagnostic: Diagnostic {
3921 source: diagnostic.source.clone(),
3922 code: code.clone(),
3923 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3924 message: diagnostic.message.clone(),
3925 group_id,
3926 is_primary: true,
3927 is_valid: true,
3928 is_disk_based,
3929 is_unnecessary,
3930 },
3931 });
3932 if let Some(infos) = &diagnostic.related_information {
3933 for info in infos {
3934 if info.location.uri == params.uri && !info.message.is_empty() {
3935 let range = range_from_lsp(info.location.range);
3936 diagnostics.push(DiagnosticEntry {
3937 range,
3938 diagnostic: Diagnostic {
3939 source: diagnostic.source.clone(),
3940 code: code.clone(),
3941 severity: DiagnosticSeverity::INFORMATION,
3942 message: info.message.clone(),
3943 group_id,
3944 is_primary: false,
3945 is_valid: true,
3946 is_disk_based,
3947 is_unnecessary: false,
3948 },
3949 });
3950 }
3951 }
3952 }
3953 }
3954 }
3955
3956 for entry in &mut diagnostics {
3957 let diagnostic = &mut entry.diagnostic;
3958 if !diagnostic.is_primary {
3959 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3960 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3961 source,
3962 diagnostic.code.clone(),
3963 entry.range.clone(),
3964 )) {
3965 if let Some(severity) = severity {
3966 diagnostic.severity = severity;
3967 }
3968 diagnostic.is_unnecessary = is_unnecessary;
3969 }
3970 }
3971 }
3972
3973 self.update_diagnostic_entries(
3974 language_server_id,
3975 abs_path,
3976 params.version,
3977 diagnostics,
3978 cx,
3979 )?;
3980 Ok(())
3981 }
3982
3983 pub fn update_diagnostic_entries(
3984 &mut self,
3985 server_id: LanguageServerId,
3986 abs_path: PathBuf,
3987 version: Option<i32>,
3988 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3989 cx: &mut ModelContext<Project>,
3990 ) -> Result<(), anyhow::Error> {
3991 let (worktree, relative_path) = self
3992 .find_local_worktree(&abs_path, cx)
3993 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3994
3995 let project_path = ProjectPath {
3996 worktree_id: worktree.read(cx).id(),
3997 path: relative_path.into(),
3998 };
3999
4000 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
4001 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
4002 }
4003
4004 let updated = worktree.update(cx, |worktree, cx| {
4005 worktree
4006 .as_local_mut()
4007 .ok_or_else(|| anyhow!("not a local worktree"))?
4008 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
4009 })?;
4010 if updated {
4011 cx.emit(Event::DiagnosticsUpdated {
4012 language_server_id: server_id,
4013 path: project_path,
4014 });
4015 }
4016 Ok(())
4017 }
4018
4019 fn update_buffer_diagnostics(
4020 &mut self,
4021 buffer: &Model<Buffer>,
4022 server_id: LanguageServerId,
4023 version: Option<i32>,
4024 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4025 cx: &mut ModelContext<Self>,
4026 ) -> Result<()> {
4027 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4028 Ordering::Equal
4029 .then_with(|| b.is_primary.cmp(&a.is_primary))
4030 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4031 .then_with(|| a.severity.cmp(&b.severity))
4032 .then_with(|| a.message.cmp(&b.message))
4033 }
4034
4035 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4036
4037 diagnostics.sort_unstable_by(|a, b| {
4038 Ordering::Equal
4039 .then_with(|| a.range.start.cmp(&b.range.start))
4040 .then_with(|| b.range.end.cmp(&a.range.end))
4041 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4042 });
4043
4044 let mut sanitized_diagnostics = Vec::new();
4045 let edits_since_save = Patch::new(
4046 snapshot
4047 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4048 .collect(),
4049 );
4050 for entry in diagnostics {
4051 let start;
4052 let end;
4053 if entry.diagnostic.is_disk_based {
4054 // Some diagnostics are based on files on disk instead of buffers'
4055 // current contents. Adjust these diagnostics' ranges to reflect
4056 // any unsaved edits.
4057 start = edits_since_save.old_to_new(entry.range.start);
4058 end = edits_since_save.old_to_new(entry.range.end);
4059 } else {
4060 start = entry.range.start;
4061 end = entry.range.end;
4062 }
4063
4064 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4065 ..snapshot.clip_point_utf16(end, Bias::Right);
4066
4067 // Expand empty ranges by one codepoint
4068 if range.start == range.end {
4069 // This will be go to the next boundary when being clipped
4070 range.end.column += 1;
4071 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4072 if range.start == range.end && range.end.column > 0 {
4073 range.start.column -= 1;
4074 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
4075 }
4076 }
4077
4078 sanitized_diagnostics.push(DiagnosticEntry {
4079 range,
4080 diagnostic: entry.diagnostic,
4081 });
4082 }
4083 drop(edits_since_save);
4084
4085 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4086 buffer.update(cx, |buffer, cx| {
4087 buffer.update_diagnostics(server_id, set, cx)
4088 });
4089 Ok(())
4090 }
4091
4092 pub fn reload_buffers(
4093 &self,
4094 buffers: HashSet<Model<Buffer>>,
4095 push_to_history: bool,
4096 cx: &mut ModelContext<Self>,
4097 ) -> Task<Result<ProjectTransaction>> {
4098 let mut local_buffers = Vec::new();
4099 let mut remote_buffers = None;
4100 for buffer_handle in buffers {
4101 let buffer = buffer_handle.read(cx);
4102 if buffer.is_dirty() {
4103 if let Some(file) = File::from_dyn(buffer.file()) {
4104 if file.is_local() {
4105 local_buffers.push(buffer_handle);
4106 } else {
4107 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4108 }
4109 }
4110 }
4111 }
4112
4113 let remote_buffers = self.remote_id().zip(remote_buffers);
4114 let client = self.client.clone();
4115
4116 cx.spawn(move |this, mut cx| async move {
4117 let mut project_transaction = ProjectTransaction::default();
4118
4119 if let Some((project_id, remote_buffers)) = remote_buffers {
4120 let response = client
4121 .request(proto::ReloadBuffers {
4122 project_id,
4123 buffer_ids: remote_buffers
4124 .iter()
4125 .filter_map(|buffer| {
4126 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4127 })
4128 .collect(),
4129 })
4130 .await?
4131 .transaction
4132 .ok_or_else(|| anyhow!("missing transaction"))?;
4133 project_transaction = this
4134 .update(&mut cx, |this, cx| {
4135 this.deserialize_project_transaction(response, push_to_history, cx)
4136 })?
4137 .await?;
4138 }
4139
4140 for buffer in local_buffers {
4141 let transaction = buffer
4142 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4143 .await?;
4144 buffer.update(&mut cx, |buffer, cx| {
4145 if let Some(transaction) = transaction {
4146 if !push_to_history {
4147 buffer.forget_transaction(transaction.id);
4148 }
4149 project_transaction.0.insert(cx.handle(), transaction);
4150 }
4151 })?;
4152 }
4153
4154 Ok(project_transaction)
4155 })
4156 }
4157
4158 pub fn format(
4159 &mut self,
4160 buffers: HashSet<Model<Buffer>>,
4161 push_to_history: bool,
4162 trigger: FormatTrigger,
4163 cx: &mut ModelContext<Project>,
4164 ) -> Task<anyhow::Result<ProjectTransaction>> {
4165 if self.is_local() {
4166 let mut buffers_with_paths_and_servers = buffers
4167 .into_iter()
4168 .filter_map(|buffer_handle| {
4169 let buffer = buffer_handle.read(cx);
4170 let file = File::from_dyn(buffer.file())?;
4171 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4172 let server = self
4173 .primary_language_server_for_buffer(buffer, cx)
4174 .map(|s| s.1.clone());
4175 Some((buffer_handle, buffer_abs_path, server))
4176 })
4177 .collect::<Vec<_>>();
4178
4179 cx.spawn(move |project, mut cx| async move {
4180 // Do not allow multiple concurrent formatting requests for the
4181 // same buffer.
4182 project.update(&mut cx, |this, cx| {
4183 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4184 this.buffers_being_formatted
4185 .insert(buffer.read(cx).remote_id())
4186 });
4187 })?;
4188
4189 let _cleanup = defer({
4190 let this = project.clone();
4191 let mut cx = cx.clone();
4192 let buffers = &buffers_with_paths_and_servers;
4193 move || {
4194 this.update(&mut cx, |this, cx| {
4195 for (buffer, _, _) in buffers {
4196 this.buffers_being_formatted
4197 .remove(&buffer.read(cx).remote_id());
4198 }
4199 })
4200 .ok();
4201 }
4202 });
4203
4204 let mut project_transaction = ProjectTransaction::default();
4205 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4206 let settings = buffer.update(&mut cx, |buffer, cx| {
4207 language_settings(buffer.language(), buffer.file(), cx).clone()
4208 })?;
4209
4210 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4211 let ensure_final_newline = settings.ensure_final_newline_on_save;
4212 let tab_size = settings.tab_size;
4213
4214 // First, format buffer's whitespace according to the settings.
4215 let trailing_whitespace_diff = if remove_trailing_whitespace {
4216 Some(
4217 buffer
4218 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4219 .await,
4220 )
4221 } else {
4222 None
4223 };
4224 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4225 buffer.finalize_last_transaction();
4226 buffer.start_transaction();
4227 if let Some(diff) = trailing_whitespace_diff {
4228 buffer.apply_diff(diff, cx);
4229 }
4230 if ensure_final_newline {
4231 buffer.ensure_final_newline(cx);
4232 }
4233 buffer.end_transaction(cx)
4234 })?;
4235
4236 // Apply language-specific formatting using either a language server
4237 // or external command.
4238 let mut format_operation = None;
4239 match (&settings.formatter, &settings.format_on_save) {
4240 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4241
4242 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4243 | (_, FormatOnSave::LanguageServer) => {
4244 if let Some((language_server, buffer_abs_path)) =
4245 language_server.as_ref().zip(buffer_abs_path.as_ref())
4246 {
4247 format_operation = Some(FormatOperation::Lsp(
4248 Self::format_via_lsp(
4249 &project,
4250 &buffer,
4251 buffer_abs_path,
4252 &language_server,
4253 tab_size,
4254 &mut cx,
4255 )
4256 .await
4257 .context("failed to format via language server")?,
4258 ));
4259 }
4260 }
4261
4262 (
4263 Formatter::External { command, arguments },
4264 FormatOnSave::On | FormatOnSave::Off,
4265 )
4266 | (_, FormatOnSave::External { command, arguments }) => {
4267 if let Some(buffer_abs_path) = buffer_abs_path {
4268 format_operation = Self::format_via_external_command(
4269 buffer,
4270 buffer_abs_path,
4271 &command,
4272 &arguments,
4273 &mut cx,
4274 )
4275 .await
4276 .context(format!(
4277 "failed to format via external command {:?}",
4278 command
4279 ))?
4280 .map(FormatOperation::External);
4281 }
4282 }
4283 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4284 if let Some(new_operation) =
4285 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4286 .await
4287 {
4288 format_operation = Some(new_operation);
4289 } else if let Some((language_server, buffer_abs_path)) =
4290 language_server.as_ref().zip(buffer_abs_path.as_ref())
4291 {
4292 format_operation = Some(FormatOperation::Lsp(
4293 Self::format_via_lsp(
4294 &project,
4295 &buffer,
4296 buffer_abs_path,
4297 &language_server,
4298 tab_size,
4299 &mut cx,
4300 )
4301 .await
4302 .context("failed to format via language server")?,
4303 ));
4304 }
4305 }
4306 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4307 if let Some(new_operation) =
4308 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4309 .await
4310 {
4311 format_operation = Some(new_operation);
4312 }
4313 }
4314 };
4315
4316 buffer.update(&mut cx, |b, cx| {
4317 // If the buffer had its whitespace formatted and was edited while the language-specific
4318 // formatting was being computed, avoid applying the language-specific formatting, because
4319 // it can't be grouped with the whitespace formatting in the undo history.
4320 if let Some(transaction_id) = whitespace_transaction_id {
4321 if b.peek_undo_stack()
4322 .map_or(true, |e| e.transaction_id() != transaction_id)
4323 {
4324 format_operation.take();
4325 }
4326 }
4327
4328 // Apply any language-specific formatting, and group the two formatting operations
4329 // in the buffer's undo history.
4330 if let Some(operation) = format_operation {
4331 match operation {
4332 FormatOperation::Lsp(edits) => {
4333 b.edit(edits, None, cx);
4334 }
4335 FormatOperation::External(diff) => {
4336 b.apply_diff(diff, cx);
4337 }
4338 FormatOperation::Prettier(diff) => {
4339 b.apply_diff(diff, cx);
4340 }
4341 }
4342
4343 if let Some(transaction_id) = whitespace_transaction_id {
4344 b.group_until_transaction(transaction_id);
4345 }
4346 }
4347
4348 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4349 if !push_to_history {
4350 b.forget_transaction(transaction.id);
4351 }
4352 project_transaction.0.insert(buffer.clone(), transaction);
4353 }
4354 })?;
4355 }
4356
4357 Ok(project_transaction)
4358 })
4359 } else {
4360 let remote_id = self.remote_id();
4361 let client = self.client.clone();
4362 cx.spawn(move |this, mut cx| async move {
4363 let mut project_transaction = ProjectTransaction::default();
4364 if let Some(project_id) = remote_id {
4365 let response = client
4366 .request(proto::FormatBuffers {
4367 project_id,
4368 trigger: trigger as i32,
4369 buffer_ids: buffers
4370 .iter()
4371 .map(|buffer| {
4372 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4373 })
4374 .collect::<Result<_>>()?,
4375 })
4376 .await?
4377 .transaction
4378 .ok_or_else(|| anyhow!("missing transaction"))?;
4379 project_transaction = this
4380 .update(&mut cx, |this, cx| {
4381 this.deserialize_project_transaction(response, push_to_history, cx)
4382 })?
4383 .await?;
4384 }
4385 Ok(project_transaction)
4386 })
4387 }
4388 }
4389
4390 async fn format_via_lsp(
4391 this: &WeakModel<Self>,
4392 buffer: &Model<Buffer>,
4393 abs_path: &Path,
4394 language_server: &Arc<LanguageServer>,
4395 tab_size: NonZeroU32,
4396 cx: &mut AsyncAppContext,
4397 ) -> Result<Vec<(Range<Anchor>, String)>> {
4398 let uri = lsp::Url::from_file_path(abs_path)
4399 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4400 let text_document = lsp::TextDocumentIdentifier::new(uri);
4401 let capabilities = &language_server.capabilities();
4402
4403 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4404 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4405
4406 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4407 language_server
4408 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4409 text_document,
4410 options: lsp_command::lsp_formatting_options(tab_size.get()),
4411 work_done_progress_params: Default::default(),
4412 })
4413 .await?
4414 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4415 let buffer_start = lsp::Position::new(0, 0);
4416 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4417
4418 language_server
4419 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4420 text_document,
4421 range: lsp::Range::new(buffer_start, buffer_end),
4422 options: lsp_command::lsp_formatting_options(tab_size.get()),
4423 work_done_progress_params: Default::default(),
4424 })
4425 .await?
4426 } else {
4427 None
4428 };
4429
4430 if let Some(lsp_edits) = lsp_edits {
4431 this.update(cx, |this, cx| {
4432 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4433 })?
4434 .await
4435 } else {
4436 Ok(Vec::new())
4437 }
4438 }
4439
4440 async fn format_via_external_command(
4441 buffer: &Model<Buffer>,
4442 buffer_abs_path: &Path,
4443 command: &str,
4444 arguments: &[String],
4445 cx: &mut AsyncAppContext,
4446 ) -> Result<Option<Diff>> {
4447 let working_dir_path = buffer.update(cx, |buffer, cx| {
4448 let file = File::from_dyn(buffer.file())?;
4449 let worktree = file.worktree.read(cx).as_local()?;
4450 let mut worktree_path = worktree.abs_path().to_path_buf();
4451 if worktree.root_entry()?.is_file() {
4452 worktree_path.pop();
4453 }
4454 Some(worktree_path)
4455 })?;
4456
4457 if let Some(working_dir_path) = working_dir_path {
4458 let mut child =
4459 smol::process::Command::new(command)
4460 .args(arguments.iter().map(|arg| {
4461 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4462 }))
4463 .current_dir(&working_dir_path)
4464 .stdin(smol::process::Stdio::piped())
4465 .stdout(smol::process::Stdio::piped())
4466 .stderr(smol::process::Stdio::piped())
4467 .spawn()?;
4468 let stdin = child
4469 .stdin
4470 .as_mut()
4471 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4472 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4473 for chunk in text.chunks() {
4474 stdin.write_all(chunk.as_bytes()).await?;
4475 }
4476 stdin.flush().await?;
4477
4478 let output = child.output().await?;
4479 if !output.status.success() {
4480 return Err(anyhow!(
4481 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4482 output.status.code(),
4483 String::from_utf8_lossy(&output.stdout),
4484 String::from_utf8_lossy(&output.stderr),
4485 ));
4486 }
4487
4488 let stdout = String::from_utf8(output.stdout)?;
4489 Ok(Some(
4490 buffer
4491 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4492 .await,
4493 ))
4494 } else {
4495 Ok(None)
4496 }
4497 }
4498
4499 pub fn definition<T: ToPointUtf16>(
4500 &self,
4501 buffer: &Model<Buffer>,
4502 position: T,
4503 cx: &mut ModelContext<Self>,
4504 ) -> Task<Result<Vec<LocationLink>>> {
4505 let position = position.to_point_utf16(buffer.read(cx));
4506 self.request_lsp(
4507 buffer.clone(),
4508 LanguageServerToQuery::Primary,
4509 GetDefinition { position },
4510 cx,
4511 )
4512 }
4513
4514 pub fn type_definition<T: ToPointUtf16>(
4515 &self,
4516 buffer: &Model<Buffer>,
4517 position: T,
4518 cx: &mut ModelContext<Self>,
4519 ) -> Task<Result<Vec<LocationLink>>> {
4520 let position = position.to_point_utf16(buffer.read(cx));
4521 self.request_lsp(
4522 buffer.clone(),
4523 LanguageServerToQuery::Primary,
4524 GetTypeDefinition { position },
4525 cx,
4526 )
4527 }
4528
4529 pub fn references<T: ToPointUtf16>(
4530 &self,
4531 buffer: &Model<Buffer>,
4532 position: T,
4533 cx: &mut ModelContext<Self>,
4534 ) -> Task<Result<Vec<Location>>> {
4535 let position = position.to_point_utf16(buffer.read(cx));
4536 self.request_lsp(
4537 buffer.clone(),
4538 LanguageServerToQuery::Primary,
4539 GetReferences { position },
4540 cx,
4541 )
4542 }
4543
4544 pub fn document_highlights<T: ToPointUtf16>(
4545 &self,
4546 buffer: &Model<Buffer>,
4547 position: T,
4548 cx: &mut ModelContext<Self>,
4549 ) -> Task<Result<Vec<DocumentHighlight>>> {
4550 let position = position.to_point_utf16(buffer.read(cx));
4551 self.request_lsp(
4552 buffer.clone(),
4553 LanguageServerToQuery::Primary,
4554 GetDocumentHighlights { position },
4555 cx,
4556 )
4557 }
4558
4559 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4560 if self.is_local() {
4561 let mut requests = Vec::new();
4562 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4563 let worktree_id = *worktree_id;
4564 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4565 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4566 Some(worktree) => worktree,
4567 None => continue,
4568 };
4569 let worktree_abs_path = worktree.abs_path().clone();
4570
4571 let (adapter, language, server) = match self.language_servers.get(server_id) {
4572 Some(LanguageServerState::Running {
4573 adapter,
4574 language,
4575 server,
4576 ..
4577 }) => (adapter.clone(), language.clone(), server),
4578
4579 _ => continue,
4580 };
4581
4582 requests.push(
4583 server
4584 .request::<lsp::request::WorkspaceSymbolRequest>(
4585 lsp::WorkspaceSymbolParams {
4586 query: query.to_string(),
4587 ..Default::default()
4588 },
4589 )
4590 .log_err()
4591 .map(move |response| {
4592 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4593 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4594 flat_responses.into_iter().map(|lsp_symbol| {
4595 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4596 }).collect::<Vec<_>>()
4597 }
4598 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4599 nested_responses.into_iter().filter_map(|lsp_symbol| {
4600 let location = match lsp_symbol.location {
4601 OneOf::Left(location) => location,
4602 OneOf::Right(_) => {
4603 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4604 return None
4605 }
4606 };
4607 Some((lsp_symbol.name, lsp_symbol.kind, location))
4608 }).collect::<Vec<_>>()
4609 }
4610 }).unwrap_or_default();
4611
4612 (
4613 adapter,
4614 language,
4615 worktree_id,
4616 worktree_abs_path,
4617 lsp_symbols,
4618 )
4619 }),
4620 );
4621 }
4622
4623 cx.spawn(move |this, mut cx| async move {
4624 let responses = futures::future::join_all(requests).await;
4625 let this = match this.upgrade() {
4626 Some(this) => this,
4627 None => return Ok(Vec::new()),
4628 };
4629
4630 let symbols = this.update(&mut cx, |this, cx| {
4631 let mut symbols = Vec::new();
4632 for (
4633 adapter,
4634 adapter_language,
4635 source_worktree_id,
4636 worktree_abs_path,
4637 lsp_symbols,
4638 ) in responses
4639 {
4640 symbols.extend(lsp_symbols.into_iter().filter_map(
4641 |(symbol_name, symbol_kind, symbol_location)| {
4642 let abs_path = symbol_location.uri.to_file_path().ok()?;
4643 let mut worktree_id = source_worktree_id;
4644 let path;
4645 if let Some((worktree, rel_path)) =
4646 this.find_local_worktree(&abs_path, cx)
4647 {
4648 worktree_id = worktree.read(cx).id();
4649 path = rel_path;
4650 } else {
4651 path = relativize_path(&worktree_abs_path, &abs_path);
4652 }
4653
4654 let project_path = ProjectPath {
4655 worktree_id,
4656 path: path.into(),
4657 };
4658 let signature = this.symbol_signature(&project_path);
4659 let adapter_language = adapter_language.clone();
4660 let language = this
4661 .languages
4662 .language_for_file(&project_path.path, None)
4663 .unwrap_or_else(move |_| adapter_language);
4664 let language_server_name = adapter.name.clone();
4665 Some(async move {
4666 let language = language.await;
4667 let label =
4668 language.label_for_symbol(&symbol_name, symbol_kind).await;
4669
4670 Symbol {
4671 language_server_name,
4672 source_worktree_id,
4673 path: project_path,
4674 label: label.unwrap_or_else(|| {
4675 CodeLabel::plain(symbol_name.clone(), None)
4676 }),
4677 kind: symbol_kind,
4678 name: symbol_name,
4679 range: range_from_lsp(symbol_location.range),
4680 signature,
4681 }
4682 })
4683 },
4684 ));
4685 }
4686
4687 symbols
4688 })?;
4689
4690 Ok(futures::future::join_all(symbols).await)
4691 })
4692 } else if let Some(project_id) = self.remote_id() {
4693 let request = self.client.request(proto::GetProjectSymbols {
4694 project_id,
4695 query: query.to_string(),
4696 });
4697 cx.spawn(move |this, mut cx| async move {
4698 let response = request.await?;
4699 let mut symbols = Vec::new();
4700 if let Some(this) = this.upgrade() {
4701 let new_symbols = this.update(&mut cx, |this, _| {
4702 response
4703 .symbols
4704 .into_iter()
4705 .map(|symbol| this.deserialize_symbol(symbol))
4706 .collect::<Vec<_>>()
4707 })?;
4708 symbols = futures::future::join_all(new_symbols)
4709 .await
4710 .into_iter()
4711 .filter_map(|symbol| symbol.log_err())
4712 .collect::<Vec<_>>();
4713 }
4714 Ok(symbols)
4715 })
4716 } else {
4717 Task::ready(Ok(Default::default()))
4718 }
4719 }
4720
4721 pub fn open_buffer_for_symbol(
4722 &mut self,
4723 symbol: &Symbol,
4724 cx: &mut ModelContext<Self>,
4725 ) -> Task<Result<Model<Buffer>>> {
4726 if self.is_local() {
4727 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4728 symbol.source_worktree_id,
4729 symbol.language_server_name.clone(),
4730 )) {
4731 *id
4732 } else {
4733 return Task::ready(Err(anyhow!(
4734 "language server for worktree and language not found"
4735 )));
4736 };
4737
4738 let worktree_abs_path = if let Some(worktree_abs_path) = self
4739 .worktree_for_id(symbol.path.worktree_id, cx)
4740 .and_then(|worktree| worktree.read(cx).as_local())
4741 .map(|local_worktree| local_worktree.abs_path())
4742 {
4743 worktree_abs_path
4744 } else {
4745 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4746 };
4747
4748 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
4749 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4750 uri
4751 } else {
4752 return Task::ready(Err(anyhow!("invalid symbol path")));
4753 };
4754
4755 self.open_local_buffer_via_lsp(
4756 symbol_uri,
4757 language_server_id,
4758 symbol.language_server_name.clone(),
4759 cx,
4760 )
4761 } else if let Some(project_id) = self.remote_id() {
4762 let request = self.client.request(proto::OpenBufferForSymbol {
4763 project_id,
4764 symbol: Some(serialize_symbol(symbol)),
4765 });
4766 cx.spawn(move |this, mut cx| async move {
4767 let response = request.await?;
4768 this.update(&mut cx, |this, cx| {
4769 this.wait_for_remote_buffer(response.buffer_id, cx)
4770 })?
4771 .await
4772 })
4773 } else {
4774 Task::ready(Err(anyhow!("project does not have a remote id")))
4775 }
4776 }
4777
4778 pub fn hover<T: ToPointUtf16>(
4779 &self,
4780 buffer: &Model<Buffer>,
4781 position: T,
4782 cx: &mut ModelContext<Self>,
4783 ) -> Task<Result<Option<Hover>>> {
4784 let position = position.to_point_utf16(buffer.read(cx));
4785 self.request_lsp(
4786 buffer.clone(),
4787 LanguageServerToQuery::Primary,
4788 GetHover { position },
4789 cx,
4790 )
4791 }
4792
4793 pub fn completions<T: ToOffset + ToPointUtf16>(
4794 &self,
4795 buffer: &Model<Buffer>,
4796 position: T,
4797 cx: &mut ModelContext<Self>,
4798 ) -> Task<Result<Vec<Completion>>> {
4799 let position = position.to_point_utf16(buffer.read(cx));
4800 if self.is_local() {
4801 let snapshot = buffer.read(cx).snapshot();
4802 let offset = position.to_offset(&snapshot);
4803 let scope = snapshot.language_scope_at(offset);
4804
4805 let server_ids: Vec<_> = self
4806 .language_servers_for_buffer(buffer.read(cx), cx)
4807 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4808 .filter(|(adapter, _)| {
4809 scope
4810 .as_ref()
4811 .map(|scope| scope.language_allowed(&adapter.name))
4812 .unwrap_or(true)
4813 })
4814 .map(|(_, server)| server.server_id())
4815 .collect();
4816
4817 let buffer = buffer.clone();
4818 cx.spawn(move |this, mut cx| async move {
4819 let mut tasks = Vec::with_capacity(server_ids.len());
4820 this.update(&mut cx, |this, cx| {
4821 for server_id in server_ids {
4822 tasks.push(this.request_lsp(
4823 buffer.clone(),
4824 LanguageServerToQuery::Other(server_id),
4825 GetCompletions { position },
4826 cx,
4827 ));
4828 }
4829 })?;
4830
4831 let mut completions = Vec::new();
4832 for task in tasks {
4833 if let Ok(new_completions) = task.await {
4834 completions.extend_from_slice(&new_completions);
4835 }
4836 }
4837
4838 Ok(completions)
4839 })
4840 } else if let Some(project_id) = self.remote_id() {
4841 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4842 } else {
4843 Task::ready(Ok(Default::default()))
4844 }
4845 }
4846
4847 pub fn apply_additional_edits_for_completion(
4848 &self,
4849 buffer_handle: Model<Buffer>,
4850 completion: Completion,
4851 push_to_history: bool,
4852 cx: &mut ModelContext<Self>,
4853 ) -> Task<Result<Option<Transaction>>> {
4854 let buffer = buffer_handle.read(cx);
4855 let buffer_id = buffer.remote_id();
4856
4857 if self.is_local() {
4858 let server_id = completion.server_id;
4859 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4860 Some((_, server)) => server.clone(),
4861 _ => return Task::ready(Ok(Default::default())),
4862 };
4863
4864 cx.spawn(move |this, mut cx| async move {
4865 let can_resolve = lang_server
4866 .capabilities()
4867 .completion_provider
4868 .as_ref()
4869 .and_then(|options| options.resolve_provider)
4870 .unwrap_or(false);
4871 let additional_text_edits = if can_resolve {
4872 lang_server
4873 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4874 .await?
4875 .additional_text_edits
4876 } else {
4877 completion.lsp_completion.additional_text_edits
4878 };
4879 if let Some(edits) = additional_text_edits {
4880 let edits = this
4881 .update(&mut cx, |this, cx| {
4882 this.edits_from_lsp(
4883 &buffer_handle,
4884 edits,
4885 lang_server.server_id(),
4886 None,
4887 cx,
4888 )
4889 })?
4890 .await?;
4891
4892 buffer_handle.update(&mut cx, |buffer, cx| {
4893 buffer.finalize_last_transaction();
4894 buffer.start_transaction();
4895
4896 for (range, text) in edits {
4897 let primary = &completion.old_range;
4898 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4899 && primary.end.cmp(&range.start, buffer).is_ge();
4900 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4901 && range.end.cmp(&primary.end, buffer).is_ge();
4902
4903 //Skip additional edits which overlap with the primary completion edit
4904 //https://github.com/zed-industries/zed/pull/1871
4905 if !start_within && !end_within {
4906 buffer.edit([(range, text)], None, cx);
4907 }
4908 }
4909
4910 let transaction = if buffer.end_transaction(cx).is_some() {
4911 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4912 if !push_to_history {
4913 buffer.forget_transaction(transaction.id);
4914 }
4915 Some(transaction)
4916 } else {
4917 None
4918 };
4919 Ok(transaction)
4920 })?
4921 } else {
4922 Ok(None)
4923 }
4924 })
4925 } else if let Some(project_id) = self.remote_id() {
4926 let client = self.client.clone();
4927 cx.spawn(move |_, mut cx| async move {
4928 let response = client
4929 .request(proto::ApplyCompletionAdditionalEdits {
4930 project_id,
4931 buffer_id,
4932 completion: Some(language::proto::serialize_completion(&completion)),
4933 })
4934 .await?;
4935
4936 if let Some(transaction) = response.transaction {
4937 let transaction = language::proto::deserialize_transaction(transaction)?;
4938 buffer_handle
4939 .update(&mut cx, |buffer, _| {
4940 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4941 })?
4942 .await?;
4943 if push_to_history {
4944 buffer_handle.update(&mut cx, |buffer, _| {
4945 buffer.push_transaction(transaction.clone(), Instant::now());
4946 })?;
4947 }
4948 Ok(Some(transaction))
4949 } else {
4950 Ok(None)
4951 }
4952 })
4953 } else {
4954 Task::ready(Err(anyhow!("project does not have a remote id")))
4955 }
4956 }
4957
4958 pub fn code_actions<T: Clone + ToOffset>(
4959 &self,
4960 buffer_handle: &Model<Buffer>,
4961 range: Range<T>,
4962 cx: &mut ModelContext<Self>,
4963 ) -> Task<Result<Vec<CodeAction>>> {
4964 let buffer = buffer_handle.read(cx);
4965 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4966 self.request_lsp(
4967 buffer_handle.clone(),
4968 LanguageServerToQuery::Primary,
4969 GetCodeActions { range },
4970 cx,
4971 )
4972 }
4973
4974 pub fn apply_code_action(
4975 &self,
4976 buffer_handle: Model<Buffer>,
4977 mut action: CodeAction,
4978 push_to_history: bool,
4979 cx: &mut ModelContext<Self>,
4980 ) -> Task<Result<ProjectTransaction>> {
4981 if self.is_local() {
4982 let buffer = buffer_handle.read(cx);
4983 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4984 self.language_server_for_buffer(buffer, action.server_id, cx)
4985 {
4986 (adapter.clone(), server.clone())
4987 } else {
4988 return Task::ready(Ok(Default::default()));
4989 };
4990 let range = action.range.to_point_utf16(buffer);
4991
4992 cx.spawn(move |this, mut cx| async move {
4993 if let Some(lsp_range) = action
4994 .lsp_action
4995 .data
4996 .as_mut()
4997 .and_then(|d| d.get_mut("codeActionParams"))
4998 .and_then(|d| d.get_mut("range"))
4999 {
5000 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
5001 action.lsp_action = lang_server
5002 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
5003 .await?;
5004 } else {
5005 let actions = this
5006 .update(&mut cx, |this, cx| {
5007 this.code_actions(&buffer_handle, action.range, cx)
5008 })?
5009 .await?;
5010 action.lsp_action = actions
5011 .into_iter()
5012 .find(|a| a.lsp_action.title == action.lsp_action.title)
5013 .ok_or_else(|| anyhow!("code action is outdated"))?
5014 .lsp_action;
5015 }
5016
5017 if let Some(edit) = action.lsp_action.edit {
5018 if edit.changes.is_some() || edit.document_changes.is_some() {
5019 return Self::deserialize_workspace_edit(
5020 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5021 edit,
5022 push_to_history,
5023 lsp_adapter.clone(),
5024 lang_server.clone(),
5025 &mut cx,
5026 )
5027 .await;
5028 }
5029 }
5030
5031 if let Some(command) = action.lsp_action.command {
5032 this.update(&mut cx, |this, _| {
5033 this.last_workspace_edits_by_language_server
5034 .remove(&lang_server.server_id());
5035 })?;
5036
5037 let result = lang_server
5038 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5039 command: command.command,
5040 arguments: command.arguments.unwrap_or_default(),
5041 ..Default::default()
5042 })
5043 .await;
5044
5045 if let Err(err) = result {
5046 // TODO: LSP ERROR
5047 return Err(err);
5048 }
5049
5050 return Ok(this.update(&mut cx, |this, _| {
5051 this.last_workspace_edits_by_language_server
5052 .remove(&lang_server.server_id())
5053 .unwrap_or_default()
5054 })?);
5055 }
5056
5057 Ok(ProjectTransaction::default())
5058 })
5059 } else if let Some(project_id) = self.remote_id() {
5060 let client = self.client.clone();
5061 let request = proto::ApplyCodeAction {
5062 project_id,
5063 buffer_id: buffer_handle.read(cx).remote_id(),
5064 action: Some(language::proto::serialize_code_action(&action)),
5065 };
5066 cx.spawn(move |this, mut cx| async move {
5067 let response = client
5068 .request(request)
5069 .await?
5070 .transaction
5071 .ok_or_else(|| anyhow!("missing transaction"))?;
5072 this.update(&mut cx, |this, cx| {
5073 this.deserialize_project_transaction(response, push_to_history, cx)
5074 })?
5075 .await
5076 })
5077 } else {
5078 Task::ready(Err(anyhow!("project does not have a remote id")))
5079 }
5080 }
5081
5082 fn apply_on_type_formatting(
5083 &self,
5084 buffer: Model<Buffer>,
5085 position: Anchor,
5086 trigger: String,
5087 cx: &mut ModelContext<Self>,
5088 ) -> Task<Result<Option<Transaction>>> {
5089 if self.is_local() {
5090 cx.spawn(move |this, mut cx| async move {
5091 // Do not allow multiple concurrent formatting requests for the
5092 // same buffer.
5093 this.update(&mut cx, |this, cx| {
5094 this.buffers_being_formatted
5095 .insert(buffer.read(cx).remote_id())
5096 })?;
5097
5098 let _cleanup = defer({
5099 let this = this.clone();
5100 let mut cx = cx.clone();
5101 let closure_buffer = buffer.clone();
5102 move || {
5103 this.update(&mut cx, |this, cx| {
5104 this.buffers_being_formatted
5105 .remove(&closure_buffer.read(cx).remote_id());
5106 })
5107 .ok();
5108 }
5109 });
5110
5111 buffer
5112 .update(&mut cx, |buffer, _| {
5113 buffer.wait_for_edits(Some(position.timestamp))
5114 })?
5115 .await?;
5116 this.update(&mut cx, |this, cx| {
5117 let position = position.to_point_utf16(buffer.read(cx));
5118 this.on_type_format(buffer, position, trigger, false, cx)
5119 })?
5120 .await
5121 })
5122 } else if let Some(project_id) = self.remote_id() {
5123 let client = self.client.clone();
5124 let request = proto::OnTypeFormatting {
5125 project_id,
5126 buffer_id: buffer.read(cx).remote_id(),
5127 position: Some(serialize_anchor(&position)),
5128 trigger,
5129 version: serialize_version(&buffer.read(cx).version()),
5130 };
5131 cx.spawn(move |_, _| async move {
5132 client
5133 .request(request)
5134 .await?
5135 .transaction
5136 .map(language::proto::deserialize_transaction)
5137 .transpose()
5138 })
5139 } else {
5140 Task::ready(Err(anyhow!("project does not have a remote id")))
5141 }
5142 }
5143
5144 async fn deserialize_edits(
5145 this: Model<Self>,
5146 buffer_to_edit: Model<Buffer>,
5147 edits: Vec<lsp::TextEdit>,
5148 push_to_history: bool,
5149 _: Arc<CachedLspAdapter>,
5150 language_server: Arc<LanguageServer>,
5151 cx: &mut AsyncAppContext,
5152 ) -> Result<Option<Transaction>> {
5153 let edits = this
5154 .update(cx, |this, cx| {
5155 this.edits_from_lsp(
5156 &buffer_to_edit,
5157 edits,
5158 language_server.server_id(),
5159 None,
5160 cx,
5161 )
5162 })?
5163 .await?;
5164
5165 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5166 buffer.finalize_last_transaction();
5167 buffer.start_transaction();
5168 for (range, text) in edits {
5169 buffer.edit([(range, text)], None, cx);
5170 }
5171
5172 if buffer.end_transaction(cx).is_some() {
5173 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5174 if !push_to_history {
5175 buffer.forget_transaction(transaction.id);
5176 }
5177 Some(transaction)
5178 } else {
5179 None
5180 }
5181 })?;
5182
5183 Ok(transaction)
5184 }
5185
5186 async fn deserialize_workspace_edit(
5187 this: Model<Self>,
5188 edit: lsp::WorkspaceEdit,
5189 push_to_history: bool,
5190 lsp_adapter: Arc<CachedLspAdapter>,
5191 language_server: Arc<LanguageServer>,
5192 cx: &mut AsyncAppContext,
5193 ) -> Result<ProjectTransaction> {
5194 let fs = this.update(cx, |this, _| this.fs.clone())?;
5195 let mut operations = Vec::new();
5196 if let Some(document_changes) = edit.document_changes {
5197 match document_changes {
5198 lsp::DocumentChanges::Edits(edits) => {
5199 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5200 }
5201 lsp::DocumentChanges::Operations(ops) => operations = ops,
5202 }
5203 } else if let Some(changes) = edit.changes {
5204 operations.extend(changes.into_iter().map(|(uri, edits)| {
5205 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5206 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5207 uri,
5208 version: None,
5209 },
5210 edits: edits.into_iter().map(OneOf::Left).collect(),
5211 })
5212 }));
5213 }
5214
5215 let mut project_transaction = ProjectTransaction::default();
5216 for operation in operations {
5217 match operation {
5218 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5219 let abs_path = op
5220 .uri
5221 .to_file_path()
5222 .map_err(|_| anyhow!("can't convert URI to path"))?;
5223
5224 if let Some(parent_path) = abs_path.parent() {
5225 fs.create_dir(parent_path).await?;
5226 }
5227 if abs_path.ends_with("/") {
5228 fs.create_dir(&abs_path).await?;
5229 } else {
5230 fs.create_file(
5231 &abs_path,
5232 op.options
5233 .map(|options| fs::CreateOptions {
5234 overwrite: options.overwrite.unwrap_or(false),
5235 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5236 })
5237 .unwrap_or_default(),
5238 )
5239 .await?;
5240 }
5241 }
5242
5243 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5244 let source_abs_path = op
5245 .old_uri
5246 .to_file_path()
5247 .map_err(|_| anyhow!("can't convert URI to path"))?;
5248 let target_abs_path = op
5249 .new_uri
5250 .to_file_path()
5251 .map_err(|_| anyhow!("can't convert URI to path"))?;
5252 fs.rename(
5253 &source_abs_path,
5254 &target_abs_path,
5255 op.options
5256 .map(|options| fs::RenameOptions {
5257 overwrite: options.overwrite.unwrap_or(false),
5258 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5259 })
5260 .unwrap_or_default(),
5261 )
5262 .await?;
5263 }
5264
5265 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5266 let abs_path = op
5267 .uri
5268 .to_file_path()
5269 .map_err(|_| anyhow!("can't convert URI to path"))?;
5270 let options = op
5271 .options
5272 .map(|options| fs::RemoveOptions {
5273 recursive: options.recursive.unwrap_or(false),
5274 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5275 })
5276 .unwrap_or_default();
5277 if abs_path.ends_with("/") {
5278 fs.remove_dir(&abs_path, options).await?;
5279 } else {
5280 fs.remove_file(&abs_path, options).await?;
5281 }
5282 }
5283
5284 lsp::DocumentChangeOperation::Edit(op) => {
5285 let buffer_to_edit = this
5286 .update(cx, |this, cx| {
5287 this.open_local_buffer_via_lsp(
5288 op.text_document.uri,
5289 language_server.server_id(),
5290 lsp_adapter.name.clone(),
5291 cx,
5292 )
5293 })?
5294 .await?;
5295
5296 let edits = this
5297 .update(cx, |this, cx| {
5298 let edits = op.edits.into_iter().map(|edit| match edit {
5299 OneOf::Left(edit) => edit,
5300 OneOf::Right(edit) => edit.text_edit,
5301 });
5302 this.edits_from_lsp(
5303 &buffer_to_edit,
5304 edits,
5305 language_server.server_id(),
5306 op.text_document.version,
5307 cx,
5308 )
5309 })?
5310 .await?;
5311
5312 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5313 buffer.finalize_last_transaction();
5314 buffer.start_transaction();
5315 for (range, text) in edits {
5316 buffer.edit([(range, text)], None, cx);
5317 }
5318 let transaction = if buffer.end_transaction(cx).is_some() {
5319 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5320 if !push_to_history {
5321 buffer.forget_transaction(transaction.id);
5322 }
5323 Some(transaction)
5324 } else {
5325 None
5326 };
5327
5328 transaction
5329 })?;
5330 if let Some(transaction) = transaction {
5331 project_transaction.0.insert(buffer_to_edit, transaction);
5332 }
5333 }
5334 }
5335 }
5336
5337 Ok(project_transaction)
5338 }
5339
5340 pub fn prepare_rename<T: ToPointUtf16>(
5341 &self,
5342 buffer: Model<Buffer>,
5343 position: T,
5344 cx: &mut ModelContext<Self>,
5345 ) -> Task<Result<Option<Range<Anchor>>>> {
5346 let position = position.to_point_utf16(buffer.read(cx));
5347 self.request_lsp(
5348 buffer,
5349 LanguageServerToQuery::Primary,
5350 PrepareRename { position },
5351 cx,
5352 )
5353 }
5354
5355 pub fn perform_rename<T: ToPointUtf16>(
5356 &self,
5357 buffer: Model<Buffer>,
5358 position: T,
5359 new_name: String,
5360 push_to_history: bool,
5361 cx: &mut ModelContext<Self>,
5362 ) -> Task<Result<ProjectTransaction>> {
5363 let position = position.to_point_utf16(buffer.read(cx));
5364 self.request_lsp(
5365 buffer,
5366 LanguageServerToQuery::Primary,
5367 PerformRename {
5368 position,
5369 new_name,
5370 push_to_history,
5371 },
5372 cx,
5373 )
5374 }
5375
5376 pub fn on_type_format<T: ToPointUtf16>(
5377 &self,
5378 buffer: Model<Buffer>,
5379 position: T,
5380 trigger: String,
5381 push_to_history: bool,
5382 cx: &mut ModelContext<Self>,
5383 ) -> Task<Result<Option<Transaction>>> {
5384 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5385 let position = position.to_point_utf16(buffer);
5386 (
5387 position,
5388 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5389 .tab_size,
5390 )
5391 });
5392 self.request_lsp(
5393 buffer.clone(),
5394 LanguageServerToQuery::Primary,
5395 OnTypeFormatting {
5396 position,
5397 trigger,
5398 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5399 push_to_history,
5400 },
5401 cx,
5402 )
5403 }
5404
5405 pub fn inlay_hints<T: ToOffset>(
5406 &self,
5407 buffer_handle: Model<Buffer>,
5408 range: Range<T>,
5409 cx: &mut ModelContext<Self>,
5410 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5411 let buffer = buffer_handle.read(cx);
5412 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5413 let range_start = range.start;
5414 let range_end = range.end;
5415 let buffer_id = buffer.remote_id();
5416 let buffer_version = buffer.version().clone();
5417 let lsp_request = InlayHints { range };
5418
5419 if self.is_local() {
5420 let lsp_request_task = self.request_lsp(
5421 buffer_handle.clone(),
5422 LanguageServerToQuery::Primary,
5423 lsp_request,
5424 cx,
5425 );
5426 cx.spawn(move |_, mut cx| async move {
5427 buffer_handle
5428 .update(&mut cx, |buffer, _| {
5429 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5430 })?
5431 .await
5432 .context("waiting for inlay hint request range edits")?;
5433 lsp_request_task.await.context("inlay hints LSP request")
5434 })
5435 } else if let Some(project_id) = self.remote_id() {
5436 let client = self.client.clone();
5437 let request = proto::InlayHints {
5438 project_id,
5439 buffer_id,
5440 start: Some(serialize_anchor(&range_start)),
5441 end: Some(serialize_anchor(&range_end)),
5442 version: serialize_version(&buffer_version),
5443 };
5444 cx.spawn(move |project, cx| async move {
5445 let response = client
5446 .request(request)
5447 .await
5448 .context("inlay hints proto request")?;
5449 let hints_request_result = LspCommand::response_from_proto(
5450 lsp_request,
5451 response,
5452 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5453 buffer_handle.clone(),
5454 cx,
5455 )
5456 .await;
5457
5458 hints_request_result.context("inlay hints proto response conversion")
5459 })
5460 } else {
5461 Task::ready(Err(anyhow!("project does not have a remote id")))
5462 }
5463 }
5464
5465 pub fn resolve_inlay_hint(
5466 &self,
5467 hint: InlayHint,
5468 buffer_handle: Model<Buffer>,
5469 server_id: LanguageServerId,
5470 cx: &mut ModelContext<Self>,
5471 ) -> Task<anyhow::Result<InlayHint>> {
5472 if self.is_local() {
5473 let buffer = buffer_handle.read(cx);
5474 let (_, lang_server) = if let Some((adapter, server)) =
5475 self.language_server_for_buffer(buffer, server_id, cx)
5476 {
5477 (adapter.clone(), server.clone())
5478 } else {
5479 return Task::ready(Ok(hint));
5480 };
5481 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5482 return Task::ready(Ok(hint));
5483 }
5484
5485 let buffer_snapshot = buffer.snapshot();
5486 cx.spawn(move |_, mut cx| async move {
5487 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5488 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5489 );
5490 let resolved_hint = resolve_task
5491 .await
5492 .context("inlay hint resolve LSP request")?;
5493 let resolved_hint = InlayHints::lsp_to_project_hint(
5494 resolved_hint,
5495 &buffer_handle,
5496 server_id,
5497 ResolveState::Resolved,
5498 false,
5499 &mut cx,
5500 )
5501 .await?;
5502 Ok(resolved_hint)
5503 })
5504 } else if let Some(project_id) = self.remote_id() {
5505 let client = self.client.clone();
5506 let request = proto::ResolveInlayHint {
5507 project_id,
5508 buffer_id: buffer_handle.read(cx).remote_id(),
5509 language_server_id: server_id.0 as u64,
5510 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5511 };
5512 cx.spawn(move |_, _| async move {
5513 let response = client
5514 .request(request)
5515 .await
5516 .context("inlay hints proto request")?;
5517 match response.hint {
5518 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5519 .context("inlay hints proto resolve response conversion"),
5520 None => Ok(hint),
5521 }
5522 })
5523 } else {
5524 Task::ready(Err(anyhow!("project does not have a remote id")))
5525 }
5526 }
5527
5528 #[allow(clippy::type_complexity)]
5529 pub fn search(
5530 &self,
5531 query: SearchQuery,
5532 cx: &mut ModelContext<Self>,
5533 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5534 if self.is_local() {
5535 self.search_local(query, cx)
5536 } else if let Some(project_id) = self.remote_id() {
5537 let (tx, rx) = smol::channel::unbounded();
5538 let request = self.client.request(query.to_proto(project_id));
5539 cx.spawn(move |this, mut cx| async move {
5540 let response = request.await?;
5541 let mut result = HashMap::default();
5542 for location in response.locations {
5543 let target_buffer = this
5544 .update(&mut cx, |this, cx| {
5545 this.wait_for_remote_buffer(location.buffer_id, cx)
5546 })?
5547 .await?;
5548 let start = location
5549 .start
5550 .and_then(deserialize_anchor)
5551 .ok_or_else(|| anyhow!("missing target start"))?;
5552 let end = location
5553 .end
5554 .and_then(deserialize_anchor)
5555 .ok_or_else(|| anyhow!("missing target end"))?;
5556 result
5557 .entry(target_buffer)
5558 .or_insert(Vec::new())
5559 .push(start..end)
5560 }
5561 for (buffer, ranges) in result {
5562 let _ = tx.send((buffer, ranges)).await;
5563 }
5564 Result::<(), anyhow::Error>::Ok(())
5565 })
5566 .detach_and_log_err(cx);
5567 rx
5568 } else {
5569 unimplemented!();
5570 }
5571 }
5572
5573 pub fn search_local(
5574 &self,
5575 query: SearchQuery,
5576 cx: &mut ModelContext<Self>,
5577 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5578 // Local search is split into several phases.
5579 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5580 // and the second phase that finds positions of all the matches found in the candidate files.
5581 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5582 //
5583 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5584 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5585 //
5586 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5587 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5588 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5589 // 2. At this point, we have a list of all potentially matching buffers/files.
5590 // We sort that list by buffer path - this list is retained for later use.
5591 // We ensure that all buffers are now opened and available in project.
5592 // 3. We run a scan over all the candidate buffers on multiple background threads.
5593 // We cannot assume that there will even be a match - while at least one match
5594 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5595 // There is also an auxilliary background thread responsible for result gathering.
5596 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5597 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5598 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5599 // entry - which might already be available thanks to out-of-order processing.
5600 //
5601 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5602 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5603 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5604 // in face of constantly updating list of sorted matches.
5605 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5606 let snapshots = self
5607 .visible_worktrees(cx)
5608 .filter_map(|tree| {
5609 let tree = tree.read(cx).as_local()?;
5610 Some(tree.snapshot())
5611 })
5612 .collect::<Vec<_>>();
5613
5614 let background = cx.background_executor().clone();
5615 let path_count: usize = snapshots
5616 .iter()
5617 .map(|s| {
5618 if query.include_ignored() {
5619 s.file_count()
5620 } else {
5621 s.visible_file_count()
5622 }
5623 })
5624 .sum();
5625 if path_count == 0 {
5626 let (_, rx) = smol::channel::bounded(1024);
5627 return rx;
5628 }
5629 let workers = background.num_cpus().min(path_count);
5630 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5631 let mut unnamed_files = vec![];
5632 let opened_buffers = self
5633 .opened_buffers
5634 .iter()
5635 .filter_map(|(_, b)| {
5636 let buffer = b.upgrade()?;
5637 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5638 let is_ignored = buffer
5639 .project_path(cx)
5640 .and_then(|path| self.entry_for_path(&path, cx))
5641 .map_or(false, |entry| entry.is_ignored);
5642 (is_ignored, buffer.snapshot())
5643 });
5644 if is_ignored && !query.include_ignored() {
5645 return None;
5646 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5647 Some((path.clone(), (buffer, snapshot)))
5648 } else {
5649 unnamed_files.push(buffer);
5650 None
5651 }
5652 })
5653 .collect();
5654 cx.background_executor()
5655 .spawn(Self::background_search(
5656 unnamed_files,
5657 opened_buffers,
5658 cx.background_executor().clone(),
5659 self.fs.clone(),
5660 workers,
5661 query.clone(),
5662 path_count,
5663 snapshots,
5664 matching_paths_tx,
5665 ))
5666 .detach();
5667
5668 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5669 let background = cx.background_executor().clone();
5670 let (result_tx, result_rx) = smol::channel::bounded(1024);
5671 cx.background_executor()
5672 .spawn(async move {
5673 let Ok(buffers) = buffers.await else {
5674 return;
5675 };
5676
5677 let buffers_len = buffers.len();
5678 if buffers_len == 0 {
5679 return;
5680 }
5681 let query = &query;
5682 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5683 background
5684 .scoped(|scope| {
5685 #[derive(Clone)]
5686 struct FinishedStatus {
5687 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
5688 buffer_index: SearchMatchCandidateIndex,
5689 }
5690
5691 for _ in 0..workers {
5692 let finished_tx = finished_tx.clone();
5693 let mut buffers_rx = buffers_rx.clone();
5694 scope.spawn(async move {
5695 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5696 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5697 {
5698 if query.file_matches(
5699 snapshot.file().map(|file| file.path().as_ref()),
5700 ) {
5701 query
5702 .search(&snapshot, None)
5703 .await
5704 .iter()
5705 .map(|range| {
5706 snapshot.anchor_before(range.start)
5707 ..snapshot.anchor_after(range.end)
5708 })
5709 .collect()
5710 } else {
5711 Vec::new()
5712 }
5713 } else {
5714 Vec::new()
5715 };
5716
5717 let status = if !buffer_matches.is_empty() {
5718 let entry = if let Some((buffer, _)) = entry.as_ref() {
5719 Some((buffer.clone(), buffer_matches))
5720 } else {
5721 None
5722 };
5723 FinishedStatus {
5724 entry,
5725 buffer_index,
5726 }
5727 } else {
5728 FinishedStatus {
5729 entry: None,
5730 buffer_index,
5731 }
5732 };
5733 if finished_tx.send(status).await.is_err() {
5734 break;
5735 }
5736 }
5737 });
5738 }
5739 // Report sorted matches
5740 scope.spawn(async move {
5741 let mut current_index = 0;
5742 let mut scratch = vec![None; buffers_len];
5743 while let Some(status) = finished_rx.next().await {
5744 debug_assert!(
5745 scratch[status.buffer_index].is_none(),
5746 "Got match status of position {} twice",
5747 status.buffer_index
5748 );
5749 let index = status.buffer_index;
5750 scratch[index] = Some(status);
5751 while current_index < buffers_len {
5752 let Some(current_entry) = scratch[current_index].take() else {
5753 // We intentionally **do not** increment `current_index` here. When next element arrives
5754 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5755 // this time.
5756 break;
5757 };
5758 if let Some(entry) = current_entry.entry {
5759 result_tx.send(entry).await.log_err();
5760 }
5761 current_index += 1;
5762 }
5763 if current_index == buffers_len {
5764 break;
5765 }
5766 }
5767 });
5768 })
5769 .await;
5770 })
5771 .detach();
5772 result_rx
5773 }
5774
5775 /// Pick paths that might potentially contain a match of a given search query.
5776 async fn background_search(
5777 unnamed_buffers: Vec<Model<Buffer>>,
5778 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
5779 executor: BackgroundExecutor,
5780 fs: Arc<dyn Fs>,
5781 workers: usize,
5782 query: SearchQuery,
5783 path_count: usize,
5784 snapshots: Vec<LocalSnapshot>,
5785 matching_paths_tx: Sender<SearchMatchCandidate>,
5786 ) {
5787 let fs = &fs;
5788 let query = &query;
5789 let matching_paths_tx = &matching_paths_tx;
5790 let snapshots = &snapshots;
5791 let paths_per_worker = (path_count + workers - 1) / workers;
5792 for buffer in unnamed_buffers {
5793 matching_paths_tx
5794 .send(SearchMatchCandidate::OpenBuffer {
5795 buffer: buffer.clone(),
5796 path: None,
5797 })
5798 .await
5799 .log_err();
5800 }
5801 for (path, (buffer, _)) in opened_buffers.iter() {
5802 matching_paths_tx
5803 .send(SearchMatchCandidate::OpenBuffer {
5804 buffer: buffer.clone(),
5805 path: Some(path.clone()),
5806 })
5807 .await
5808 .log_err();
5809 }
5810 executor
5811 .scoped(|scope| {
5812 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5813
5814 for worker_ix in 0..workers {
5815 let worker_start_ix = worker_ix * paths_per_worker;
5816 let worker_end_ix = worker_start_ix + paths_per_worker;
5817 let unnamed_buffers = opened_buffers.clone();
5818 let limiter = Arc::clone(&max_concurrent_workers);
5819 scope.spawn(async move {
5820 let _guard = limiter.acquire().await;
5821 let mut snapshot_start_ix = 0;
5822 let mut abs_path = PathBuf::new();
5823 for snapshot in snapshots {
5824 let snapshot_end_ix = snapshot_start_ix
5825 + if query.include_ignored() {
5826 snapshot.file_count()
5827 } else {
5828 snapshot.visible_file_count()
5829 };
5830 if worker_end_ix <= snapshot_start_ix {
5831 break;
5832 } else if worker_start_ix > snapshot_end_ix {
5833 snapshot_start_ix = snapshot_end_ix;
5834 continue;
5835 } else {
5836 let start_in_snapshot =
5837 worker_start_ix.saturating_sub(snapshot_start_ix);
5838 let end_in_snapshot =
5839 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5840
5841 for entry in snapshot
5842 .files(query.include_ignored(), start_in_snapshot)
5843 .take(end_in_snapshot - start_in_snapshot)
5844 {
5845 if matching_paths_tx.is_closed() {
5846 break;
5847 }
5848 if unnamed_buffers.contains_key(&entry.path) {
5849 continue;
5850 }
5851 let matches = if query.file_matches(Some(&entry.path)) {
5852 abs_path.clear();
5853 abs_path.push(&snapshot.abs_path());
5854 abs_path.push(&entry.path);
5855 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5856 {
5857 query.detect(file).unwrap_or(false)
5858 } else {
5859 false
5860 }
5861 } else {
5862 false
5863 };
5864
5865 if matches {
5866 let project_path = SearchMatchCandidate::Path {
5867 worktree_id: snapshot.id(),
5868 path: entry.path.clone(),
5869 is_ignored: entry.is_ignored,
5870 };
5871 if matching_paths_tx.send(project_path).await.is_err() {
5872 break;
5873 }
5874 }
5875 }
5876
5877 snapshot_start_ix = snapshot_end_ix;
5878 }
5879 }
5880 });
5881 }
5882
5883 if query.include_ignored() {
5884 for snapshot in snapshots {
5885 for ignored_entry in snapshot
5886 .entries(query.include_ignored())
5887 .filter(|e| e.is_ignored)
5888 {
5889 let limiter = Arc::clone(&max_concurrent_workers);
5890 scope.spawn(async move {
5891 let _guard = limiter.acquire().await;
5892 let mut ignored_paths_to_process =
5893 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
5894 while let Some(ignored_abs_path) =
5895 ignored_paths_to_process.pop_front()
5896 {
5897 if let Some(fs_metadata) = fs
5898 .metadata(&ignored_abs_path)
5899 .await
5900 .with_context(|| {
5901 format!("fetching fs metadata for {ignored_abs_path:?}")
5902 })
5903 .log_err()
5904 .flatten()
5905 {
5906 if fs_metadata.is_dir {
5907 if let Some(mut subfiles) = fs
5908 .read_dir(&ignored_abs_path)
5909 .await
5910 .with_context(|| {
5911 format!(
5912 "listing ignored path {ignored_abs_path:?}"
5913 )
5914 })
5915 .log_err()
5916 {
5917 while let Some(subfile) = subfiles.next().await {
5918 if let Some(subfile) = subfile.log_err() {
5919 ignored_paths_to_process.push_back(subfile);
5920 }
5921 }
5922 }
5923 } else if !fs_metadata.is_symlink {
5924 if !query.file_matches(Some(&ignored_abs_path))
5925 || snapshot.is_path_excluded(
5926 ignored_entry.path.to_path_buf(),
5927 )
5928 {
5929 continue;
5930 }
5931 let matches = if let Some(file) = fs
5932 .open_sync(&ignored_abs_path)
5933 .await
5934 .with_context(|| {
5935 format!(
5936 "Opening ignored path {ignored_abs_path:?}"
5937 )
5938 })
5939 .log_err()
5940 {
5941 query.detect(file).unwrap_or(false)
5942 } else {
5943 false
5944 };
5945 if matches {
5946 let project_path = SearchMatchCandidate::Path {
5947 worktree_id: snapshot.id(),
5948 path: Arc::from(
5949 ignored_abs_path
5950 .strip_prefix(snapshot.abs_path())
5951 .expect(
5952 "scanning worktree-related files",
5953 ),
5954 ),
5955 is_ignored: true,
5956 };
5957 if matching_paths_tx
5958 .send(project_path)
5959 .await
5960 .is_err()
5961 {
5962 return;
5963 }
5964 }
5965 }
5966 }
5967 }
5968 });
5969 }
5970 }
5971 }
5972 })
5973 .await;
5974 }
5975
5976 pub fn request_lsp<R: LspCommand>(
5977 &self,
5978 buffer_handle: Model<Buffer>,
5979 server: LanguageServerToQuery,
5980 request: R,
5981 cx: &mut ModelContext<Self>,
5982 ) -> Task<Result<R::Response>>
5983 where
5984 <R::LspRequest as lsp::request::Request>::Result: Send,
5985 <R::LspRequest as lsp::request::Request>::Params: Send,
5986 {
5987 let buffer = buffer_handle.read(cx);
5988 if self.is_local() {
5989 let language_server = match server {
5990 LanguageServerToQuery::Primary => {
5991 match self.primary_language_server_for_buffer(buffer, cx) {
5992 Some((_, server)) => Some(Arc::clone(server)),
5993 None => return Task::ready(Ok(Default::default())),
5994 }
5995 }
5996 LanguageServerToQuery::Other(id) => self
5997 .language_server_for_buffer(buffer, id, cx)
5998 .map(|(_, server)| Arc::clone(server)),
5999 };
6000 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6001 if let (Some(file), Some(language_server)) = (file, language_server) {
6002 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6003 return cx.spawn(move |this, cx| async move {
6004 if !request.check_capabilities(language_server.capabilities()) {
6005 return Ok(Default::default());
6006 }
6007
6008 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6009 let response = match result {
6010 Ok(response) => response,
6011
6012 Err(err) => {
6013 log::warn!(
6014 "Generic lsp request to {} failed: {}",
6015 language_server.name(),
6016 err
6017 );
6018 return Err(err);
6019 }
6020 };
6021
6022 request
6023 .response_from_lsp(
6024 response,
6025 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6026 buffer_handle,
6027 language_server.server_id(),
6028 cx,
6029 )
6030 .await
6031 });
6032 }
6033 } else if let Some(project_id) = self.remote_id() {
6034 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6035 }
6036
6037 Task::ready(Ok(Default::default()))
6038 }
6039
6040 fn send_lsp_proto_request<R: LspCommand>(
6041 &self,
6042 buffer: Model<Buffer>,
6043 project_id: u64,
6044 request: R,
6045 cx: &mut ModelContext<'_, Project>,
6046 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6047 let rpc = self.client.clone();
6048 let message = request.to_proto(project_id, buffer.read(cx));
6049 cx.spawn(move |this, mut cx| async move {
6050 // Ensure the project is still alive by the time the task
6051 // is scheduled.
6052 this.upgrade().context("project dropped")?;
6053 let response = rpc.request(message).await?;
6054 let this = this.upgrade().context("project dropped")?;
6055 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6056 Err(anyhow!("disconnected before completing request"))
6057 } else {
6058 request
6059 .response_from_proto(response, this, buffer, cx)
6060 .await
6061 }
6062 })
6063 }
6064
6065 fn sort_candidates_and_open_buffers(
6066 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6067 cx: &mut ModelContext<Self>,
6068 ) -> (
6069 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6070 Receiver<(
6071 Option<(Model<Buffer>, BufferSnapshot)>,
6072 SearchMatchCandidateIndex,
6073 )>,
6074 ) {
6075 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6076 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6077 cx.spawn(move |this, cx| async move {
6078 let mut buffers = Vec::new();
6079 let mut ignored_buffers = Vec::new();
6080 while let Some(entry) = matching_paths_rx.next().await {
6081 if matches!(
6082 entry,
6083 SearchMatchCandidate::Path {
6084 is_ignored: true,
6085 ..
6086 }
6087 ) {
6088 ignored_buffers.push(entry);
6089 } else {
6090 buffers.push(entry);
6091 }
6092 }
6093 buffers.sort_by_key(|candidate| candidate.path());
6094 ignored_buffers.sort_by_key(|candidate| candidate.path());
6095 buffers.extend(ignored_buffers);
6096 let matching_paths = buffers.clone();
6097 let _ = sorted_buffers_tx.send(buffers);
6098 for (index, candidate) in matching_paths.into_iter().enumerate() {
6099 if buffers_tx.is_closed() {
6100 break;
6101 }
6102 let this = this.clone();
6103 let buffers_tx = buffers_tx.clone();
6104 cx.spawn(move |mut cx| async move {
6105 let buffer = match candidate {
6106 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6107 SearchMatchCandidate::Path {
6108 worktree_id, path, ..
6109 } => this
6110 .update(&mut cx, |this, cx| {
6111 this.open_buffer((worktree_id, path), cx)
6112 })?
6113 .await
6114 .log_err(),
6115 };
6116 if let Some(buffer) = buffer {
6117 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6118 buffers_tx
6119 .send((Some((buffer, snapshot)), index))
6120 .await
6121 .log_err();
6122 } else {
6123 buffers_tx.send((None, index)).await.log_err();
6124 }
6125
6126 Ok::<_, anyhow::Error>(())
6127 })
6128 .detach();
6129 }
6130 })
6131 .detach();
6132 (sorted_buffers_rx, buffers_rx)
6133 }
6134
6135 pub fn find_or_create_local_worktree(
6136 &mut self,
6137 abs_path: impl AsRef<Path>,
6138 visible: bool,
6139 cx: &mut ModelContext<Self>,
6140 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6141 let abs_path = abs_path.as_ref();
6142 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6143 Task::ready(Ok((tree, relative_path)))
6144 } else {
6145 let worktree = self.create_local_worktree(abs_path, visible, cx);
6146 cx.background_executor()
6147 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6148 }
6149 }
6150
6151 pub fn find_local_worktree(
6152 &self,
6153 abs_path: &Path,
6154 cx: &AppContext,
6155 ) -> Option<(Model<Worktree>, PathBuf)> {
6156 for tree in &self.worktrees {
6157 if let Some(tree) = tree.upgrade() {
6158 if let Some(relative_path) = tree
6159 .read(cx)
6160 .as_local()
6161 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6162 {
6163 return Some((tree.clone(), relative_path.into()));
6164 }
6165 }
6166 }
6167 None
6168 }
6169
6170 pub fn is_shared(&self) -> bool {
6171 match &self.client_state {
6172 ProjectClientState::Shared { .. } => true,
6173 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6174 }
6175 }
6176
6177 fn create_local_worktree(
6178 &mut self,
6179 abs_path: impl AsRef<Path>,
6180 visible: bool,
6181 cx: &mut ModelContext<Self>,
6182 ) -> Task<Result<Model<Worktree>>> {
6183 let fs = self.fs.clone();
6184 let client = self.client.clone();
6185 let next_entry_id = self.next_entry_id.clone();
6186 let path: Arc<Path> = abs_path.as_ref().into();
6187 let task = self
6188 .loading_local_worktrees
6189 .entry(path.clone())
6190 .or_insert_with(|| {
6191 cx.spawn(move |project, mut cx| {
6192 async move {
6193 let worktree = Worktree::local(
6194 client.clone(),
6195 path.clone(),
6196 visible,
6197 fs,
6198 next_entry_id,
6199 &mut cx,
6200 )
6201 .await;
6202
6203 project.update(&mut cx, |project, _| {
6204 project.loading_local_worktrees.remove(&path);
6205 })?;
6206
6207 let worktree = worktree?;
6208 project
6209 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6210 Ok(worktree)
6211 }
6212 .map_err(Arc::new)
6213 })
6214 .shared()
6215 })
6216 .clone();
6217 cx.background_executor().spawn(async move {
6218 match task.await {
6219 Ok(worktree) => Ok(worktree),
6220 Err(err) => Err(anyhow!("{}", err)),
6221 }
6222 })
6223 }
6224
6225 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6226 self.worktrees.retain(|worktree| {
6227 if let Some(worktree) = worktree.upgrade() {
6228 let id = worktree.read(cx).id();
6229 if id == id_to_remove {
6230 cx.emit(Event::WorktreeRemoved(id));
6231 false
6232 } else {
6233 true
6234 }
6235 } else {
6236 false
6237 }
6238 });
6239 self.metadata_changed(cx);
6240 }
6241
6242 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6243 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6244 if worktree.read(cx).is_local() {
6245 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6246 worktree::Event::UpdatedEntries(changes) => {
6247 this.update_local_worktree_buffers(&worktree, changes, cx);
6248 this.update_local_worktree_language_servers(&worktree, changes, cx);
6249 this.update_local_worktree_settings(&worktree, changes, cx);
6250 this.update_prettier_settings(&worktree, changes, cx);
6251 cx.emit(Event::WorktreeUpdatedEntries(
6252 worktree.read(cx).id(),
6253 changes.clone(),
6254 ));
6255 }
6256 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6257 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6258 }
6259 })
6260 .detach();
6261 }
6262
6263 let push_strong_handle = {
6264 let worktree = worktree.read(cx);
6265 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6266 };
6267 if push_strong_handle {
6268 self.worktrees
6269 .push(WorktreeHandle::Strong(worktree.clone()));
6270 } else {
6271 self.worktrees
6272 .push(WorktreeHandle::Weak(worktree.downgrade()));
6273 }
6274
6275 let handle_id = worktree.entity_id();
6276 cx.observe_release(worktree, move |this, worktree, cx| {
6277 let _ = this.remove_worktree(worktree.id(), cx);
6278 cx.update_global::<SettingsStore, _>(|store, cx| {
6279 store
6280 .clear_local_settings(handle_id.as_u64() as usize, cx)
6281 .log_err()
6282 });
6283 })
6284 .detach();
6285
6286 cx.emit(Event::WorktreeAdded);
6287 self.metadata_changed(cx);
6288 }
6289
6290 fn update_local_worktree_buffers(
6291 &mut self,
6292 worktree_handle: &Model<Worktree>,
6293 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6294 cx: &mut ModelContext<Self>,
6295 ) {
6296 let snapshot = worktree_handle.read(cx).snapshot();
6297
6298 let mut renamed_buffers = Vec::new();
6299 for (path, entry_id, _) in changes {
6300 let worktree_id = worktree_handle.read(cx).id();
6301 let project_path = ProjectPath {
6302 worktree_id,
6303 path: path.clone(),
6304 };
6305
6306 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6307 Some(&buffer_id) => buffer_id,
6308 None => match self.local_buffer_ids_by_path.get(&project_path) {
6309 Some(&buffer_id) => buffer_id,
6310 None => {
6311 continue;
6312 }
6313 },
6314 };
6315
6316 let open_buffer = self.opened_buffers.get(&buffer_id);
6317 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6318 buffer
6319 } else {
6320 self.opened_buffers.remove(&buffer_id);
6321 self.local_buffer_ids_by_path.remove(&project_path);
6322 self.local_buffer_ids_by_entry_id.remove(entry_id);
6323 continue;
6324 };
6325
6326 buffer.update(cx, |buffer, cx| {
6327 if let Some(old_file) = File::from_dyn(buffer.file()) {
6328 if old_file.worktree != *worktree_handle {
6329 return;
6330 }
6331
6332 let new_file = if let Some(entry) = old_file
6333 .entry_id
6334 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6335 {
6336 File {
6337 is_local: true,
6338 entry_id: Some(entry.id),
6339 mtime: entry.mtime,
6340 path: entry.path.clone(),
6341 worktree: worktree_handle.clone(),
6342 is_deleted: false,
6343 }
6344 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6345 File {
6346 is_local: true,
6347 entry_id: Some(entry.id),
6348 mtime: entry.mtime,
6349 path: entry.path.clone(),
6350 worktree: worktree_handle.clone(),
6351 is_deleted: false,
6352 }
6353 } else {
6354 File {
6355 is_local: true,
6356 entry_id: old_file.entry_id,
6357 path: old_file.path().clone(),
6358 mtime: old_file.mtime(),
6359 worktree: worktree_handle.clone(),
6360 is_deleted: true,
6361 }
6362 };
6363
6364 let old_path = old_file.abs_path(cx);
6365 if new_file.abs_path(cx) != old_path {
6366 renamed_buffers.push((cx.handle(), old_file.clone()));
6367 self.local_buffer_ids_by_path.remove(&project_path);
6368 self.local_buffer_ids_by_path.insert(
6369 ProjectPath {
6370 worktree_id,
6371 path: path.clone(),
6372 },
6373 buffer_id,
6374 );
6375 }
6376
6377 if new_file.entry_id != Some(*entry_id) {
6378 self.local_buffer_ids_by_entry_id.remove(entry_id);
6379 if let Some(entry_id) = new_file.entry_id {
6380 self.local_buffer_ids_by_entry_id
6381 .insert(entry_id, buffer_id);
6382 }
6383 }
6384
6385 if new_file != *old_file {
6386 if let Some(project_id) = self.remote_id() {
6387 self.client
6388 .send(proto::UpdateBufferFile {
6389 project_id,
6390 buffer_id: buffer_id as u64,
6391 file: Some(new_file.to_proto()),
6392 })
6393 .log_err();
6394 }
6395
6396 buffer.file_updated(Arc::new(new_file), cx);
6397 }
6398 }
6399 });
6400 }
6401
6402 for (buffer, old_file) in renamed_buffers {
6403 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6404 self.detect_language_for_buffer(&buffer, cx);
6405 self.register_buffer_with_language_servers(&buffer, cx);
6406 }
6407 }
6408
6409 fn update_local_worktree_language_servers(
6410 &mut self,
6411 worktree_handle: &Model<Worktree>,
6412 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6413 cx: &mut ModelContext<Self>,
6414 ) {
6415 if changes.is_empty() {
6416 return;
6417 }
6418
6419 let worktree_id = worktree_handle.read(cx).id();
6420 let mut language_server_ids = self
6421 .language_server_ids
6422 .iter()
6423 .filter_map(|((server_worktree_id, _), server_id)| {
6424 (*server_worktree_id == worktree_id).then_some(*server_id)
6425 })
6426 .collect::<Vec<_>>();
6427 language_server_ids.sort();
6428 language_server_ids.dedup();
6429
6430 let abs_path = worktree_handle.read(cx).abs_path();
6431 for server_id in &language_server_ids {
6432 if let Some(LanguageServerState::Running {
6433 server,
6434 watched_paths,
6435 ..
6436 }) = self.language_servers.get(server_id)
6437 {
6438 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6439 let params = lsp::DidChangeWatchedFilesParams {
6440 changes: changes
6441 .iter()
6442 .filter_map(|(path, _, change)| {
6443 if !watched_paths.is_match(&path) {
6444 return None;
6445 }
6446 let typ = match change {
6447 PathChange::Loaded => return None,
6448 PathChange::Added => lsp::FileChangeType::CREATED,
6449 PathChange::Removed => lsp::FileChangeType::DELETED,
6450 PathChange::Updated => lsp::FileChangeType::CHANGED,
6451 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6452 };
6453 Some(lsp::FileEvent {
6454 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6455 typ,
6456 })
6457 })
6458 .collect(),
6459 };
6460
6461 if !params.changes.is_empty() {
6462 server
6463 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6464 .log_err();
6465 }
6466 }
6467 }
6468 }
6469 }
6470
6471 fn update_local_worktree_buffers_git_repos(
6472 &mut self,
6473 worktree_handle: Model<Worktree>,
6474 changed_repos: &UpdatedGitRepositoriesSet,
6475 cx: &mut ModelContext<Self>,
6476 ) {
6477 debug_assert!(worktree_handle.read(cx).is_local());
6478
6479 // Identify the loading buffers whose containing repository that has changed.
6480 let future_buffers = self
6481 .loading_buffers_by_path
6482 .iter()
6483 .filter_map(|(project_path, receiver)| {
6484 if project_path.worktree_id != worktree_handle.read(cx).id() {
6485 return None;
6486 }
6487 let path = &project_path.path;
6488 changed_repos
6489 .iter()
6490 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6491 let receiver = receiver.clone();
6492 let path = path.clone();
6493 Some(async move {
6494 wait_for_loading_buffer(receiver)
6495 .await
6496 .ok()
6497 .map(|buffer| (buffer, path))
6498 })
6499 })
6500 .collect::<FuturesUnordered<_>>();
6501
6502 // Identify the current buffers whose containing repository has changed.
6503 let current_buffers = self
6504 .opened_buffers
6505 .values()
6506 .filter_map(|buffer| {
6507 let buffer = buffer.upgrade()?;
6508 let file = File::from_dyn(buffer.read(cx).file())?;
6509 if file.worktree != worktree_handle {
6510 return None;
6511 }
6512 let path = file.path();
6513 changed_repos
6514 .iter()
6515 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6516 Some((buffer, path.clone()))
6517 })
6518 .collect::<Vec<_>>();
6519
6520 if future_buffers.len() + current_buffers.len() == 0 {
6521 return;
6522 }
6523
6524 let remote_id = self.remote_id();
6525 let client = self.client.clone();
6526 cx.spawn(move |_, mut cx| async move {
6527 // Wait for all of the buffers to load.
6528 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6529
6530 // Reload the diff base for every buffer whose containing git repository has changed.
6531 let snapshot =
6532 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6533 let diff_bases_by_buffer = cx
6534 .background_executor()
6535 .spawn(async move {
6536 future_buffers
6537 .into_iter()
6538 .filter_map(|e| e)
6539 .chain(current_buffers)
6540 .filter_map(|(buffer, path)| {
6541 let (work_directory, repo) =
6542 snapshot.repository_and_work_directory_for_path(&path)?;
6543 let repo = snapshot.get_local_repo(&repo)?;
6544 let relative_path = path.strip_prefix(&work_directory).ok()?;
6545 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6546 Some((buffer, base_text))
6547 })
6548 .collect::<Vec<_>>()
6549 })
6550 .await;
6551
6552 // Assign the new diff bases on all of the buffers.
6553 for (buffer, diff_base) in diff_bases_by_buffer {
6554 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6555 buffer.set_diff_base(diff_base.clone(), cx);
6556 buffer.remote_id()
6557 })?;
6558 if let Some(project_id) = remote_id {
6559 client
6560 .send(proto::UpdateDiffBase {
6561 project_id,
6562 buffer_id,
6563 diff_base,
6564 })
6565 .log_err();
6566 }
6567 }
6568
6569 anyhow::Ok(())
6570 })
6571 .detach();
6572 }
6573
6574 fn update_local_worktree_settings(
6575 &mut self,
6576 worktree: &Model<Worktree>,
6577 changes: &UpdatedEntriesSet,
6578 cx: &mut ModelContext<Self>,
6579 ) {
6580 let project_id = self.remote_id();
6581 let worktree_id = worktree.entity_id();
6582 let worktree = worktree.read(cx).as_local().unwrap();
6583 let remote_worktree_id = worktree.id();
6584
6585 let mut settings_contents = Vec::new();
6586 for (path, _, change) in changes.iter() {
6587 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6588 let settings_dir = Arc::from(
6589 path.ancestors()
6590 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6591 .unwrap(),
6592 );
6593 let fs = self.fs.clone();
6594 let removed = *change == PathChange::Removed;
6595 let abs_path = worktree.absolutize(path);
6596 settings_contents.push(async move {
6597 (
6598 settings_dir,
6599 if removed {
6600 None
6601 } else {
6602 Some(async move { fs.load(&abs_path?).await }.await)
6603 },
6604 )
6605 });
6606 }
6607 }
6608
6609 if settings_contents.is_empty() {
6610 return;
6611 }
6612
6613 let client = self.client.clone();
6614 cx.spawn(move |_, cx| async move {
6615 let settings_contents: Vec<(Arc<Path>, _)> =
6616 futures::future::join_all(settings_contents).await;
6617 cx.update(|cx| {
6618 cx.update_global::<SettingsStore, _>(|store, cx| {
6619 for (directory, file_content) in settings_contents {
6620 let file_content = file_content.and_then(|content| content.log_err());
6621 store
6622 .set_local_settings(
6623 worktree_id.as_u64() as usize,
6624 directory.clone(),
6625 file_content.as_ref().map(String::as_str),
6626 cx,
6627 )
6628 .log_err();
6629 if let Some(remote_id) = project_id {
6630 client
6631 .send(proto::UpdateWorktreeSettings {
6632 project_id: remote_id,
6633 worktree_id: remote_worktree_id.to_proto(),
6634 path: directory.to_string_lossy().into_owned(),
6635 content: file_content,
6636 })
6637 .log_err();
6638 }
6639 }
6640 });
6641 })
6642 .ok();
6643 })
6644 .detach();
6645 }
6646
6647 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6648 let new_active_entry = entry.and_then(|project_path| {
6649 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6650 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6651 Some(entry.id)
6652 });
6653 if new_active_entry != self.active_entry {
6654 self.active_entry = new_active_entry;
6655 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6656 }
6657 }
6658
6659 pub fn language_servers_running_disk_based_diagnostics(
6660 &self,
6661 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6662 self.language_server_statuses
6663 .iter()
6664 .filter_map(|(id, status)| {
6665 if status.has_pending_diagnostic_updates {
6666 Some(*id)
6667 } else {
6668 None
6669 }
6670 })
6671 }
6672
6673 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
6674 let mut summary = DiagnosticSummary::default();
6675 for (_, _, path_summary) in
6676 self.diagnostic_summaries(include_ignored, cx)
6677 .filter(|(path, _, _)| {
6678 let worktree = self.entry_for_path(&path, cx).map(|entry| entry.is_ignored);
6679 include_ignored || worktree == Some(false)
6680 })
6681 {
6682 summary.error_count += path_summary.error_count;
6683 summary.warning_count += path_summary.warning_count;
6684 }
6685 summary
6686 }
6687
6688 pub fn diagnostic_summaries<'a>(
6689 &'a self,
6690 include_ignored: bool,
6691 cx: &'a AppContext,
6692 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6693 self.visible_worktrees(cx)
6694 .flat_map(move |worktree| {
6695 let worktree = worktree.read(cx);
6696 let worktree_id = worktree.id();
6697 worktree
6698 .diagnostic_summaries()
6699 .map(move |(path, server_id, summary)| {
6700 (ProjectPath { worktree_id, path }, server_id, summary)
6701 })
6702 })
6703 .filter(move |(path, _, _)| {
6704 let worktree = self.entry_for_path(&path, cx).map(|entry| entry.is_ignored);
6705 include_ignored || worktree == Some(false)
6706 })
6707 }
6708
6709 pub fn disk_based_diagnostics_started(
6710 &mut self,
6711 language_server_id: LanguageServerId,
6712 cx: &mut ModelContext<Self>,
6713 ) {
6714 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6715 }
6716
6717 pub fn disk_based_diagnostics_finished(
6718 &mut self,
6719 language_server_id: LanguageServerId,
6720 cx: &mut ModelContext<Self>,
6721 ) {
6722 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6723 }
6724
6725 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6726 self.active_entry
6727 }
6728
6729 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6730 self.worktree_for_id(path.worktree_id, cx)?
6731 .read(cx)
6732 .entry_for_path(&path.path)
6733 .cloned()
6734 }
6735
6736 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6737 let worktree = self.worktree_for_entry(entry_id, cx)?;
6738 let worktree = worktree.read(cx);
6739 let worktree_id = worktree.id();
6740 let path = worktree.entry_for_id(entry_id)?.path.clone();
6741 Some(ProjectPath { worktree_id, path })
6742 }
6743
6744 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6745 let workspace_root = self
6746 .worktree_for_id(project_path.worktree_id, cx)?
6747 .read(cx)
6748 .abs_path();
6749 let project_path = project_path.path.as_ref();
6750
6751 Some(if project_path == Path::new("") {
6752 workspace_root.to_path_buf()
6753 } else {
6754 workspace_root.join(project_path)
6755 })
6756 }
6757
6758 // RPC message handlers
6759
6760 async fn handle_unshare_project(
6761 this: Model<Self>,
6762 _: TypedEnvelope<proto::UnshareProject>,
6763 _: Arc<Client>,
6764 mut cx: AsyncAppContext,
6765 ) -> Result<()> {
6766 this.update(&mut cx, |this, cx| {
6767 if this.is_local() {
6768 this.unshare(cx)?;
6769 } else {
6770 this.disconnected_from_host(cx);
6771 }
6772 Ok(())
6773 })?
6774 }
6775
6776 async fn handle_add_collaborator(
6777 this: Model<Self>,
6778 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6779 _: Arc<Client>,
6780 mut cx: AsyncAppContext,
6781 ) -> Result<()> {
6782 let collaborator = envelope
6783 .payload
6784 .collaborator
6785 .take()
6786 .ok_or_else(|| anyhow!("empty collaborator"))?;
6787
6788 let collaborator = Collaborator::from_proto(collaborator)?;
6789 this.update(&mut cx, |this, cx| {
6790 this.shared_buffers.remove(&collaborator.peer_id);
6791 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6792 this.collaborators
6793 .insert(collaborator.peer_id, collaborator);
6794 cx.notify();
6795 })?;
6796
6797 Ok(())
6798 }
6799
6800 async fn handle_update_project_collaborator(
6801 this: Model<Self>,
6802 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6803 _: Arc<Client>,
6804 mut cx: AsyncAppContext,
6805 ) -> Result<()> {
6806 let old_peer_id = envelope
6807 .payload
6808 .old_peer_id
6809 .ok_or_else(|| anyhow!("missing old peer id"))?;
6810 let new_peer_id = envelope
6811 .payload
6812 .new_peer_id
6813 .ok_or_else(|| anyhow!("missing new peer id"))?;
6814 this.update(&mut cx, |this, cx| {
6815 let collaborator = this
6816 .collaborators
6817 .remove(&old_peer_id)
6818 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6819 let is_host = collaborator.replica_id == 0;
6820 this.collaborators.insert(new_peer_id, collaborator);
6821
6822 let buffers = this.shared_buffers.remove(&old_peer_id);
6823 log::info!(
6824 "peer {} became {}. moving buffers {:?}",
6825 old_peer_id,
6826 new_peer_id,
6827 &buffers
6828 );
6829 if let Some(buffers) = buffers {
6830 this.shared_buffers.insert(new_peer_id, buffers);
6831 }
6832
6833 if is_host {
6834 this.opened_buffers
6835 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6836 this.buffer_ordered_messages_tx
6837 .unbounded_send(BufferOrderedMessage::Resync)
6838 .unwrap();
6839 }
6840
6841 cx.emit(Event::CollaboratorUpdated {
6842 old_peer_id,
6843 new_peer_id,
6844 });
6845 cx.notify();
6846 Ok(())
6847 })?
6848 }
6849
6850 async fn handle_remove_collaborator(
6851 this: Model<Self>,
6852 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6853 _: Arc<Client>,
6854 mut cx: AsyncAppContext,
6855 ) -> Result<()> {
6856 this.update(&mut cx, |this, cx| {
6857 let peer_id = envelope
6858 .payload
6859 .peer_id
6860 .ok_or_else(|| anyhow!("invalid peer id"))?;
6861 let replica_id = this
6862 .collaborators
6863 .remove(&peer_id)
6864 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6865 .replica_id;
6866 for buffer in this.opened_buffers.values() {
6867 if let Some(buffer) = buffer.upgrade() {
6868 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6869 }
6870 }
6871 this.shared_buffers.remove(&peer_id);
6872
6873 cx.emit(Event::CollaboratorLeft(peer_id));
6874 cx.notify();
6875 Ok(())
6876 })?
6877 }
6878
6879 async fn handle_update_project(
6880 this: Model<Self>,
6881 envelope: TypedEnvelope<proto::UpdateProject>,
6882 _: Arc<Client>,
6883 mut cx: AsyncAppContext,
6884 ) -> Result<()> {
6885 this.update(&mut cx, |this, cx| {
6886 // Don't handle messages that were sent before the response to us joining the project
6887 if envelope.message_id > this.join_project_response_message_id {
6888 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6889 }
6890 Ok(())
6891 })?
6892 }
6893
6894 async fn handle_update_worktree(
6895 this: Model<Self>,
6896 envelope: TypedEnvelope<proto::UpdateWorktree>,
6897 _: Arc<Client>,
6898 mut cx: AsyncAppContext,
6899 ) -> Result<()> {
6900 this.update(&mut cx, |this, cx| {
6901 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6902 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6903 worktree.update(cx, |worktree, _| {
6904 let worktree = worktree.as_remote_mut().unwrap();
6905 worktree.update_from_remote(envelope.payload);
6906 });
6907 }
6908 Ok(())
6909 })?
6910 }
6911
6912 async fn handle_update_worktree_settings(
6913 this: Model<Self>,
6914 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6915 _: Arc<Client>,
6916 mut cx: AsyncAppContext,
6917 ) -> Result<()> {
6918 this.update(&mut cx, |this, cx| {
6919 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6920 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6921 cx.update_global::<SettingsStore, _>(|store, cx| {
6922 store
6923 .set_local_settings(
6924 worktree.entity_id().as_u64() as usize,
6925 PathBuf::from(&envelope.payload.path).into(),
6926 envelope.payload.content.as_ref().map(String::as_str),
6927 cx,
6928 )
6929 .log_err();
6930 });
6931 }
6932 Ok(())
6933 })?
6934 }
6935
6936 async fn handle_create_project_entry(
6937 this: Model<Self>,
6938 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6939 _: Arc<Client>,
6940 mut cx: AsyncAppContext,
6941 ) -> Result<proto::ProjectEntryResponse> {
6942 let worktree = this.update(&mut cx, |this, cx| {
6943 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6944 this.worktree_for_id(worktree_id, cx)
6945 .ok_or_else(|| anyhow!("worktree not found"))
6946 })??;
6947 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6948 let entry = worktree
6949 .update(&mut cx, |worktree, cx| {
6950 let worktree = worktree.as_local_mut().unwrap();
6951 let path = PathBuf::from(envelope.payload.path);
6952 worktree.create_entry(path, envelope.payload.is_directory, cx)
6953 })?
6954 .await?;
6955 Ok(proto::ProjectEntryResponse {
6956 entry: entry.as_ref().map(|e| e.into()),
6957 worktree_scan_id: worktree_scan_id as u64,
6958 })
6959 }
6960
6961 async fn handle_rename_project_entry(
6962 this: Model<Self>,
6963 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6964 _: Arc<Client>,
6965 mut cx: AsyncAppContext,
6966 ) -> Result<proto::ProjectEntryResponse> {
6967 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6968 let worktree = this.update(&mut cx, |this, cx| {
6969 this.worktree_for_entry(entry_id, cx)
6970 .ok_or_else(|| anyhow!("worktree not found"))
6971 })??;
6972 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6973 let entry = worktree
6974 .update(&mut cx, |worktree, cx| {
6975 let new_path = PathBuf::from(envelope.payload.new_path);
6976 worktree
6977 .as_local_mut()
6978 .unwrap()
6979 .rename_entry(entry_id, new_path, cx)
6980 })?
6981 .await?;
6982 Ok(proto::ProjectEntryResponse {
6983 entry: entry.as_ref().map(|e| e.into()),
6984 worktree_scan_id: worktree_scan_id as u64,
6985 })
6986 }
6987
6988 async fn handle_copy_project_entry(
6989 this: Model<Self>,
6990 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6991 _: Arc<Client>,
6992 mut cx: AsyncAppContext,
6993 ) -> Result<proto::ProjectEntryResponse> {
6994 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6995 let worktree = this.update(&mut cx, |this, cx| {
6996 this.worktree_for_entry(entry_id, cx)
6997 .ok_or_else(|| anyhow!("worktree not found"))
6998 })??;
6999 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7000 let entry = worktree
7001 .update(&mut cx, |worktree, cx| {
7002 let new_path = PathBuf::from(envelope.payload.new_path);
7003 worktree
7004 .as_local_mut()
7005 .unwrap()
7006 .copy_entry(entry_id, new_path, cx)
7007 })?
7008 .await?;
7009 Ok(proto::ProjectEntryResponse {
7010 entry: entry.as_ref().map(|e| e.into()),
7011 worktree_scan_id: worktree_scan_id as u64,
7012 })
7013 }
7014
7015 async fn handle_delete_project_entry(
7016 this: Model<Self>,
7017 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7018 _: Arc<Client>,
7019 mut cx: AsyncAppContext,
7020 ) -> Result<proto::ProjectEntryResponse> {
7021 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7022
7023 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7024
7025 let worktree = this.update(&mut cx, |this, cx| {
7026 this.worktree_for_entry(entry_id, cx)
7027 .ok_or_else(|| anyhow!("worktree not found"))
7028 })??;
7029 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7030 worktree
7031 .update(&mut cx, |worktree, cx| {
7032 worktree
7033 .as_local_mut()
7034 .unwrap()
7035 .delete_entry(entry_id, cx)
7036 .ok_or_else(|| anyhow!("invalid entry"))
7037 })??
7038 .await?;
7039 Ok(proto::ProjectEntryResponse {
7040 entry: None,
7041 worktree_scan_id: worktree_scan_id as u64,
7042 })
7043 }
7044
7045 async fn handle_expand_project_entry(
7046 this: Model<Self>,
7047 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7048 _: Arc<Client>,
7049 mut cx: AsyncAppContext,
7050 ) -> Result<proto::ExpandProjectEntryResponse> {
7051 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7052 let worktree = this
7053 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7054 .ok_or_else(|| anyhow!("invalid request"))?;
7055 worktree
7056 .update(&mut cx, |worktree, cx| {
7057 worktree
7058 .as_local_mut()
7059 .unwrap()
7060 .expand_entry(entry_id, cx)
7061 .ok_or_else(|| anyhow!("invalid entry"))
7062 })??
7063 .await?;
7064 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7065 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7066 }
7067
7068 async fn handle_update_diagnostic_summary(
7069 this: Model<Self>,
7070 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7071 _: Arc<Client>,
7072 mut cx: AsyncAppContext,
7073 ) -> Result<()> {
7074 this.update(&mut cx, |this, cx| {
7075 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7076 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7077 if let Some(summary) = envelope.payload.summary {
7078 let project_path = ProjectPath {
7079 worktree_id,
7080 path: Path::new(&summary.path).into(),
7081 };
7082 worktree.update(cx, |worktree, _| {
7083 worktree
7084 .as_remote_mut()
7085 .unwrap()
7086 .update_diagnostic_summary(project_path.path.clone(), &summary);
7087 });
7088 cx.emit(Event::DiagnosticsUpdated {
7089 language_server_id: LanguageServerId(summary.language_server_id as usize),
7090 path: project_path,
7091 });
7092 }
7093 }
7094 Ok(())
7095 })?
7096 }
7097
7098 async fn handle_start_language_server(
7099 this: Model<Self>,
7100 envelope: TypedEnvelope<proto::StartLanguageServer>,
7101 _: Arc<Client>,
7102 mut cx: AsyncAppContext,
7103 ) -> Result<()> {
7104 let server = envelope
7105 .payload
7106 .server
7107 .ok_or_else(|| anyhow!("invalid server"))?;
7108 this.update(&mut cx, |this, cx| {
7109 this.language_server_statuses.insert(
7110 LanguageServerId(server.id as usize),
7111 LanguageServerStatus {
7112 name: server.name,
7113 pending_work: Default::default(),
7114 has_pending_diagnostic_updates: false,
7115 progress_tokens: Default::default(),
7116 },
7117 );
7118 cx.notify();
7119 })?;
7120 Ok(())
7121 }
7122
7123 async fn handle_update_language_server(
7124 this: Model<Self>,
7125 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7126 _: Arc<Client>,
7127 mut cx: AsyncAppContext,
7128 ) -> Result<()> {
7129 this.update(&mut cx, |this, cx| {
7130 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7131
7132 match envelope
7133 .payload
7134 .variant
7135 .ok_or_else(|| anyhow!("invalid variant"))?
7136 {
7137 proto::update_language_server::Variant::WorkStart(payload) => {
7138 this.on_lsp_work_start(
7139 language_server_id,
7140 payload.token,
7141 LanguageServerProgress {
7142 message: payload.message,
7143 percentage: payload.percentage.map(|p| p as usize),
7144 last_update_at: Instant::now(),
7145 },
7146 cx,
7147 );
7148 }
7149
7150 proto::update_language_server::Variant::WorkProgress(payload) => {
7151 this.on_lsp_work_progress(
7152 language_server_id,
7153 payload.token,
7154 LanguageServerProgress {
7155 message: payload.message,
7156 percentage: payload.percentage.map(|p| p as usize),
7157 last_update_at: Instant::now(),
7158 },
7159 cx,
7160 );
7161 }
7162
7163 proto::update_language_server::Variant::WorkEnd(payload) => {
7164 this.on_lsp_work_end(language_server_id, payload.token, cx);
7165 }
7166
7167 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7168 this.disk_based_diagnostics_started(language_server_id, cx);
7169 }
7170
7171 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7172 this.disk_based_diagnostics_finished(language_server_id, cx)
7173 }
7174 }
7175
7176 Ok(())
7177 })?
7178 }
7179
7180 async fn handle_update_buffer(
7181 this: Model<Self>,
7182 envelope: TypedEnvelope<proto::UpdateBuffer>,
7183 _: Arc<Client>,
7184 mut cx: AsyncAppContext,
7185 ) -> Result<proto::Ack> {
7186 this.update(&mut cx, |this, cx| {
7187 let payload = envelope.payload.clone();
7188 let buffer_id = payload.buffer_id;
7189 let ops = payload
7190 .operations
7191 .into_iter()
7192 .map(language::proto::deserialize_operation)
7193 .collect::<Result<Vec<_>, _>>()?;
7194 let is_remote = this.is_remote();
7195 match this.opened_buffers.entry(buffer_id) {
7196 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7197 OpenBuffer::Strong(buffer) => {
7198 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7199 }
7200 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7201 OpenBuffer::Weak(_) => {}
7202 },
7203 hash_map::Entry::Vacant(e) => {
7204 assert!(
7205 is_remote,
7206 "received buffer update from {:?}",
7207 envelope.original_sender_id
7208 );
7209 e.insert(OpenBuffer::Operations(ops));
7210 }
7211 }
7212 Ok(proto::Ack {})
7213 })?
7214 }
7215
7216 async fn handle_create_buffer_for_peer(
7217 this: Model<Self>,
7218 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7219 _: Arc<Client>,
7220 mut cx: AsyncAppContext,
7221 ) -> Result<()> {
7222 this.update(&mut cx, |this, cx| {
7223 match envelope
7224 .payload
7225 .variant
7226 .ok_or_else(|| anyhow!("missing variant"))?
7227 {
7228 proto::create_buffer_for_peer::Variant::State(mut state) => {
7229 let mut buffer_file = None;
7230 if let Some(file) = state.file.take() {
7231 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7232 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7233 anyhow!("no worktree found for id {}", file.worktree_id)
7234 })?;
7235 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7236 as Arc<dyn language::File>);
7237 }
7238
7239 let buffer_id = state.id;
7240 let buffer = cx.new_model(|_| {
7241 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
7242 .unwrap()
7243 });
7244 this.incomplete_remote_buffers
7245 .insert(buffer_id, Some(buffer));
7246 }
7247 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7248 let buffer = this
7249 .incomplete_remote_buffers
7250 .get(&chunk.buffer_id)
7251 .cloned()
7252 .flatten()
7253 .ok_or_else(|| {
7254 anyhow!(
7255 "received chunk for buffer {} without initial state",
7256 chunk.buffer_id
7257 )
7258 })?;
7259 let operations = chunk
7260 .operations
7261 .into_iter()
7262 .map(language::proto::deserialize_operation)
7263 .collect::<Result<Vec<_>>>()?;
7264 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7265
7266 if chunk.is_last {
7267 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7268 this.register_buffer(&buffer, cx)?;
7269 }
7270 }
7271 }
7272
7273 Ok(())
7274 })?
7275 }
7276
7277 async fn handle_update_diff_base(
7278 this: Model<Self>,
7279 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7280 _: Arc<Client>,
7281 mut cx: AsyncAppContext,
7282 ) -> Result<()> {
7283 this.update(&mut cx, |this, cx| {
7284 let buffer_id = envelope.payload.buffer_id;
7285 let diff_base = envelope.payload.diff_base;
7286 if let Some(buffer) = this
7287 .opened_buffers
7288 .get_mut(&buffer_id)
7289 .and_then(|b| b.upgrade())
7290 .or_else(|| {
7291 this.incomplete_remote_buffers
7292 .get(&buffer_id)
7293 .cloned()
7294 .flatten()
7295 })
7296 {
7297 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7298 }
7299 Ok(())
7300 })?
7301 }
7302
7303 async fn handle_update_buffer_file(
7304 this: Model<Self>,
7305 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7306 _: Arc<Client>,
7307 mut cx: AsyncAppContext,
7308 ) -> Result<()> {
7309 let buffer_id = envelope.payload.buffer_id;
7310
7311 this.update(&mut cx, |this, cx| {
7312 let payload = envelope.payload.clone();
7313 if let Some(buffer) = this
7314 .opened_buffers
7315 .get(&buffer_id)
7316 .and_then(|b| b.upgrade())
7317 .or_else(|| {
7318 this.incomplete_remote_buffers
7319 .get(&buffer_id)
7320 .cloned()
7321 .flatten()
7322 })
7323 {
7324 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7325 let worktree = this
7326 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7327 .ok_or_else(|| anyhow!("no such worktree"))?;
7328 let file = File::from_proto(file, worktree, cx)?;
7329 buffer.update(cx, |buffer, cx| {
7330 buffer.file_updated(Arc::new(file), cx);
7331 });
7332 this.detect_language_for_buffer(&buffer, cx);
7333 }
7334 Ok(())
7335 })?
7336 }
7337
7338 async fn handle_save_buffer(
7339 this: Model<Self>,
7340 envelope: TypedEnvelope<proto::SaveBuffer>,
7341 _: Arc<Client>,
7342 mut cx: AsyncAppContext,
7343 ) -> Result<proto::BufferSaved> {
7344 let buffer_id = envelope.payload.buffer_id;
7345 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7346 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7347 let buffer = this
7348 .opened_buffers
7349 .get(&buffer_id)
7350 .and_then(|buffer| buffer.upgrade())
7351 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7352 anyhow::Ok((project_id, buffer))
7353 })??;
7354 buffer
7355 .update(&mut cx, |buffer, _| {
7356 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7357 })?
7358 .await?;
7359 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7360
7361 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7362 .await?;
7363 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7364 project_id,
7365 buffer_id,
7366 version: serialize_version(buffer.saved_version()),
7367 mtime: Some(buffer.saved_mtime().into()),
7368 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7369 })?)
7370 }
7371
7372 async fn handle_reload_buffers(
7373 this: Model<Self>,
7374 envelope: TypedEnvelope<proto::ReloadBuffers>,
7375 _: Arc<Client>,
7376 mut cx: AsyncAppContext,
7377 ) -> Result<proto::ReloadBuffersResponse> {
7378 let sender_id = envelope.original_sender_id()?;
7379 let reload = this.update(&mut cx, |this, cx| {
7380 let mut buffers = HashSet::default();
7381 for buffer_id in &envelope.payload.buffer_ids {
7382 buffers.insert(
7383 this.opened_buffers
7384 .get(buffer_id)
7385 .and_then(|buffer| buffer.upgrade())
7386 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7387 );
7388 }
7389 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7390 })??;
7391
7392 let project_transaction = reload.await?;
7393 let project_transaction = this.update(&mut cx, |this, cx| {
7394 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7395 })?;
7396 Ok(proto::ReloadBuffersResponse {
7397 transaction: Some(project_transaction),
7398 })
7399 }
7400
7401 async fn handle_synchronize_buffers(
7402 this: Model<Self>,
7403 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7404 _: Arc<Client>,
7405 mut cx: AsyncAppContext,
7406 ) -> Result<proto::SynchronizeBuffersResponse> {
7407 let project_id = envelope.payload.project_id;
7408 let mut response = proto::SynchronizeBuffersResponse {
7409 buffers: Default::default(),
7410 };
7411
7412 this.update(&mut cx, |this, cx| {
7413 let Some(guest_id) = envelope.original_sender_id else {
7414 error!("missing original_sender_id on SynchronizeBuffers request");
7415 return;
7416 };
7417
7418 this.shared_buffers.entry(guest_id).or_default().clear();
7419 for buffer in envelope.payload.buffers {
7420 let buffer_id = buffer.id;
7421 let remote_version = language::proto::deserialize_version(&buffer.version);
7422 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7423 this.shared_buffers
7424 .entry(guest_id)
7425 .or_default()
7426 .insert(buffer_id);
7427
7428 let buffer = buffer.read(cx);
7429 response.buffers.push(proto::BufferVersion {
7430 id: buffer_id,
7431 version: language::proto::serialize_version(&buffer.version),
7432 });
7433
7434 let operations = buffer.serialize_ops(Some(remote_version), cx);
7435 let client = this.client.clone();
7436 if let Some(file) = buffer.file() {
7437 client
7438 .send(proto::UpdateBufferFile {
7439 project_id,
7440 buffer_id: buffer_id as u64,
7441 file: Some(file.to_proto()),
7442 })
7443 .log_err();
7444 }
7445
7446 client
7447 .send(proto::UpdateDiffBase {
7448 project_id,
7449 buffer_id: buffer_id as u64,
7450 diff_base: buffer.diff_base().map(Into::into),
7451 })
7452 .log_err();
7453
7454 client
7455 .send(proto::BufferReloaded {
7456 project_id,
7457 buffer_id,
7458 version: language::proto::serialize_version(buffer.saved_version()),
7459 mtime: Some(buffer.saved_mtime().into()),
7460 fingerprint: language::proto::serialize_fingerprint(
7461 buffer.saved_version_fingerprint(),
7462 ),
7463 line_ending: language::proto::serialize_line_ending(
7464 buffer.line_ending(),
7465 ) as i32,
7466 })
7467 .log_err();
7468
7469 cx.background_executor()
7470 .spawn(
7471 async move {
7472 let operations = operations.await;
7473 for chunk in split_operations(operations) {
7474 client
7475 .request(proto::UpdateBuffer {
7476 project_id,
7477 buffer_id,
7478 operations: chunk,
7479 })
7480 .await?;
7481 }
7482 anyhow::Ok(())
7483 }
7484 .log_err(),
7485 )
7486 .detach();
7487 }
7488 }
7489 })?;
7490
7491 Ok(response)
7492 }
7493
7494 async fn handle_format_buffers(
7495 this: Model<Self>,
7496 envelope: TypedEnvelope<proto::FormatBuffers>,
7497 _: Arc<Client>,
7498 mut cx: AsyncAppContext,
7499 ) -> Result<proto::FormatBuffersResponse> {
7500 let sender_id = envelope.original_sender_id()?;
7501 let format = this.update(&mut cx, |this, cx| {
7502 let mut buffers = HashSet::default();
7503 for buffer_id in &envelope.payload.buffer_ids {
7504 buffers.insert(
7505 this.opened_buffers
7506 .get(buffer_id)
7507 .and_then(|buffer| buffer.upgrade())
7508 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7509 );
7510 }
7511 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7512 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7513 })??;
7514
7515 let project_transaction = format.await?;
7516 let project_transaction = this.update(&mut cx, |this, cx| {
7517 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7518 })?;
7519 Ok(proto::FormatBuffersResponse {
7520 transaction: Some(project_transaction),
7521 })
7522 }
7523
7524 async fn handle_apply_additional_edits_for_completion(
7525 this: Model<Self>,
7526 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7527 _: Arc<Client>,
7528 mut cx: AsyncAppContext,
7529 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7530 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7531 let buffer = this
7532 .opened_buffers
7533 .get(&envelope.payload.buffer_id)
7534 .and_then(|buffer| buffer.upgrade())
7535 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7536 let language = buffer.read(cx).language();
7537 let completion = language::proto::deserialize_completion(
7538 envelope
7539 .payload
7540 .completion
7541 .ok_or_else(|| anyhow!("invalid completion"))?,
7542 language.cloned(),
7543 );
7544 Ok::<_, anyhow::Error>((buffer, completion))
7545 })??;
7546
7547 let completion = completion.await?;
7548
7549 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7550 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7551 })?;
7552
7553 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7554 transaction: apply_additional_edits
7555 .await?
7556 .as_ref()
7557 .map(language::proto::serialize_transaction),
7558 })
7559 }
7560
7561 async fn handle_apply_code_action(
7562 this: Model<Self>,
7563 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7564 _: Arc<Client>,
7565 mut cx: AsyncAppContext,
7566 ) -> Result<proto::ApplyCodeActionResponse> {
7567 let sender_id = envelope.original_sender_id()?;
7568 let action = language::proto::deserialize_code_action(
7569 envelope
7570 .payload
7571 .action
7572 .ok_or_else(|| anyhow!("invalid action"))?,
7573 )?;
7574 let apply_code_action = this.update(&mut cx, |this, cx| {
7575 let buffer = this
7576 .opened_buffers
7577 .get(&envelope.payload.buffer_id)
7578 .and_then(|buffer| buffer.upgrade())
7579 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7580 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7581 })??;
7582
7583 let project_transaction = apply_code_action.await?;
7584 let project_transaction = this.update(&mut cx, |this, cx| {
7585 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7586 })?;
7587 Ok(proto::ApplyCodeActionResponse {
7588 transaction: Some(project_transaction),
7589 })
7590 }
7591
7592 async fn handle_on_type_formatting(
7593 this: Model<Self>,
7594 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7595 _: Arc<Client>,
7596 mut cx: AsyncAppContext,
7597 ) -> Result<proto::OnTypeFormattingResponse> {
7598 let on_type_formatting = this.update(&mut cx, |this, cx| {
7599 let buffer = this
7600 .opened_buffers
7601 .get(&envelope.payload.buffer_id)
7602 .and_then(|buffer| buffer.upgrade())
7603 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7604 let position = envelope
7605 .payload
7606 .position
7607 .and_then(deserialize_anchor)
7608 .ok_or_else(|| anyhow!("invalid position"))?;
7609 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7610 buffer,
7611 position,
7612 envelope.payload.trigger.clone(),
7613 cx,
7614 ))
7615 })??;
7616
7617 let transaction = on_type_formatting
7618 .await?
7619 .as_ref()
7620 .map(language::proto::serialize_transaction);
7621 Ok(proto::OnTypeFormattingResponse { transaction })
7622 }
7623
7624 async fn handle_inlay_hints(
7625 this: Model<Self>,
7626 envelope: TypedEnvelope<proto::InlayHints>,
7627 _: Arc<Client>,
7628 mut cx: AsyncAppContext,
7629 ) -> Result<proto::InlayHintsResponse> {
7630 let sender_id = envelope.original_sender_id()?;
7631 let buffer = this.update(&mut cx, |this, _| {
7632 this.opened_buffers
7633 .get(&envelope.payload.buffer_id)
7634 .and_then(|buffer| buffer.upgrade())
7635 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7636 })??;
7637 let buffer_version = deserialize_version(&envelope.payload.version);
7638
7639 buffer
7640 .update(&mut cx, |buffer, _| {
7641 buffer.wait_for_version(buffer_version.clone())
7642 })?
7643 .await
7644 .with_context(|| {
7645 format!(
7646 "waiting for version {:?} for buffer {}",
7647 buffer_version,
7648 buffer.entity_id()
7649 )
7650 })?;
7651
7652 let start = envelope
7653 .payload
7654 .start
7655 .and_then(deserialize_anchor)
7656 .context("missing range start")?;
7657 let end = envelope
7658 .payload
7659 .end
7660 .and_then(deserialize_anchor)
7661 .context("missing range end")?;
7662 let buffer_hints = this
7663 .update(&mut cx, |project, cx| {
7664 project.inlay_hints(buffer, start..end, cx)
7665 })?
7666 .await
7667 .context("inlay hints fetch")?;
7668
7669 Ok(this.update(&mut cx, |project, cx| {
7670 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7671 })?)
7672 }
7673
7674 async fn handle_resolve_inlay_hint(
7675 this: Model<Self>,
7676 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7677 _: Arc<Client>,
7678 mut cx: AsyncAppContext,
7679 ) -> Result<proto::ResolveInlayHintResponse> {
7680 let proto_hint = envelope
7681 .payload
7682 .hint
7683 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7684 let hint = InlayHints::proto_to_project_hint(proto_hint)
7685 .context("resolved proto inlay hint conversion")?;
7686 let buffer = this.update(&mut cx, |this, _cx| {
7687 this.opened_buffers
7688 .get(&envelope.payload.buffer_id)
7689 .and_then(|buffer| buffer.upgrade())
7690 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7691 })??;
7692 let response_hint = this
7693 .update(&mut cx, |project, cx| {
7694 project.resolve_inlay_hint(
7695 hint,
7696 buffer,
7697 LanguageServerId(envelope.payload.language_server_id as usize),
7698 cx,
7699 )
7700 })?
7701 .await
7702 .context("inlay hints fetch")?;
7703 Ok(proto::ResolveInlayHintResponse {
7704 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7705 })
7706 }
7707
7708 async fn handle_refresh_inlay_hints(
7709 this: Model<Self>,
7710 _: TypedEnvelope<proto::RefreshInlayHints>,
7711 _: Arc<Client>,
7712 mut cx: AsyncAppContext,
7713 ) -> Result<proto::Ack> {
7714 this.update(&mut cx, |_, cx| {
7715 cx.emit(Event::RefreshInlayHints);
7716 })?;
7717 Ok(proto::Ack {})
7718 }
7719
7720 async fn handle_lsp_command<T: LspCommand>(
7721 this: Model<Self>,
7722 envelope: TypedEnvelope<T::ProtoRequest>,
7723 _: Arc<Client>,
7724 mut cx: AsyncAppContext,
7725 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7726 where
7727 <T::LspRequest as lsp::request::Request>::Params: Send,
7728 <T::LspRequest as lsp::request::Request>::Result: Send,
7729 {
7730 let sender_id = envelope.original_sender_id()?;
7731 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7732 let buffer_handle = this.update(&mut cx, |this, _cx| {
7733 this.opened_buffers
7734 .get(&buffer_id)
7735 .and_then(|buffer| buffer.upgrade())
7736 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7737 })??;
7738 let request = T::from_proto(
7739 envelope.payload,
7740 this.clone(),
7741 buffer_handle.clone(),
7742 cx.clone(),
7743 )
7744 .await?;
7745 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7746 let response = this
7747 .update(&mut cx, |this, cx| {
7748 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7749 })?
7750 .await?;
7751 this.update(&mut cx, |this, cx| {
7752 Ok(T::response_to_proto(
7753 response,
7754 this,
7755 sender_id,
7756 &buffer_version,
7757 cx,
7758 ))
7759 })?
7760 }
7761
7762 async fn handle_get_project_symbols(
7763 this: Model<Self>,
7764 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7765 _: Arc<Client>,
7766 mut cx: AsyncAppContext,
7767 ) -> Result<proto::GetProjectSymbolsResponse> {
7768 let symbols = this
7769 .update(&mut cx, |this, cx| {
7770 this.symbols(&envelope.payload.query, cx)
7771 })?
7772 .await?;
7773
7774 Ok(proto::GetProjectSymbolsResponse {
7775 symbols: symbols.iter().map(serialize_symbol).collect(),
7776 })
7777 }
7778
7779 async fn handle_search_project(
7780 this: Model<Self>,
7781 envelope: TypedEnvelope<proto::SearchProject>,
7782 _: Arc<Client>,
7783 mut cx: AsyncAppContext,
7784 ) -> Result<proto::SearchProjectResponse> {
7785 let peer_id = envelope.original_sender_id()?;
7786 let query = SearchQuery::from_proto(envelope.payload)?;
7787 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7788
7789 cx.spawn(move |mut cx| async move {
7790 let mut locations = Vec::new();
7791 while let Some((buffer, ranges)) = result.next().await {
7792 for range in ranges {
7793 let start = serialize_anchor(&range.start);
7794 let end = serialize_anchor(&range.end);
7795 let buffer_id = this.update(&mut cx, |this, cx| {
7796 this.create_buffer_for_peer(&buffer, peer_id, cx)
7797 })?;
7798 locations.push(proto::Location {
7799 buffer_id,
7800 start: Some(start),
7801 end: Some(end),
7802 });
7803 }
7804 }
7805 Ok(proto::SearchProjectResponse { locations })
7806 })
7807 .await
7808 }
7809
7810 async fn handle_open_buffer_for_symbol(
7811 this: Model<Self>,
7812 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7813 _: Arc<Client>,
7814 mut cx: AsyncAppContext,
7815 ) -> Result<proto::OpenBufferForSymbolResponse> {
7816 let peer_id = envelope.original_sender_id()?;
7817 let symbol = envelope
7818 .payload
7819 .symbol
7820 .ok_or_else(|| anyhow!("invalid symbol"))?;
7821 let symbol = this
7822 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
7823 .await?;
7824 let symbol = this.update(&mut cx, |this, _| {
7825 let signature = this.symbol_signature(&symbol.path);
7826 if signature == symbol.signature {
7827 Ok(symbol)
7828 } else {
7829 Err(anyhow!("invalid symbol signature"))
7830 }
7831 })??;
7832 let buffer = this
7833 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
7834 .await?;
7835
7836 Ok(proto::OpenBufferForSymbolResponse {
7837 buffer_id: this.update(&mut cx, |this, cx| {
7838 this.create_buffer_for_peer(&buffer, peer_id, cx)
7839 })?,
7840 })
7841 }
7842
7843 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7844 let mut hasher = Sha256::new();
7845 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7846 hasher.update(project_path.path.to_string_lossy().as_bytes());
7847 hasher.update(self.nonce.to_be_bytes());
7848 hasher.finalize().as_slice().try_into().unwrap()
7849 }
7850
7851 async fn handle_open_buffer_by_id(
7852 this: Model<Self>,
7853 envelope: TypedEnvelope<proto::OpenBufferById>,
7854 _: Arc<Client>,
7855 mut cx: AsyncAppContext,
7856 ) -> Result<proto::OpenBufferResponse> {
7857 let peer_id = envelope.original_sender_id()?;
7858 let buffer = this
7859 .update(&mut cx, |this, cx| {
7860 this.open_buffer_by_id(envelope.payload.id, cx)
7861 })?
7862 .await?;
7863 this.update(&mut cx, |this, cx| {
7864 Ok(proto::OpenBufferResponse {
7865 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7866 })
7867 })?
7868 }
7869
7870 async fn handle_open_buffer_by_path(
7871 this: Model<Self>,
7872 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7873 _: Arc<Client>,
7874 mut cx: AsyncAppContext,
7875 ) -> Result<proto::OpenBufferResponse> {
7876 let peer_id = envelope.original_sender_id()?;
7877 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7878 let open_buffer = this.update(&mut cx, |this, cx| {
7879 this.open_buffer(
7880 ProjectPath {
7881 worktree_id,
7882 path: PathBuf::from(envelope.payload.path).into(),
7883 },
7884 cx,
7885 )
7886 })?;
7887
7888 let buffer = open_buffer.await?;
7889 this.update(&mut cx, |this, cx| {
7890 Ok(proto::OpenBufferResponse {
7891 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7892 })
7893 })?
7894 }
7895
7896 fn serialize_project_transaction_for_peer(
7897 &mut self,
7898 project_transaction: ProjectTransaction,
7899 peer_id: proto::PeerId,
7900 cx: &mut AppContext,
7901 ) -> proto::ProjectTransaction {
7902 let mut serialized_transaction = proto::ProjectTransaction {
7903 buffer_ids: Default::default(),
7904 transactions: Default::default(),
7905 };
7906 for (buffer, transaction) in project_transaction.0 {
7907 serialized_transaction
7908 .buffer_ids
7909 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7910 serialized_transaction
7911 .transactions
7912 .push(language::proto::serialize_transaction(&transaction));
7913 }
7914 serialized_transaction
7915 }
7916
7917 fn deserialize_project_transaction(
7918 &mut self,
7919 message: proto::ProjectTransaction,
7920 push_to_history: bool,
7921 cx: &mut ModelContext<Self>,
7922 ) -> Task<Result<ProjectTransaction>> {
7923 cx.spawn(move |this, mut cx| async move {
7924 let mut project_transaction = ProjectTransaction::default();
7925 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7926 {
7927 let buffer = this
7928 .update(&mut cx, |this, cx| {
7929 this.wait_for_remote_buffer(buffer_id, cx)
7930 })?
7931 .await?;
7932 let transaction = language::proto::deserialize_transaction(transaction)?;
7933 project_transaction.0.insert(buffer, transaction);
7934 }
7935
7936 for (buffer, transaction) in &project_transaction.0 {
7937 buffer
7938 .update(&mut cx, |buffer, _| {
7939 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7940 })?
7941 .await?;
7942
7943 if push_to_history {
7944 buffer.update(&mut cx, |buffer, _| {
7945 buffer.push_transaction(transaction.clone(), Instant::now());
7946 })?;
7947 }
7948 }
7949
7950 Ok(project_transaction)
7951 })
7952 }
7953
7954 fn create_buffer_for_peer(
7955 &mut self,
7956 buffer: &Model<Buffer>,
7957 peer_id: proto::PeerId,
7958 cx: &mut AppContext,
7959 ) -> u64 {
7960 let buffer_id = buffer.read(cx).remote_id();
7961 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
7962 updates_tx
7963 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7964 .ok();
7965 }
7966 buffer_id
7967 }
7968
7969 fn wait_for_remote_buffer(
7970 &mut self,
7971 id: u64,
7972 cx: &mut ModelContext<Self>,
7973 ) -> Task<Result<Model<Buffer>>> {
7974 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7975
7976 cx.spawn(move |this, mut cx| async move {
7977 let buffer = loop {
7978 let Some(this) = this.upgrade() else {
7979 return Err(anyhow!("project dropped"));
7980 };
7981
7982 let buffer = this.update(&mut cx, |this, _cx| {
7983 this.opened_buffers
7984 .get(&id)
7985 .and_then(|buffer| buffer.upgrade())
7986 })?;
7987
7988 if let Some(buffer) = buffer {
7989 break buffer;
7990 } else if this.update(&mut cx, |this, _| this.is_disconnected())? {
7991 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7992 }
7993
7994 this.update(&mut cx, |this, _| {
7995 this.incomplete_remote_buffers.entry(id).or_default();
7996 })?;
7997 drop(this);
7998
7999 opened_buffer_rx
8000 .next()
8001 .await
8002 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
8003 };
8004
8005 Ok(buffer)
8006 })
8007 }
8008
8009 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8010 let project_id = match self.client_state {
8011 ProjectClientState::Remote {
8012 sharing_has_stopped,
8013 remote_id,
8014 ..
8015 } => {
8016 if sharing_has_stopped {
8017 return Task::ready(Err(anyhow!(
8018 "can't synchronize remote buffers on a readonly project"
8019 )));
8020 } else {
8021 remote_id
8022 }
8023 }
8024 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8025 return Task::ready(Err(anyhow!(
8026 "can't synchronize remote buffers on a local project"
8027 )))
8028 }
8029 };
8030
8031 let client = self.client.clone();
8032 cx.spawn(move |this, mut cx| async move {
8033 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8034 let buffers = this
8035 .opened_buffers
8036 .iter()
8037 .filter_map(|(id, buffer)| {
8038 let buffer = buffer.upgrade()?;
8039 Some(proto::BufferVersion {
8040 id: *id,
8041 version: language::proto::serialize_version(&buffer.read(cx).version),
8042 })
8043 })
8044 .collect();
8045 let incomplete_buffer_ids = this
8046 .incomplete_remote_buffers
8047 .keys()
8048 .copied()
8049 .collect::<Vec<_>>();
8050
8051 (buffers, incomplete_buffer_ids)
8052 })?;
8053 let response = client
8054 .request(proto::SynchronizeBuffers {
8055 project_id,
8056 buffers,
8057 })
8058 .await?;
8059
8060 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8061 response
8062 .buffers
8063 .into_iter()
8064 .map(|buffer| {
8065 let client = client.clone();
8066 let buffer_id = buffer.id;
8067 let remote_version = language::proto::deserialize_version(&buffer.version);
8068 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8069 let operations =
8070 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8071 cx.background_executor().spawn(async move {
8072 let operations = operations.await;
8073 for chunk in split_operations(operations) {
8074 client
8075 .request(proto::UpdateBuffer {
8076 project_id,
8077 buffer_id,
8078 operations: chunk,
8079 })
8080 .await?;
8081 }
8082 anyhow::Ok(())
8083 })
8084 } else {
8085 Task::ready(Ok(()))
8086 }
8087 })
8088 .collect::<Vec<_>>()
8089 })?;
8090
8091 // Any incomplete buffers have open requests waiting. Request that the host sends
8092 // creates these buffers for us again to unblock any waiting futures.
8093 for id in incomplete_buffer_ids {
8094 cx.background_executor()
8095 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8096 .detach();
8097 }
8098
8099 futures::future::join_all(send_updates_for_buffers)
8100 .await
8101 .into_iter()
8102 .collect()
8103 })
8104 }
8105
8106 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8107 self.worktrees()
8108 .map(|worktree| {
8109 let worktree = worktree.read(cx);
8110 proto::WorktreeMetadata {
8111 id: worktree.id().to_proto(),
8112 root_name: worktree.root_name().into(),
8113 visible: worktree.is_visible(),
8114 abs_path: worktree.abs_path().to_string_lossy().into(),
8115 }
8116 })
8117 .collect()
8118 }
8119
8120 fn set_worktrees_from_proto(
8121 &mut self,
8122 worktrees: Vec<proto::WorktreeMetadata>,
8123 cx: &mut ModelContext<Project>,
8124 ) -> Result<()> {
8125 let replica_id = self.replica_id();
8126 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8127
8128 let mut old_worktrees_by_id = self
8129 .worktrees
8130 .drain(..)
8131 .filter_map(|worktree| {
8132 let worktree = worktree.upgrade()?;
8133 Some((worktree.read(cx).id(), worktree))
8134 })
8135 .collect::<HashMap<_, _>>();
8136
8137 for worktree in worktrees {
8138 if let Some(old_worktree) =
8139 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8140 {
8141 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8142 } else {
8143 let worktree =
8144 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8145 let _ = self.add_worktree(&worktree, cx);
8146 }
8147 }
8148
8149 self.metadata_changed(cx);
8150 for id in old_worktrees_by_id.keys() {
8151 cx.emit(Event::WorktreeRemoved(*id));
8152 }
8153
8154 Ok(())
8155 }
8156
8157 fn set_collaborators_from_proto(
8158 &mut self,
8159 messages: Vec<proto::Collaborator>,
8160 cx: &mut ModelContext<Self>,
8161 ) -> Result<()> {
8162 let mut collaborators = HashMap::default();
8163 for message in messages {
8164 let collaborator = Collaborator::from_proto(message)?;
8165 collaborators.insert(collaborator.peer_id, collaborator);
8166 }
8167 for old_peer_id in self.collaborators.keys() {
8168 if !collaborators.contains_key(old_peer_id) {
8169 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8170 }
8171 }
8172 self.collaborators = collaborators;
8173 Ok(())
8174 }
8175
8176 fn deserialize_symbol(
8177 &self,
8178 serialized_symbol: proto::Symbol,
8179 ) -> impl Future<Output = Result<Symbol>> {
8180 let languages = self.languages.clone();
8181 async move {
8182 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8183 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8184 let start = serialized_symbol
8185 .start
8186 .ok_or_else(|| anyhow!("invalid start"))?;
8187 let end = serialized_symbol
8188 .end
8189 .ok_or_else(|| anyhow!("invalid end"))?;
8190 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8191 let path = ProjectPath {
8192 worktree_id,
8193 path: PathBuf::from(serialized_symbol.path).into(),
8194 };
8195 let language = languages
8196 .language_for_file(&path.path, None)
8197 .await
8198 .log_err();
8199 Ok(Symbol {
8200 language_server_name: LanguageServerName(
8201 serialized_symbol.language_server_name.into(),
8202 ),
8203 source_worktree_id,
8204 path,
8205 label: {
8206 match language {
8207 Some(language) => {
8208 language
8209 .label_for_symbol(&serialized_symbol.name, kind)
8210 .await
8211 }
8212 None => None,
8213 }
8214 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8215 },
8216
8217 name: serialized_symbol.name,
8218 range: Unclipped(PointUtf16::new(start.row, start.column))
8219 ..Unclipped(PointUtf16::new(end.row, end.column)),
8220 kind,
8221 signature: serialized_symbol
8222 .signature
8223 .try_into()
8224 .map_err(|_| anyhow!("invalid signature"))?,
8225 })
8226 }
8227 }
8228
8229 async fn handle_buffer_saved(
8230 this: Model<Self>,
8231 envelope: TypedEnvelope<proto::BufferSaved>,
8232 _: Arc<Client>,
8233 mut cx: AsyncAppContext,
8234 ) -> Result<()> {
8235 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8236 let version = deserialize_version(&envelope.payload.version);
8237 let mtime = envelope
8238 .payload
8239 .mtime
8240 .ok_or_else(|| anyhow!("missing mtime"))?
8241 .into();
8242
8243 this.update(&mut cx, |this, cx| {
8244 let buffer = this
8245 .opened_buffers
8246 .get(&envelope.payload.buffer_id)
8247 .and_then(|buffer| buffer.upgrade())
8248 .or_else(|| {
8249 this.incomplete_remote_buffers
8250 .get(&envelope.payload.buffer_id)
8251 .and_then(|b| b.clone())
8252 });
8253 if let Some(buffer) = buffer {
8254 buffer.update(cx, |buffer, cx| {
8255 buffer.did_save(version, fingerprint, mtime, cx);
8256 });
8257 }
8258 Ok(())
8259 })?
8260 }
8261
8262 async fn handle_buffer_reloaded(
8263 this: Model<Self>,
8264 envelope: TypedEnvelope<proto::BufferReloaded>,
8265 _: Arc<Client>,
8266 mut cx: AsyncAppContext,
8267 ) -> Result<()> {
8268 let payload = envelope.payload;
8269 let version = deserialize_version(&payload.version);
8270 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8271 let line_ending = deserialize_line_ending(
8272 proto::LineEnding::from_i32(payload.line_ending)
8273 .ok_or_else(|| anyhow!("missing line ending"))?,
8274 );
8275 let mtime = payload
8276 .mtime
8277 .ok_or_else(|| anyhow!("missing mtime"))?
8278 .into();
8279 this.update(&mut cx, |this, cx| {
8280 let buffer = this
8281 .opened_buffers
8282 .get(&payload.buffer_id)
8283 .and_then(|buffer| buffer.upgrade())
8284 .or_else(|| {
8285 this.incomplete_remote_buffers
8286 .get(&payload.buffer_id)
8287 .cloned()
8288 .flatten()
8289 });
8290 if let Some(buffer) = buffer {
8291 buffer.update(cx, |buffer, cx| {
8292 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8293 });
8294 }
8295 Ok(())
8296 })?
8297 }
8298
8299 #[allow(clippy::type_complexity)]
8300 fn edits_from_lsp(
8301 &mut self,
8302 buffer: &Model<Buffer>,
8303 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8304 server_id: LanguageServerId,
8305 version: Option<i32>,
8306 cx: &mut ModelContext<Self>,
8307 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8308 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8309 cx.background_executor().spawn(async move {
8310 let snapshot = snapshot?;
8311 let mut lsp_edits = lsp_edits
8312 .into_iter()
8313 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8314 .collect::<Vec<_>>();
8315 lsp_edits.sort_by_key(|(range, _)| range.start);
8316
8317 let mut lsp_edits = lsp_edits.into_iter().peekable();
8318 let mut edits = Vec::new();
8319 while let Some((range, mut new_text)) = lsp_edits.next() {
8320 // Clip invalid ranges provided by the language server.
8321 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8322 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8323
8324 // Combine any LSP edits that are adjacent.
8325 //
8326 // Also, combine LSP edits that are separated from each other by only
8327 // a newline. This is important because for some code actions,
8328 // Rust-analyzer rewrites the entire buffer via a series of edits that
8329 // are separated by unchanged newline characters.
8330 //
8331 // In order for the diffing logic below to work properly, any edits that
8332 // cancel each other out must be combined into one.
8333 while let Some((next_range, next_text)) = lsp_edits.peek() {
8334 if next_range.start.0 > range.end {
8335 if next_range.start.0.row > range.end.row + 1
8336 || next_range.start.0.column > 0
8337 || snapshot.clip_point_utf16(
8338 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8339 Bias::Left,
8340 ) > range.end
8341 {
8342 break;
8343 }
8344 new_text.push('\n');
8345 }
8346 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8347 new_text.push_str(next_text);
8348 lsp_edits.next();
8349 }
8350
8351 // For multiline edits, perform a diff of the old and new text so that
8352 // we can identify the changes more precisely, preserving the locations
8353 // of any anchors positioned in the unchanged regions.
8354 if range.end.row > range.start.row {
8355 let mut offset = range.start.to_offset(&snapshot);
8356 let old_text = snapshot.text_for_range(range).collect::<String>();
8357
8358 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8359 let mut moved_since_edit = true;
8360 for change in diff.iter_all_changes() {
8361 let tag = change.tag();
8362 let value = change.value();
8363 match tag {
8364 ChangeTag::Equal => {
8365 offset += value.len();
8366 moved_since_edit = true;
8367 }
8368 ChangeTag::Delete => {
8369 let start = snapshot.anchor_after(offset);
8370 let end = snapshot.anchor_before(offset + value.len());
8371 if moved_since_edit {
8372 edits.push((start..end, String::new()));
8373 } else {
8374 edits.last_mut().unwrap().0.end = end;
8375 }
8376 offset += value.len();
8377 moved_since_edit = false;
8378 }
8379 ChangeTag::Insert => {
8380 if moved_since_edit {
8381 let anchor = snapshot.anchor_after(offset);
8382 edits.push((anchor..anchor, value.to_string()));
8383 } else {
8384 edits.last_mut().unwrap().1.push_str(value);
8385 }
8386 moved_since_edit = false;
8387 }
8388 }
8389 }
8390 } else if range.end == range.start {
8391 let anchor = snapshot.anchor_after(range.start);
8392 edits.push((anchor..anchor, new_text));
8393 } else {
8394 let edit_start = snapshot.anchor_after(range.start);
8395 let edit_end = snapshot.anchor_before(range.end);
8396 edits.push((edit_start..edit_end, new_text));
8397 }
8398 }
8399
8400 Ok(edits)
8401 })
8402 }
8403
8404 fn buffer_snapshot_for_lsp_version(
8405 &mut self,
8406 buffer: &Model<Buffer>,
8407 server_id: LanguageServerId,
8408 version: Option<i32>,
8409 cx: &AppContext,
8410 ) -> Result<TextBufferSnapshot> {
8411 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8412
8413 if let Some(version) = version {
8414 let buffer_id = buffer.read(cx).remote_id();
8415 let snapshots = self
8416 .buffer_snapshots
8417 .get_mut(&buffer_id)
8418 .and_then(|m| m.get_mut(&server_id))
8419 .ok_or_else(|| {
8420 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8421 })?;
8422
8423 let found_snapshot = snapshots
8424 .binary_search_by_key(&version, |e| e.version)
8425 .map(|ix| snapshots[ix].snapshot.clone())
8426 .map_err(|_| {
8427 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8428 })?;
8429
8430 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8431 Ok(found_snapshot)
8432 } else {
8433 Ok((buffer.read(cx)).text_snapshot())
8434 }
8435 }
8436
8437 pub fn language_servers(
8438 &self,
8439 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8440 self.language_server_ids
8441 .iter()
8442 .map(|((worktree_id, server_name), server_id)| {
8443 (*server_id, server_name.clone(), *worktree_id)
8444 })
8445 }
8446
8447 pub fn supplementary_language_servers(
8448 &self,
8449 ) -> impl '_
8450 + Iterator<
8451 Item = (
8452 &LanguageServerId,
8453 &(LanguageServerName, Arc<LanguageServer>),
8454 ),
8455 > {
8456 self.supplementary_language_servers.iter()
8457 }
8458
8459 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8460 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8461 Some(server.clone())
8462 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8463 Some(Arc::clone(server))
8464 } else {
8465 None
8466 }
8467 }
8468
8469 pub fn language_servers_for_buffer(
8470 &self,
8471 buffer: &Buffer,
8472 cx: &AppContext,
8473 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8474 self.language_server_ids_for_buffer(buffer, cx)
8475 .into_iter()
8476 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8477 LanguageServerState::Running {
8478 adapter, server, ..
8479 } => Some((adapter, server)),
8480 _ => None,
8481 })
8482 }
8483
8484 fn primary_language_server_for_buffer(
8485 &self,
8486 buffer: &Buffer,
8487 cx: &AppContext,
8488 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8489 self.language_servers_for_buffer(buffer, cx).next()
8490 }
8491
8492 pub fn language_server_for_buffer(
8493 &self,
8494 buffer: &Buffer,
8495 server_id: LanguageServerId,
8496 cx: &AppContext,
8497 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8498 self.language_servers_for_buffer(buffer, cx)
8499 .find(|(_, s)| s.server_id() == server_id)
8500 }
8501
8502 fn language_server_ids_for_buffer(
8503 &self,
8504 buffer: &Buffer,
8505 cx: &AppContext,
8506 ) -> Vec<LanguageServerId> {
8507 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8508 let worktree_id = file.worktree_id(cx);
8509 language
8510 .lsp_adapters()
8511 .iter()
8512 .flat_map(|adapter| {
8513 let key = (worktree_id, adapter.name.clone());
8514 self.language_server_ids.get(&key).copied()
8515 })
8516 .collect()
8517 } else {
8518 Vec::new()
8519 }
8520 }
8521}
8522
8523fn subscribe_for_copilot_events(
8524 copilot: &Model<Copilot>,
8525 cx: &mut ModelContext<'_, Project>,
8526) -> gpui::Subscription {
8527 cx.subscribe(
8528 copilot,
8529 |project, copilot, copilot_event, cx| match copilot_event {
8530 copilot::Event::CopilotLanguageServerStarted => {
8531 match copilot.read(cx).language_server() {
8532 Some((name, copilot_server)) => {
8533 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8534 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8535 let new_server_id = copilot_server.server_id();
8536 let weak_project = cx.weak_model();
8537 let copilot_log_subscription = copilot_server
8538 .on_notification::<copilot::request::LogMessage, _>(
8539 move |params, mut cx| {
8540 weak_project.update(&mut cx, |_, cx| {
8541 cx.emit(Event::LanguageServerLog(
8542 new_server_id,
8543 params.message,
8544 ));
8545 }).ok();
8546 },
8547 );
8548 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8549 project.copilot_log_subscription = Some(copilot_log_subscription);
8550 cx.emit(Event::LanguageServerAdded(new_server_id));
8551 }
8552 }
8553 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8554 }
8555 }
8556 },
8557 )
8558}
8559
8560fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8561 let mut literal_end = 0;
8562 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8563 if part.contains(&['*', '?', '{', '}']) {
8564 break;
8565 } else {
8566 if i > 0 {
8567 // Acount for separator prior to this part
8568 literal_end += path::MAIN_SEPARATOR.len_utf8();
8569 }
8570 literal_end += part.len();
8571 }
8572 }
8573 &glob[..literal_end]
8574}
8575
8576impl WorktreeHandle {
8577 pub fn upgrade(&self) -> Option<Model<Worktree>> {
8578 match self {
8579 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8580 WorktreeHandle::Weak(handle) => handle.upgrade(),
8581 }
8582 }
8583
8584 pub fn handle_id(&self) -> usize {
8585 match self {
8586 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8587 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8588 }
8589 }
8590}
8591
8592impl OpenBuffer {
8593 pub fn upgrade(&self) -> Option<Model<Buffer>> {
8594 match self {
8595 OpenBuffer::Strong(handle) => Some(handle.clone()),
8596 OpenBuffer::Weak(handle) => handle.upgrade(),
8597 OpenBuffer::Operations(_) => None,
8598 }
8599 }
8600}
8601
8602pub struct PathMatchCandidateSet {
8603 pub snapshot: Snapshot,
8604 pub include_ignored: bool,
8605 pub include_root_name: bool,
8606}
8607
8608impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8609 type Candidates = PathMatchCandidateSetIter<'a>;
8610
8611 fn id(&self) -> usize {
8612 self.snapshot.id().to_usize()
8613 }
8614
8615 fn len(&self) -> usize {
8616 if self.include_ignored {
8617 self.snapshot.file_count()
8618 } else {
8619 self.snapshot.visible_file_count()
8620 }
8621 }
8622
8623 fn prefix(&self) -> Arc<str> {
8624 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8625 self.snapshot.root_name().into()
8626 } else if self.include_root_name {
8627 format!("{}/", self.snapshot.root_name()).into()
8628 } else {
8629 "".into()
8630 }
8631 }
8632
8633 fn candidates(&'a self, start: usize) -> Self::Candidates {
8634 PathMatchCandidateSetIter {
8635 traversal: self.snapshot.files(self.include_ignored, start),
8636 }
8637 }
8638}
8639
8640pub struct PathMatchCandidateSetIter<'a> {
8641 traversal: Traversal<'a>,
8642}
8643
8644impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8645 type Item = fuzzy::PathMatchCandidate<'a>;
8646
8647 fn next(&mut self) -> Option<Self::Item> {
8648 self.traversal.next().map(|entry| {
8649 if let EntryKind::File(char_bag) = entry.kind {
8650 fuzzy::PathMatchCandidate {
8651 path: &entry.path,
8652 char_bag,
8653 }
8654 } else {
8655 unreachable!()
8656 }
8657 })
8658 }
8659}
8660
8661impl EventEmitter<Event> for Project {}
8662
8663impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8664 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8665 Self {
8666 worktree_id,
8667 path: path.as_ref().into(),
8668 }
8669 }
8670}
8671
8672impl ProjectLspAdapterDelegate {
8673 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8674 Arc::new(Self {
8675 project: cx.handle(),
8676 http_client: project.client.http_client(),
8677 })
8678 }
8679}
8680
8681impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8682 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8683 self.project
8684 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8685 }
8686
8687 fn http_client(&self) -> Arc<dyn HttpClient> {
8688 self.http_client.clone()
8689 }
8690}
8691
8692fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8693 proto::Symbol {
8694 language_server_name: symbol.language_server_name.0.to_string(),
8695 source_worktree_id: symbol.source_worktree_id.to_proto(),
8696 worktree_id: symbol.path.worktree_id.to_proto(),
8697 path: symbol.path.path.to_string_lossy().to_string(),
8698 name: symbol.name.clone(),
8699 kind: unsafe { mem::transmute(symbol.kind) },
8700 start: Some(proto::PointUtf16 {
8701 row: symbol.range.start.0.row,
8702 column: symbol.range.start.0.column,
8703 }),
8704 end: Some(proto::PointUtf16 {
8705 row: symbol.range.end.0.row,
8706 column: symbol.range.end.0.column,
8707 }),
8708 signature: symbol.signature.to_vec(),
8709 }
8710}
8711
8712fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8713 let mut path_components = path.components();
8714 let mut base_components = base.components();
8715 let mut components: Vec<Component> = Vec::new();
8716 loop {
8717 match (path_components.next(), base_components.next()) {
8718 (None, None) => break,
8719 (Some(a), None) => {
8720 components.push(a);
8721 components.extend(path_components.by_ref());
8722 break;
8723 }
8724 (None, _) => components.push(Component::ParentDir),
8725 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8726 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8727 (Some(a), Some(_)) => {
8728 components.push(Component::ParentDir);
8729 for _ in base_components {
8730 components.push(Component::ParentDir);
8731 }
8732 components.push(a);
8733 components.extend(path_components.by_ref());
8734 break;
8735 }
8736 }
8737 }
8738 components.iter().map(|c| c.as_os_str()).collect()
8739}
8740
8741fn resolve_path(base: &Path, path: &Path) -> PathBuf {
8742 let mut result = base.to_path_buf();
8743 for component in path.components() {
8744 match component {
8745 Component::ParentDir => {
8746 result.pop();
8747 }
8748 Component::CurDir => (),
8749 _ => result.push(component),
8750 }
8751 }
8752 result
8753}
8754
8755impl Item for Buffer {
8756 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8757 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8758 }
8759
8760 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8761 File::from_dyn(self.file()).map(|file| ProjectPath {
8762 worktree_id: file.worktree_id(cx),
8763 path: file.path().clone(),
8764 })
8765 }
8766}
8767
8768async fn wait_for_loading_buffer(
8769 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
8770) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
8771 loop {
8772 if let Some(result) = receiver.borrow().as_ref() {
8773 match result {
8774 Ok(buffer) => return Ok(buffer.to_owned()),
8775 Err(e) => return Err(e.to_owned()),
8776 }
8777 }
8778 receiver.next().await;
8779 }
8780}
8781
8782fn include_text(server: &lsp::LanguageServer) -> bool {
8783 server
8784 .capabilities()
8785 .text_document_sync
8786 .as_ref()
8787 .and_then(|sync| match sync {
8788 lsp::TextDocumentSyncCapability::Kind(_) => None,
8789 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8790 })
8791 .and_then(|save_options| match save_options {
8792 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
8793 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8794 })
8795 .unwrap_or(false)
8796}