1mod ignore;
2mod lsp_command;
3mod prettier_support;
4pub mod project_settings;
5pub mod search;
6pub mod terminals;
7pub mod worktree;
8
9#[cfg(test)]
10mod project_tests;
11#[cfg(test)]
12mod worktree_tests;
13
14use anyhow::{anyhow, Context as _, Result};
15use client::{proto, Client, Collaborator, TypedEnvelope, UserStore};
16use clock::ReplicaId;
17use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
18use copilot::Copilot;
19use futures::{
20 channel::{
21 mpsc::{self, UnboundedReceiver},
22 oneshot,
23 },
24 future::{try_join_all, Shared},
25 stream::FuturesUnordered,
26 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
27};
28use globset::{Glob, GlobSet, GlobSetBuilder};
29use gpui::{
30 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
31 Model, ModelContext, Task, WeakModel,
32};
33use itertools::Itertools;
34use language::{
35 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
36 point_to_lsp,
37 proto::{
38 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
39 serialize_anchor, serialize_version, split_operations,
40 },
41 range_from_lsp, range_to_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, CodeAction,
42 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent,
43 File as _, Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate,
44 OffsetRangeExt, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
45 ToOffset, ToPointUtf16, Transaction, Unclipped,
46};
47use log::error;
48use lsp::{
49 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
50 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId, OneOf,
51};
52use lsp_command::*;
53use node_runtime::NodeRuntime;
54use parking_lot::Mutex;
55use postage::watch;
56use prettier_support::{DefaultPrettier, PrettierInstance};
57use project_settings::{LspSettings, ProjectSettings};
58use rand::prelude::*;
59use search::SearchQuery;
60use serde::Serialize;
61use settings::{Settings, SettingsStore};
62use sha2::{Digest, Sha256};
63use similar::{ChangeTag, TextDiff};
64use smol::channel::{Receiver, Sender};
65use smol::lock::Semaphore;
66use std::{
67 cmp::{self, Ordering},
68 convert::TryInto,
69 hash::Hash,
70 mem,
71 num::NonZeroU32,
72 ops::Range,
73 path::{self, Component, Path, PathBuf},
74 process::Stdio,
75 str,
76 sync::{
77 atomic::{AtomicUsize, Ordering::SeqCst},
78 Arc,
79 },
80 time::{Duration, Instant},
81};
82use terminals::Terminals;
83use text::Anchor;
84use util::{
85 debug_panic, defer, http::HttpClient, merge_json_value_into,
86 paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
87};
88
89pub use fs::*;
90#[cfg(any(test, feature = "test-support"))]
91pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
92pub use worktree::*;
93
94const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
95
96pub trait Item {
97 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
98 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
99}
100
101// Language server state is stored across 3 collections:
102// language_servers =>
103// a mapping from unique server id to LanguageServerState which can either be a task for a
104// server in the process of starting, or a running server with adapter and language server arcs
105// language_server_ids => a mapping from worktreeId and server name to the unique server id
106// language_server_statuses => a mapping from unique server id to the current server status
107//
108// Multiple worktrees can map to the same language server for example when you jump to the definition
109// of a file in the standard library. So language_server_ids is used to look up which server is active
110// for a given worktree and language server name
111//
112// When starting a language server, first the id map is checked to make sure a server isn't already available
113// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
114// the Starting variant of LanguageServerState is stored in the language_servers map.
115pub struct Project {
116 worktrees: Vec<WorktreeHandle>,
117 active_entry: Option<ProjectEntryId>,
118 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
119 languages: Arc<LanguageRegistry>,
120 supplementary_language_servers:
121 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
122 language_servers: HashMap<LanguageServerId, LanguageServerState>,
123 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
124 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
125 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
126 client: Arc<client::Client>,
127 next_entry_id: Arc<AtomicUsize>,
128 join_project_response_message_id: u32,
129 next_diagnostic_group_id: usize,
130 user_store: Model<UserStore>,
131 fs: Arc<dyn Fs>,
132 client_state: Option<ProjectClientState>,
133 collaborators: HashMap<proto::PeerId, Collaborator>,
134 client_subscriptions: Vec<client::Subscription>,
135 _subscriptions: Vec<gpui::Subscription>,
136 next_buffer_id: u64,
137 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
138 shared_buffers: HashMap<proto::PeerId, HashSet<u64>>,
139 #[allow(clippy::type_complexity)]
140 loading_buffers_by_path: HashMap<
141 ProjectPath,
142 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
143 >,
144 #[allow(clippy::type_complexity)]
145 loading_local_worktrees:
146 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
147 opened_buffers: HashMap<u64, OpenBuffer>,
148 local_buffer_ids_by_path: HashMap<ProjectPath, u64>,
149 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, u64>,
150 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
151 /// Used for re-issuing buffer requests when peers temporarily disconnect
152 incomplete_remote_buffers: HashMap<u64, Option<Model<Buffer>>>,
153 buffer_snapshots: HashMap<u64, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
154 buffers_being_formatted: HashSet<u64>,
155 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
156 git_diff_debouncer: DelayedDebounced,
157 nonce: u128,
158 _maintain_buffer_languages: Task<()>,
159 _maintain_workspace_config: Task<Result<()>>,
160 terminals: Terminals,
161 copilot_lsp_subscription: Option<gpui::Subscription>,
162 copilot_log_subscription: Option<lsp::Subscription>,
163 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
164 node: Option<Arc<dyn NodeRuntime>>,
165 default_prettier: DefaultPrettier,
166 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
167 prettier_instances: HashMap<PathBuf, PrettierInstance>,
168}
169
170struct DelayedDebounced {
171 task: Option<Task<()>>,
172 cancel_channel: Option<oneshot::Sender<()>>,
173}
174
175enum LanguageServerToQuery {
176 Primary,
177 Other(LanguageServerId),
178}
179
180impl DelayedDebounced {
181 fn new() -> DelayedDebounced {
182 DelayedDebounced {
183 task: None,
184 cancel_channel: None,
185 }
186 }
187
188 fn fire_new<F>(&mut self, delay: Duration, cx: &mut ModelContext<Project>, func: F)
189 where
190 F: 'static + Send + FnOnce(&mut Project, &mut ModelContext<Project>) -> Task<()>,
191 {
192 if let Some(channel) = self.cancel_channel.take() {
193 _ = channel.send(());
194 }
195
196 let (sender, mut receiver) = oneshot::channel::<()>();
197 self.cancel_channel = Some(sender);
198
199 let previous_task = self.task.take();
200 self.task = Some(cx.spawn(move |project, mut cx| async move {
201 let mut timer = cx.background_executor().timer(delay).fuse();
202 if let Some(previous_task) = previous_task {
203 previous_task.await;
204 }
205
206 futures::select_biased! {
207 _ = receiver => return,
208 _ = timer => {}
209 }
210
211 if let Ok(task) = project.update(&mut cx, |project, cx| (func)(project, cx)) {
212 task.await;
213 }
214 }));
215 }
216}
217
218struct LspBufferSnapshot {
219 version: i32,
220 snapshot: TextBufferSnapshot,
221}
222
223/// Message ordered with respect to buffer operations
224enum BufferOrderedMessage {
225 Operation {
226 buffer_id: u64,
227 operation: proto::Operation,
228 },
229 LanguageServerUpdate {
230 language_server_id: LanguageServerId,
231 message: proto::update_language_server::Variant,
232 },
233 Resync,
234}
235
236enum LocalProjectUpdate {
237 WorktreesChanged,
238 CreateBufferForPeer {
239 peer_id: proto::PeerId,
240 buffer_id: u64,
241 },
242}
243
244enum OpenBuffer {
245 Strong(Model<Buffer>),
246 Weak(WeakModel<Buffer>),
247 Operations(Vec<Operation>),
248}
249
250#[derive(Clone)]
251enum WorktreeHandle {
252 Strong(Model<Worktree>),
253 Weak(WeakModel<Worktree>),
254}
255
256enum ProjectClientState {
257 Local {
258 remote_id: u64,
259 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
260 _send_updates: Task<Result<()>>,
261 },
262 Remote {
263 sharing_has_stopped: bool,
264 remote_id: u64,
265 replica_id: ReplicaId,
266 },
267}
268
269#[derive(Clone, Debug, PartialEq)]
270pub enum Event {
271 LanguageServerAdded(LanguageServerId),
272 LanguageServerRemoved(LanguageServerId),
273 LanguageServerLog(LanguageServerId, String),
274 Notification(String),
275 ActiveEntryChanged(Option<ProjectEntryId>),
276 ActivateProjectPanel,
277 WorktreeAdded,
278 WorktreeRemoved(WorktreeId),
279 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
280 DiskBasedDiagnosticsStarted {
281 language_server_id: LanguageServerId,
282 },
283 DiskBasedDiagnosticsFinished {
284 language_server_id: LanguageServerId,
285 },
286 DiagnosticsUpdated {
287 path: ProjectPath,
288 language_server_id: LanguageServerId,
289 },
290 RemoteIdChanged(Option<u64>),
291 DisconnectedFromHost,
292 Closed,
293 DeletedEntry(ProjectEntryId),
294 CollaboratorUpdated {
295 old_peer_id: proto::PeerId,
296 new_peer_id: proto::PeerId,
297 },
298 CollaboratorJoined(proto::PeerId),
299 CollaboratorLeft(proto::PeerId),
300 RefreshInlayHints,
301}
302
303pub enum LanguageServerState {
304 Starting(Task<Option<Arc<LanguageServer>>>),
305
306 Running {
307 language: Arc<Language>,
308 adapter: Arc<CachedLspAdapter>,
309 server: Arc<LanguageServer>,
310 watched_paths: HashMap<WorktreeId, GlobSet>,
311 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
312 },
313}
314
315#[derive(Serialize)]
316pub struct LanguageServerStatus {
317 pub name: String,
318 pub pending_work: BTreeMap<String, LanguageServerProgress>,
319 pub has_pending_diagnostic_updates: bool,
320 progress_tokens: HashSet<String>,
321}
322
323#[derive(Clone, Debug, Serialize)]
324pub struct LanguageServerProgress {
325 pub message: Option<String>,
326 pub percentage: Option<usize>,
327 #[serde(skip_serializing)]
328 pub last_update_at: Instant,
329}
330
331#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
332pub struct ProjectPath {
333 pub worktree_id: WorktreeId,
334 pub path: Arc<Path>,
335}
336
337#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
338pub struct DiagnosticSummary {
339 pub error_count: usize,
340 pub warning_count: usize,
341}
342
343#[derive(Debug, Clone, PartialEq, Eq, Hash)]
344pub struct Location {
345 pub buffer: Model<Buffer>,
346 pub range: Range<language::Anchor>,
347}
348
349#[derive(Debug, Clone, PartialEq, Eq)]
350pub struct InlayHint {
351 pub position: language::Anchor,
352 pub label: InlayHintLabel,
353 pub kind: Option<InlayHintKind>,
354 pub padding_left: bool,
355 pub padding_right: bool,
356 pub tooltip: Option<InlayHintTooltip>,
357 pub resolve_state: ResolveState,
358}
359
360#[derive(Debug, Clone, PartialEq, Eq)]
361pub enum ResolveState {
362 Resolved,
363 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
364 Resolving,
365}
366
367impl InlayHint {
368 pub fn text(&self) -> String {
369 match &self.label {
370 InlayHintLabel::String(s) => s.to_owned(),
371 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
372 }
373 }
374}
375
376#[derive(Debug, Clone, PartialEq, Eq)]
377pub enum InlayHintLabel {
378 String(String),
379 LabelParts(Vec<InlayHintLabelPart>),
380}
381
382#[derive(Debug, Clone, PartialEq, Eq)]
383pub struct InlayHintLabelPart {
384 pub value: String,
385 pub tooltip: Option<InlayHintLabelPartTooltip>,
386 pub location: Option<(LanguageServerId, lsp::Location)>,
387}
388
389#[derive(Debug, Clone, PartialEq, Eq)]
390pub enum InlayHintTooltip {
391 String(String),
392 MarkupContent(MarkupContent),
393}
394
395#[derive(Debug, Clone, PartialEq, Eq)]
396pub enum InlayHintLabelPartTooltip {
397 String(String),
398 MarkupContent(MarkupContent),
399}
400
401#[derive(Debug, Clone, PartialEq, Eq)]
402pub struct MarkupContent {
403 pub kind: HoverBlockKind,
404 pub value: String,
405}
406
407#[derive(Debug, Clone)]
408pub struct LocationLink {
409 pub origin: Option<Location>,
410 pub target: Location,
411}
412
413#[derive(Debug)]
414pub struct DocumentHighlight {
415 pub range: Range<language::Anchor>,
416 pub kind: DocumentHighlightKind,
417}
418
419#[derive(Clone, Debug)]
420pub struct Symbol {
421 pub language_server_name: LanguageServerName,
422 pub source_worktree_id: WorktreeId,
423 pub path: ProjectPath,
424 pub label: CodeLabel,
425 pub name: String,
426 pub kind: lsp::SymbolKind,
427 pub range: Range<Unclipped<PointUtf16>>,
428 pub signature: [u8; 32],
429}
430
431#[derive(Clone, Debug, PartialEq)]
432pub struct HoverBlock {
433 pub text: String,
434 pub kind: HoverBlockKind,
435}
436
437#[derive(Clone, Debug, PartialEq, Eq)]
438pub enum HoverBlockKind {
439 PlainText,
440 Markdown,
441 Code { language: String },
442}
443
444#[derive(Debug)]
445pub struct Hover {
446 pub contents: Vec<HoverBlock>,
447 pub range: Option<Range<language::Anchor>>,
448 pub language: Option<Arc<Language>>,
449}
450
451impl Hover {
452 pub fn is_empty(&self) -> bool {
453 self.contents.iter().all(|block| block.text.is_empty())
454 }
455}
456
457#[derive(Default)]
458pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
459
460impl DiagnosticSummary {
461 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
462 let mut this = Self {
463 error_count: 0,
464 warning_count: 0,
465 };
466
467 for entry in diagnostics {
468 if entry.diagnostic.is_primary {
469 match entry.diagnostic.severity {
470 DiagnosticSeverity::ERROR => this.error_count += 1,
471 DiagnosticSeverity::WARNING => this.warning_count += 1,
472 _ => {}
473 }
474 }
475 }
476
477 this
478 }
479
480 pub fn is_empty(&self) -> bool {
481 self.error_count == 0 && self.warning_count == 0
482 }
483
484 pub fn to_proto(
485 &self,
486 language_server_id: LanguageServerId,
487 path: &Path,
488 ) -> proto::DiagnosticSummary {
489 proto::DiagnosticSummary {
490 path: path.to_string_lossy().to_string(),
491 language_server_id: language_server_id.0 as u64,
492 error_count: self.error_count as u32,
493 warning_count: self.warning_count as u32,
494 }
495 }
496}
497
498#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
499pub struct ProjectEntryId(usize);
500
501impl ProjectEntryId {
502 pub const MAX: Self = Self(usize::MAX);
503
504 pub fn new(counter: &AtomicUsize) -> Self {
505 Self(counter.fetch_add(1, SeqCst))
506 }
507
508 pub fn from_proto(id: u64) -> Self {
509 Self(id as usize)
510 }
511
512 pub fn to_proto(&self) -> u64 {
513 self.0 as u64
514 }
515
516 pub fn to_usize(&self) -> usize {
517 self.0
518 }
519}
520
521#[derive(Debug, Clone, Copy, PartialEq, Eq)]
522pub enum FormatTrigger {
523 Save,
524 Manual,
525}
526
527struct ProjectLspAdapterDelegate {
528 project: Model<Project>,
529 http_client: Arc<dyn HttpClient>,
530}
531
532// Currently, formatting operations are represented differently depending on
533// whether they come from a language server or an external command.
534enum FormatOperation {
535 Lsp(Vec<(Range<Anchor>, String)>),
536 External(Diff),
537 Prettier(Diff),
538}
539
540impl FormatTrigger {
541 fn from_proto(value: i32) -> FormatTrigger {
542 match value {
543 0 => FormatTrigger::Save,
544 1 => FormatTrigger::Manual,
545 _ => FormatTrigger::Save,
546 }
547 }
548}
549#[derive(Clone, Debug, PartialEq)]
550enum SearchMatchCandidate {
551 OpenBuffer {
552 buffer: Model<Buffer>,
553 // This might be an unnamed file without representation on filesystem
554 path: Option<Arc<Path>>,
555 },
556 Path {
557 worktree_id: WorktreeId,
558 is_ignored: bool,
559 path: Arc<Path>,
560 },
561}
562
563type SearchMatchCandidateIndex = usize;
564impl SearchMatchCandidate {
565 fn path(&self) -> Option<Arc<Path>> {
566 match self {
567 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
568 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
569 }
570 }
571}
572
573impl Project {
574 pub fn init_settings(cx: &mut AppContext) {
575 ProjectSettings::register(cx);
576 }
577
578 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
579 Self::init_settings(cx);
580
581 client.add_model_message_handler(Self::handle_add_collaborator);
582 client.add_model_message_handler(Self::handle_update_project_collaborator);
583 client.add_model_message_handler(Self::handle_remove_collaborator);
584 client.add_model_message_handler(Self::handle_buffer_reloaded);
585 client.add_model_message_handler(Self::handle_buffer_saved);
586 client.add_model_message_handler(Self::handle_start_language_server);
587 client.add_model_message_handler(Self::handle_update_language_server);
588 client.add_model_message_handler(Self::handle_update_project);
589 client.add_model_message_handler(Self::handle_unshare_project);
590 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
591 client.add_model_message_handler(Self::handle_update_buffer_file);
592 client.add_model_request_handler(Self::handle_update_buffer);
593 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
594 client.add_model_message_handler(Self::handle_update_worktree);
595 client.add_model_message_handler(Self::handle_update_worktree_settings);
596 client.add_model_request_handler(Self::handle_create_project_entry);
597 client.add_model_request_handler(Self::handle_rename_project_entry);
598 client.add_model_request_handler(Self::handle_copy_project_entry);
599 client.add_model_request_handler(Self::handle_delete_project_entry);
600 client.add_model_request_handler(Self::handle_expand_project_entry);
601 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
602 client.add_model_request_handler(Self::handle_apply_code_action);
603 client.add_model_request_handler(Self::handle_on_type_formatting);
604 client.add_model_request_handler(Self::handle_inlay_hints);
605 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
606 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
607 client.add_model_request_handler(Self::handle_reload_buffers);
608 client.add_model_request_handler(Self::handle_synchronize_buffers);
609 client.add_model_request_handler(Self::handle_format_buffers);
610 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
611 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
612 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
613 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
614 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
615 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
616 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
617 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
618 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
619 client.add_model_request_handler(Self::handle_search_project);
620 client.add_model_request_handler(Self::handle_get_project_symbols);
621 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
622 client.add_model_request_handler(Self::handle_open_buffer_by_id);
623 client.add_model_request_handler(Self::handle_open_buffer_by_path);
624 client.add_model_request_handler(Self::handle_save_buffer);
625 client.add_model_message_handler(Self::handle_update_diff_base);
626 }
627
628 pub fn local(
629 client: Arc<Client>,
630 node: Arc<dyn NodeRuntime>,
631 user_store: Model<UserStore>,
632 languages: Arc<LanguageRegistry>,
633 fs: Arc<dyn Fs>,
634 cx: &mut AppContext,
635 ) -> Model<Self> {
636 cx.build_model(|cx: &mut ModelContext<Self>| {
637 let (tx, rx) = mpsc::unbounded();
638 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
639 .detach();
640 let copilot_lsp_subscription =
641 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
642 Self {
643 worktrees: Default::default(),
644 buffer_ordered_messages_tx: tx,
645 collaborators: Default::default(),
646 next_buffer_id: 0,
647 opened_buffers: Default::default(),
648 shared_buffers: Default::default(),
649 incomplete_remote_buffers: Default::default(),
650 loading_buffers_by_path: Default::default(),
651 loading_local_worktrees: Default::default(),
652 local_buffer_ids_by_path: Default::default(),
653 local_buffer_ids_by_entry_id: Default::default(),
654 buffer_snapshots: Default::default(),
655 join_project_response_message_id: 0,
656 client_state: None,
657 opened_buffer: watch::channel(),
658 client_subscriptions: Vec::new(),
659 _subscriptions: vec![
660 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
661 cx.on_release(Self::release),
662 cx.on_app_quit(Self::shutdown_language_servers),
663 ],
664 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
665 _maintain_workspace_config: Self::maintain_workspace_config(cx),
666 active_entry: None,
667 languages,
668 client,
669 user_store,
670 fs,
671 next_entry_id: Default::default(),
672 next_diagnostic_group_id: Default::default(),
673 supplementary_language_servers: HashMap::default(),
674 language_servers: Default::default(),
675 language_server_ids: Default::default(),
676 language_server_statuses: Default::default(),
677 last_workspace_edits_by_language_server: Default::default(),
678 buffers_being_formatted: Default::default(),
679 buffers_needing_diff: Default::default(),
680 git_diff_debouncer: DelayedDebounced::new(),
681 nonce: StdRng::from_entropy().gen(),
682 terminals: Terminals {
683 local_handles: Vec::new(),
684 },
685 copilot_lsp_subscription,
686 copilot_log_subscription: None,
687 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
688 node: Some(node),
689 default_prettier: DefaultPrettier::default(),
690 prettiers_per_worktree: HashMap::default(),
691 prettier_instances: HashMap::default(),
692 }
693 })
694 }
695
696 pub async fn remote(
697 remote_id: u64,
698 client: Arc<Client>,
699 user_store: Model<UserStore>,
700 languages: Arc<LanguageRegistry>,
701 fs: Arc<dyn Fs>,
702 mut cx: AsyncAppContext,
703 ) -> Result<Model<Self>> {
704 client.authenticate_and_connect(true, &cx).await?;
705
706 let subscription = client.subscribe_to_entity(remote_id)?;
707 let response = client
708 .request_envelope(proto::JoinProject {
709 project_id: remote_id,
710 })
711 .await?;
712 let this = cx.build_model(|cx| {
713 let replica_id = response.payload.replica_id as ReplicaId;
714
715 let mut worktrees = Vec::new();
716 for worktree in response.payload.worktrees {
717 let worktree =
718 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
719 worktrees.push(worktree);
720 }
721
722 let (tx, rx) = mpsc::unbounded();
723 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
724 .detach();
725 let copilot_lsp_subscription =
726 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
727 let mut this = Self {
728 worktrees: Vec::new(),
729 buffer_ordered_messages_tx: tx,
730 loading_buffers_by_path: Default::default(),
731 next_buffer_id: 0,
732 opened_buffer: watch::channel(),
733 shared_buffers: Default::default(),
734 incomplete_remote_buffers: Default::default(),
735 loading_local_worktrees: Default::default(),
736 local_buffer_ids_by_path: Default::default(),
737 local_buffer_ids_by_entry_id: Default::default(),
738 active_entry: None,
739 collaborators: Default::default(),
740 join_project_response_message_id: response.message_id,
741 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
742 _maintain_workspace_config: Self::maintain_workspace_config(cx),
743 languages,
744 user_store: user_store.clone(),
745 fs,
746 next_entry_id: Default::default(),
747 next_diagnostic_group_id: Default::default(),
748 client_subscriptions: Default::default(),
749 _subscriptions: vec![
750 cx.on_release(Self::release),
751 cx.on_app_quit(Self::shutdown_language_servers),
752 ],
753 client: client.clone(),
754 client_state: Some(ProjectClientState::Remote {
755 sharing_has_stopped: false,
756 remote_id,
757 replica_id,
758 }),
759 supplementary_language_servers: HashMap::default(),
760 language_servers: Default::default(),
761 language_server_ids: Default::default(),
762 language_server_statuses: response
763 .payload
764 .language_servers
765 .into_iter()
766 .map(|server| {
767 (
768 LanguageServerId(server.id as usize),
769 LanguageServerStatus {
770 name: server.name,
771 pending_work: Default::default(),
772 has_pending_diagnostic_updates: false,
773 progress_tokens: Default::default(),
774 },
775 )
776 })
777 .collect(),
778 last_workspace_edits_by_language_server: Default::default(),
779 opened_buffers: Default::default(),
780 buffers_being_formatted: Default::default(),
781 buffers_needing_diff: Default::default(),
782 git_diff_debouncer: DelayedDebounced::new(),
783 buffer_snapshots: Default::default(),
784 nonce: StdRng::from_entropy().gen(),
785 terminals: Terminals {
786 local_handles: Vec::new(),
787 },
788 copilot_lsp_subscription,
789 copilot_log_subscription: None,
790 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
791 node: None,
792 default_prettier: DefaultPrettier::default(),
793 prettiers_per_worktree: HashMap::default(),
794 prettier_instances: HashMap::default(),
795 };
796 for worktree in worktrees {
797 let _ = this.add_worktree(&worktree, cx);
798 }
799 this
800 })?;
801 let subscription = subscription.set_model(&this, &mut cx);
802
803 let user_ids = response
804 .payload
805 .collaborators
806 .iter()
807 .map(|peer| peer.user_id)
808 .collect();
809 user_store
810 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
811 .await?;
812
813 this.update(&mut cx, |this, cx| {
814 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
815 this.client_subscriptions.push(subscription);
816 anyhow::Ok(())
817 })??;
818
819 Ok(this)
820 }
821
822 fn release(&mut self, cx: &mut AppContext) {
823 match &self.client_state {
824 Some(ProjectClientState::Local { .. }) => {
825 let _ = self.unshare_internal(cx);
826 }
827 Some(ProjectClientState::Remote { remote_id, .. }) => {
828 let _ = self.client.send(proto::LeaveProject {
829 project_id: *remote_id,
830 });
831 self.disconnected_from_host_internal(cx);
832 }
833 _ => {}
834 }
835 }
836
837 fn shutdown_language_servers(
838 &mut self,
839 _cx: &mut ModelContext<Self>,
840 ) -> impl Future<Output = ()> {
841 let shutdown_futures = self
842 .language_servers
843 .drain()
844 .map(|(_, server_state)| async {
845 use LanguageServerState::*;
846 match server_state {
847 Running { server, .. } => server.shutdown()?.await,
848 Starting(task) => task.await?.shutdown()?.await,
849 }
850 })
851 .collect::<Vec<_>>();
852
853 async move {
854 futures::future::join_all(shutdown_futures).await;
855 }
856 }
857
858 #[cfg(any(test, feature = "test-support"))]
859 pub async fn test(
860 fs: Arc<dyn Fs>,
861 root_paths: impl IntoIterator<Item = &Path>,
862 cx: &mut gpui::TestAppContext,
863 ) -> Model<Project> {
864 let mut languages = LanguageRegistry::test();
865 languages.set_executor(cx.executor());
866 let http_client = util::http::FakeHttpClient::with_404_response();
867 let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
868 let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
869 let project = cx.update(|cx| {
870 Project::local(
871 client,
872 node_runtime::FakeNodeRuntime::new(),
873 user_store,
874 Arc::new(languages),
875 fs,
876 cx,
877 )
878 });
879 for path in root_paths {
880 let (tree, _) = project
881 .update(cx, |project, cx| {
882 project.find_or_create_local_worktree(path, true, cx)
883 })
884 .await
885 .unwrap();
886 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
887 .await;
888 }
889 project
890 }
891
892 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
893 let mut language_servers_to_start = Vec::new();
894 let mut language_formatters_to_check = Vec::new();
895 for buffer in self.opened_buffers.values() {
896 if let Some(buffer) = buffer.upgrade() {
897 let buffer = buffer.read(cx);
898 let buffer_file = File::from_dyn(buffer.file());
899 let buffer_language = buffer.language();
900 let settings = language_settings(buffer_language, buffer.file(), cx);
901 if let Some(language) = buffer_language {
902 if settings.enable_language_server {
903 if let Some(file) = buffer_file {
904 language_servers_to_start
905 .push((file.worktree.clone(), Arc::clone(language)));
906 }
907 }
908 language_formatters_to_check.push((
909 buffer_file.map(|f| f.worktree_id(cx)),
910 Arc::clone(language),
911 settings.clone(),
912 ));
913 }
914 }
915 }
916
917 let mut language_servers_to_stop = Vec::new();
918 let mut language_servers_to_restart = Vec::new();
919 let languages = self.languages.to_vec();
920
921 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
922 let current_lsp_settings = &self.current_lsp_settings;
923 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
924 let language = languages.iter().find_map(|l| {
925 let adapter = l
926 .lsp_adapters()
927 .iter()
928 .find(|adapter| &adapter.name == started_lsp_name)?;
929 Some((l, adapter))
930 });
931 if let Some((language, adapter)) = language {
932 let worktree = self.worktree_for_id(*worktree_id, cx);
933 let file = worktree.as_ref().and_then(|tree| {
934 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
935 });
936 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
937 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
938 } else if let Some(worktree) = worktree {
939 let server_name = &adapter.name.0;
940 match (
941 current_lsp_settings.get(server_name),
942 new_lsp_settings.get(server_name),
943 ) {
944 (None, None) => {}
945 (Some(_), None) | (None, Some(_)) => {
946 language_servers_to_restart.push((worktree, Arc::clone(language)));
947 }
948 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
949 if current_lsp_settings != new_lsp_settings {
950 language_servers_to_restart.push((worktree, Arc::clone(language)));
951 }
952 }
953 }
954 }
955 }
956 }
957 self.current_lsp_settings = new_lsp_settings;
958
959 // Stop all newly-disabled language servers.
960 for (worktree_id, adapter_name) in language_servers_to_stop {
961 self.stop_language_server(worktree_id, adapter_name, cx)
962 .detach();
963 }
964
965 let mut prettier_plugins_by_worktree = HashMap::default();
966 for (worktree, language, settings) in language_formatters_to_check {
967 if let Some(plugins) =
968 prettier_support::prettier_plugins_for_language(&language, &settings)
969 {
970 prettier_plugins_by_worktree
971 .entry(worktree)
972 .or_insert_with(|| HashSet::default())
973 .extend(plugins);
974 }
975 }
976 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
977 self.install_default_prettier(worktree, prettier_plugins, cx);
978 }
979
980 // Start all the newly-enabled language servers.
981 for (worktree, language) in language_servers_to_start {
982 let worktree_path = worktree.read(cx).abs_path();
983 self.start_language_servers(&worktree, worktree_path, language, cx);
984 }
985
986 // Restart all language servers with changed initialization options.
987 for (worktree, language) in language_servers_to_restart {
988 self.restart_language_servers(worktree, language, cx);
989 }
990
991 if self.copilot_lsp_subscription.is_none() {
992 if let Some(copilot) = Copilot::global(cx) {
993 for buffer in self.opened_buffers.values() {
994 if let Some(buffer) = buffer.upgrade() {
995 self.register_buffer_with_copilot(&buffer, cx);
996 }
997 }
998 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
999 }
1000 }
1001
1002 cx.notify();
1003 }
1004
1005 pub fn buffer_for_id(&self, remote_id: u64) -> Option<Model<Buffer>> {
1006 self.opened_buffers
1007 .get(&remote_id)
1008 .and_then(|buffer| buffer.upgrade())
1009 }
1010
1011 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1012 &self.languages
1013 }
1014
1015 pub fn client(&self) -> Arc<Client> {
1016 self.client.clone()
1017 }
1018
1019 pub fn user_store(&self) -> Model<UserStore> {
1020 self.user_store.clone()
1021 }
1022
1023 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1024 self.opened_buffers
1025 .values()
1026 .filter_map(|b| b.upgrade())
1027 .collect()
1028 }
1029
1030 #[cfg(any(test, feature = "test-support"))]
1031 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1032 let path = path.into();
1033 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1034 self.opened_buffers.iter().any(|(_, buffer)| {
1035 if let Some(buffer) = buffer.upgrade() {
1036 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1037 if file.worktree == worktree && file.path() == &path.path {
1038 return true;
1039 }
1040 }
1041 }
1042 false
1043 })
1044 } else {
1045 false
1046 }
1047 }
1048
1049 pub fn fs(&self) -> &Arc<dyn Fs> {
1050 &self.fs
1051 }
1052
1053 pub fn remote_id(&self) -> Option<u64> {
1054 match self.client_state.as_ref()? {
1055 ProjectClientState::Local { remote_id, .. }
1056 | ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
1057 }
1058 }
1059
1060 pub fn replica_id(&self) -> ReplicaId {
1061 match &self.client_state {
1062 Some(ProjectClientState::Remote { replica_id, .. }) => *replica_id,
1063 _ => 0,
1064 }
1065 }
1066
1067 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1068 if let Some(ProjectClientState::Local { updates_tx, .. }) = &mut self.client_state {
1069 updates_tx
1070 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1071 .ok();
1072 }
1073 cx.notify();
1074 }
1075
1076 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1077 &self.collaborators
1078 }
1079
1080 pub fn host(&self) -> Option<&Collaborator> {
1081 self.collaborators.values().find(|c| c.replica_id == 0)
1082 }
1083
1084 /// Collect all worktrees, including ones that don't appear in the project panel
1085 pub fn worktrees<'a>(&'a self) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1086 self.worktrees
1087 .iter()
1088 .filter_map(move |worktree| worktree.upgrade())
1089 }
1090
1091 /// Collect all user-visible worktrees, the ones that appear in the project panel
1092 pub fn visible_worktrees<'a>(
1093 &'a self,
1094 cx: &'a AppContext,
1095 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1096 self.worktrees.iter().filter_map(|worktree| {
1097 worktree.upgrade().and_then(|worktree| {
1098 if worktree.read(cx).is_visible() {
1099 Some(worktree)
1100 } else {
1101 None
1102 }
1103 })
1104 })
1105 }
1106
1107 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1108 self.visible_worktrees(cx)
1109 .map(|tree| tree.read(cx).root_name())
1110 }
1111
1112 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1113 self.worktrees()
1114 .find(|worktree| worktree.read(cx).id() == id)
1115 }
1116
1117 pub fn worktree_for_entry(
1118 &self,
1119 entry_id: ProjectEntryId,
1120 cx: &AppContext,
1121 ) -> Option<Model<Worktree>> {
1122 self.worktrees()
1123 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1124 }
1125
1126 pub fn worktree_id_for_entry(
1127 &self,
1128 entry_id: ProjectEntryId,
1129 cx: &AppContext,
1130 ) -> Option<WorktreeId> {
1131 self.worktree_for_entry(entry_id, cx)
1132 .map(|worktree| worktree.read(cx).id())
1133 }
1134
1135 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1136 paths.iter().all(|path| self.contains_path(path, cx))
1137 }
1138
1139 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1140 for worktree in self.worktrees() {
1141 let worktree = worktree.read(cx).as_local();
1142 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1143 return true;
1144 }
1145 }
1146 false
1147 }
1148
1149 pub fn create_entry(
1150 &mut self,
1151 project_path: impl Into<ProjectPath>,
1152 is_directory: bool,
1153 cx: &mut ModelContext<Self>,
1154 ) -> Option<Task<Result<Entry>>> {
1155 let project_path = project_path.into();
1156 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1157 if self.is_local() {
1158 Some(worktree.update(cx, |worktree, cx| {
1159 worktree
1160 .as_local_mut()
1161 .unwrap()
1162 .create_entry(project_path.path, is_directory, cx)
1163 }))
1164 } else {
1165 let client = self.client.clone();
1166 let project_id = self.remote_id().unwrap();
1167 Some(cx.spawn(move |_, mut cx| async move {
1168 let response = client
1169 .request(proto::CreateProjectEntry {
1170 worktree_id: project_path.worktree_id.to_proto(),
1171 project_id,
1172 path: project_path.path.to_string_lossy().into(),
1173 is_directory,
1174 })
1175 .await?;
1176 let entry = response
1177 .entry
1178 .ok_or_else(|| anyhow!("missing entry in response"))?;
1179 worktree
1180 .update(&mut cx, |worktree, cx| {
1181 worktree.as_remote_mut().unwrap().insert_entry(
1182 entry,
1183 response.worktree_scan_id as usize,
1184 cx,
1185 )
1186 })?
1187 .await
1188 }))
1189 }
1190 }
1191
1192 pub fn copy_entry(
1193 &mut self,
1194 entry_id: ProjectEntryId,
1195 new_path: impl Into<Arc<Path>>,
1196 cx: &mut ModelContext<Self>,
1197 ) -> Option<Task<Result<Entry>>> {
1198 let worktree = self.worktree_for_entry(entry_id, cx)?;
1199 let new_path = new_path.into();
1200 if self.is_local() {
1201 worktree.update(cx, |worktree, cx| {
1202 worktree
1203 .as_local_mut()
1204 .unwrap()
1205 .copy_entry(entry_id, new_path, cx)
1206 })
1207 } else {
1208 let client = self.client.clone();
1209 let project_id = self.remote_id().unwrap();
1210
1211 Some(cx.spawn(move |_, mut cx| async move {
1212 let response = client
1213 .request(proto::CopyProjectEntry {
1214 project_id,
1215 entry_id: entry_id.to_proto(),
1216 new_path: new_path.to_string_lossy().into(),
1217 })
1218 .await?;
1219 let entry = response
1220 .entry
1221 .ok_or_else(|| anyhow!("missing entry in response"))?;
1222 worktree
1223 .update(&mut cx, |worktree, cx| {
1224 worktree.as_remote_mut().unwrap().insert_entry(
1225 entry,
1226 response.worktree_scan_id as usize,
1227 cx,
1228 )
1229 })?
1230 .await
1231 }))
1232 }
1233 }
1234
1235 pub fn rename_entry(
1236 &mut self,
1237 entry_id: ProjectEntryId,
1238 new_path: impl Into<Arc<Path>>,
1239 cx: &mut ModelContext<Self>,
1240 ) -> Option<Task<Result<Entry>>> {
1241 let worktree = self.worktree_for_entry(entry_id, cx)?;
1242 let new_path = new_path.into();
1243 if self.is_local() {
1244 worktree.update(cx, |worktree, cx| {
1245 worktree
1246 .as_local_mut()
1247 .unwrap()
1248 .rename_entry(entry_id, new_path, cx)
1249 })
1250 } else {
1251 let client = self.client.clone();
1252 let project_id = self.remote_id().unwrap();
1253
1254 Some(cx.spawn(move |_, mut cx| async move {
1255 let response = client
1256 .request(proto::RenameProjectEntry {
1257 project_id,
1258 entry_id: entry_id.to_proto(),
1259 new_path: new_path.to_string_lossy().into(),
1260 })
1261 .await?;
1262 let entry = response
1263 .entry
1264 .ok_or_else(|| anyhow!("missing entry in response"))?;
1265 worktree
1266 .update(&mut cx, |worktree, cx| {
1267 worktree.as_remote_mut().unwrap().insert_entry(
1268 entry,
1269 response.worktree_scan_id as usize,
1270 cx,
1271 )
1272 })?
1273 .await
1274 }))
1275 }
1276 }
1277
1278 pub fn delete_entry(
1279 &mut self,
1280 entry_id: ProjectEntryId,
1281 cx: &mut ModelContext<Self>,
1282 ) -> Option<Task<Result<()>>> {
1283 let worktree = self.worktree_for_entry(entry_id, cx)?;
1284
1285 cx.emit(Event::DeletedEntry(entry_id));
1286
1287 if self.is_local() {
1288 worktree.update(cx, |worktree, cx| {
1289 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1290 })
1291 } else {
1292 let client = self.client.clone();
1293 let project_id = self.remote_id().unwrap();
1294 Some(cx.spawn(move |_, mut cx| async move {
1295 let response = client
1296 .request(proto::DeleteProjectEntry {
1297 project_id,
1298 entry_id: entry_id.to_proto(),
1299 })
1300 .await?;
1301 worktree
1302 .update(&mut cx, move |worktree, cx| {
1303 worktree.as_remote_mut().unwrap().delete_entry(
1304 entry_id,
1305 response.worktree_scan_id as usize,
1306 cx,
1307 )
1308 })?
1309 .await
1310 }))
1311 }
1312 }
1313
1314 pub fn expand_entry(
1315 &mut self,
1316 worktree_id: WorktreeId,
1317 entry_id: ProjectEntryId,
1318 cx: &mut ModelContext<Self>,
1319 ) -> Option<Task<Result<()>>> {
1320 let worktree = self.worktree_for_id(worktree_id, cx)?;
1321 if self.is_local() {
1322 worktree.update(cx, |worktree, cx| {
1323 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1324 })
1325 } else {
1326 let worktree = worktree.downgrade();
1327 let request = self.client.request(proto::ExpandProjectEntry {
1328 project_id: self.remote_id().unwrap(),
1329 entry_id: entry_id.to_proto(),
1330 });
1331 Some(cx.spawn(move |_, mut cx| async move {
1332 let response = request.await?;
1333 if let Some(worktree) = worktree.upgrade() {
1334 worktree
1335 .update(&mut cx, |worktree, _| {
1336 worktree
1337 .as_remote_mut()
1338 .unwrap()
1339 .wait_for_snapshot(response.worktree_scan_id as usize)
1340 })?
1341 .await?;
1342 }
1343 Ok(())
1344 }))
1345 }
1346 }
1347
1348 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1349 if self.client_state.is_some() {
1350 return Err(anyhow!("project was already shared"));
1351 }
1352 self.client_subscriptions.push(
1353 self.client
1354 .subscribe_to_entity(project_id)?
1355 .set_model(&cx.handle(), &mut cx.to_async()),
1356 );
1357
1358 for open_buffer in self.opened_buffers.values_mut() {
1359 match open_buffer {
1360 OpenBuffer::Strong(_) => {}
1361 OpenBuffer::Weak(buffer) => {
1362 if let Some(buffer) = buffer.upgrade() {
1363 *open_buffer = OpenBuffer::Strong(buffer);
1364 }
1365 }
1366 OpenBuffer::Operations(_) => unreachable!(),
1367 }
1368 }
1369
1370 for worktree_handle in self.worktrees.iter_mut() {
1371 match worktree_handle {
1372 WorktreeHandle::Strong(_) => {}
1373 WorktreeHandle::Weak(worktree) => {
1374 if let Some(worktree) = worktree.upgrade() {
1375 *worktree_handle = WorktreeHandle::Strong(worktree);
1376 }
1377 }
1378 }
1379 }
1380
1381 for (server_id, status) in &self.language_server_statuses {
1382 self.client
1383 .send(proto::StartLanguageServer {
1384 project_id,
1385 server: Some(proto::LanguageServer {
1386 id: server_id.0 as u64,
1387 name: status.name.clone(),
1388 }),
1389 })
1390 .log_err();
1391 }
1392
1393 let store = cx.global::<SettingsStore>();
1394 for worktree in self.worktrees() {
1395 let worktree_id = worktree.read(cx).id().to_proto();
1396 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1397 self.client
1398 .send(proto::UpdateWorktreeSettings {
1399 project_id,
1400 worktree_id,
1401 path: path.to_string_lossy().into(),
1402 content: Some(content),
1403 })
1404 .log_err();
1405 }
1406 }
1407
1408 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1409 let client = self.client.clone();
1410 self.client_state = Some(ProjectClientState::Local {
1411 remote_id: project_id,
1412 updates_tx,
1413 _send_updates: cx.spawn(move |this, mut cx| async move {
1414 while let Some(update) = updates_rx.next().await {
1415 match update {
1416 LocalProjectUpdate::WorktreesChanged => {
1417 let worktrees = this.update(&mut cx, |this, _cx| {
1418 this.worktrees().collect::<Vec<_>>()
1419 })?;
1420 let update_project = this
1421 .update(&mut cx, |this, cx| {
1422 this.client.request(proto::UpdateProject {
1423 project_id,
1424 worktrees: this.worktree_metadata_protos(cx),
1425 })
1426 })?
1427 .await;
1428 if update_project.is_ok() {
1429 for worktree in worktrees {
1430 worktree.update(&mut cx, |worktree, cx| {
1431 let worktree = worktree.as_local_mut().unwrap();
1432 worktree.share(project_id, cx).detach_and_log_err(cx)
1433 })?;
1434 }
1435 }
1436 }
1437 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1438 let buffer = this.update(&mut cx, |this, _| {
1439 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1440 let shared_buffers =
1441 this.shared_buffers.entry(peer_id).or_default();
1442 if shared_buffers.insert(buffer_id) {
1443 if let OpenBuffer::Strong(buffer) = buffer {
1444 Some(buffer.clone())
1445 } else {
1446 None
1447 }
1448 } else {
1449 None
1450 }
1451 })?;
1452
1453 let Some(buffer) = buffer else { continue };
1454 let operations =
1455 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1456 let operations = operations.await;
1457 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1458
1459 let initial_state = proto::CreateBufferForPeer {
1460 project_id,
1461 peer_id: Some(peer_id),
1462 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1463 };
1464 if client.send(initial_state).log_err().is_some() {
1465 let client = client.clone();
1466 cx.background_executor()
1467 .spawn(async move {
1468 let mut chunks = split_operations(operations).peekable();
1469 while let Some(chunk) = chunks.next() {
1470 let is_last = chunks.peek().is_none();
1471 client.send(proto::CreateBufferForPeer {
1472 project_id,
1473 peer_id: Some(peer_id),
1474 variant: Some(
1475 proto::create_buffer_for_peer::Variant::Chunk(
1476 proto::BufferChunk {
1477 buffer_id,
1478 operations: chunk,
1479 is_last,
1480 },
1481 ),
1482 ),
1483 })?;
1484 }
1485 anyhow::Ok(())
1486 })
1487 .await
1488 .log_err();
1489 }
1490 }
1491 }
1492 }
1493 Ok(())
1494 }),
1495 });
1496
1497 self.metadata_changed(cx);
1498 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1499 cx.notify();
1500 Ok(())
1501 }
1502
1503 pub fn reshared(
1504 &mut self,
1505 message: proto::ResharedProject,
1506 cx: &mut ModelContext<Self>,
1507 ) -> Result<()> {
1508 self.shared_buffers.clear();
1509 self.set_collaborators_from_proto(message.collaborators, cx)?;
1510 self.metadata_changed(cx);
1511 Ok(())
1512 }
1513
1514 pub fn rejoined(
1515 &mut self,
1516 message: proto::RejoinedProject,
1517 message_id: u32,
1518 cx: &mut ModelContext<Self>,
1519 ) -> Result<()> {
1520 cx.update_global::<SettingsStore, _>(|store, cx| {
1521 for worktree in &self.worktrees {
1522 store
1523 .clear_local_settings(worktree.handle_id(), cx)
1524 .log_err();
1525 }
1526 });
1527
1528 self.join_project_response_message_id = message_id;
1529 self.set_worktrees_from_proto(message.worktrees, cx)?;
1530 self.set_collaborators_from_proto(message.collaborators, cx)?;
1531 self.language_server_statuses = message
1532 .language_servers
1533 .into_iter()
1534 .map(|server| {
1535 (
1536 LanguageServerId(server.id as usize),
1537 LanguageServerStatus {
1538 name: server.name,
1539 pending_work: Default::default(),
1540 has_pending_diagnostic_updates: false,
1541 progress_tokens: Default::default(),
1542 },
1543 )
1544 })
1545 .collect();
1546 self.buffer_ordered_messages_tx
1547 .unbounded_send(BufferOrderedMessage::Resync)
1548 .unwrap();
1549 cx.notify();
1550 Ok(())
1551 }
1552
1553 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1554 self.unshare_internal(cx)?;
1555 self.metadata_changed(cx);
1556 cx.notify();
1557 Ok(())
1558 }
1559
1560 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1561 if self.is_remote() {
1562 return Err(anyhow!("attempted to unshare a remote project"));
1563 }
1564
1565 if let Some(ProjectClientState::Local { remote_id, .. }) = self.client_state.take() {
1566 self.collaborators.clear();
1567 self.shared_buffers.clear();
1568 self.client_subscriptions.clear();
1569
1570 for worktree_handle in self.worktrees.iter_mut() {
1571 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1572 let is_visible = worktree.update(cx, |worktree, _| {
1573 worktree.as_local_mut().unwrap().unshare();
1574 worktree.is_visible()
1575 });
1576 if !is_visible {
1577 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1578 }
1579 }
1580 }
1581
1582 for open_buffer in self.opened_buffers.values_mut() {
1583 // Wake up any tasks waiting for peers' edits to this buffer.
1584 if let Some(buffer) = open_buffer.upgrade() {
1585 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1586 }
1587
1588 if let OpenBuffer::Strong(buffer) = open_buffer {
1589 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1590 }
1591 }
1592
1593 self.client.send(proto::UnshareProject {
1594 project_id: remote_id,
1595 })?;
1596
1597 Ok(())
1598 } else {
1599 Err(anyhow!("attempted to unshare an unshared project"))
1600 }
1601 }
1602
1603 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1604 self.disconnected_from_host_internal(cx);
1605 cx.emit(Event::DisconnectedFromHost);
1606 cx.notify();
1607 }
1608
1609 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1610 if let Some(ProjectClientState::Remote {
1611 sharing_has_stopped,
1612 ..
1613 }) = &mut self.client_state
1614 {
1615 *sharing_has_stopped = true;
1616
1617 self.collaborators.clear();
1618
1619 for worktree in &self.worktrees {
1620 if let Some(worktree) = worktree.upgrade() {
1621 worktree.update(cx, |worktree, _| {
1622 if let Some(worktree) = worktree.as_remote_mut() {
1623 worktree.disconnected_from_host();
1624 }
1625 });
1626 }
1627 }
1628
1629 for open_buffer in self.opened_buffers.values_mut() {
1630 // Wake up any tasks waiting for peers' edits to this buffer.
1631 if let Some(buffer) = open_buffer.upgrade() {
1632 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1633 }
1634
1635 if let OpenBuffer::Strong(buffer) = open_buffer {
1636 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1637 }
1638 }
1639
1640 // Wake up all futures currently waiting on a buffer to get opened,
1641 // to give them a chance to fail now that we've disconnected.
1642 *self.opened_buffer.0.borrow_mut() = ();
1643 }
1644 }
1645
1646 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1647 cx.emit(Event::Closed);
1648 }
1649
1650 pub fn is_read_only(&self) -> bool {
1651 match &self.client_state {
1652 Some(ProjectClientState::Remote {
1653 sharing_has_stopped,
1654 ..
1655 }) => *sharing_has_stopped,
1656 _ => false,
1657 }
1658 }
1659
1660 pub fn is_local(&self) -> bool {
1661 match &self.client_state {
1662 Some(ProjectClientState::Remote { .. }) => false,
1663 _ => true,
1664 }
1665 }
1666
1667 pub fn is_remote(&self) -> bool {
1668 !self.is_local()
1669 }
1670
1671 pub fn create_buffer(
1672 &mut self,
1673 text: &str,
1674 language: Option<Arc<Language>>,
1675 cx: &mut ModelContext<Self>,
1676 ) -> Result<Model<Buffer>> {
1677 if self.is_remote() {
1678 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1679 }
1680 let id = post_inc(&mut self.next_buffer_id);
1681 let buffer = cx.build_model(|cx| {
1682 Buffer::new(self.replica_id(), id, text)
1683 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1684 });
1685 self.register_buffer(&buffer, cx)?;
1686 Ok(buffer)
1687 }
1688
1689 pub fn open_path(
1690 &mut self,
1691 path: impl Into<ProjectPath>,
1692 cx: &mut ModelContext<Self>,
1693 ) -> Task<Result<(ProjectEntryId, AnyModel)>> {
1694 let project_path = path.into();
1695 let task = self.open_buffer(project_path.clone(), cx);
1696 cx.spawn(move |_, mut cx| async move {
1697 let buffer = task.await?;
1698 let project_entry_id = buffer
1699 .update(&mut cx, |buffer, cx| {
1700 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1701 })?
1702 .with_context(|| format!("no project entry for {project_path:?}"))?;
1703
1704 let buffer: &AnyModel = &buffer;
1705 Ok((project_entry_id, buffer.clone()))
1706 })
1707 }
1708
1709 pub fn open_local_buffer(
1710 &mut self,
1711 abs_path: impl AsRef<Path>,
1712 cx: &mut ModelContext<Self>,
1713 ) -> Task<Result<Model<Buffer>>> {
1714 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1715 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1716 } else {
1717 Task::ready(Err(anyhow!("no such path")))
1718 }
1719 }
1720
1721 pub fn open_buffer(
1722 &mut self,
1723 path: impl Into<ProjectPath>,
1724 cx: &mut ModelContext<Self>,
1725 ) -> Task<Result<Model<Buffer>>> {
1726 let project_path = path.into();
1727 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1728 worktree
1729 } else {
1730 return Task::ready(Err(anyhow!("no such worktree")));
1731 };
1732
1733 // If there is already a buffer for the given path, then return it.
1734 let existing_buffer = self.get_open_buffer(&project_path, cx);
1735 if let Some(existing_buffer) = existing_buffer {
1736 return Task::ready(Ok(existing_buffer));
1737 }
1738
1739 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1740 // If the given path is already being loaded, then wait for that existing
1741 // task to complete and return the same buffer.
1742 hash_map::Entry::Occupied(e) => e.get().clone(),
1743
1744 // Otherwise, record the fact that this path is now being loaded.
1745 hash_map::Entry::Vacant(entry) => {
1746 let (mut tx, rx) = postage::watch::channel();
1747 entry.insert(rx.clone());
1748
1749 let load_buffer = if worktree.read(cx).is_local() {
1750 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1751 } else {
1752 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1753 };
1754
1755 let project_path = project_path.clone();
1756 cx.spawn(move |this, mut cx| async move {
1757 let load_result = load_buffer.await;
1758 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1759 // Record the fact that the buffer is no longer loading.
1760 this.loading_buffers_by_path.remove(&project_path);
1761 let buffer = load_result.map_err(Arc::new)?;
1762 Ok(buffer)
1763 })?);
1764 anyhow::Ok(())
1765 })
1766 .detach();
1767 rx
1768 }
1769 };
1770
1771 cx.background_executor().spawn(async move {
1772 wait_for_loading_buffer(loading_watch)
1773 .await
1774 .map_err(|error| anyhow!("{project_path:?} opening failure: {error:#}"))
1775 })
1776 }
1777
1778 fn open_local_buffer_internal(
1779 &mut self,
1780 path: &Arc<Path>,
1781 worktree: &Model<Worktree>,
1782 cx: &mut ModelContext<Self>,
1783 ) -> Task<Result<Model<Buffer>>> {
1784 let buffer_id = post_inc(&mut self.next_buffer_id);
1785 let load_buffer = worktree.update(cx, |worktree, cx| {
1786 let worktree = worktree.as_local_mut().unwrap();
1787 worktree.load_buffer(buffer_id, path, cx)
1788 });
1789 cx.spawn(move |this, mut cx| async move {
1790 let buffer = load_buffer.await?;
1791 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1792 Ok(buffer)
1793 })
1794 }
1795
1796 fn open_remote_buffer_internal(
1797 &mut self,
1798 path: &Arc<Path>,
1799 worktree: &Model<Worktree>,
1800 cx: &mut ModelContext<Self>,
1801 ) -> Task<Result<Model<Buffer>>> {
1802 let rpc = self.client.clone();
1803 let project_id = self.remote_id().unwrap();
1804 let remote_worktree_id = worktree.read(cx).id();
1805 let path = path.clone();
1806 let path_string = path.to_string_lossy().to_string();
1807 cx.spawn(move |this, mut cx| async move {
1808 let response = rpc
1809 .request(proto::OpenBufferByPath {
1810 project_id,
1811 worktree_id: remote_worktree_id.to_proto(),
1812 path: path_string,
1813 })
1814 .await?;
1815 this.update(&mut cx, |this, cx| {
1816 this.wait_for_remote_buffer(response.buffer_id, cx)
1817 })?
1818 .await
1819 })
1820 }
1821
1822 /// LanguageServerName is owned, because it is inserted into a map
1823 pub fn open_local_buffer_via_lsp(
1824 &mut self,
1825 abs_path: lsp::Url,
1826 language_server_id: LanguageServerId,
1827 language_server_name: LanguageServerName,
1828 cx: &mut ModelContext<Self>,
1829 ) -> Task<Result<Model<Buffer>>> {
1830 cx.spawn(move |this, mut cx| async move {
1831 let abs_path = abs_path
1832 .to_file_path()
1833 .map_err(|_| anyhow!("can't convert URI to path"))?;
1834 let (worktree, relative_path) = if let Some(result) =
1835 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1836 {
1837 result
1838 } else {
1839 let worktree = this
1840 .update(&mut cx, |this, cx| {
1841 this.create_local_worktree(&abs_path, false, cx)
1842 })?
1843 .await?;
1844 this.update(&mut cx, |this, cx| {
1845 this.language_server_ids.insert(
1846 (worktree.read(cx).id(), language_server_name),
1847 language_server_id,
1848 );
1849 })
1850 .ok();
1851 (worktree, PathBuf::new())
1852 };
1853
1854 let project_path = ProjectPath {
1855 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1856 path: relative_path.into(),
1857 };
1858 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1859 .await
1860 })
1861 }
1862
1863 pub fn open_buffer_by_id(
1864 &mut self,
1865 id: u64,
1866 cx: &mut ModelContext<Self>,
1867 ) -> Task<Result<Model<Buffer>>> {
1868 if let Some(buffer) = self.buffer_for_id(id) {
1869 Task::ready(Ok(buffer))
1870 } else if self.is_local() {
1871 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1872 } else if let Some(project_id) = self.remote_id() {
1873 let request = self
1874 .client
1875 .request(proto::OpenBufferById { project_id, id });
1876 cx.spawn(move |this, mut cx| async move {
1877 let buffer_id = request.await?.buffer_id;
1878 this.update(&mut cx, |this, cx| {
1879 this.wait_for_remote_buffer(buffer_id, cx)
1880 })?
1881 .await
1882 })
1883 } else {
1884 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1885 }
1886 }
1887
1888 pub fn save_buffers(
1889 &self,
1890 buffers: HashSet<Model<Buffer>>,
1891 cx: &mut ModelContext<Self>,
1892 ) -> Task<Result<()>> {
1893 cx.spawn(move |this, mut cx| async move {
1894 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1895 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1896 .ok()
1897 });
1898 try_join_all(save_tasks).await?;
1899 Ok(())
1900 })
1901 }
1902
1903 pub fn save_buffer(
1904 &self,
1905 buffer: Model<Buffer>,
1906 cx: &mut ModelContext<Self>,
1907 ) -> Task<Result<()>> {
1908 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1909 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1910 };
1911 let worktree = file.worktree.clone();
1912 let path = file.path.clone();
1913 worktree.update(cx, |worktree, cx| match worktree {
1914 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1915 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1916 })
1917 }
1918
1919 pub fn save_buffer_as(
1920 &mut self,
1921 buffer: Model<Buffer>,
1922 abs_path: PathBuf,
1923 cx: &mut ModelContext<Self>,
1924 ) -> Task<Result<()>> {
1925 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1926 let old_file = File::from_dyn(buffer.read(cx).file())
1927 .filter(|f| f.is_local())
1928 .cloned();
1929 cx.spawn(move |this, mut cx| async move {
1930 if let Some(old_file) = &old_file {
1931 this.update(&mut cx, |this, cx| {
1932 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1933 })?;
1934 }
1935 let (worktree, path) = worktree_task.await?;
1936 worktree
1937 .update(&mut cx, |worktree, cx| match worktree {
1938 Worktree::Local(worktree) => {
1939 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1940 }
1941 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1942 })?
1943 .await?;
1944
1945 this.update(&mut cx, |this, cx| {
1946 this.detect_language_for_buffer(&buffer, cx);
1947 this.register_buffer_with_language_servers(&buffer, cx);
1948 })?;
1949 Ok(())
1950 })
1951 }
1952
1953 pub fn get_open_buffer(
1954 &mut self,
1955 path: &ProjectPath,
1956 cx: &mut ModelContext<Self>,
1957 ) -> Option<Model<Buffer>> {
1958 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1959 self.opened_buffers.values().find_map(|buffer| {
1960 let buffer = buffer.upgrade()?;
1961 let file = File::from_dyn(buffer.read(cx).file())?;
1962 if file.worktree == worktree && file.path() == &path.path {
1963 Some(buffer)
1964 } else {
1965 None
1966 }
1967 })
1968 }
1969
1970 fn register_buffer(
1971 &mut self,
1972 buffer: &Model<Buffer>,
1973 cx: &mut ModelContext<Self>,
1974 ) -> Result<()> {
1975 self.request_buffer_diff_recalculation(buffer, cx);
1976 buffer.update(cx, |buffer, _| {
1977 buffer.set_language_registry(self.languages.clone())
1978 });
1979
1980 let remote_id = buffer.read(cx).remote_id();
1981 let is_remote = self.is_remote();
1982 let open_buffer = if is_remote || self.is_shared() {
1983 OpenBuffer::Strong(buffer.clone())
1984 } else {
1985 OpenBuffer::Weak(buffer.downgrade())
1986 };
1987
1988 match self.opened_buffers.entry(remote_id) {
1989 hash_map::Entry::Vacant(entry) => {
1990 entry.insert(open_buffer);
1991 }
1992 hash_map::Entry::Occupied(mut entry) => {
1993 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1994 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
1995 } else if entry.get().upgrade().is_some() {
1996 if is_remote {
1997 return Ok(());
1998 } else {
1999 debug_panic!("buffer {} was already registered", remote_id);
2000 Err(anyhow!("buffer {} was already registered", remote_id))?;
2001 }
2002 }
2003 entry.insert(open_buffer);
2004 }
2005 }
2006 cx.subscribe(buffer, |this, buffer, event, cx| {
2007 this.on_buffer_event(buffer, event, cx);
2008 })
2009 .detach();
2010
2011 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2012 if file.is_local {
2013 self.local_buffer_ids_by_path.insert(
2014 ProjectPath {
2015 worktree_id: file.worktree_id(cx),
2016 path: file.path.clone(),
2017 },
2018 remote_id,
2019 );
2020
2021 self.local_buffer_ids_by_entry_id
2022 .insert(file.entry_id, remote_id);
2023 }
2024 }
2025
2026 self.detect_language_for_buffer(buffer, cx);
2027 self.register_buffer_with_language_servers(buffer, cx);
2028 self.register_buffer_with_copilot(buffer, cx);
2029 cx.observe_release(buffer, |this, buffer, cx| {
2030 if let Some(file) = File::from_dyn(buffer.file()) {
2031 if file.is_local() {
2032 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2033 for server in this.language_servers_for_buffer(buffer, cx) {
2034 server
2035 .1
2036 .notify::<lsp::notification::DidCloseTextDocument>(
2037 lsp::DidCloseTextDocumentParams {
2038 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2039 },
2040 )
2041 .log_err();
2042 }
2043 }
2044 }
2045 })
2046 .detach();
2047
2048 *self.opened_buffer.0.borrow_mut() = ();
2049 Ok(())
2050 }
2051
2052 fn register_buffer_with_language_servers(
2053 &mut self,
2054 buffer_handle: &Model<Buffer>,
2055 cx: &mut ModelContext<Self>,
2056 ) {
2057 let buffer = buffer_handle.read(cx);
2058 let buffer_id = buffer.remote_id();
2059
2060 if let Some(file) = File::from_dyn(buffer.file()) {
2061 if !file.is_local() {
2062 return;
2063 }
2064
2065 let abs_path = file.abs_path(cx);
2066 let uri = lsp::Url::from_file_path(&abs_path)
2067 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2068 let initial_snapshot = buffer.text_snapshot();
2069 let language = buffer.language().cloned();
2070 let worktree_id = file.worktree_id(cx);
2071
2072 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2073 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2074 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2075 .log_err();
2076 }
2077 }
2078
2079 if let Some(language) = language {
2080 for adapter in language.lsp_adapters() {
2081 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2082 let server = self
2083 .language_server_ids
2084 .get(&(worktree_id, adapter.name.clone()))
2085 .and_then(|id| self.language_servers.get(id))
2086 .and_then(|server_state| {
2087 if let LanguageServerState::Running { server, .. } = server_state {
2088 Some(server.clone())
2089 } else {
2090 None
2091 }
2092 });
2093 let server = match server {
2094 Some(server) => server,
2095 None => continue,
2096 };
2097
2098 server
2099 .notify::<lsp::notification::DidOpenTextDocument>(
2100 lsp::DidOpenTextDocumentParams {
2101 text_document: lsp::TextDocumentItem::new(
2102 uri.clone(),
2103 language_id.unwrap_or_default(),
2104 0,
2105 initial_snapshot.text(),
2106 ),
2107 },
2108 )
2109 .log_err();
2110
2111 buffer_handle.update(cx, |buffer, cx| {
2112 buffer.set_completion_triggers(
2113 server
2114 .capabilities()
2115 .completion_provider
2116 .as_ref()
2117 .and_then(|provider| provider.trigger_characters.clone())
2118 .unwrap_or_default(),
2119 cx,
2120 );
2121 });
2122
2123 let snapshot = LspBufferSnapshot {
2124 version: 0,
2125 snapshot: initial_snapshot.clone(),
2126 };
2127 self.buffer_snapshots
2128 .entry(buffer_id)
2129 .or_default()
2130 .insert(server.server_id(), vec![snapshot]);
2131 }
2132 }
2133 }
2134 }
2135
2136 fn unregister_buffer_from_language_servers(
2137 &mut self,
2138 buffer: &Model<Buffer>,
2139 old_file: &File,
2140 cx: &mut ModelContext<Self>,
2141 ) {
2142 let old_path = match old_file.as_local() {
2143 Some(local) => local.abs_path(cx),
2144 None => return,
2145 };
2146
2147 buffer.update(cx, |buffer, cx| {
2148 let worktree_id = old_file.worktree_id(cx);
2149 let ids = &self.language_server_ids;
2150
2151 let language = buffer.language().cloned();
2152 let adapters = language.iter().flat_map(|language| language.lsp_adapters());
2153 for &server_id in adapters.flat_map(|a| ids.get(&(worktree_id, a.name.clone()))) {
2154 buffer.update_diagnostics(server_id, Default::default(), cx);
2155 }
2156
2157 self.buffer_snapshots.remove(&buffer.remote_id());
2158 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2159 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2160 language_server
2161 .notify::<lsp::notification::DidCloseTextDocument>(
2162 lsp::DidCloseTextDocumentParams {
2163 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2164 },
2165 )
2166 .log_err();
2167 }
2168 });
2169 }
2170
2171 fn register_buffer_with_copilot(
2172 &self,
2173 buffer_handle: &Model<Buffer>,
2174 cx: &mut ModelContext<Self>,
2175 ) {
2176 if let Some(copilot) = Copilot::global(cx) {
2177 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2178 }
2179 }
2180
2181 async fn send_buffer_ordered_messages(
2182 this: WeakModel<Self>,
2183 rx: UnboundedReceiver<BufferOrderedMessage>,
2184 mut cx: AsyncAppContext,
2185 ) -> Result<()> {
2186 const MAX_BATCH_SIZE: usize = 128;
2187
2188 let mut operations_by_buffer_id = HashMap::default();
2189 async fn flush_operations(
2190 this: &WeakModel<Project>,
2191 operations_by_buffer_id: &mut HashMap<u64, Vec<proto::Operation>>,
2192 needs_resync_with_host: &mut bool,
2193 is_local: bool,
2194 cx: &mut AsyncAppContext,
2195 ) -> Result<()> {
2196 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2197 let request = this.update(cx, |this, _| {
2198 let project_id = this.remote_id()?;
2199 Some(this.client.request(proto::UpdateBuffer {
2200 buffer_id,
2201 project_id,
2202 operations,
2203 }))
2204 })?;
2205 if let Some(request) = request {
2206 if request.await.is_err() && !is_local {
2207 *needs_resync_with_host = true;
2208 break;
2209 }
2210 }
2211 }
2212 Ok(())
2213 }
2214
2215 let mut needs_resync_with_host = false;
2216 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2217
2218 while let Some(changes) = changes.next().await {
2219 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2220
2221 for change in changes {
2222 match change {
2223 BufferOrderedMessage::Operation {
2224 buffer_id,
2225 operation,
2226 } => {
2227 if needs_resync_with_host {
2228 continue;
2229 }
2230
2231 operations_by_buffer_id
2232 .entry(buffer_id)
2233 .or_insert(Vec::new())
2234 .push(operation);
2235 }
2236
2237 BufferOrderedMessage::Resync => {
2238 operations_by_buffer_id.clear();
2239 if this
2240 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2241 .await
2242 .is_ok()
2243 {
2244 needs_resync_with_host = false;
2245 }
2246 }
2247
2248 BufferOrderedMessage::LanguageServerUpdate {
2249 language_server_id,
2250 message,
2251 } => {
2252 flush_operations(
2253 &this,
2254 &mut operations_by_buffer_id,
2255 &mut needs_resync_with_host,
2256 is_local,
2257 &mut cx,
2258 )
2259 .await?;
2260
2261 this.update(&mut cx, |this, _| {
2262 if let Some(project_id) = this.remote_id() {
2263 this.client
2264 .send(proto::UpdateLanguageServer {
2265 project_id,
2266 language_server_id: language_server_id.0 as u64,
2267 variant: Some(message),
2268 })
2269 .log_err();
2270 }
2271 })?;
2272 }
2273 }
2274 }
2275
2276 flush_operations(
2277 &this,
2278 &mut operations_by_buffer_id,
2279 &mut needs_resync_with_host,
2280 is_local,
2281 &mut cx,
2282 )
2283 .await?;
2284 }
2285
2286 Ok(())
2287 }
2288
2289 fn on_buffer_event(
2290 &mut self,
2291 buffer: Model<Buffer>,
2292 event: &BufferEvent,
2293 cx: &mut ModelContext<Self>,
2294 ) -> Option<()> {
2295 if matches!(
2296 event,
2297 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2298 ) {
2299 self.request_buffer_diff_recalculation(&buffer, cx);
2300 }
2301
2302 match event {
2303 BufferEvent::Operation(operation) => {
2304 self.buffer_ordered_messages_tx
2305 .unbounded_send(BufferOrderedMessage::Operation {
2306 buffer_id: buffer.read(cx).remote_id(),
2307 operation: language::proto::serialize_operation(operation),
2308 })
2309 .ok();
2310 }
2311
2312 BufferEvent::Edited { .. } => {
2313 let buffer = buffer.read(cx);
2314 let file = File::from_dyn(buffer.file())?;
2315 let abs_path = file.as_local()?.abs_path(cx);
2316 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2317 let next_snapshot = buffer.text_snapshot();
2318
2319 let language_servers: Vec<_> = self
2320 .language_servers_for_buffer(buffer, cx)
2321 .map(|i| i.1.clone())
2322 .collect();
2323
2324 for language_server in language_servers {
2325 let language_server = language_server.clone();
2326
2327 let buffer_snapshots = self
2328 .buffer_snapshots
2329 .get_mut(&buffer.remote_id())
2330 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2331 let previous_snapshot = buffer_snapshots.last()?;
2332
2333 let build_incremental_change = || {
2334 buffer
2335 .edits_since::<(PointUtf16, usize)>(
2336 previous_snapshot.snapshot.version(),
2337 )
2338 .map(|edit| {
2339 let edit_start = edit.new.start.0;
2340 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2341 let new_text = next_snapshot
2342 .text_for_range(edit.new.start.1..edit.new.end.1)
2343 .collect();
2344 lsp::TextDocumentContentChangeEvent {
2345 range: Some(lsp::Range::new(
2346 point_to_lsp(edit_start),
2347 point_to_lsp(edit_end),
2348 )),
2349 range_length: None,
2350 text: new_text,
2351 }
2352 })
2353 .collect()
2354 };
2355
2356 let document_sync_kind = language_server
2357 .capabilities()
2358 .text_document_sync
2359 .as_ref()
2360 .and_then(|sync| match sync {
2361 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2362 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2363 });
2364
2365 let content_changes: Vec<_> = match document_sync_kind {
2366 Some(lsp::TextDocumentSyncKind::FULL) => {
2367 vec![lsp::TextDocumentContentChangeEvent {
2368 range: None,
2369 range_length: None,
2370 text: next_snapshot.text(),
2371 }]
2372 }
2373 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2374 _ => {
2375 #[cfg(any(test, feature = "test-support"))]
2376 {
2377 build_incremental_change()
2378 }
2379
2380 #[cfg(not(any(test, feature = "test-support")))]
2381 {
2382 continue;
2383 }
2384 }
2385 };
2386
2387 let next_version = previous_snapshot.version + 1;
2388
2389 buffer_snapshots.push(LspBufferSnapshot {
2390 version: next_version,
2391 snapshot: next_snapshot.clone(),
2392 });
2393
2394 language_server
2395 .notify::<lsp::notification::DidChangeTextDocument>(
2396 lsp::DidChangeTextDocumentParams {
2397 text_document: lsp::VersionedTextDocumentIdentifier::new(
2398 uri.clone(),
2399 next_version,
2400 ),
2401 content_changes,
2402 },
2403 )
2404 .log_err();
2405 }
2406 }
2407
2408 BufferEvent::Saved => {
2409 let file = File::from_dyn(buffer.read(cx).file())?;
2410 let worktree_id = file.worktree_id(cx);
2411 let abs_path = file.as_local()?.abs_path(cx);
2412 let text_document = lsp::TextDocumentIdentifier {
2413 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2414 };
2415
2416 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2417 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2418
2419 server
2420 .notify::<lsp::notification::DidSaveTextDocument>(
2421 lsp::DidSaveTextDocumentParams {
2422 text_document: text_document.clone(),
2423 text,
2424 },
2425 )
2426 .log_err();
2427 }
2428
2429 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2430 for language_server_id in language_server_ids {
2431 if let Some(LanguageServerState::Running {
2432 adapter,
2433 simulate_disk_based_diagnostics_completion,
2434 ..
2435 }) = self.language_servers.get_mut(&language_server_id)
2436 {
2437 // After saving a buffer using a language server that doesn't provide
2438 // a disk-based progress token, kick off a timer that will reset every
2439 // time the buffer is saved. If the timer eventually fires, simulate
2440 // disk-based diagnostics being finished so that other pieces of UI
2441 // (e.g., project diagnostics view, diagnostic status bar) can update.
2442 // We don't emit an event right away because the language server might take
2443 // some time to publish diagnostics.
2444 if adapter.disk_based_diagnostics_progress_token.is_none() {
2445 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2446 Duration::from_secs(1);
2447
2448 let task = cx.spawn(move |this, mut cx| async move {
2449 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2450 if let Some(this) = this.upgrade() {
2451 this.update(&mut cx, |this, cx| {
2452 this.disk_based_diagnostics_finished(
2453 language_server_id,
2454 cx,
2455 );
2456 this.buffer_ordered_messages_tx
2457 .unbounded_send(
2458 BufferOrderedMessage::LanguageServerUpdate {
2459 language_server_id,
2460 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2461 },
2462 )
2463 .ok();
2464 }).ok();
2465 }
2466 });
2467 *simulate_disk_based_diagnostics_completion = Some(task);
2468 }
2469 }
2470 }
2471 }
2472 BufferEvent::FileHandleChanged => {
2473 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2474 return None;
2475 };
2476
2477 match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
2478 Some(_) => {
2479 return None;
2480 }
2481 None => {
2482 let remote_id = buffer.read(cx).remote_id();
2483 self.local_buffer_ids_by_entry_id
2484 .insert(file.entry_id, remote_id);
2485
2486 self.local_buffer_ids_by_path.insert(
2487 ProjectPath {
2488 worktree_id: file.worktree_id(cx),
2489 path: file.path.clone(),
2490 },
2491 remote_id,
2492 );
2493 }
2494 }
2495 }
2496 _ => {}
2497 }
2498
2499 None
2500 }
2501
2502 fn request_buffer_diff_recalculation(
2503 &mut self,
2504 buffer: &Model<Buffer>,
2505 cx: &mut ModelContext<Self>,
2506 ) {
2507 self.buffers_needing_diff.insert(buffer.downgrade());
2508 let first_insertion = self.buffers_needing_diff.len() == 1;
2509
2510 let settings = ProjectSettings::get_global(cx);
2511 let delay = if let Some(delay) = settings.git.gutter_debounce {
2512 delay
2513 } else {
2514 if first_insertion {
2515 let this = cx.weak_model();
2516 cx.defer(move |cx| {
2517 if let Some(this) = this.upgrade() {
2518 this.update(cx, |this, cx| {
2519 this.recalculate_buffer_diffs(cx).detach();
2520 });
2521 }
2522 });
2523 }
2524 return;
2525 };
2526
2527 const MIN_DELAY: u64 = 50;
2528 let delay = delay.max(MIN_DELAY);
2529 let duration = Duration::from_millis(delay);
2530
2531 self.git_diff_debouncer
2532 .fire_new(duration, cx, move |this, cx| {
2533 this.recalculate_buffer_diffs(cx)
2534 });
2535 }
2536
2537 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2538 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2539 cx.spawn(move |this, mut cx| async move {
2540 let tasks: Vec<_> = buffers
2541 .iter()
2542 .filter_map(|buffer| {
2543 let buffer = buffer.upgrade()?;
2544 buffer
2545 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2546 .ok()
2547 .flatten()
2548 })
2549 .collect();
2550
2551 futures::future::join_all(tasks).await;
2552
2553 this.update(&mut cx, |this, cx| {
2554 if !this.buffers_needing_diff.is_empty() {
2555 this.recalculate_buffer_diffs(cx).detach();
2556 } else {
2557 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2558 for buffer in buffers {
2559 if let Some(buffer) = buffer.upgrade() {
2560 buffer.update(cx, |_, cx| cx.notify());
2561 }
2562 }
2563 }
2564 })
2565 .ok();
2566 })
2567 }
2568
2569 fn language_servers_for_worktree(
2570 &self,
2571 worktree_id: WorktreeId,
2572 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2573 self.language_server_ids
2574 .iter()
2575 .filter_map(move |((language_server_worktree_id, _), id)| {
2576 if *language_server_worktree_id == worktree_id {
2577 if let Some(LanguageServerState::Running {
2578 adapter,
2579 language,
2580 server,
2581 ..
2582 }) = self.language_servers.get(id)
2583 {
2584 return Some((adapter, language, server));
2585 }
2586 }
2587 None
2588 })
2589 }
2590
2591 fn maintain_buffer_languages(
2592 languages: Arc<LanguageRegistry>,
2593 cx: &mut ModelContext<Project>,
2594 ) -> Task<()> {
2595 let mut subscription = languages.subscribe();
2596 let mut prev_reload_count = languages.reload_count();
2597 cx.spawn(move |project, mut cx| async move {
2598 while let Some(()) = subscription.next().await {
2599 if let Some(project) = project.upgrade() {
2600 // If the language registry has been reloaded, then remove and
2601 // re-assign the languages on all open buffers.
2602 let reload_count = languages.reload_count();
2603 if reload_count > prev_reload_count {
2604 prev_reload_count = reload_count;
2605 project
2606 .update(&mut cx, |this, cx| {
2607 let buffers = this
2608 .opened_buffers
2609 .values()
2610 .filter_map(|b| b.upgrade())
2611 .collect::<Vec<_>>();
2612 for buffer in buffers {
2613 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2614 {
2615 this.unregister_buffer_from_language_servers(
2616 &buffer, &f, cx,
2617 );
2618 buffer
2619 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2620 }
2621 }
2622 })
2623 .ok();
2624 }
2625
2626 project
2627 .update(&mut cx, |project, cx| {
2628 let mut plain_text_buffers = Vec::new();
2629 let mut buffers_with_unknown_injections = Vec::new();
2630 for buffer in project.opened_buffers.values() {
2631 if let Some(handle) = buffer.upgrade() {
2632 let buffer = &handle.read(cx);
2633 if buffer.language().is_none()
2634 || buffer.language() == Some(&*language::PLAIN_TEXT)
2635 {
2636 plain_text_buffers.push(handle);
2637 } else if buffer.contains_unknown_injections() {
2638 buffers_with_unknown_injections.push(handle);
2639 }
2640 }
2641 }
2642
2643 for buffer in plain_text_buffers {
2644 project.detect_language_for_buffer(&buffer, cx);
2645 project.register_buffer_with_language_servers(&buffer, cx);
2646 }
2647
2648 for buffer in buffers_with_unknown_injections {
2649 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2650 }
2651 })
2652 .ok();
2653 }
2654 }
2655 })
2656 }
2657
2658 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2659 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2660 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2661
2662 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2663 *settings_changed_tx.borrow_mut() = ();
2664 });
2665
2666 cx.spawn(move |this, mut cx| async move {
2667 while let Some(_) = settings_changed_rx.next().await {
2668 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2669 this.language_servers
2670 .values()
2671 .filter_map(|state| match state {
2672 LanguageServerState::Starting(_) => None,
2673 LanguageServerState::Running {
2674 adapter, server, ..
2675 } => Some((adapter.clone(), server.clone())),
2676 })
2677 .collect()
2678 })?;
2679
2680 for (adapter, server) in servers {
2681 let workspace_config = cx
2682 .update(|cx| adapter.workspace_configuration(server.root_path(), cx))?
2683 .await;
2684 server
2685 .notify::<lsp::notification::DidChangeConfiguration>(
2686 lsp::DidChangeConfigurationParams {
2687 settings: workspace_config.clone(),
2688 },
2689 )
2690 .ok();
2691 }
2692 }
2693
2694 drop(settings_observation);
2695 anyhow::Ok(())
2696 })
2697 }
2698
2699 fn detect_language_for_buffer(
2700 &mut self,
2701 buffer_handle: &Model<Buffer>,
2702 cx: &mut ModelContext<Self>,
2703 ) -> Option<()> {
2704 // If the buffer has a language, set it and start the language server if we haven't already.
2705 let buffer = buffer_handle.read(cx);
2706 let full_path = buffer.file()?.full_path(cx);
2707 let content = buffer.as_rope();
2708 let new_language = self
2709 .languages
2710 .language_for_file(&full_path, Some(content))
2711 .now_or_never()?
2712 .ok()?;
2713 self.set_language_for_buffer(buffer_handle, new_language, cx);
2714 None
2715 }
2716
2717 pub fn set_language_for_buffer(
2718 &mut self,
2719 buffer: &Model<Buffer>,
2720 new_language: Arc<Language>,
2721 cx: &mut ModelContext<Self>,
2722 ) {
2723 buffer.update(cx, |buffer, cx| {
2724 if buffer.language().map_or(true, |old_language| {
2725 !Arc::ptr_eq(old_language, &new_language)
2726 }) {
2727 buffer.set_language(Some(new_language.clone()), cx);
2728 }
2729 });
2730
2731 let buffer_file = buffer.read(cx).file().cloned();
2732 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2733 let buffer_file = File::from_dyn(buffer_file.as_ref());
2734 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2735 if let Some(prettier_plugins) =
2736 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2737 {
2738 self.install_default_prettier(worktree, prettier_plugins, cx);
2739 };
2740 if let Some(file) = buffer_file {
2741 let worktree = file.worktree.clone();
2742 if let Some(tree) = worktree.read(cx).as_local() {
2743 self.start_language_servers(&worktree, tree.abs_path().clone(), new_language, cx);
2744 }
2745 }
2746 }
2747
2748 fn start_language_servers(
2749 &mut self,
2750 worktree: &Model<Worktree>,
2751 worktree_path: Arc<Path>,
2752 language: Arc<Language>,
2753 cx: &mut ModelContext<Self>,
2754 ) {
2755 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2756 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2757 if !settings.enable_language_server {
2758 return;
2759 }
2760
2761 let worktree_id = worktree.read(cx).id();
2762 for adapter in language.lsp_adapters() {
2763 self.start_language_server(
2764 worktree_id,
2765 worktree_path.clone(),
2766 adapter.clone(),
2767 language.clone(),
2768 cx,
2769 );
2770 }
2771 }
2772
2773 fn start_language_server(
2774 &mut self,
2775 worktree_id: WorktreeId,
2776 worktree_path: Arc<Path>,
2777 adapter: Arc<CachedLspAdapter>,
2778 language: Arc<Language>,
2779 cx: &mut ModelContext<Self>,
2780 ) {
2781 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2782 return;
2783 }
2784
2785 let key = (worktree_id, adapter.name.clone());
2786 if self.language_server_ids.contains_key(&key) {
2787 return;
2788 }
2789
2790 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2791 let pending_server = match self.languages.create_pending_language_server(
2792 stderr_capture.clone(),
2793 language.clone(),
2794 adapter.clone(),
2795 Arc::clone(&worktree_path),
2796 ProjectLspAdapterDelegate::new(self, cx),
2797 cx,
2798 ) {
2799 Some(pending_server) => pending_server,
2800 None => return,
2801 };
2802
2803 let project_settings = ProjectSettings::get_global(cx);
2804 let lsp = project_settings.lsp.get(&adapter.name.0);
2805 let override_options = lsp.map(|s| s.initialization_options.clone()).flatten();
2806
2807 let mut initialization_options = adapter.initialization_options.clone();
2808 match (&mut initialization_options, override_options) {
2809 (Some(initialization_options), Some(override_options)) => {
2810 merge_json_value_into(override_options, initialization_options);
2811 }
2812 (None, override_options) => initialization_options = override_options,
2813 _ => {}
2814 }
2815
2816 let server_id = pending_server.server_id;
2817 let container_dir = pending_server.container_dir.clone();
2818 let state = LanguageServerState::Starting({
2819 let adapter = adapter.clone();
2820 let server_name = adapter.name.0.clone();
2821 let language = language.clone();
2822 let key = key.clone();
2823
2824 cx.spawn(move |this, mut cx| async move {
2825 let result = Self::setup_and_insert_language_server(
2826 this.clone(),
2827 &worktree_path,
2828 initialization_options,
2829 pending_server,
2830 adapter.clone(),
2831 language.clone(),
2832 server_id,
2833 key,
2834 &mut cx,
2835 )
2836 .await;
2837
2838 match result {
2839 Ok(server) => {
2840 stderr_capture.lock().take();
2841 server
2842 }
2843
2844 Err(err) => {
2845 log::error!("failed to start language server {server_name:?}: {err}");
2846 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2847
2848 let this = this.upgrade()?;
2849 let container_dir = container_dir?;
2850
2851 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2852 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2853 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2854 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2855 return None;
2856 }
2857
2858 let installation_test_binary = adapter
2859 .installation_test_binary(container_dir.to_path_buf())
2860 .await;
2861
2862 this.update(&mut cx, |_, cx| {
2863 Self::check_errored_server(
2864 language,
2865 adapter,
2866 server_id,
2867 installation_test_binary,
2868 cx,
2869 )
2870 })
2871 .ok();
2872
2873 None
2874 }
2875 }
2876 })
2877 });
2878
2879 self.language_servers.insert(server_id, state);
2880 self.language_server_ids.insert(key, server_id);
2881 }
2882
2883 fn reinstall_language_server(
2884 &mut self,
2885 language: Arc<Language>,
2886 adapter: Arc<CachedLspAdapter>,
2887 server_id: LanguageServerId,
2888 cx: &mut ModelContext<Self>,
2889 ) -> Option<Task<()>> {
2890 log::info!("beginning to reinstall server");
2891
2892 let existing_server = match self.language_servers.remove(&server_id) {
2893 Some(LanguageServerState::Running { server, .. }) => Some(server),
2894 _ => None,
2895 };
2896
2897 for worktree in &self.worktrees {
2898 if let Some(worktree) = worktree.upgrade() {
2899 let key = (worktree.read(cx).id(), adapter.name.clone());
2900 self.language_server_ids.remove(&key);
2901 }
2902 }
2903
2904 Some(cx.spawn(move |this, mut cx| async move {
2905 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2906 log::info!("shutting down existing server");
2907 task.await;
2908 }
2909
2910 // TODO: This is race-safe with regards to preventing new instances from
2911 // starting while deleting, but existing instances in other projects are going
2912 // to be very confused and messed up
2913 let Some(task) = this
2914 .update(&mut cx, |this, cx| {
2915 this.languages.delete_server_container(adapter.clone(), cx)
2916 })
2917 .log_err()
2918 else {
2919 return;
2920 };
2921 task.await;
2922
2923 this.update(&mut cx, |this, mut cx| {
2924 let worktrees = this.worktrees.clone();
2925 for worktree in worktrees {
2926 let worktree = match worktree.upgrade() {
2927 Some(worktree) => worktree.read(cx),
2928 None => continue,
2929 };
2930 let worktree_id = worktree.id();
2931 let root_path = worktree.abs_path();
2932
2933 this.start_language_server(
2934 worktree_id,
2935 root_path,
2936 adapter.clone(),
2937 language.clone(),
2938 &mut cx,
2939 );
2940 }
2941 })
2942 .ok();
2943 }))
2944 }
2945
2946 async fn setup_and_insert_language_server(
2947 this: WeakModel<Self>,
2948 worktree_path: &Path,
2949 initialization_options: Option<serde_json::Value>,
2950 pending_server: PendingLanguageServer,
2951 adapter: Arc<CachedLspAdapter>,
2952 language: Arc<Language>,
2953 server_id: LanguageServerId,
2954 key: (WorktreeId, LanguageServerName),
2955 cx: &mut AsyncAppContext,
2956 ) -> Result<Option<Arc<LanguageServer>>> {
2957 let language_server = Self::setup_pending_language_server(
2958 this.clone(),
2959 initialization_options,
2960 pending_server,
2961 worktree_path,
2962 adapter.clone(),
2963 server_id,
2964 cx,
2965 )
2966 .await?;
2967
2968 let this = match this.upgrade() {
2969 Some(this) => this,
2970 None => return Err(anyhow!("failed to upgrade project handle")),
2971 };
2972
2973 this.update(cx, |this, cx| {
2974 this.insert_newly_running_language_server(
2975 language,
2976 adapter,
2977 language_server.clone(),
2978 server_id,
2979 key,
2980 cx,
2981 )
2982 })??;
2983
2984 Ok(Some(language_server))
2985 }
2986
2987 async fn setup_pending_language_server(
2988 this: WeakModel<Self>,
2989 initialization_options: Option<serde_json::Value>,
2990 pending_server: PendingLanguageServer,
2991 worktree_path: &Path,
2992 adapter: Arc<CachedLspAdapter>,
2993 server_id: LanguageServerId,
2994 cx: &mut AsyncAppContext,
2995 ) -> Result<Arc<LanguageServer>> {
2996 let workspace_config = cx
2997 .update(|cx| adapter.workspace_configuration(worktree_path, cx))?
2998 .await;
2999 let language_server = pending_server.task.await?;
3000
3001 language_server
3002 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3003 let adapter = adapter.clone();
3004 let this = this.clone();
3005 move |mut params, mut cx| {
3006 let adapter = adapter.clone();
3007 if let Some(this) = this.upgrade() {
3008 adapter.process_diagnostics(&mut params);
3009 this.update(&mut cx, |this, cx| {
3010 this.update_diagnostics(
3011 server_id,
3012 params,
3013 &adapter.disk_based_diagnostic_sources,
3014 cx,
3015 )
3016 .log_err();
3017 })
3018 .ok();
3019 }
3020 }
3021 })
3022 .detach();
3023
3024 language_server
3025 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3026 let adapter = adapter.clone();
3027 let worktree_path = worktree_path.to_path_buf();
3028 move |params, cx| {
3029 let adapter = adapter.clone();
3030 let worktree_path = worktree_path.clone();
3031 async move {
3032 let workspace_config = cx
3033 .update(|cx| adapter.workspace_configuration(&worktree_path, cx))?
3034 .await;
3035 Ok(params
3036 .items
3037 .into_iter()
3038 .map(|item| {
3039 if let Some(section) = &item.section {
3040 workspace_config
3041 .get(section)
3042 .cloned()
3043 .unwrap_or(serde_json::Value::Null)
3044 } else {
3045 workspace_config.clone()
3046 }
3047 })
3048 .collect())
3049 }
3050 }
3051 })
3052 .detach();
3053
3054 // Even though we don't have handling for these requests, respond to them to
3055 // avoid stalling any language server like `gopls` which waits for a response
3056 // to these requests when initializing.
3057 language_server
3058 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3059 let this = this.clone();
3060 move |params, mut cx| {
3061 let this = this.clone();
3062 async move {
3063 this.update(&mut cx, |this, _| {
3064 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3065 {
3066 if let lsp::NumberOrString::String(token) = params.token {
3067 status.progress_tokens.insert(token);
3068 }
3069 }
3070 })?;
3071
3072 Ok(())
3073 }
3074 }
3075 })
3076 .detach();
3077
3078 language_server
3079 .on_request::<lsp::request::RegisterCapability, _, _>({
3080 let this = this.clone();
3081 move |params, mut cx| {
3082 let this = this.clone();
3083 async move {
3084 for reg in params.registrations {
3085 if reg.method == "workspace/didChangeWatchedFiles" {
3086 if let Some(options) = reg.register_options {
3087 let options = serde_json::from_value(options)?;
3088 this.update(&mut cx, |this, cx| {
3089 this.on_lsp_did_change_watched_files(
3090 server_id, options, cx,
3091 );
3092 })?;
3093 }
3094 }
3095 }
3096 Ok(())
3097 }
3098 }
3099 })
3100 .detach();
3101
3102 language_server
3103 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3104 let adapter = adapter.clone();
3105 let this = this.clone();
3106 move |params, cx| {
3107 Self::on_lsp_workspace_edit(
3108 this.clone(),
3109 params,
3110 server_id,
3111 adapter.clone(),
3112 cx,
3113 )
3114 }
3115 })
3116 .detach();
3117
3118 language_server
3119 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3120 let this = this.clone();
3121 move |(), mut cx| {
3122 let this = this.clone();
3123 async move {
3124 this.update(&mut cx, |project, cx| {
3125 cx.emit(Event::RefreshInlayHints);
3126 project.remote_id().map(|project_id| {
3127 project.client.send(proto::RefreshInlayHints { project_id })
3128 })
3129 })?
3130 .transpose()?;
3131 Ok(())
3132 }
3133 }
3134 })
3135 .detach();
3136
3137 let disk_based_diagnostics_progress_token =
3138 adapter.disk_based_diagnostics_progress_token.clone();
3139
3140 language_server
3141 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3142 if let Some(this) = this.upgrade() {
3143 this.update(&mut cx, |this, cx| {
3144 this.on_lsp_progress(
3145 params,
3146 server_id,
3147 disk_based_diagnostics_progress_token.clone(),
3148 cx,
3149 );
3150 })
3151 .ok();
3152 }
3153 })
3154 .detach();
3155
3156 let language_server = language_server.initialize(initialization_options).await?;
3157
3158 language_server
3159 .notify::<lsp::notification::DidChangeConfiguration>(
3160 lsp::DidChangeConfigurationParams {
3161 settings: workspace_config,
3162 },
3163 )
3164 .ok();
3165
3166 Ok(language_server)
3167 }
3168
3169 fn insert_newly_running_language_server(
3170 &mut self,
3171 language: Arc<Language>,
3172 adapter: Arc<CachedLspAdapter>,
3173 language_server: Arc<LanguageServer>,
3174 server_id: LanguageServerId,
3175 key: (WorktreeId, LanguageServerName),
3176 cx: &mut ModelContext<Self>,
3177 ) -> Result<()> {
3178 // If the language server for this key doesn't match the server id, don't store the
3179 // server. Which will cause it to be dropped, killing the process
3180 if self
3181 .language_server_ids
3182 .get(&key)
3183 .map(|id| id != &server_id)
3184 .unwrap_or(false)
3185 {
3186 return Ok(());
3187 }
3188
3189 // Update language_servers collection with Running variant of LanguageServerState
3190 // indicating that the server is up and running and ready
3191 self.language_servers.insert(
3192 server_id,
3193 LanguageServerState::Running {
3194 adapter: adapter.clone(),
3195 language: language.clone(),
3196 watched_paths: Default::default(),
3197 server: language_server.clone(),
3198 simulate_disk_based_diagnostics_completion: None,
3199 },
3200 );
3201
3202 self.language_server_statuses.insert(
3203 server_id,
3204 LanguageServerStatus {
3205 name: language_server.name().to_string(),
3206 pending_work: Default::default(),
3207 has_pending_diagnostic_updates: false,
3208 progress_tokens: Default::default(),
3209 },
3210 );
3211
3212 cx.emit(Event::LanguageServerAdded(server_id));
3213
3214 if let Some(project_id) = self.remote_id() {
3215 self.client.send(proto::StartLanguageServer {
3216 project_id,
3217 server: Some(proto::LanguageServer {
3218 id: server_id.0 as u64,
3219 name: language_server.name().to_string(),
3220 }),
3221 })?;
3222 }
3223
3224 // Tell the language server about every open buffer in the worktree that matches the language.
3225 for buffer in self.opened_buffers.values() {
3226 if let Some(buffer_handle) = buffer.upgrade() {
3227 let buffer = buffer_handle.read(cx);
3228 let file = match File::from_dyn(buffer.file()) {
3229 Some(file) => file,
3230 None => continue,
3231 };
3232 let language = match buffer.language() {
3233 Some(language) => language,
3234 None => continue,
3235 };
3236
3237 if file.worktree.read(cx).id() != key.0
3238 || !language.lsp_adapters().iter().any(|a| a.name == key.1)
3239 {
3240 continue;
3241 }
3242
3243 let file = match file.as_local() {
3244 Some(file) => file,
3245 None => continue,
3246 };
3247
3248 let versions = self
3249 .buffer_snapshots
3250 .entry(buffer.remote_id())
3251 .or_default()
3252 .entry(server_id)
3253 .or_insert_with(|| {
3254 vec![LspBufferSnapshot {
3255 version: 0,
3256 snapshot: buffer.text_snapshot(),
3257 }]
3258 });
3259
3260 let snapshot = versions.last().unwrap();
3261 let version = snapshot.version;
3262 let initial_snapshot = &snapshot.snapshot;
3263 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3264 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3265 lsp::DidOpenTextDocumentParams {
3266 text_document: lsp::TextDocumentItem::new(
3267 uri,
3268 adapter
3269 .language_ids
3270 .get(language.name().as_ref())
3271 .cloned()
3272 .unwrap_or_default(),
3273 version,
3274 initial_snapshot.text(),
3275 ),
3276 },
3277 )?;
3278
3279 buffer_handle.update(cx, |buffer, cx| {
3280 buffer.set_completion_triggers(
3281 language_server
3282 .capabilities()
3283 .completion_provider
3284 .as_ref()
3285 .and_then(|provider| provider.trigger_characters.clone())
3286 .unwrap_or_default(),
3287 cx,
3288 )
3289 });
3290 }
3291 }
3292
3293 cx.notify();
3294 Ok(())
3295 }
3296
3297 // Returns a list of all of the worktrees which no longer have a language server and the root path
3298 // for the stopped server
3299 fn stop_language_server(
3300 &mut self,
3301 worktree_id: WorktreeId,
3302 adapter_name: LanguageServerName,
3303 cx: &mut ModelContext<Self>,
3304 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
3305 let key = (worktree_id, adapter_name);
3306 if let Some(server_id) = self.language_server_ids.remove(&key) {
3307 log::info!("stopping language server {}", key.1 .0);
3308
3309 // Remove other entries for this language server as well
3310 let mut orphaned_worktrees = vec![worktree_id];
3311 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3312 for other_key in other_keys {
3313 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3314 self.language_server_ids.remove(&other_key);
3315 orphaned_worktrees.push(other_key.0);
3316 }
3317 }
3318
3319 for buffer in self.opened_buffers.values() {
3320 if let Some(buffer) = buffer.upgrade() {
3321 buffer.update(cx, |buffer, cx| {
3322 buffer.update_diagnostics(server_id, Default::default(), cx);
3323 });
3324 }
3325 }
3326 for worktree in &self.worktrees {
3327 if let Some(worktree) = worktree.upgrade() {
3328 worktree.update(cx, |worktree, cx| {
3329 if let Some(worktree) = worktree.as_local_mut() {
3330 worktree.clear_diagnostics_for_language_server(server_id, cx);
3331 }
3332 });
3333 }
3334 }
3335
3336 self.language_server_statuses.remove(&server_id);
3337 cx.notify();
3338
3339 let server_state = self.language_servers.remove(&server_id);
3340 cx.emit(Event::LanguageServerRemoved(server_id));
3341 cx.spawn(move |this, mut cx| async move {
3342 let mut root_path = None;
3343
3344 let server = match server_state {
3345 Some(LanguageServerState::Starting(task)) => task.await,
3346 Some(LanguageServerState::Running { server, .. }) => Some(server),
3347 None => None,
3348 };
3349
3350 if let Some(server) = server {
3351 root_path = Some(server.root_path().clone());
3352 if let Some(shutdown) = server.shutdown() {
3353 shutdown.await;
3354 }
3355 }
3356
3357 if let Some(this) = this.upgrade() {
3358 this.update(&mut cx, |this, cx| {
3359 this.language_server_statuses.remove(&server_id);
3360 cx.notify();
3361 })
3362 .ok();
3363 }
3364
3365 (root_path, orphaned_worktrees)
3366 })
3367 } else {
3368 Task::ready((None, Vec::new()))
3369 }
3370 }
3371
3372 pub fn restart_language_servers_for_buffers(
3373 &mut self,
3374 buffers: impl IntoIterator<Item = Model<Buffer>>,
3375 cx: &mut ModelContext<Self>,
3376 ) -> Option<()> {
3377 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3378 .into_iter()
3379 .filter_map(|buffer| {
3380 let buffer = buffer.read(cx);
3381 let file = File::from_dyn(buffer.file())?;
3382 let full_path = file.full_path(cx);
3383 let language = self
3384 .languages
3385 .language_for_file(&full_path, Some(buffer.as_rope()))
3386 .now_or_never()?
3387 .ok()?;
3388 Some((file.worktree.clone(), language))
3389 })
3390 .collect();
3391 for (worktree, language) in language_server_lookup_info {
3392 self.restart_language_servers(worktree, language, cx);
3393 }
3394
3395 None
3396 }
3397
3398 // TODO This will break in the case where the adapter's root paths and worktrees are not equal
3399 fn restart_language_servers(
3400 &mut self,
3401 worktree: Model<Worktree>,
3402 language: Arc<Language>,
3403 cx: &mut ModelContext<Self>,
3404 ) {
3405 let worktree_id = worktree.read(cx).id();
3406 let fallback_path = worktree.read(cx).abs_path();
3407
3408 let mut stops = Vec::new();
3409 for adapter in language.lsp_adapters() {
3410 stops.push(self.stop_language_server(worktree_id, adapter.name.clone(), cx));
3411 }
3412
3413 if stops.is_empty() {
3414 return;
3415 }
3416 let mut stops = stops.into_iter();
3417
3418 cx.spawn(move |this, mut cx| async move {
3419 let (original_root_path, mut orphaned_worktrees) = stops.next().unwrap().await;
3420 for stop in stops {
3421 let (_, worktrees) = stop.await;
3422 orphaned_worktrees.extend_from_slice(&worktrees);
3423 }
3424
3425 let this = match this.upgrade() {
3426 Some(this) => this,
3427 None => return,
3428 };
3429
3430 this.update(&mut cx, |this, cx| {
3431 // Attempt to restart using original server path. Fallback to passed in
3432 // path if we could not retrieve the root path
3433 let root_path = original_root_path
3434 .map(|path_buf| Arc::from(path_buf.as_path()))
3435 .unwrap_or(fallback_path);
3436
3437 this.start_language_servers(&worktree, root_path, language.clone(), cx);
3438
3439 // Lookup new server ids and set them for each of the orphaned worktrees
3440 for adapter in language.lsp_adapters() {
3441 if let Some(new_server_id) = this
3442 .language_server_ids
3443 .get(&(worktree_id, adapter.name.clone()))
3444 .cloned()
3445 {
3446 for &orphaned_worktree in &orphaned_worktrees {
3447 this.language_server_ids
3448 .insert((orphaned_worktree, adapter.name.clone()), new_server_id);
3449 }
3450 }
3451 }
3452 })
3453 .ok();
3454 })
3455 .detach();
3456 }
3457
3458 fn check_errored_server(
3459 language: Arc<Language>,
3460 adapter: Arc<CachedLspAdapter>,
3461 server_id: LanguageServerId,
3462 installation_test_binary: Option<LanguageServerBinary>,
3463 cx: &mut ModelContext<Self>,
3464 ) {
3465 if !adapter.can_be_reinstalled() {
3466 log::info!(
3467 "Validation check requested for {:?} but it cannot be reinstalled",
3468 adapter.name.0
3469 );
3470 return;
3471 }
3472
3473 cx.spawn(move |this, mut cx| async move {
3474 log::info!("About to spawn test binary");
3475
3476 // A lack of test binary counts as a failure
3477 let process = installation_test_binary.and_then(|binary| {
3478 smol::process::Command::new(&binary.path)
3479 .current_dir(&binary.path)
3480 .args(binary.arguments)
3481 .stdin(Stdio::piped())
3482 .stdout(Stdio::piped())
3483 .stderr(Stdio::inherit())
3484 .kill_on_drop(true)
3485 .spawn()
3486 .ok()
3487 });
3488
3489 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3490 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3491
3492 let mut errored = false;
3493 if let Some(mut process) = process {
3494 futures::select! {
3495 status = process.status().fuse() => match status {
3496 Ok(status) => errored = !status.success(),
3497 Err(_) => errored = true,
3498 },
3499
3500 _ = timeout => {
3501 log::info!("test binary time-ed out, this counts as a success");
3502 _ = process.kill();
3503 }
3504 }
3505 } else {
3506 log::warn!("test binary failed to launch");
3507 errored = true;
3508 }
3509
3510 if errored {
3511 log::warn!("test binary check failed");
3512 let task = this
3513 .update(&mut cx, move |this, mut cx| {
3514 this.reinstall_language_server(language, adapter, server_id, &mut cx)
3515 })
3516 .ok()
3517 .flatten();
3518
3519 if let Some(task) = task {
3520 task.await;
3521 }
3522 }
3523 })
3524 .detach();
3525 }
3526
3527 fn on_lsp_progress(
3528 &mut self,
3529 progress: lsp::ProgressParams,
3530 language_server_id: LanguageServerId,
3531 disk_based_diagnostics_progress_token: Option<String>,
3532 cx: &mut ModelContext<Self>,
3533 ) {
3534 let token = match progress.token {
3535 lsp::NumberOrString::String(token) => token,
3536 lsp::NumberOrString::Number(token) => {
3537 log::info!("skipping numeric progress token {}", token);
3538 return;
3539 }
3540 };
3541 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3542 let language_server_status =
3543 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3544 status
3545 } else {
3546 return;
3547 };
3548
3549 if !language_server_status.progress_tokens.contains(&token) {
3550 return;
3551 }
3552
3553 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3554 .as_ref()
3555 .map_or(false, |disk_based_token| {
3556 token.starts_with(disk_based_token)
3557 });
3558
3559 match progress {
3560 lsp::WorkDoneProgress::Begin(report) => {
3561 if is_disk_based_diagnostics_progress {
3562 language_server_status.has_pending_diagnostic_updates = true;
3563 self.disk_based_diagnostics_started(language_server_id, cx);
3564 self.buffer_ordered_messages_tx
3565 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3566 language_server_id,
3567 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3568 })
3569 .ok();
3570 } else {
3571 self.on_lsp_work_start(
3572 language_server_id,
3573 token.clone(),
3574 LanguageServerProgress {
3575 message: report.message.clone(),
3576 percentage: report.percentage.map(|p| p as usize),
3577 last_update_at: Instant::now(),
3578 },
3579 cx,
3580 );
3581 self.buffer_ordered_messages_tx
3582 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3583 language_server_id,
3584 message: proto::update_language_server::Variant::WorkStart(
3585 proto::LspWorkStart {
3586 token,
3587 message: report.message,
3588 percentage: report.percentage.map(|p| p as u32),
3589 },
3590 ),
3591 })
3592 .ok();
3593 }
3594 }
3595 lsp::WorkDoneProgress::Report(report) => {
3596 if !is_disk_based_diagnostics_progress {
3597 self.on_lsp_work_progress(
3598 language_server_id,
3599 token.clone(),
3600 LanguageServerProgress {
3601 message: report.message.clone(),
3602 percentage: report.percentage.map(|p| p as usize),
3603 last_update_at: Instant::now(),
3604 },
3605 cx,
3606 );
3607 self.buffer_ordered_messages_tx
3608 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3609 language_server_id,
3610 message: proto::update_language_server::Variant::WorkProgress(
3611 proto::LspWorkProgress {
3612 token,
3613 message: report.message,
3614 percentage: report.percentage.map(|p| p as u32),
3615 },
3616 ),
3617 })
3618 .ok();
3619 }
3620 }
3621 lsp::WorkDoneProgress::End(_) => {
3622 language_server_status.progress_tokens.remove(&token);
3623
3624 if is_disk_based_diagnostics_progress {
3625 language_server_status.has_pending_diagnostic_updates = false;
3626 self.disk_based_diagnostics_finished(language_server_id, cx);
3627 self.buffer_ordered_messages_tx
3628 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3629 language_server_id,
3630 message:
3631 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3632 Default::default(),
3633 ),
3634 })
3635 .ok();
3636 } else {
3637 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3638 self.buffer_ordered_messages_tx
3639 .unbounded_send(BufferOrderedMessage::LanguageServerUpdate {
3640 language_server_id,
3641 message: proto::update_language_server::Variant::WorkEnd(
3642 proto::LspWorkEnd { token },
3643 ),
3644 })
3645 .ok();
3646 }
3647 }
3648 }
3649 }
3650
3651 fn on_lsp_work_start(
3652 &mut self,
3653 language_server_id: LanguageServerId,
3654 token: String,
3655 progress: LanguageServerProgress,
3656 cx: &mut ModelContext<Self>,
3657 ) {
3658 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3659 status.pending_work.insert(token, progress);
3660 cx.notify();
3661 }
3662 }
3663
3664 fn on_lsp_work_progress(
3665 &mut self,
3666 language_server_id: LanguageServerId,
3667 token: String,
3668 progress: LanguageServerProgress,
3669 cx: &mut ModelContext<Self>,
3670 ) {
3671 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3672 let entry = status
3673 .pending_work
3674 .entry(token)
3675 .or_insert(LanguageServerProgress {
3676 message: Default::default(),
3677 percentage: Default::default(),
3678 last_update_at: progress.last_update_at,
3679 });
3680 if progress.message.is_some() {
3681 entry.message = progress.message;
3682 }
3683 if progress.percentage.is_some() {
3684 entry.percentage = progress.percentage;
3685 }
3686 entry.last_update_at = progress.last_update_at;
3687 cx.notify();
3688 }
3689 }
3690
3691 fn on_lsp_work_end(
3692 &mut self,
3693 language_server_id: LanguageServerId,
3694 token: String,
3695 cx: &mut ModelContext<Self>,
3696 ) {
3697 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3698 cx.emit(Event::RefreshInlayHints);
3699 status.pending_work.remove(&token);
3700 cx.notify();
3701 }
3702 }
3703
3704 fn on_lsp_did_change_watched_files(
3705 &mut self,
3706 language_server_id: LanguageServerId,
3707 params: DidChangeWatchedFilesRegistrationOptions,
3708 cx: &mut ModelContext<Self>,
3709 ) {
3710 if let Some(LanguageServerState::Running { watched_paths, .. }) =
3711 self.language_servers.get_mut(&language_server_id)
3712 {
3713 let mut builders = HashMap::default();
3714 for watcher in params.watchers {
3715 for worktree in &self.worktrees {
3716 if let Some(worktree) = worktree.upgrade() {
3717 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3718 if let Some(abs_path) = tree.abs_path().to_str() {
3719 let relative_glob_pattern = match &watcher.glob_pattern {
3720 lsp::GlobPattern::String(s) => s
3721 .strip_prefix(abs_path)
3722 .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR)),
3723 lsp::GlobPattern::Relative(rp) => {
3724 let base_uri = match &rp.base_uri {
3725 lsp::OneOf::Left(workspace_folder) => {
3726 &workspace_folder.uri
3727 }
3728 lsp::OneOf::Right(base_uri) => base_uri,
3729 };
3730 base_uri.to_file_path().ok().and_then(|file_path| {
3731 (file_path.to_str() == Some(abs_path))
3732 .then_some(rp.pattern.as_str())
3733 })
3734 }
3735 };
3736 if let Some(relative_glob_pattern) = relative_glob_pattern {
3737 let literal_prefix =
3738 glob_literal_prefix(&relative_glob_pattern);
3739 tree.as_local_mut()
3740 .unwrap()
3741 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3742 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3743 builders
3744 .entry(tree.id())
3745 .or_insert_with(|| GlobSetBuilder::new())
3746 .add(glob);
3747 }
3748 return true;
3749 }
3750 }
3751 false
3752 });
3753 if glob_is_inside_worktree {
3754 break;
3755 }
3756 }
3757 }
3758 }
3759
3760 watched_paths.clear();
3761 for (worktree_id, builder) in builders {
3762 if let Ok(globset) = builder.build() {
3763 watched_paths.insert(worktree_id, globset);
3764 }
3765 }
3766
3767 cx.notify();
3768 }
3769 }
3770
3771 async fn on_lsp_workspace_edit(
3772 this: WeakModel<Self>,
3773 params: lsp::ApplyWorkspaceEditParams,
3774 server_id: LanguageServerId,
3775 adapter: Arc<CachedLspAdapter>,
3776 mut cx: AsyncAppContext,
3777 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3778 let this = this
3779 .upgrade()
3780 .ok_or_else(|| anyhow!("project project closed"))?;
3781 let language_server = this
3782 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3783 .ok_or_else(|| anyhow!("language server not found"))?;
3784 let transaction = Self::deserialize_workspace_edit(
3785 this.clone(),
3786 params.edit,
3787 true,
3788 adapter.clone(),
3789 language_server.clone(),
3790 &mut cx,
3791 )
3792 .await
3793 .log_err();
3794 this.update(&mut cx, |this, _| {
3795 if let Some(transaction) = transaction {
3796 this.last_workspace_edits_by_language_server
3797 .insert(server_id, transaction);
3798 }
3799 })?;
3800 Ok(lsp::ApplyWorkspaceEditResponse {
3801 applied: true,
3802 failed_change: None,
3803 failure_reason: None,
3804 })
3805 }
3806
3807 pub fn language_server_statuses(
3808 &self,
3809 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
3810 self.language_server_statuses.values()
3811 }
3812
3813 pub fn update_diagnostics(
3814 &mut self,
3815 language_server_id: LanguageServerId,
3816 mut params: lsp::PublishDiagnosticsParams,
3817 disk_based_sources: &[String],
3818 cx: &mut ModelContext<Self>,
3819 ) -> Result<()> {
3820 let abs_path = params
3821 .uri
3822 .to_file_path()
3823 .map_err(|_| anyhow!("URI is not a file"))?;
3824 let mut diagnostics = Vec::default();
3825 let mut primary_diagnostic_group_ids = HashMap::default();
3826 let mut sources_by_group_id = HashMap::default();
3827 let mut supporting_diagnostics = HashMap::default();
3828
3829 // Ensure that primary diagnostics are always the most severe
3830 params.diagnostics.sort_by_key(|item| item.severity);
3831
3832 for diagnostic in ¶ms.diagnostics {
3833 let source = diagnostic.source.as_ref();
3834 let code = diagnostic.code.as_ref().map(|code| match code {
3835 lsp::NumberOrString::Number(code) => code.to_string(),
3836 lsp::NumberOrString::String(code) => code.clone(),
3837 });
3838 let range = range_from_lsp(diagnostic.range);
3839 let is_supporting = diagnostic
3840 .related_information
3841 .as_ref()
3842 .map_or(false, |infos| {
3843 infos.iter().any(|info| {
3844 primary_diagnostic_group_ids.contains_key(&(
3845 source,
3846 code.clone(),
3847 range_from_lsp(info.location.range),
3848 ))
3849 })
3850 });
3851
3852 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
3853 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
3854 });
3855
3856 if is_supporting {
3857 supporting_diagnostics.insert(
3858 (source, code.clone(), range),
3859 (diagnostic.severity, is_unnecessary),
3860 );
3861 } else {
3862 let group_id = post_inc(&mut self.next_diagnostic_group_id);
3863 let is_disk_based =
3864 source.map_or(false, |source| disk_based_sources.contains(source));
3865
3866 sources_by_group_id.insert(group_id, source);
3867 primary_diagnostic_group_ids
3868 .insert((source, code.clone(), range.clone()), group_id);
3869
3870 diagnostics.push(DiagnosticEntry {
3871 range,
3872 diagnostic: Diagnostic {
3873 source: diagnostic.source.clone(),
3874 code: code.clone(),
3875 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
3876 message: diagnostic.message.clone(),
3877 group_id,
3878 is_primary: true,
3879 is_valid: true,
3880 is_disk_based,
3881 is_unnecessary,
3882 },
3883 });
3884 if let Some(infos) = &diagnostic.related_information {
3885 for info in infos {
3886 if info.location.uri == params.uri && !info.message.is_empty() {
3887 let range = range_from_lsp(info.location.range);
3888 diagnostics.push(DiagnosticEntry {
3889 range,
3890 diagnostic: Diagnostic {
3891 source: diagnostic.source.clone(),
3892 code: code.clone(),
3893 severity: DiagnosticSeverity::INFORMATION,
3894 message: info.message.clone(),
3895 group_id,
3896 is_primary: false,
3897 is_valid: true,
3898 is_disk_based,
3899 is_unnecessary: false,
3900 },
3901 });
3902 }
3903 }
3904 }
3905 }
3906 }
3907
3908 for entry in &mut diagnostics {
3909 let diagnostic = &mut entry.diagnostic;
3910 if !diagnostic.is_primary {
3911 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
3912 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
3913 source,
3914 diagnostic.code.clone(),
3915 entry.range.clone(),
3916 )) {
3917 if let Some(severity) = severity {
3918 diagnostic.severity = severity;
3919 }
3920 diagnostic.is_unnecessary = is_unnecessary;
3921 }
3922 }
3923 }
3924
3925 self.update_diagnostic_entries(
3926 language_server_id,
3927 abs_path,
3928 params.version,
3929 diagnostics,
3930 cx,
3931 )?;
3932 Ok(())
3933 }
3934
3935 pub fn update_diagnostic_entries(
3936 &mut self,
3937 server_id: LanguageServerId,
3938 abs_path: PathBuf,
3939 version: Option<i32>,
3940 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3941 cx: &mut ModelContext<Project>,
3942 ) -> Result<(), anyhow::Error> {
3943 let (worktree, relative_path) = self
3944 .find_local_worktree(&abs_path, cx)
3945 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
3946
3947 let project_path = ProjectPath {
3948 worktree_id: worktree.read(cx).id(),
3949 path: relative_path.into(),
3950 };
3951
3952 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
3953 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
3954 }
3955
3956 let updated = worktree.update(cx, |worktree, cx| {
3957 worktree
3958 .as_local_mut()
3959 .ok_or_else(|| anyhow!("not a local worktree"))?
3960 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
3961 })?;
3962 if updated {
3963 cx.emit(Event::DiagnosticsUpdated {
3964 language_server_id: server_id,
3965 path: project_path,
3966 });
3967 }
3968 Ok(())
3969 }
3970
3971 fn update_buffer_diagnostics(
3972 &mut self,
3973 buffer: &Model<Buffer>,
3974 server_id: LanguageServerId,
3975 version: Option<i32>,
3976 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
3977 cx: &mut ModelContext<Self>,
3978 ) -> Result<()> {
3979 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
3980 Ordering::Equal
3981 .then_with(|| b.is_primary.cmp(&a.is_primary))
3982 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
3983 .then_with(|| a.severity.cmp(&b.severity))
3984 .then_with(|| a.message.cmp(&b.message))
3985 }
3986
3987 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
3988
3989 diagnostics.sort_unstable_by(|a, b| {
3990 Ordering::Equal
3991 .then_with(|| a.range.start.cmp(&b.range.start))
3992 .then_with(|| b.range.end.cmp(&a.range.end))
3993 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
3994 });
3995
3996 let mut sanitized_diagnostics = Vec::new();
3997 let edits_since_save = Patch::new(
3998 snapshot
3999 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4000 .collect(),
4001 );
4002 for entry in diagnostics {
4003 let start;
4004 let end;
4005 if entry.diagnostic.is_disk_based {
4006 // Some diagnostics are based on files on disk instead of buffers'
4007 // current contents. Adjust these diagnostics' ranges to reflect
4008 // any unsaved edits.
4009 start = edits_since_save.old_to_new(entry.range.start);
4010 end = edits_since_save.old_to_new(entry.range.end);
4011 } else {
4012 start = entry.range.start;
4013 end = entry.range.end;
4014 }
4015
4016 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4017 ..snapshot.clip_point_utf16(end, Bias::Right);
4018
4019 // Expand empty ranges by one codepoint
4020 if range.start == range.end {
4021 // This will be go to the next boundary when being clipped
4022 range.end.column += 1;
4023 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4024 if range.start == range.end && range.end.column > 0 {
4025 range.start.column -= 1;
4026 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Left);
4027 }
4028 }
4029
4030 sanitized_diagnostics.push(DiagnosticEntry {
4031 range,
4032 diagnostic: entry.diagnostic,
4033 });
4034 }
4035 drop(edits_since_save);
4036
4037 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4038 buffer.update(cx, |buffer, cx| {
4039 buffer.update_diagnostics(server_id, set, cx)
4040 });
4041 Ok(())
4042 }
4043
4044 pub fn reload_buffers(
4045 &self,
4046 buffers: HashSet<Model<Buffer>>,
4047 push_to_history: bool,
4048 cx: &mut ModelContext<Self>,
4049 ) -> Task<Result<ProjectTransaction>> {
4050 let mut local_buffers = Vec::new();
4051 let mut remote_buffers = None;
4052 for buffer_handle in buffers {
4053 let buffer = buffer_handle.read(cx);
4054 if buffer.is_dirty() {
4055 if let Some(file) = File::from_dyn(buffer.file()) {
4056 if file.is_local() {
4057 local_buffers.push(buffer_handle);
4058 } else {
4059 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4060 }
4061 }
4062 }
4063 }
4064
4065 let remote_buffers = self.remote_id().zip(remote_buffers);
4066 let client = self.client.clone();
4067
4068 cx.spawn(move |this, mut cx| async move {
4069 let mut project_transaction = ProjectTransaction::default();
4070
4071 if let Some((project_id, remote_buffers)) = remote_buffers {
4072 let response = client
4073 .request(proto::ReloadBuffers {
4074 project_id,
4075 buffer_ids: remote_buffers
4076 .iter()
4077 .filter_map(|buffer| {
4078 buffer.update(&mut cx, |buffer, _| buffer.remote_id()).ok()
4079 })
4080 .collect(),
4081 })
4082 .await?
4083 .transaction
4084 .ok_or_else(|| anyhow!("missing transaction"))?;
4085 project_transaction = this
4086 .update(&mut cx, |this, cx| {
4087 this.deserialize_project_transaction(response, push_to_history, cx)
4088 })?
4089 .await?;
4090 }
4091
4092 for buffer in local_buffers {
4093 let transaction = buffer
4094 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4095 .await?;
4096 buffer.update(&mut cx, |buffer, cx| {
4097 if let Some(transaction) = transaction {
4098 if !push_to_history {
4099 buffer.forget_transaction(transaction.id);
4100 }
4101 project_transaction.0.insert(cx.handle(), transaction);
4102 }
4103 })?;
4104 }
4105
4106 Ok(project_transaction)
4107 })
4108 }
4109
4110 pub fn format(
4111 &mut self,
4112 buffers: HashSet<Model<Buffer>>,
4113 push_to_history: bool,
4114 trigger: FormatTrigger,
4115 cx: &mut ModelContext<Project>,
4116 ) -> Task<anyhow::Result<ProjectTransaction>> {
4117 if self.is_local() {
4118 let mut buffers_with_paths_and_servers = buffers
4119 .into_iter()
4120 .filter_map(|buffer_handle| {
4121 let buffer = buffer_handle.read(cx);
4122 let file = File::from_dyn(buffer.file())?;
4123 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4124 let server = self
4125 .primary_language_server_for_buffer(buffer, cx)
4126 .map(|s| s.1.clone());
4127 Some((buffer_handle, buffer_abs_path, server))
4128 })
4129 .collect::<Vec<_>>();
4130
4131 cx.spawn(move |project, mut cx| async move {
4132 // Do not allow multiple concurrent formatting requests for the
4133 // same buffer.
4134 project.update(&mut cx, |this, cx| {
4135 buffers_with_paths_and_servers.retain(|(buffer, _, _)| {
4136 this.buffers_being_formatted
4137 .insert(buffer.read(cx).remote_id())
4138 });
4139 })?;
4140
4141 let _cleanup = defer({
4142 let this = project.clone();
4143 let mut cx = cx.clone();
4144 let buffers = &buffers_with_paths_and_servers;
4145 move || {
4146 this.update(&mut cx, |this, cx| {
4147 for (buffer, _, _) in buffers {
4148 this.buffers_being_formatted
4149 .remove(&buffer.read(cx).remote_id());
4150 }
4151 })
4152 .ok();
4153 }
4154 });
4155
4156 let mut project_transaction = ProjectTransaction::default();
4157 for (buffer, buffer_abs_path, language_server) in &buffers_with_paths_and_servers {
4158 let settings = buffer.update(&mut cx, |buffer, cx| {
4159 language_settings(buffer.language(), buffer.file(), cx).clone()
4160 })?;
4161
4162 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4163 let ensure_final_newline = settings.ensure_final_newline_on_save;
4164 let tab_size = settings.tab_size;
4165
4166 // First, format buffer's whitespace according to the settings.
4167 let trailing_whitespace_diff = if remove_trailing_whitespace {
4168 Some(
4169 buffer
4170 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4171 .await,
4172 )
4173 } else {
4174 None
4175 };
4176 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4177 buffer.finalize_last_transaction();
4178 buffer.start_transaction();
4179 if let Some(diff) = trailing_whitespace_diff {
4180 buffer.apply_diff(diff, cx);
4181 }
4182 if ensure_final_newline {
4183 buffer.ensure_final_newline(cx);
4184 }
4185 buffer.end_transaction(cx)
4186 })?;
4187
4188 // Apply language-specific formatting using either a language server
4189 // or external command.
4190 let mut format_operation = None;
4191 match (&settings.formatter, &settings.format_on_save) {
4192 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4193
4194 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4195 | (_, FormatOnSave::LanguageServer) => {
4196 if let Some((language_server, buffer_abs_path)) =
4197 language_server.as_ref().zip(buffer_abs_path.as_ref())
4198 {
4199 format_operation = Some(FormatOperation::Lsp(
4200 Self::format_via_lsp(
4201 &project,
4202 &buffer,
4203 buffer_abs_path,
4204 &language_server,
4205 tab_size,
4206 &mut cx,
4207 )
4208 .await
4209 .context("failed to format via language server")?,
4210 ));
4211 }
4212 }
4213
4214 (
4215 Formatter::External { command, arguments },
4216 FormatOnSave::On | FormatOnSave::Off,
4217 )
4218 | (_, FormatOnSave::External { command, arguments }) => {
4219 if let Some(buffer_abs_path) = buffer_abs_path {
4220 format_operation = Self::format_via_external_command(
4221 buffer,
4222 buffer_abs_path,
4223 &command,
4224 &arguments,
4225 &mut cx,
4226 )
4227 .await
4228 .context(format!(
4229 "failed to format via external command {:?}",
4230 command
4231 ))?
4232 .map(FormatOperation::External);
4233 }
4234 }
4235 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4236 if let Some(new_operation) =
4237 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4238 .await
4239 {
4240 format_operation = Some(new_operation);
4241 } else if let Some((language_server, buffer_abs_path)) =
4242 language_server.as_ref().zip(buffer_abs_path.as_ref())
4243 {
4244 format_operation = Some(FormatOperation::Lsp(
4245 Self::format_via_lsp(
4246 &project,
4247 &buffer,
4248 buffer_abs_path,
4249 &language_server,
4250 tab_size,
4251 &mut cx,
4252 )
4253 .await
4254 .context("failed to format via language server")?,
4255 ));
4256 }
4257 }
4258 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4259 if let Some(new_operation) =
4260 prettier_support::format_with_prettier(&project, buffer, &mut cx)
4261 .await
4262 {
4263 format_operation = Some(new_operation);
4264 }
4265 }
4266 };
4267
4268 buffer.update(&mut cx, |b, cx| {
4269 // If the buffer had its whitespace formatted and was edited while the language-specific
4270 // formatting was being computed, avoid applying the language-specific formatting, because
4271 // it can't be grouped with the whitespace formatting in the undo history.
4272 if let Some(transaction_id) = whitespace_transaction_id {
4273 if b.peek_undo_stack()
4274 .map_or(true, |e| e.transaction_id() != transaction_id)
4275 {
4276 format_operation.take();
4277 }
4278 }
4279
4280 // Apply any language-specific formatting, and group the two formatting operations
4281 // in the buffer's undo history.
4282 if let Some(operation) = format_operation {
4283 match operation {
4284 FormatOperation::Lsp(edits) => {
4285 b.edit(edits, None, cx);
4286 }
4287 FormatOperation::External(diff) => {
4288 b.apply_diff(diff, cx);
4289 }
4290 FormatOperation::Prettier(diff) => {
4291 b.apply_diff(diff, cx);
4292 }
4293 }
4294
4295 if let Some(transaction_id) = whitespace_transaction_id {
4296 b.group_until_transaction(transaction_id);
4297 }
4298 }
4299
4300 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4301 if !push_to_history {
4302 b.forget_transaction(transaction.id);
4303 }
4304 project_transaction.0.insert(buffer.clone(), transaction);
4305 }
4306 })?;
4307 }
4308
4309 Ok(project_transaction)
4310 })
4311 } else {
4312 let remote_id = self.remote_id();
4313 let client = self.client.clone();
4314 cx.spawn(move |this, mut cx| async move {
4315 let mut project_transaction = ProjectTransaction::default();
4316 if let Some(project_id) = remote_id {
4317 let response = client
4318 .request(proto::FormatBuffers {
4319 project_id,
4320 trigger: trigger as i32,
4321 buffer_ids: buffers
4322 .iter()
4323 .map(|buffer| {
4324 buffer.update(&mut cx, |buffer, _| buffer.remote_id())
4325 })
4326 .collect::<Result<_>>()?,
4327 })
4328 .await?
4329 .transaction
4330 .ok_or_else(|| anyhow!("missing transaction"))?;
4331 project_transaction = this
4332 .update(&mut cx, |this, cx| {
4333 this.deserialize_project_transaction(response, push_to_history, cx)
4334 })?
4335 .await?;
4336 }
4337 Ok(project_transaction)
4338 })
4339 }
4340 }
4341
4342 async fn format_via_lsp(
4343 this: &WeakModel<Self>,
4344 buffer: &Model<Buffer>,
4345 abs_path: &Path,
4346 language_server: &Arc<LanguageServer>,
4347 tab_size: NonZeroU32,
4348 cx: &mut AsyncAppContext,
4349 ) -> Result<Vec<(Range<Anchor>, String)>> {
4350 let uri = lsp::Url::from_file_path(abs_path)
4351 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4352 let text_document = lsp::TextDocumentIdentifier::new(uri);
4353 let capabilities = &language_server.capabilities();
4354
4355 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4356 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4357
4358 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4359 language_server
4360 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4361 text_document,
4362 options: lsp_command::lsp_formatting_options(tab_size.get()),
4363 work_done_progress_params: Default::default(),
4364 })
4365 .await?
4366 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4367 let buffer_start = lsp::Position::new(0, 0);
4368 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4369
4370 language_server
4371 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4372 text_document,
4373 range: lsp::Range::new(buffer_start, buffer_end),
4374 options: lsp_command::lsp_formatting_options(tab_size.get()),
4375 work_done_progress_params: Default::default(),
4376 })
4377 .await?
4378 } else {
4379 None
4380 };
4381
4382 if let Some(lsp_edits) = lsp_edits {
4383 this.update(cx, |this, cx| {
4384 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4385 })?
4386 .await
4387 } else {
4388 Ok(Vec::new())
4389 }
4390 }
4391
4392 async fn format_via_external_command(
4393 buffer: &Model<Buffer>,
4394 buffer_abs_path: &Path,
4395 command: &str,
4396 arguments: &[String],
4397 cx: &mut AsyncAppContext,
4398 ) -> Result<Option<Diff>> {
4399 let working_dir_path = buffer.update(cx, |buffer, cx| {
4400 let file = File::from_dyn(buffer.file())?;
4401 let worktree = file.worktree.read(cx).as_local()?;
4402 let mut worktree_path = worktree.abs_path().to_path_buf();
4403 if worktree.root_entry()?.is_file() {
4404 worktree_path.pop();
4405 }
4406 Some(worktree_path)
4407 })?;
4408
4409 if let Some(working_dir_path) = working_dir_path {
4410 let mut child =
4411 smol::process::Command::new(command)
4412 .args(arguments.iter().map(|arg| {
4413 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4414 }))
4415 .current_dir(&working_dir_path)
4416 .stdin(smol::process::Stdio::piped())
4417 .stdout(smol::process::Stdio::piped())
4418 .stderr(smol::process::Stdio::piped())
4419 .spawn()?;
4420 let stdin = child
4421 .stdin
4422 .as_mut()
4423 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4424 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4425 for chunk in text.chunks() {
4426 stdin.write_all(chunk.as_bytes()).await?;
4427 }
4428 stdin.flush().await?;
4429
4430 let output = child.output().await?;
4431 if !output.status.success() {
4432 return Err(anyhow!(
4433 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4434 output.status.code(),
4435 String::from_utf8_lossy(&output.stdout),
4436 String::from_utf8_lossy(&output.stderr),
4437 ));
4438 }
4439
4440 let stdout = String::from_utf8(output.stdout)?;
4441 Ok(Some(
4442 buffer
4443 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4444 .await,
4445 ))
4446 } else {
4447 Ok(None)
4448 }
4449 }
4450
4451 pub fn definition<T: ToPointUtf16>(
4452 &self,
4453 buffer: &Model<Buffer>,
4454 position: T,
4455 cx: &mut ModelContext<Self>,
4456 ) -> Task<Result<Vec<LocationLink>>> {
4457 let position = position.to_point_utf16(buffer.read(cx));
4458 self.request_lsp(
4459 buffer.clone(),
4460 LanguageServerToQuery::Primary,
4461 GetDefinition { position },
4462 cx,
4463 )
4464 }
4465
4466 pub fn type_definition<T: ToPointUtf16>(
4467 &self,
4468 buffer: &Model<Buffer>,
4469 position: T,
4470 cx: &mut ModelContext<Self>,
4471 ) -> Task<Result<Vec<LocationLink>>> {
4472 let position = position.to_point_utf16(buffer.read(cx));
4473 self.request_lsp(
4474 buffer.clone(),
4475 LanguageServerToQuery::Primary,
4476 GetTypeDefinition { position },
4477 cx,
4478 )
4479 }
4480
4481 pub fn references<T: ToPointUtf16>(
4482 &self,
4483 buffer: &Model<Buffer>,
4484 position: T,
4485 cx: &mut ModelContext<Self>,
4486 ) -> Task<Result<Vec<Location>>> {
4487 let position = position.to_point_utf16(buffer.read(cx));
4488 self.request_lsp(
4489 buffer.clone(),
4490 LanguageServerToQuery::Primary,
4491 GetReferences { position },
4492 cx,
4493 )
4494 }
4495
4496 pub fn document_highlights<T: ToPointUtf16>(
4497 &self,
4498 buffer: &Model<Buffer>,
4499 position: T,
4500 cx: &mut ModelContext<Self>,
4501 ) -> Task<Result<Vec<DocumentHighlight>>> {
4502 let position = position.to_point_utf16(buffer.read(cx));
4503 self.request_lsp(
4504 buffer.clone(),
4505 LanguageServerToQuery::Primary,
4506 GetDocumentHighlights { position },
4507 cx,
4508 )
4509 }
4510
4511 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4512 if self.is_local() {
4513 let mut requests = Vec::new();
4514 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4515 let worktree_id = *worktree_id;
4516 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4517 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4518 Some(worktree) => worktree,
4519 None => continue,
4520 };
4521 let worktree_abs_path = worktree.abs_path().clone();
4522
4523 let (adapter, language, server) = match self.language_servers.get(server_id) {
4524 Some(LanguageServerState::Running {
4525 adapter,
4526 language,
4527 server,
4528 ..
4529 }) => (adapter.clone(), language.clone(), server),
4530
4531 _ => continue,
4532 };
4533
4534 requests.push(
4535 server
4536 .request::<lsp::request::WorkspaceSymbolRequest>(
4537 lsp::WorkspaceSymbolParams {
4538 query: query.to_string(),
4539 ..Default::default()
4540 },
4541 )
4542 .log_err()
4543 .map(move |response| {
4544 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4545 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4546 flat_responses.into_iter().map(|lsp_symbol| {
4547 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4548 }).collect::<Vec<_>>()
4549 }
4550 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4551 nested_responses.into_iter().filter_map(|lsp_symbol| {
4552 let location = match lsp_symbol.location {
4553 OneOf::Left(location) => location,
4554 OneOf::Right(_) => {
4555 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4556 return None
4557 }
4558 };
4559 Some((lsp_symbol.name, lsp_symbol.kind, location))
4560 }).collect::<Vec<_>>()
4561 }
4562 }).unwrap_or_default();
4563
4564 (
4565 adapter,
4566 language,
4567 worktree_id,
4568 worktree_abs_path,
4569 lsp_symbols,
4570 )
4571 }),
4572 );
4573 }
4574
4575 cx.spawn(move |this, mut cx| async move {
4576 let responses = futures::future::join_all(requests).await;
4577 let this = match this.upgrade() {
4578 Some(this) => this,
4579 None => return Ok(Vec::new()),
4580 };
4581
4582 let symbols = this.update(&mut cx, |this, cx| {
4583 let mut symbols = Vec::new();
4584 for (
4585 adapter,
4586 adapter_language,
4587 source_worktree_id,
4588 worktree_abs_path,
4589 lsp_symbols,
4590 ) in responses
4591 {
4592 symbols.extend(lsp_symbols.into_iter().filter_map(
4593 |(symbol_name, symbol_kind, symbol_location)| {
4594 let abs_path = symbol_location.uri.to_file_path().ok()?;
4595 let mut worktree_id = source_worktree_id;
4596 let path;
4597 if let Some((worktree, rel_path)) =
4598 this.find_local_worktree(&abs_path, cx)
4599 {
4600 worktree_id = worktree.read(cx).id();
4601 path = rel_path;
4602 } else {
4603 path = relativize_path(&worktree_abs_path, &abs_path);
4604 }
4605
4606 let project_path = ProjectPath {
4607 worktree_id,
4608 path: path.into(),
4609 };
4610 let signature = this.symbol_signature(&project_path);
4611 let adapter_language = adapter_language.clone();
4612 let language = this
4613 .languages
4614 .language_for_file(&project_path.path, None)
4615 .unwrap_or_else(move |_| adapter_language);
4616 let language_server_name = adapter.name.clone();
4617 Some(async move {
4618 let language = language.await;
4619 let label =
4620 language.label_for_symbol(&symbol_name, symbol_kind).await;
4621
4622 Symbol {
4623 language_server_name,
4624 source_worktree_id,
4625 path: project_path,
4626 label: label.unwrap_or_else(|| {
4627 CodeLabel::plain(symbol_name.clone(), None)
4628 }),
4629 kind: symbol_kind,
4630 name: symbol_name,
4631 range: range_from_lsp(symbol_location.range),
4632 signature,
4633 }
4634 })
4635 },
4636 ));
4637 }
4638
4639 symbols
4640 })?;
4641
4642 Ok(futures::future::join_all(symbols).await)
4643 })
4644 } else if let Some(project_id) = self.remote_id() {
4645 let request = self.client.request(proto::GetProjectSymbols {
4646 project_id,
4647 query: query.to_string(),
4648 });
4649 cx.spawn(move |this, mut cx| async move {
4650 let response = request.await?;
4651 let mut symbols = Vec::new();
4652 if let Some(this) = this.upgrade() {
4653 let new_symbols = this.update(&mut cx, |this, _| {
4654 response
4655 .symbols
4656 .into_iter()
4657 .map(|symbol| this.deserialize_symbol(symbol))
4658 .collect::<Vec<_>>()
4659 })?;
4660 symbols = futures::future::join_all(new_symbols)
4661 .await
4662 .into_iter()
4663 .filter_map(|symbol| symbol.log_err())
4664 .collect::<Vec<_>>();
4665 }
4666 Ok(symbols)
4667 })
4668 } else {
4669 Task::ready(Ok(Default::default()))
4670 }
4671 }
4672
4673 pub fn open_buffer_for_symbol(
4674 &mut self,
4675 symbol: &Symbol,
4676 cx: &mut ModelContext<Self>,
4677 ) -> Task<Result<Model<Buffer>>> {
4678 if self.is_local() {
4679 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
4680 symbol.source_worktree_id,
4681 symbol.language_server_name.clone(),
4682 )) {
4683 *id
4684 } else {
4685 return Task::ready(Err(anyhow!(
4686 "language server for worktree and language not found"
4687 )));
4688 };
4689
4690 let worktree_abs_path = if let Some(worktree_abs_path) = self
4691 .worktree_for_id(symbol.path.worktree_id, cx)
4692 .and_then(|worktree| worktree.read(cx).as_local())
4693 .map(|local_worktree| local_worktree.abs_path())
4694 {
4695 worktree_abs_path
4696 } else {
4697 return Task::ready(Err(anyhow!("worktree not found for symbol")));
4698 };
4699 let symbol_abs_path = worktree_abs_path.join(&symbol.path.path);
4700 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
4701 uri
4702 } else {
4703 return Task::ready(Err(anyhow!("invalid symbol path")));
4704 };
4705
4706 self.open_local_buffer_via_lsp(
4707 symbol_uri,
4708 language_server_id,
4709 symbol.language_server_name.clone(),
4710 cx,
4711 )
4712 } else if let Some(project_id) = self.remote_id() {
4713 let request = self.client.request(proto::OpenBufferForSymbol {
4714 project_id,
4715 symbol: Some(serialize_symbol(symbol)),
4716 });
4717 cx.spawn(move |this, mut cx| async move {
4718 let response = request.await?;
4719 this.update(&mut cx, |this, cx| {
4720 this.wait_for_remote_buffer(response.buffer_id, cx)
4721 })?
4722 .await
4723 })
4724 } else {
4725 Task::ready(Err(anyhow!("project does not have a remote id")))
4726 }
4727 }
4728
4729 pub fn hover<T: ToPointUtf16>(
4730 &self,
4731 buffer: &Model<Buffer>,
4732 position: T,
4733 cx: &mut ModelContext<Self>,
4734 ) -> Task<Result<Option<Hover>>> {
4735 let position = position.to_point_utf16(buffer.read(cx));
4736 self.request_lsp(
4737 buffer.clone(),
4738 LanguageServerToQuery::Primary,
4739 GetHover { position },
4740 cx,
4741 )
4742 }
4743
4744 pub fn completions<T: ToOffset + ToPointUtf16>(
4745 &self,
4746 buffer: &Model<Buffer>,
4747 position: T,
4748 cx: &mut ModelContext<Self>,
4749 ) -> Task<Result<Vec<Completion>>> {
4750 let position = position.to_point_utf16(buffer.read(cx));
4751 if self.is_local() {
4752 let snapshot = buffer.read(cx).snapshot();
4753 let offset = position.to_offset(&snapshot);
4754 let scope = snapshot.language_scope_at(offset);
4755
4756 let server_ids: Vec<_> = self
4757 .language_servers_for_buffer(buffer.read(cx), cx)
4758 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
4759 .filter(|(adapter, _)| {
4760 scope
4761 .as_ref()
4762 .map(|scope| scope.language_allowed(&adapter.name))
4763 .unwrap_or(true)
4764 })
4765 .map(|(_, server)| server.server_id())
4766 .collect();
4767
4768 let buffer = buffer.clone();
4769 cx.spawn(move |this, mut cx| async move {
4770 let mut tasks = Vec::with_capacity(server_ids.len());
4771 this.update(&mut cx, |this, cx| {
4772 for server_id in server_ids {
4773 tasks.push(this.request_lsp(
4774 buffer.clone(),
4775 LanguageServerToQuery::Other(server_id),
4776 GetCompletions { position },
4777 cx,
4778 ));
4779 }
4780 })?;
4781
4782 let mut completions = Vec::new();
4783 for task in tasks {
4784 if let Ok(new_completions) = task.await {
4785 completions.extend_from_slice(&new_completions);
4786 }
4787 }
4788
4789 Ok(completions)
4790 })
4791 } else if let Some(project_id) = self.remote_id() {
4792 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
4793 } else {
4794 Task::ready(Ok(Default::default()))
4795 }
4796 }
4797
4798 pub fn apply_additional_edits_for_completion(
4799 &self,
4800 buffer_handle: Model<Buffer>,
4801 completion: Completion,
4802 push_to_history: bool,
4803 cx: &mut ModelContext<Self>,
4804 ) -> Task<Result<Option<Transaction>>> {
4805 let buffer = buffer_handle.read(cx);
4806 let buffer_id = buffer.remote_id();
4807
4808 if self.is_local() {
4809 let server_id = completion.server_id;
4810 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
4811 Some((_, server)) => server.clone(),
4812 _ => return Task::ready(Ok(Default::default())),
4813 };
4814
4815 cx.spawn(move |this, mut cx| async move {
4816 let can_resolve = lang_server
4817 .capabilities()
4818 .completion_provider
4819 .as_ref()
4820 .and_then(|options| options.resolve_provider)
4821 .unwrap_or(false);
4822 let additional_text_edits = if can_resolve {
4823 lang_server
4824 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
4825 .await?
4826 .additional_text_edits
4827 } else {
4828 completion.lsp_completion.additional_text_edits
4829 };
4830 if let Some(edits) = additional_text_edits {
4831 let edits = this
4832 .update(&mut cx, |this, cx| {
4833 this.edits_from_lsp(
4834 &buffer_handle,
4835 edits,
4836 lang_server.server_id(),
4837 None,
4838 cx,
4839 )
4840 })?
4841 .await?;
4842
4843 buffer_handle.update(&mut cx, |buffer, cx| {
4844 buffer.finalize_last_transaction();
4845 buffer.start_transaction();
4846
4847 for (range, text) in edits {
4848 let primary = &completion.old_range;
4849 let start_within = primary.start.cmp(&range.start, buffer).is_le()
4850 && primary.end.cmp(&range.start, buffer).is_ge();
4851 let end_within = range.start.cmp(&primary.end, buffer).is_le()
4852 && range.end.cmp(&primary.end, buffer).is_ge();
4853
4854 //Skip additional edits which overlap with the primary completion edit
4855 //https://github.com/zed-industries/zed/pull/1871
4856 if !start_within && !end_within {
4857 buffer.edit([(range, text)], None, cx);
4858 }
4859 }
4860
4861 let transaction = if buffer.end_transaction(cx).is_some() {
4862 let transaction = buffer.finalize_last_transaction().unwrap().clone();
4863 if !push_to_history {
4864 buffer.forget_transaction(transaction.id);
4865 }
4866 Some(transaction)
4867 } else {
4868 None
4869 };
4870 Ok(transaction)
4871 })?
4872 } else {
4873 Ok(None)
4874 }
4875 })
4876 } else if let Some(project_id) = self.remote_id() {
4877 let client = self.client.clone();
4878 cx.spawn(move |_, mut cx| async move {
4879 let response = client
4880 .request(proto::ApplyCompletionAdditionalEdits {
4881 project_id,
4882 buffer_id,
4883 completion: Some(language::proto::serialize_completion(&completion)),
4884 })
4885 .await?;
4886
4887 if let Some(transaction) = response.transaction {
4888 let transaction = language::proto::deserialize_transaction(transaction)?;
4889 buffer_handle
4890 .update(&mut cx, |buffer, _| {
4891 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4892 })?
4893 .await?;
4894 if push_to_history {
4895 buffer_handle.update(&mut cx, |buffer, _| {
4896 buffer.push_transaction(transaction.clone(), Instant::now());
4897 })?;
4898 }
4899 Ok(Some(transaction))
4900 } else {
4901 Ok(None)
4902 }
4903 })
4904 } else {
4905 Task::ready(Err(anyhow!("project does not have a remote id")))
4906 }
4907 }
4908
4909 pub fn code_actions<T: Clone + ToOffset>(
4910 &self,
4911 buffer_handle: &Model<Buffer>,
4912 range: Range<T>,
4913 cx: &mut ModelContext<Self>,
4914 ) -> Task<Result<Vec<CodeAction>>> {
4915 let buffer = buffer_handle.read(cx);
4916 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
4917 self.request_lsp(
4918 buffer_handle.clone(),
4919 LanguageServerToQuery::Primary,
4920 GetCodeActions { range },
4921 cx,
4922 )
4923 }
4924
4925 pub fn apply_code_action(
4926 &self,
4927 buffer_handle: Model<Buffer>,
4928 mut action: CodeAction,
4929 push_to_history: bool,
4930 cx: &mut ModelContext<Self>,
4931 ) -> Task<Result<ProjectTransaction>> {
4932 if self.is_local() {
4933 let buffer = buffer_handle.read(cx);
4934 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
4935 self.language_server_for_buffer(buffer, action.server_id, cx)
4936 {
4937 (adapter.clone(), server.clone())
4938 } else {
4939 return Task::ready(Ok(Default::default()));
4940 };
4941 let range = action.range.to_point_utf16(buffer);
4942
4943 cx.spawn(move |this, mut cx| async move {
4944 if let Some(lsp_range) = action
4945 .lsp_action
4946 .data
4947 .as_mut()
4948 .and_then(|d| d.get_mut("codeActionParams"))
4949 .and_then(|d| d.get_mut("range"))
4950 {
4951 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
4952 action.lsp_action = lang_server
4953 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
4954 .await?;
4955 } else {
4956 let actions = this
4957 .update(&mut cx, |this, cx| {
4958 this.code_actions(&buffer_handle, action.range, cx)
4959 })?
4960 .await?;
4961 action.lsp_action = actions
4962 .into_iter()
4963 .find(|a| a.lsp_action.title == action.lsp_action.title)
4964 .ok_or_else(|| anyhow!("code action is outdated"))?
4965 .lsp_action;
4966 }
4967
4968 if let Some(edit) = action.lsp_action.edit {
4969 if edit.changes.is_some() || edit.document_changes.is_some() {
4970 return Self::deserialize_workspace_edit(
4971 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
4972 edit,
4973 push_to_history,
4974 lsp_adapter.clone(),
4975 lang_server.clone(),
4976 &mut cx,
4977 )
4978 .await;
4979 }
4980 }
4981
4982 if let Some(command) = action.lsp_action.command {
4983 this.update(&mut cx, |this, _| {
4984 this.last_workspace_edits_by_language_server
4985 .remove(&lang_server.server_id());
4986 })?;
4987
4988 let result = lang_server
4989 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
4990 command: command.command,
4991 arguments: command.arguments.unwrap_or_default(),
4992 ..Default::default()
4993 })
4994 .await;
4995
4996 if let Err(err) = result {
4997 // TODO: LSP ERROR
4998 return Err(err);
4999 }
5000
5001 return Ok(this.update(&mut cx, |this, _| {
5002 this.last_workspace_edits_by_language_server
5003 .remove(&lang_server.server_id())
5004 .unwrap_or_default()
5005 })?);
5006 }
5007
5008 Ok(ProjectTransaction::default())
5009 })
5010 } else if let Some(project_id) = self.remote_id() {
5011 let client = self.client.clone();
5012 let request = proto::ApplyCodeAction {
5013 project_id,
5014 buffer_id: buffer_handle.read(cx).remote_id(),
5015 action: Some(language::proto::serialize_code_action(&action)),
5016 };
5017 cx.spawn(move |this, mut cx| async move {
5018 let response = client
5019 .request(request)
5020 .await?
5021 .transaction
5022 .ok_or_else(|| anyhow!("missing transaction"))?;
5023 this.update(&mut cx, |this, cx| {
5024 this.deserialize_project_transaction(response, push_to_history, cx)
5025 })?
5026 .await
5027 })
5028 } else {
5029 Task::ready(Err(anyhow!("project does not have a remote id")))
5030 }
5031 }
5032
5033 fn apply_on_type_formatting(
5034 &self,
5035 buffer: Model<Buffer>,
5036 position: Anchor,
5037 trigger: String,
5038 cx: &mut ModelContext<Self>,
5039 ) -> Task<Result<Option<Transaction>>> {
5040 if self.is_local() {
5041 cx.spawn(move |this, mut cx| async move {
5042 // Do not allow multiple concurrent formatting requests for the
5043 // same buffer.
5044 this.update(&mut cx, |this, cx| {
5045 this.buffers_being_formatted
5046 .insert(buffer.read(cx).remote_id())
5047 })?;
5048
5049 let _cleanup = defer({
5050 let this = this.clone();
5051 let mut cx = cx.clone();
5052 let closure_buffer = buffer.clone();
5053 move || {
5054 this.update(&mut cx, |this, cx| {
5055 this.buffers_being_formatted
5056 .remove(&closure_buffer.read(cx).remote_id());
5057 })
5058 .ok();
5059 }
5060 });
5061
5062 buffer
5063 .update(&mut cx, |buffer, _| {
5064 buffer.wait_for_edits(Some(position.timestamp))
5065 })?
5066 .await?;
5067 this.update(&mut cx, |this, cx| {
5068 let position = position.to_point_utf16(buffer.read(cx));
5069 this.on_type_format(buffer, position, trigger, false, cx)
5070 })?
5071 .await
5072 })
5073 } else if let Some(project_id) = self.remote_id() {
5074 let client = self.client.clone();
5075 let request = proto::OnTypeFormatting {
5076 project_id,
5077 buffer_id: buffer.read(cx).remote_id(),
5078 position: Some(serialize_anchor(&position)),
5079 trigger,
5080 version: serialize_version(&buffer.read(cx).version()),
5081 };
5082 cx.spawn(move |_, _| async move {
5083 client
5084 .request(request)
5085 .await?
5086 .transaction
5087 .map(language::proto::deserialize_transaction)
5088 .transpose()
5089 })
5090 } else {
5091 Task::ready(Err(anyhow!("project does not have a remote id")))
5092 }
5093 }
5094
5095 async fn deserialize_edits(
5096 this: Model<Self>,
5097 buffer_to_edit: Model<Buffer>,
5098 edits: Vec<lsp::TextEdit>,
5099 push_to_history: bool,
5100 _: Arc<CachedLspAdapter>,
5101 language_server: Arc<LanguageServer>,
5102 cx: &mut AsyncAppContext,
5103 ) -> Result<Option<Transaction>> {
5104 let edits = this
5105 .update(cx, |this, cx| {
5106 this.edits_from_lsp(
5107 &buffer_to_edit,
5108 edits,
5109 language_server.server_id(),
5110 None,
5111 cx,
5112 )
5113 })?
5114 .await?;
5115
5116 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5117 buffer.finalize_last_transaction();
5118 buffer.start_transaction();
5119 for (range, text) in edits {
5120 buffer.edit([(range, text)], None, cx);
5121 }
5122
5123 if buffer.end_transaction(cx).is_some() {
5124 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5125 if !push_to_history {
5126 buffer.forget_transaction(transaction.id);
5127 }
5128 Some(transaction)
5129 } else {
5130 None
5131 }
5132 })?;
5133
5134 Ok(transaction)
5135 }
5136
5137 async fn deserialize_workspace_edit(
5138 this: Model<Self>,
5139 edit: lsp::WorkspaceEdit,
5140 push_to_history: bool,
5141 lsp_adapter: Arc<CachedLspAdapter>,
5142 language_server: Arc<LanguageServer>,
5143 cx: &mut AsyncAppContext,
5144 ) -> Result<ProjectTransaction> {
5145 let fs = this.update(cx, |this, _| this.fs.clone())?;
5146 let mut operations = Vec::new();
5147 if let Some(document_changes) = edit.document_changes {
5148 match document_changes {
5149 lsp::DocumentChanges::Edits(edits) => {
5150 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5151 }
5152 lsp::DocumentChanges::Operations(ops) => operations = ops,
5153 }
5154 } else if let Some(changes) = edit.changes {
5155 operations.extend(changes.into_iter().map(|(uri, edits)| {
5156 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5157 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5158 uri,
5159 version: None,
5160 },
5161 edits: edits.into_iter().map(OneOf::Left).collect(),
5162 })
5163 }));
5164 }
5165
5166 let mut project_transaction = ProjectTransaction::default();
5167 for operation in operations {
5168 match operation {
5169 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5170 let abs_path = op
5171 .uri
5172 .to_file_path()
5173 .map_err(|_| anyhow!("can't convert URI to path"))?;
5174
5175 if let Some(parent_path) = abs_path.parent() {
5176 fs.create_dir(parent_path).await?;
5177 }
5178 if abs_path.ends_with("/") {
5179 fs.create_dir(&abs_path).await?;
5180 } else {
5181 fs.create_file(
5182 &abs_path,
5183 op.options
5184 .map(|options| fs::CreateOptions {
5185 overwrite: options.overwrite.unwrap_or(false),
5186 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5187 })
5188 .unwrap_or_default(),
5189 )
5190 .await?;
5191 }
5192 }
5193
5194 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5195 let source_abs_path = op
5196 .old_uri
5197 .to_file_path()
5198 .map_err(|_| anyhow!("can't convert URI to path"))?;
5199 let target_abs_path = op
5200 .new_uri
5201 .to_file_path()
5202 .map_err(|_| anyhow!("can't convert URI to path"))?;
5203 fs.rename(
5204 &source_abs_path,
5205 &target_abs_path,
5206 op.options
5207 .map(|options| fs::RenameOptions {
5208 overwrite: options.overwrite.unwrap_or(false),
5209 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5210 })
5211 .unwrap_or_default(),
5212 )
5213 .await?;
5214 }
5215
5216 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5217 let abs_path = op
5218 .uri
5219 .to_file_path()
5220 .map_err(|_| anyhow!("can't convert URI to path"))?;
5221 let options = op
5222 .options
5223 .map(|options| fs::RemoveOptions {
5224 recursive: options.recursive.unwrap_or(false),
5225 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5226 })
5227 .unwrap_or_default();
5228 if abs_path.ends_with("/") {
5229 fs.remove_dir(&abs_path, options).await?;
5230 } else {
5231 fs.remove_file(&abs_path, options).await?;
5232 }
5233 }
5234
5235 lsp::DocumentChangeOperation::Edit(op) => {
5236 let buffer_to_edit = this
5237 .update(cx, |this, cx| {
5238 this.open_local_buffer_via_lsp(
5239 op.text_document.uri,
5240 language_server.server_id(),
5241 lsp_adapter.name.clone(),
5242 cx,
5243 )
5244 })?
5245 .await?;
5246
5247 let edits = this
5248 .update(cx, |this, cx| {
5249 let edits = op.edits.into_iter().map(|edit| match edit {
5250 OneOf::Left(edit) => edit,
5251 OneOf::Right(edit) => edit.text_edit,
5252 });
5253 this.edits_from_lsp(
5254 &buffer_to_edit,
5255 edits,
5256 language_server.server_id(),
5257 op.text_document.version,
5258 cx,
5259 )
5260 })?
5261 .await?;
5262
5263 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5264 buffer.finalize_last_transaction();
5265 buffer.start_transaction();
5266 for (range, text) in edits {
5267 buffer.edit([(range, text)], None, cx);
5268 }
5269 let transaction = if buffer.end_transaction(cx).is_some() {
5270 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5271 if !push_to_history {
5272 buffer.forget_transaction(transaction.id);
5273 }
5274 Some(transaction)
5275 } else {
5276 None
5277 };
5278
5279 transaction
5280 })?;
5281 if let Some(transaction) = transaction {
5282 project_transaction.0.insert(buffer_to_edit, transaction);
5283 }
5284 }
5285 }
5286 }
5287
5288 Ok(project_transaction)
5289 }
5290
5291 pub fn prepare_rename<T: ToPointUtf16>(
5292 &self,
5293 buffer: Model<Buffer>,
5294 position: T,
5295 cx: &mut ModelContext<Self>,
5296 ) -> Task<Result<Option<Range<Anchor>>>> {
5297 let position = position.to_point_utf16(buffer.read(cx));
5298 self.request_lsp(
5299 buffer,
5300 LanguageServerToQuery::Primary,
5301 PrepareRename { position },
5302 cx,
5303 )
5304 }
5305
5306 pub fn perform_rename<T: ToPointUtf16>(
5307 &self,
5308 buffer: Model<Buffer>,
5309 position: T,
5310 new_name: String,
5311 push_to_history: bool,
5312 cx: &mut ModelContext<Self>,
5313 ) -> Task<Result<ProjectTransaction>> {
5314 let position = position.to_point_utf16(buffer.read(cx));
5315 self.request_lsp(
5316 buffer,
5317 LanguageServerToQuery::Primary,
5318 PerformRename {
5319 position,
5320 new_name,
5321 push_to_history,
5322 },
5323 cx,
5324 )
5325 }
5326
5327 pub fn on_type_format<T: ToPointUtf16>(
5328 &self,
5329 buffer: Model<Buffer>,
5330 position: T,
5331 trigger: String,
5332 push_to_history: bool,
5333 cx: &mut ModelContext<Self>,
5334 ) -> Task<Result<Option<Transaction>>> {
5335 let (position, tab_size) = buffer.update(cx, |buffer, cx| {
5336 let position = position.to_point_utf16(buffer);
5337 (
5338 position,
5339 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx)
5340 .tab_size,
5341 )
5342 });
5343 self.request_lsp(
5344 buffer.clone(),
5345 LanguageServerToQuery::Primary,
5346 OnTypeFormatting {
5347 position,
5348 trigger,
5349 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5350 push_to_history,
5351 },
5352 cx,
5353 )
5354 }
5355
5356 pub fn inlay_hints<T: ToOffset>(
5357 &self,
5358 buffer_handle: Model<Buffer>,
5359 range: Range<T>,
5360 cx: &mut ModelContext<Self>,
5361 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5362 let buffer = buffer_handle.read(cx);
5363 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5364 let range_start = range.start;
5365 let range_end = range.end;
5366 let buffer_id = buffer.remote_id();
5367 let buffer_version = buffer.version().clone();
5368 let lsp_request = InlayHints { range };
5369
5370 if self.is_local() {
5371 let lsp_request_task = self.request_lsp(
5372 buffer_handle.clone(),
5373 LanguageServerToQuery::Primary,
5374 lsp_request,
5375 cx,
5376 );
5377 cx.spawn(move |_, mut cx| async move {
5378 buffer_handle
5379 .update(&mut cx, |buffer, _| {
5380 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5381 })?
5382 .await
5383 .context("waiting for inlay hint request range edits")?;
5384 lsp_request_task.await.context("inlay hints LSP request")
5385 })
5386 } else if let Some(project_id) = self.remote_id() {
5387 let client = self.client.clone();
5388 let request = proto::InlayHints {
5389 project_id,
5390 buffer_id,
5391 start: Some(serialize_anchor(&range_start)),
5392 end: Some(serialize_anchor(&range_end)),
5393 version: serialize_version(&buffer_version),
5394 };
5395 cx.spawn(move |project, cx| async move {
5396 let response = client
5397 .request(request)
5398 .await
5399 .context("inlay hints proto request")?;
5400 let hints_request_result = LspCommand::response_from_proto(
5401 lsp_request,
5402 response,
5403 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5404 buffer_handle.clone(),
5405 cx,
5406 )
5407 .await;
5408
5409 hints_request_result.context("inlay hints proto response conversion")
5410 })
5411 } else {
5412 Task::ready(Err(anyhow!("project does not have a remote id")))
5413 }
5414 }
5415
5416 pub fn resolve_inlay_hint(
5417 &self,
5418 hint: InlayHint,
5419 buffer_handle: Model<Buffer>,
5420 server_id: LanguageServerId,
5421 cx: &mut ModelContext<Self>,
5422 ) -> Task<anyhow::Result<InlayHint>> {
5423 if self.is_local() {
5424 let buffer = buffer_handle.read(cx);
5425 let (_, lang_server) = if let Some((adapter, server)) =
5426 self.language_server_for_buffer(buffer, server_id, cx)
5427 {
5428 (adapter.clone(), server.clone())
5429 } else {
5430 return Task::ready(Ok(hint));
5431 };
5432 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
5433 return Task::ready(Ok(hint));
5434 }
5435
5436 let buffer_snapshot = buffer.snapshot();
5437 cx.spawn(move |_, mut cx| async move {
5438 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
5439 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
5440 );
5441 let resolved_hint = resolve_task
5442 .await
5443 .context("inlay hint resolve LSP request")?;
5444 let resolved_hint = InlayHints::lsp_to_project_hint(
5445 resolved_hint,
5446 &buffer_handle,
5447 server_id,
5448 ResolveState::Resolved,
5449 false,
5450 &mut cx,
5451 )
5452 .await?;
5453 Ok(resolved_hint)
5454 })
5455 } else if let Some(project_id) = self.remote_id() {
5456 let client = self.client.clone();
5457 let request = proto::ResolveInlayHint {
5458 project_id,
5459 buffer_id: buffer_handle.read(cx).remote_id(),
5460 language_server_id: server_id.0 as u64,
5461 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
5462 };
5463 cx.spawn(move |_, _| async move {
5464 let response = client
5465 .request(request)
5466 .await
5467 .context("inlay hints proto request")?;
5468 match response.hint {
5469 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
5470 .context("inlay hints proto resolve response conversion"),
5471 None => Ok(hint),
5472 }
5473 })
5474 } else {
5475 Task::ready(Err(anyhow!("project does not have a remote id")))
5476 }
5477 }
5478
5479 #[allow(clippy::type_complexity)]
5480 pub fn search(
5481 &self,
5482 query: SearchQuery,
5483 cx: &mut ModelContext<Self>,
5484 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5485 if self.is_local() {
5486 self.search_local(query, cx)
5487 } else if let Some(project_id) = self.remote_id() {
5488 let (tx, rx) = smol::channel::unbounded();
5489 let request = self.client.request(query.to_proto(project_id));
5490 cx.spawn(move |this, mut cx| async move {
5491 let response = request.await?;
5492 let mut result = HashMap::default();
5493 for location in response.locations {
5494 let target_buffer = this
5495 .update(&mut cx, |this, cx| {
5496 this.wait_for_remote_buffer(location.buffer_id, cx)
5497 })?
5498 .await?;
5499 let start = location
5500 .start
5501 .and_then(deserialize_anchor)
5502 .ok_or_else(|| anyhow!("missing target start"))?;
5503 let end = location
5504 .end
5505 .and_then(deserialize_anchor)
5506 .ok_or_else(|| anyhow!("missing target end"))?;
5507 result
5508 .entry(target_buffer)
5509 .or_insert(Vec::new())
5510 .push(start..end)
5511 }
5512 for (buffer, ranges) in result {
5513 let _ = tx.send((buffer, ranges)).await;
5514 }
5515 Result::<(), anyhow::Error>::Ok(())
5516 })
5517 .detach_and_log_err(cx);
5518 rx
5519 } else {
5520 unimplemented!();
5521 }
5522 }
5523
5524 pub fn search_local(
5525 &self,
5526 query: SearchQuery,
5527 cx: &mut ModelContext<Self>,
5528 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
5529 // Local search is split into several phases.
5530 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
5531 // and the second phase that finds positions of all the matches found in the candidate files.
5532 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
5533 //
5534 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
5535 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
5536 //
5537 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
5538 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
5539 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
5540 // 2. At this point, we have a list of all potentially matching buffers/files.
5541 // We sort that list by buffer path - this list is retained for later use.
5542 // We ensure that all buffers are now opened and available in project.
5543 // 3. We run a scan over all the candidate buffers on multiple background threads.
5544 // We cannot assume that there will even be a match - while at least one match
5545 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
5546 // There is also an auxilliary background thread responsible for result gathering.
5547 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
5548 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
5549 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
5550 // entry - which might already be available thanks to out-of-order processing.
5551 //
5552 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
5553 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
5554 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
5555 // in face of constantly updating list of sorted matches.
5556 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
5557 let snapshots = self
5558 .visible_worktrees(cx)
5559 .filter_map(|tree| {
5560 let tree = tree.read(cx).as_local()?;
5561 Some(tree.snapshot())
5562 })
5563 .collect::<Vec<_>>();
5564
5565 let background = cx.background_executor().clone();
5566 let path_count: usize = snapshots
5567 .iter()
5568 .map(|s| {
5569 if query.include_ignored() {
5570 s.file_count()
5571 } else {
5572 s.visible_file_count()
5573 }
5574 })
5575 .sum();
5576 if path_count == 0 {
5577 let (_, rx) = smol::channel::bounded(1024);
5578 return rx;
5579 }
5580 let workers = background.num_cpus().min(path_count);
5581 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
5582 let mut unnamed_files = vec![];
5583 let opened_buffers = self
5584 .opened_buffers
5585 .iter()
5586 .filter_map(|(_, b)| {
5587 let buffer = b.upgrade()?;
5588 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
5589 let is_ignored = buffer
5590 .project_path(cx)
5591 .and_then(|path| self.entry_for_path(&path, cx))
5592 .map_or(false, |entry| entry.is_ignored);
5593 (is_ignored, buffer.snapshot())
5594 });
5595 if is_ignored && !query.include_ignored() {
5596 return None;
5597 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
5598 Some((path.clone(), (buffer, snapshot)))
5599 } else {
5600 unnamed_files.push(buffer);
5601 None
5602 }
5603 })
5604 .collect();
5605 cx.background_executor()
5606 .spawn(Self::background_search(
5607 unnamed_files,
5608 opened_buffers,
5609 cx.background_executor().clone(),
5610 self.fs.clone(),
5611 workers,
5612 query.clone(),
5613 path_count,
5614 snapshots,
5615 matching_paths_tx,
5616 ))
5617 .detach();
5618
5619 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
5620 let background = cx.background_executor().clone();
5621 let (result_tx, result_rx) = smol::channel::bounded(1024);
5622 cx.background_executor()
5623 .spawn(async move {
5624 let Ok(buffers) = buffers.await else {
5625 return;
5626 };
5627
5628 let buffers_len = buffers.len();
5629 if buffers_len == 0 {
5630 return;
5631 }
5632 let query = &query;
5633 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
5634 background
5635 .scoped(|scope| {
5636 #[derive(Clone)]
5637 struct FinishedStatus {
5638 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
5639 buffer_index: SearchMatchCandidateIndex,
5640 }
5641
5642 for _ in 0..workers {
5643 let finished_tx = finished_tx.clone();
5644 let mut buffers_rx = buffers_rx.clone();
5645 scope.spawn(async move {
5646 while let Some((entry, buffer_index)) = buffers_rx.next().await {
5647 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
5648 {
5649 if query.file_matches(
5650 snapshot.file().map(|file| file.path().as_ref()),
5651 ) {
5652 query
5653 .search(&snapshot, None)
5654 .await
5655 .iter()
5656 .map(|range| {
5657 snapshot.anchor_before(range.start)
5658 ..snapshot.anchor_after(range.end)
5659 })
5660 .collect()
5661 } else {
5662 Vec::new()
5663 }
5664 } else {
5665 Vec::new()
5666 };
5667
5668 let status = if !buffer_matches.is_empty() {
5669 let entry = if let Some((buffer, _)) = entry.as_ref() {
5670 Some((buffer.clone(), buffer_matches))
5671 } else {
5672 None
5673 };
5674 FinishedStatus {
5675 entry,
5676 buffer_index,
5677 }
5678 } else {
5679 FinishedStatus {
5680 entry: None,
5681 buffer_index,
5682 }
5683 };
5684 if finished_tx.send(status).await.is_err() {
5685 break;
5686 }
5687 }
5688 });
5689 }
5690 // Report sorted matches
5691 scope.spawn(async move {
5692 let mut current_index = 0;
5693 let mut scratch = vec![None; buffers_len];
5694 while let Some(status) = finished_rx.next().await {
5695 debug_assert!(
5696 scratch[status.buffer_index].is_none(),
5697 "Got match status of position {} twice",
5698 status.buffer_index
5699 );
5700 let index = status.buffer_index;
5701 scratch[index] = Some(status);
5702 while current_index < buffers_len {
5703 let Some(current_entry) = scratch[current_index].take() else {
5704 // We intentionally **do not** increment `current_index` here. When next element arrives
5705 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
5706 // this time.
5707 break;
5708 };
5709 if let Some(entry) = current_entry.entry {
5710 result_tx.send(entry).await.log_err();
5711 }
5712 current_index += 1;
5713 }
5714 if current_index == buffers_len {
5715 break;
5716 }
5717 }
5718 });
5719 })
5720 .await;
5721 })
5722 .detach();
5723 result_rx
5724 }
5725
5726 /// Pick paths that might potentially contain a match of a given search query.
5727 async fn background_search(
5728 unnamed_buffers: Vec<Model<Buffer>>,
5729 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
5730 executor: BackgroundExecutor,
5731 fs: Arc<dyn Fs>,
5732 workers: usize,
5733 query: SearchQuery,
5734 path_count: usize,
5735 snapshots: Vec<LocalSnapshot>,
5736 matching_paths_tx: Sender<SearchMatchCandidate>,
5737 ) {
5738 let fs = &fs;
5739 let query = &query;
5740 let matching_paths_tx = &matching_paths_tx;
5741 let snapshots = &snapshots;
5742 let paths_per_worker = (path_count + workers - 1) / workers;
5743 for buffer in unnamed_buffers {
5744 matching_paths_tx
5745 .send(SearchMatchCandidate::OpenBuffer {
5746 buffer: buffer.clone(),
5747 path: None,
5748 })
5749 .await
5750 .log_err();
5751 }
5752 for (path, (buffer, _)) in opened_buffers.iter() {
5753 matching_paths_tx
5754 .send(SearchMatchCandidate::OpenBuffer {
5755 buffer: buffer.clone(),
5756 path: Some(path.clone()),
5757 })
5758 .await
5759 .log_err();
5760 }
5761 executor
5762 .scoped(|scope| {
5763 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
5764
5765 for worker_ix in 0..workers {
5766 let worker_start_ix = worker_ix * paths_per_worker;
5767 let worker_end_ix = worker_start_ix + paths_per_worker;
5768 let unnamed_buffers = opened_buffers.clone();
5769 let limiter = Arc::clone(&max_concurrent_workers);
5770 scope.spawn(async move {
5771 let _guard = limiter.acquire().await;
5772 let mut snapshot_start_ix = 0;
5773 let mut abs_path = PathBuf::new();
5774 for snapshot in snapshots {
5775 let snapshot_end_ix = snapshot_start_ix
5776 + if query.include_ignored() {
5777 snapshot.file_count()
5778 } else {
5779 snapshot.visible_file_count()
5780 };
5781 if worker_end_ix <= snapshot_start_ix {
5782 break;
5783 } else if worker_start_ix > snapshot_end_ix {
5784 snapshot_start_ix = snapshot_end_ix;
5785 continue;
5786 } else {
5787 let start_in_snapshot =
5788 worker_start_ix.saturating_sub(snapshot_start_ix);
5789 let end_in_snapshot =
5790 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
5791
5792 for entry in snapshot
5793 .files(query.include_ignored(), start_in_snapshot)
5794 .take(end_in_snapshot - start_in_snapshot)
5795 {
5796 if matching_paths_tx.is_closed() {
5797 break;
5798 }
5799 if unnamed_buffers.contains_key(&entry.path) {
5800 continue;
5801 }
5802 let matches = if query.file_matches(Some(&entry.path)) {
5803 abs_path.clear();
5804 abs_path.push(&snapshot.abs_path());
5805 abs_path.push(&entry.path);
5806 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
5807 {
5808 query.detect(file).unwrap_or(false)
5809 } else {
5810 false
5811 }
5812 } else {
5813 false
5814 };
5815
5816 if matches {
5817 let project_path = SearchMatchCandidate::Path {
5818 worktree_id: snapshot.id(),
5819 path: entry.path.clone(),
5820 is_ignored: entry.is_ignored,
5821 };
5822 if matching_paths_tx.send(project_path).await.is_err() {
5823 break;
5824 }
5825 }
5826 }
5827
5828 snapshot_start_ix = snapshot_end_ix;
5829 }
5830 }
5831 });
5832 }
5833
5834 if query.include_ignored() {
5835 for snapshot in snapshots {
5836 for ignored_entry in snapshot
5837 .entries(query.include_ignored())
5838 .filter(|e| e.is_ignored)
5839 {
5840 let limiter = Arc::clone(&max_concurrent_workers);
5841 scope.spawn(async move {
5842 let _guard = limiter.acquire().await;
5843 let mut ignored_paths_to_process =
5844 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
5845 while let Some(ignored_abs_path) =
5846 ignored_paths_to_process.pop_front()
5847 {
5848 if !query.file_matches(Some(&ignored_abs_path))
5849 || snapshot.is_path_excluded(&ignored_abs_path)
5850 {
5851 continue;
5852 }
5853 if let Some(fs_metadata) = fs
5854 .metadata(&ignored_abs_path)
5855 .await
5856 .with_context(|| {
5857 format!("fetching fs metadata for {ignored_abs_path:?}")
5858 })
5859 .log_err()
5860 .flatten()
5861 {
5862 if fs_metadata.is_dir {
5863 if let Some(mut subfiles) = fs
5864 .read_dir(&ignored_abs_path)
5865 .await
5866 .with_context(|| {
5867 format!(
5868 "listing ignored path {ignored_abs_path:?}"
5869 )
5870 })
5871 .log_err()
5872 {
5873 while let Some(subfile) = subfiles.next().await {
5874 if let Some(subfile) = subfile.log_err() {
5875 ignored_paths_to_process.push_back(subfile);
5876 }
5877 }
5878 }
5879 } else if !fs_metadata.is_symlink {
5880 let matches = if let Some(file) = fs
5881 .open_sync(&ignored_abs_path)
5882 .await
5883 .with_context(|| {
5884 format!(
5885 "Opening ignored path {ignored_abs_path:?}"
5886 )
5887 })
5888 .log_err()
5889 {
5890 query.detect(file).unwrap_or(false)
5891 } else {
5892 false
5893 };
5894 if matches {
5895 let project_path = SearchMatchCandidate::Path {
5896 worktree_id: snapshot.id(),
5897 path: Arc::from(
5898 ignored_abs_path
5899 .strip_prefix(snapshot.abs_path())
5900 .expect(
5901 "scanning worktree-related files",
5902 ),
5903 ),
5904 is_ignored: true,
5905 };
5906 if matching_paths_tx
5907 .send(project_path)
5908 .await
5909 .is_err()
5910 {
5911 return;
5912 }
5913 }
5914 }
5915 }
5916 }
5917 });
5918 }
5919 }
5920 }
5921 })
5922 .await;
5923 }
5924
5925 fn request_lsp<R: LspCommand>(
5926 &self,
5927 buffer_handle: Model<Buffer>,
5928 server: LanguageServerToQuery,
5929 request: R,
5930 cx: &mut ModelContext<Self>,
5931 ) -> Task<Result<R::Response>>
5932 where
5933 <R::LspRequest as lsp::request::Request>::Result: Send,
5934 <R::LspRequest as lsp::request::Request>::Params: Send,
5935 {
5936 let buffer = buffer_handle.read(cx);
5937 if self.is_local() {
5938 let language_server = match server {
5939 LanguageServerToQuery::Primary => {
5940 match self.primary_language_server_for_buffer(buffer, cx) {
5941 Some((_, server)) => Some(Arc::clone(server)),
5942 None => return Task::ready(Ok(Default::default())),
5943 }
5944 }
5945 LanguageServerToQuery::Other(id) => self
5946 .language_server_for_buffer(buffer, id, cx)
5947 .map(|(_, server)| Arc::clone(server)),
5948 };
5949 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
5950 if let (Some(file), Some(language_server)) = (file, language_server) {
5951 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
5952 return cx.spawn(move |this, cx| async move {
5953 if !request.check_capabilities(language_server.capabilities()) {
5954 return Ok(Default::default());
5955 }
5956
5957 let result = language_server.request::<R::LspRequest>(lsp_params).await;
5958 let response = match result {
5959 Ok(response) => response,
5960
5961 Err(err) => {
5962 log::warn!(
5963 "Generic lsp request to {} failed: {}",
5964 language_server.name(),
5965 err
5966 );
5967 return Err(err);
5968 }
5969 };
5970
5971 request
5972 .response_from_lsp(
5973 response,
5974 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
5975 buffer_handle,
5976 language_server.server_id(),
5977 cx,
5978 )
5979 .await
5980 });
5981 }
5982 } else if let Some(project_id) = self.remote_id() {
5983 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
5984 }
5985
5986 Task::ready(Ok(Default::default()))
5987 }
5988
5989 fn send_lsp_proto_request<R: LspCommand>(
5990 &self,
5991 buffer: Model<Buffer>,
5992 project_id: u64,
5993 request: R,
5994 cx: &mut ModelContext<'_, Project>,
5995 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
5996 let rpc = self.client.clone();
5997 let message = request.to_proto(project_id, buffer.read(cx));
5998 cx.spawn(move |this, mut cx| async move {
5999 // Ensure the project is still alive by the time the task
6000 // is scheduled.
6001 this.upgrade().context("project dropped")?;
6002 let response = rpc.request(message).await?;
6003 let this = this.upgrade().context("project dropped")?;
6004 if this.update(&mut cx, |this, _| this.is_read_only())? {
6005 Err(anyhow!("disconnected before completing request"))
6006 } else {
6007 request
6008 .response_from_proto(response, this, buffer, cx)
6009 .await
6010 }
6011 })
6012 }
6013
6014 fn sort_candidates_and_open_buffers(
6015 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6016 cx: &mut ModelContext<Self>,
6017 ) -> (
6018 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6019 Receiver<(
6020 Option<(Model<Buffer>, BufferSnapshot)>,
6021 SearchMatchCandidateIndex,
6022 )>,
6023 ) {
6024 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6025 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6026 cx.spawn(move |this, cx| async move {
6027 let mut buffers = Vec::new();
6028 let mut ignored_buffers = Vec::new();
6029 while let Some(entry) = matching_paths_rx.next().await {
6030 if matches!(
6031 entry,
6032 SearchMatchCandidate::Path {
6033 is_ignored: true,
6034 ..
6035 }
6036 ) {
6037 ignored_buffers.push(entry);
6038 } else {
6039 buffers.push(entry);
6040 }
6041 }
6042 buffers.sort_by_key(|candidate| candidate.path());
6043 ignored_buffers.sort_by_key(|candidate| candidate.path());
6044 buffers.extend(ignored_buffers);
6045 let matching_paths = buffers.clone();
6046 let _ = sorted_buffers_tx.send(buffers);
6047 for (index, candidate) in matching_paths.into_iter().enumerate() {
6048 if buffers_tx.is_closed() {
6049 break;
6050 }
6051 let this = this.clone();
6052 let buffers_tx = buffers_tx.clone();
6053 cx.spawn(move |mut cx| async move {
6054 let buffer = match candidate {
6055 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6056 SearchMatchCandidate::Path {
6057 worktree_id, path, ..
6058 } => this
6059 .update(&mut cx, |this, cx| {
6060 this.open_buffer((worktree_id, path), cx)
6061 })?
6062 .await
6063 .log_err(),
6064 };
6065 if let Some(buffer) = buffer {
6066 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6067 buffers_tx
6068 .send((Some((buffer, snapshot)), index))
6069 .await
6070 .log_err();
6071 } else {
6072 buffers_tx.send((None, index)).await.log_err();
6073 }
6074
6075 Ok::<_, anyhow::Error>(())
6076 })
6077 .detach();
6078 }
6079 })
6080 .detach();
6081 (sorted_buffers_rx, buffers_rx)
6082 }
6083
6084 pub fn find_or_create_local_worktree(
6085 &mut self,
6086 abs_path: impl AsRef<Path>,
6087 visible: bool,
6088 cx: &mut ModelContext<Self>,
6089 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6090 let abs_path = abs_path.as_ref();
6091 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6092 Task::ready(Ok((tree, relative_path)))
6093 } else {
6094 let worktree = self.create_local_worktree(abs_path, visible, cx);
6095 cx.background_executor()
6096 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6097 }
6098 }
6099
6100 pub fn find_local_worktree(
6101 &self,
6102 abs_path: &Path,
6103 cx: &AppContext,
6104 ) -> Option<(Model<Worktree>, PathBuf)> {
6105 for tree in &self.worktrees {
6106 if let Some(tree) = tree.upgrade() {
6107 if let Some(relative_path) = tree
6108 .read(cx)
6109 .as_local()
6110 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6111 {
6112 return Some((tree.clone(), relative_path.into()));
6113 }
6114 }
6115 }
6116 None
6117 }
6118
6119 pub fn is_shared(&self) -> bool {
6120 match &self.client_state {
6121 Some(ProjectClientState::Local { .. }) => true,
6122 _ => false,
6123 }
6124 }
6125
6126 fn create_local_worktree(
6127 &mut self,
6128 abs_path: impl AsRef<Path>,
6129 visible: bool,
6130 cx: &mut ModelContext<Self>,
6131 ) -> Task<Result<Model<Worktree>>> {
6132 let fs = self.fs.clone();
6133 let client = self.client.clone();
6134 let next_entry_id = self.next_entry_id.clone();
6135 let path: Arc<Path> = abs_path.as_ref().into();
6136 let task = self
6137 .loading_local_worktrees
6138 .entry(path.clone())
6139 .or_insert_with(|| {
6140 cx.spawn(move |project, mut cx| {
6141 async move {
6142 let worktree = Worktree::local(
6143 client.clone(),
6144 path.clone(),
6145 visible,
6146 fs,
6147 next_entry_id,
6148 &mut cx,
6149 )
6150 .await;
6151
6152 project.update(&mut cx, |project, _| {
6153 project.loading_local_worktrees.remove(&path);
6154 })?;
6155
6156 let worktree = worktree?;
6157 project
6158 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6159 Ok(worktree)
6160 }
6161 .map_err(Arc::new)
6162 })
6163 .shared()
6164 })
6165 .clone();
6166 cx.background_executor().spawn(async move {
6167 match task.await {
6168 Ok(worktree) => Ok(worktree),
6169 Err(err) => Err(anyhow!("{}", err)),
6170 }
6171 })
6172 }
6173
6174 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6175 self.worktrees.retain(|worktree| {
6176 if let Some(worktree) = worktree.upgrade() {
6177 let id = worktree.read(cx).id();
6178 if id == id_to_remove {
6179 cx.emit(Event::WorktreeRemoved(id));
6180 false
6181 } else {
6182 true
6183 }
6184 } else {
6185 false
6186 }
6187 });
6188 self.metadata_changed(cx);
6189 }
6190
6191 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6192 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6193 if worktree.read(cx).is_local() {
6194 cx.subscribe(worktree, |this, worktree, event, cx| match event {
6195 worktree::Event::UpdatedEntries(changes) => {
6196 this.update_local_worktree_buffers(&worktree, changes, cx);
6197 this.update_local_worktree_language_servers(&worktree, changes, cx);
6198 this.update_local_worktree_settings(&worktree, changes, cx);
6199 this.update_prettier_settings(&worktree, changes, cx);
6200 cx.emit(Event::WorktreeUpdatedEntries(
6201 worktree.read(cx).id(),
6202 changes.clone(),
6203 ));
6204 }
6205 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6206 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6207 }
6208 })
6209 .detach();
6210 }
6211
6212 let push_strong_handle = {
6213 let worktree = worktree.read(cx);
6214 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6215 };
6216 if push_strong_handle {
6217 self.worktrees
6218 .push(WorktreeHandle::Strong(worktree.clone()));
6219 } else {
6220 self.worktrees
6221 .push(WorktreeHandle::Weak(worktree.downgrade()));
6222 }
6223
6224 let handle_id = worktree.entity_id();
6225 cx.observe_release(worktree, move |this, worktree, cx| {
6226 let _ = this.remove_worktree(worktree.id(), cx);
6227 cx.update_global::<SettingsStore, _>(|store, cx| {
6228 store
6229 .clear_local_settings(handle_id.as_u64() as usize, cx)
6230 .log_err()
6231 });
6232 })
6233 .detach();
6234
6235 cx.emit(Event::WorktreeAdded);
6236 self.metadata_changed(cx);
6237 }
6238
6239 fn update_local_worktree_buffers(
6240 &mut self,
6241 worktree_handle: &Model<Worktree>,
6242 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6243 cx: &mut ModelContext<Self>,
6244 ) {
6245 let snapshot = worktree_handle.read(cx).snapshot();
6246
6247 let mut renamed_buffers = Vec::new();
6248 for (path, entry_id, _) in changes {
6249 let worktree_id = worktree_handle.read(cx).id();
6250 let project_path = ProjectPath {
6251 worktree_id,
6252 path: path.clone(),
6253 };
6254
6255 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6256 Some(&buffer_id) => buffer_id,
6257 None => match self.local_buffer_ids_by_path.get(&project_path) {
6258 Some(&buffer_id) => buffer_id,
6259 None => {
6260 continue;
6261 }
6262 },
6263 };
6264
6265 let open_buffer = self.opened_buffers.get(&buffer_id);
6266 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6267 buffer
6268 } else {
6269 self.opened_buffers.remove(&buffer_id);
6270 self.local_buffer_ids_by_path.remove(&project_path);
6271 self.local_buffer_ids_by_entry_id.remove(entry_id);
6272 continue;
6273 };
6274
6275 buffer.update(cx, |buffer, cx| {
6276 if let Some(old_file) = File::from_dyn(buffer.file()) {
6277 if old_file.worktree != *worktree_handle {
6278 return;
6279 }
6280
6281 let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
6282 File {
6283 is_local: true,
6284 entry_id: entry.id,
6285 mtime: entry.mtime,
6286 path: entry.path.clone(),
6287 worktree: worktree_handle.clone(),
6288 is_deleted: false,
6289 }
6290 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6291 File {
6292 is_local: true,
6293 entry_id: entry.id,
6294 mtime: entry.mtime,
6295 path: entry.path.clone(),
6296 worktree: worktree_handle.clone(),
6297 is_deleted: false,
6298 }
6299 } else {
6300 File {
6301 is_local: true,
6302 entry_id: old_file.entry_id,
6303 path: old_file.path().clone(),
6304 mtime: old_file.mtime(),
6305 worktree: worktree_handle.clone(),
6306 is_deleted: true,
6307 }
6308 };
6309
6310 let old_path = old_file.abs_path(cx);
6311 if new_file.abs_path(cx) != old_path {
6312 renamed_buffers.push((cx.handle(), old_file.clone()));
6313 self.local_buffer_ids_by_path.remove(&project_path);
6314 self.local_buffer_ids_by_path.insert(
6315 ProjectPath {
6316 worktree_id,
6317 path: path.clone(),
6318 },
6319 buffer_id,
6320 );
6321 }
6322
6323 if new_file.entry_id != *entry_id {
6324 self.local_buffer_ids_by_entry_id.remove(entry_id);
6325 self.local_buffer_ids_by_entry_id
6326 .insert(new_file.entry_id, buffer_id);
6327 }
6328
6329 if new_file != *old_file {
6330 if let Some(project_id) = self.remote_id() {
6331 self.client
6332 .send(proto::UpdateBufferFile {
6333 project_id,
6334 buffer_id: buffer_id as u64,
6335 file: Some(new_file.to_proto()),
6336 })
6337 .log_err();
6338 }
6339
6340 buffer.file_updated(Arc::new(new_file), cx);
6341 }
6342 }
6343 });
6344 }
6345
6346 for (buffer, old_file) in renamed_buffers {
6347 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6348 self.detect_language_for_buffer(&buffer, cx);
6349 self.register_buffer_with_language_servers(&buffer, cx);
6350 }
6351 }
6352
6353 fn update_local_worktree_language_servers(
6354 &mut self,
6355 worktree_handle: &Model<Worktree>,
6356 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6357 cx: &mut ModelContext<Self>,
6358 ) {
6359 if changes.is_empty() {
6360 return;
6361 }
6362
6363 let worktree_id = worktree_handle.read(cx).id();
6364 let mut language_server_ids = self
6365 .language_server_ids
6366 .iter()
6367 .filter_map(|((server_worktree_id, _), server_id)| {
6368 (*server_worktree_id == worktree_id).then_some(*server_id)
6369 })
6370 .collect::<Vec<_>>();
6371 language_server_ids.sort();
6372 language_server_ids.dedup();
6373
6374 let abs_path = worktree_handle.read(cx).abs_path();
6375 for server_id in &language_server_ids {
6376 if let Some(LanguageServerState::Running {
6377 server,
6378 watched_paths,
6379 ..
6380 }) = self.language_servers.get(server_id)
6381 {
6382 if let Some(watched_paths) = watched_paths.get(&worktree_id) {
6383 let params = lsp::DidChangeWatchedFilesParams {
6384 changes: changes
6385 .iter()
6386 .filter_map(|(path, _, change)| {
6387 if !watched_paths.is_match(&path) {
6388 return None;
6389 }
6390 let typ = match change {
6391 PathChange::Loaded => return None,
6392 PathChange::Added => lsp::FileChangeType::CREATED,
6393 PathChange::Removed => lsp::FileChangeType::DELETED,
6394 PathChange::Updated => lsp::FileChangeType::CHANGED,
6395 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
6396 };
6397 Some(lsp::FileEvent {
6398 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
6399 typ,
6400 })
6401 })
6402 .collect(),
6403 };
6404
6405 if !params.changes.is_empty() {
6406 server
6407 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
6408 .log_err();
6409 }
6410 }
6411 }
6412 }
6413 }
6414
6415 fn update_local_worktree_buffers_git_repos(
6416 &mut self,
6417 worktree_handle: Model<Worktree>,
6418 changed_repos: &UpdatedGitRepositoriesSet,
6419 cx: &mut ModelContext<Self>,
6420 ) {
6421 debug_assert!(worktree_handle.read(cx).is_local());
6422
6423 // Identify the loading buffers whose containing repository that has changed.
6424 let future_buffers = self
6425 .loading_buffers_by_path
6426 .iter()
6427 .filter_map(|(project_path, receiver)| {
6428 if project_path.worktree_id != worktree_handle.read(cx).id() {
6429 return None;
6430 }
6431 let path = &project_path.path;
6432 changed_repos
6433 .iter()
6434 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6435 let receiver = receiver.clone();
6436 let path = path.clone();
6437 Some(async move {
6438 wait_for_loading_buffer(receiver)
6439 .await
6440 .ok()
6441 .map(|buffer| (buffer, path))
6442 })
6443 })
6444 .collect::<FuturesUnordered<_>>();
6445
6446 // Identify the current buffers whose containing repository has changed.
6447 let current_buffers = self
6448 .opened_buffers
6449 .values()
6450 .filter_map(|buffer| {
6451 let buffer = buffer.upgrade()?;
6452 let file = File::from_dyn(buffer.read(cx).file())?;
6453 if file.worktree != worktree_handle {
6454 return None;
6455 }
6456 let path = file.path();
6457 changed_repos
6458 .iter()
6459 .find(|(work_dir, _)| path.starts_with(work_dir))?;
6460 Some((buffer, path.clone()))
6461 })
6462 .collect::<Vec<_>>();
6463
6464 if future_buffers.len() + current_buffers.len() == 0 {
6465 return;
6466 }
6467
6468 let remote_id = self.remote_id();
6469 let client = self.client.clone();
6470 cx.spawn(move |_, mut cx| async move {
6471 // Wait for all of the buffers to load.
6472 let future_buffers = future_buffers.collect::<Vec<_>>().await;
6473
6474 // Reload the diff base for every buffer whose containing git repository has changed.
6475 let snapshot =
6476 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
6477 let diff_bases_by_buffer = cx
6478 .background_executor()
6479 .spawn(async move {
6480 future_buffers
6481 .into_iter()
6482 .filter_map(|e| e)
6483 .chain(current_buffers)
6484 .filter_map(|(buffer, path)| {
6485 let (work_directory, repo) =
6486 snapshot.repository_and_work_directory_for_path(&path)?;
6487 let repo = snapshot.get_local_repo(&repo)?;
6488 let relative_path = path.strip_prefix(&work_directory).ok()?;
6489 let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
6490 Some((buffer, base_text))
6491 })
6492 .collect::<Vec<_>>()
6493 })
6494 .await;
6495
6496 // Assign the new diff bases on all of the buffers.
6497 for (buffer, diff_base) in diff_bases_by_buffer {
6498 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
6499 buffer.set_diff_base(diff_base.clone(), cx);
6500 buffer.remote_id()
6501 })?;
6502 if let Some(project_id) = remote_id {
6503 client
6504 .send(proto::UpdateDiffBase {
6505 project_id,
6506 buffer_id,
6507 diff_base,
6508 })
6509 .log_err();
6510 }
6511 }
6512
6513 anyhow::Ok(())
6514 })
6515 .detach();
6516 }
6517
6518 fn update_local_worktree_settings(
6519 &mut self,
6520 worktree: &Model<Worktree>,
6521 changes: &UpdatedEntriesSet,
6522 cx: &mut ModelContext<Self>,
6523 ) {
6524 let project_id = self.remote_id();
6525 let worktree_id = worktree.entity_id();
6526 let worktree = worktree.read(cx).as_local().unwrap();
6527 let remote_worktree_id = worktree.id();
6528
6529 let mut settings_contents = Vec::new();
6530 for (path, _, change) in changes.iter() {
6531 if path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
6532 let settings_dir = Arc::from(
6533 path.ancestors()
6534 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
6535 .unwrap(),
6536 );
6537 let fs = self.fs.clone();
6538 let removed = *change == PathChange::Removed;
6539 let abs_path = worktree.absolutize(path);
6540 settings_contents.push(async move {
6541 (settings_dir, (!removed).then_some(fs.load(&abs_path).await))
6542 });
6543 }
6544 }
6545
6546 if settings_contents.is_empty() {
6547 return;
6548 }
6549
6550 let client = self.client.clone();
6551 cx.spawn(move |_, cx| async move {
6552 let settings_contents: Vec<(Arc<Path>, _)> =
6553 futures::future::join_all(settings_contents).await;
6554 cx.update(|cx| {
6555 cx.update_global::<SettingsStore, _>(|store, cx| {
6556 for (directory, file_content) in settings_contents {
6557 let file_content = file_content.and_then(|content| content.log_err());
6558 store
6559 .set_local_settings(
6560 worktree_id.as_u64() as usize,
6561 directory.clone(),
6562 file_content.as_ref().map(String::as_str),
6563 cx,
6564 )
6565 .log_err();
6566 if let Some(remote_id) = project_id {
6567 client
6568 .send(proto::UpdateWorktreeSettings {
6569 project_id: remote_id,
6570 worktree_id: remote_worktree_id.to_proto(),
6571 path: directory.to_string_lossy().into_owned(),
6572 content: file_content,
6573 })
6574 .log_err();
6575 }
6576 }
6577 });
6578 })
6579 .ok();
6580 })
6581 .detach();
6582 }
6583
6584 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
6585 let new_active_entry = entry.and_then(|project_path| {
6586 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
6587 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
6588 Some(entry.id)
6589 });
6590 if new_active_entry != self.active_entry {
6591 self.active_entry = new_active_entry;
6592 cx.emit(Event::ActiveEntryChanged(new_active_entry));
6593 }
6594 }
6595
6596 pub fn language_servers_running_disk_based_diagnostics(
6597 &self,
6598 ) -> impl Iterator<Item = LanguageServerId> + '_ {
6599 self.language_server_statuses
6600 .iter()
6601 .filter_map(|(id, status)| {
6602 if status.has_pending_diagnostic_updates {
6603 Some(*id)
6604 } else {
6605 None
6606 }
6607 })
6608 }
6609
6610 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
6611 let mut summary = DiagnosticSummary::default();
6612 for (_, _, path_summary) in
6613 self.diagnostic_summaries(include_ignored, cx)
6614 .filter(|(path, _, _)| {
6615 let worktree = self.entry_for_path(&path, cx).map(|entry| entry.is_ignored);
6616 include_ignored || worktree == Some(false)
6617 })
6618 {
6619 summary.error_count += path_summary.error_count;
6620 summary.warning_count += path_summary.warning_count;
6621 }
6622 summary
6623 }
6624
6625 pub fn diagnostic_summaries<'a>(
6626 &'a self,
6627 include_ignored: bool,
6628 cx: &'a AppContext,
6629 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
6630 self.visible_worktrees(cx)
6631 .flat_map(move |worktree| {
6632 let worktree = worktree.read(cx);
6633 let worktree_id = worktree.id();
6634 worktree
6635 .diagnostic_summaries()
6636 .map(move |(path, server_id, summary)| {
6637 (ProjectPath { worktree_id, path }, server_id, summary)
6638 })
6639 })
6640 .filter(move |(path, _, _)| {
6641 let worktree = self.entry_for_path(&path, cx).map(|entry| entry.is_ignored);
6642 include_ignored || worktree == Some(false)
6643 })
6644 }
6645
6646 pub fn disk_based_diagnostics_started(
6647 &mut self,
6648 language_server_id: LanguageServerId,
6649 cx: &mut ModelContext<Self>,
6650 ) {
6651 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
6652 }
6653
6654 pub fn disk_based_diagnostics_finished(
6655 &mut self,
6656 language_server_id: LanguageServerId,
6657 cx: &mut ModelContext<Self>,
6658 ) {
6659 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
6660 }
6661
6662 pub fn active_entry(&self) -> Option<ProjectEntryId> {
6663 self.active_entry
6664 }
6665
6666 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
6667 self.worktree_for_id(path.worktree_id, cx)?
6668 .read(cx)
6669 .entry_for_path(&path.path)
6670 .cloned()
6671 }
6672
6673 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
6674 let worktree = self.worktree_for_entry(entry_id, cx)?;
6675 let worktree = worktree.read(cx);
6676 let worktree_id = worktree.id();
6677 let path = worktree.entry_for_id(entry_id)?.path.clone();
6678 Some(ProjectPath { worktree_id, path })
6679 }
6680
6681 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
6682 let workspace_root = self
6683 .worktree_for_id(project_path.worktree_id, cx)?
6684 .read(cx)
6685 .abs_path();
6686 let project_path = project_path.path.as_ref();
6687
6688 Some(if project_path == Path::new("") {
6689 workspace_root.to_path_buf()
6690 } else {
6691 workspace_root.join(project_path)
6692 })
6693 }
6694
6695 // RPC message handlers
6696
6697 async fn handle_unshare_project(
6698 this: Model<Self>,
6699 _: TypedEnvelope<proto::UnshareProject>,
6700 _: Arc<Client>,
6701 mut cx: AsyncAppContext,
6702 ) -> Result<()> {
6703 this.update(&mut cx, |this, cx| {
6704 if this.is_local() {
6705 this.unshare(cx)?;
6706 } else {
6707 this.disconnected_from_host(cx);
6708 }
6709 Ok(())
6710 })?
6711 }
6712
6713 async fn handle_add_collaborator(
6714 this: Model<Self>,
6715 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
6716 _: Arc<Client>,
6717 mut cx: AsyncAppContext,
6718 ) -> Result<()> {
6719 let collaborator = envelope
6720 .payload
6721 .collaborator
6722 .take()
6723 .ok_or_else(|| anyhow!("empty collaborator"))?;
6724
6725 let collaborator = Collaborator::from_proto(collaborator)?;
6726 this.update(&mut cx, |this, cx| {
6727 this.shared_buffers.remove(&collaborator.peer_id);
6728 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
6729 this.collaborators
6730 .insert(collaborator.peer_id, collaborator);
6731 cx.notify();
6732 })?;
6733
6734 Ok(())
6735 }
6736
6737 async fn handle_update_project_collaborator(
6738 this: Model<Self>,
6739 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
6740 _: Arc<Client>,
6741 mut cx: AsyncAppContext,
6742 ) -> Result<()> {
6743 let old_peer_id = envelope
6744 .payload
6745 .old_peer_id
6746 .ok_or_else(|| anyhow!("missing old peer id"))?;
6747 let new_peer_id = envelope
6748 .payload
6749 .new_peer_id
6750 .ok_or_else(|| anyhow!("missing new peer id"))?;
6751 this.update(&mut cx, |this, cx| {
6752 let collaborator = this
6753 .collaborators
6754 .remove(&old_peer_id)
6755 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
6756 let is_host = collaborator.replica_id == 0;
6757 this.collaborators.insert(new_peer_id, collaborator);
6758
6759 let buffers = this.shared_buffers.remove(&old_peer_id);
6760 log::info!(
6761 "peer {} became {}. moving buffers {:?}",
6762 old_peer_id,
6763 new_peer_id,
6764 &buffers
6765 );
6766 if let Some(buffers) = buffers {
6767 this.shared_buffers.insert(new_peer_id, buffers);
6768 }
6769
6770 if is_host {
6771 this.opened_buffers
6772 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
6773 this.buffer_ordered_messages_tx
6774 .unbounded_send(BufferOrderedMessage::Resync)
6775 .unwrap();
6776 }
6777
6778 cx.emit(Event::CollaboratorUpdated {
6779 old_peer_id,
6780 new_peer_id,
6781 });
6782 cx.notify();
6783 Ok(())
6784 })?
6785 }
6786
6787 async fn handle_remove_collaborator(
6788 this: Model<Self>,
6789 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
6790 _: Arc<Client>,
6791 mut cx: AsyncAppContext,
6792 ) -> Result<()> {
6793 this.update(&mut cx, |this, cx| {
6794 let peer_id = envelope
6795 .payload
6796 .peer_id
6797 .ok_or_else(|| anyhow!("invalid peer id"))?;
6798 let replica_id = this
6799 .collaborators
6800 .remove(&peer_id)
6801 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
6802 .replica_id;
6803 for buffer in this.opened_buffers.values() {
6804 if let Some(buffer) = buffer.upgrade() {
6805 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
6806 }
6807 }
6808 this.shared_buffers.remove(&peer_id);
6809
6810 cx.emit(Event::CollaboratorLeft(peer_id));
6811 cx.notify();
6812 Ok(())
6813 })?
6814 }
6815
6816 async fn handle_update_project(
6817 this: Model<Self>,
6818 envelope: TypedEnvelope<proto::UpdateProject>,
6819 _: Arc<Client>,
6820 mut cx: AsyncAppContext,
6821 ) -> Result<()> {
6822 this.update(&mut cx, |this, cx| {
6823 // Don't handle messages that were sent before the response to us joining the project
6824 if envelope.message_id > this.join_project_response_message_id {
6825 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
6826 }
6827 Ok(())
6828 })?
6829 }
6830
6831 async fn handle_update_worktree(
6832 this: Model<Self>,
6833 envelope: TypedEnvelope<proto::UpdateWorktree>,
6834 _: Arc<Client>,
6835 mut cx: AsyncAppContext,
6836 ) -> Result<()> {
6837 this.update(&mut cx, |this, cx| {
6838 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6839 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6840 worktree.update(cx, |worktree, _| {
6841 let worktree = worktree.as_remote_mut().unwrap();
6842 worktree.update_from_remote(envelope.payload);
6843 });
6844 }
6845 Ok(())
6846 })?
6847 }
6848
6849 async fn handle_update_worktree_settings(
6850 this: Model<Self>,
6851 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
6852 _: Arc<Client>,
6853 mut cx: AsyncAppContext,
6854 ) -> Result<()> {
6855 this.update(&mut cx, |this, cx| {
6856 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6857 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
6858 cx.update_global::<SettingsStore, _>(|store, cx| {
6859 store
6860 .set_local_settings(
6861 worktree.entity_id().as_u64() as usize,
6862 PathBuf::from(&envelope.payload.path).into(),
6863 envelope.payload.content.as_ref().map(String::as_str),
6864 cx,
6865 )
6866 .log_err();
6867 });
6868 }
6869 Ok(())
6870 })?
6871 }
6872
6873 async fn handle_create_project_entry(
6874 this: Model<Self>,
6875 envelope: TypedEnvelope<proto::CreateProjectEntry>,
6876 _: Arc<Client>,
6877 mut cx: AsyncAppContext,
6878 ) -> Result<proto::ProjectEntryResponse> {
6879 let worktree = this.update(&mut cx, |this, cx| {
6880 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
6881 this.worktree_for_id(worktree_id, cx)
6882 .ok_or_else(|| anyhow!("worktree not found"))
6883 })??;
6884 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6885 let entry = worktree
6886 .update(&mut cx, |worktree, cx| {
6887 let worktree = worktree.as_local_mut().unwrap();
6888 let path = PathBuf::from(envelope.payload.path);
6889 worktree.create_entry(path, envelope.payload.is_directory, cx)
6890 })?
6891 .await?;
6892 Ok(proto::ProjectEntryResponse {
6893 entry: Some((&entry).into()),
6894 worktree_scan_id: worktree_scan_id as u64,
6895 })
6896 }
6897
6898 async fn handle_rename_project_entry(
6899 this: Model<Self>,
6900 envelope: TypedEnvelope<proto::RenameProjectEntry>,
6901 _: Arc<Client>,
6902 mut cx: AsyncAppContext,
6903 ) -> Result<proto::ProjectEntryResponse> {
6904 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6905 let worktree = this.update(&mut cx, |this, cx| {
6906 this.worktree_for_entry(entry_id, cx)
6907 .ok_or_else(|| anyhow!("worktree not found"))
6908 })??;
6909 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6910 let entry = worktree
6911 .update(&mut cx, |worktree, cx| {
6912 let new_path = PathBuf::from(envelope.payload.new_path);
6913 worktree
6914 .as_local_mut()
6915 .unwrap()
6916 .rename_entry(entry_id, new_path, cx)
6917 .ok_or_else(|| anyhow!("invalid entry"))
6918 })??
6919 .await?;
6920 Ok(proto::ProjectEntryResponse {
6921 entry: Some((&entry).into()),
6922 worktree_scan_id: worktree_scan_id as u64,
6923 })
6924 }
6925
6926 async fn handle_copy_project_entry(
6927 this: Model<Self>,
6928 envelope: TypedEnvelope<proto::CopyProjectEntry>,
6929 _: Arc<Client>,
6930 mut cx: AsyncAppContext,
6931 ) -> Result<proto::ProjectEntryResponse> {
6932 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6933 let worktree = this.update(&mut cx, |this, cx| {
6934 this.worktree_for_entry(entry_id, cx)
6935 .ok_or_else(|| anyhow!("worktree not found"))
6936 })??;
6937 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6938 let entry = worktree
6939 .update(&mut cx, |worktree, cx| {
6940 let new_path = PathBuf::from(envelope.payload.new_path);
6941 worktree
6942 .as_local_mut()
6943 .unwrap()
6944 .copy_entry(entry_id, new_path, cx)
6945 .ok_or_else(|| anyhow!("invalid entry"))
6946 })??
6947 .await?;
6948 Ok(proto::ProjectEntryResponse {
6949 entry: Some((&entry).into()),
6950 worktree_scan_id: worktree_scan_id as u64,
6951 })
6952 }
6953
6954 async fn handle_delete_project_entry(
6955 this: Model<Self>,
6956 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
6957 _: Arc<Client>,
6958 mut cx: AsyncAppContext,
6959 ) -> Result<proto::ProjectEntryResponse> {
6960 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6961
6962 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
6963
6964 let worktree = this.update(&mut cx, |this, cx| {
6965 this.worktree_for_entry(entry_id, cx)
6966 .ok_or_else(|| anyhow!("worktree not found"))
6967 })??;
6968 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
6969 worktree
6970 .update(&mut cx, |worktree, cx| {
6971 worktree
6972 .as_local_mut()
6973 .unwrap()
6974 .delete_entry(entry_id, cx)
6975 .ok_or_else(|| anyhow!("invalid entry"))
6976 })??
6977 .await?;
6978 Ok(proto::ProjectEntryResponse {
6979 entry: None,
6980 worktree_scan_id: worktree_scan_id as u64,
6981 })
6982 }
6983
6984 async fn handle_expand_project_entry(
6985 this: Model<Self>,
6986 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
6987 _: Arc<Client>,
6988 mut cx: AsyncAppContext,
6989 ) -> Result<proto::ExpandProjectEntryResponse> {
6990 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
6991 let worktree = this
6992 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
6993 .ok_or_else(|| anyhow!("invalid request"))?;
6994 worktree
6995 .update(&mut cx, |worktree, cx| {
6996 worktree
6997 .as_local_mut()
6998 .unwrap()
6999 .expand_entry(entry_id, cx)
7000 .ok_or_else(|| anyhow!("invalid entry"))
7001 })??
7002 .await?;
7003 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7004 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7005 }
7006
7007 async fn handle_update_diagnostic_summary(
7008 this: Model<Self>,
7009 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7010 _: Arc<Client>,
7011 mut cx: AsyncAppContext,
7012 ) -> Result<()> {
7013 this.update(&mut cx, |this, cx| {
7014 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7015 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7016 if let Some(summary) = envelope.payload.summary {
7017 let project_path = ProjectPath {
7018 worktree_id,
7019 path: Path::new(&summary.path).into(),
7020 };
7021 worktree.update(cx, |worktree, _| {
7022 worktree
7023 .as_remote_mut()
7024 .unwrap()
7025 .update_diagnostic_summary(project_path.path.clone(), &summary);
7026 });
7027 cx.emit(Event::DiagnosticsUpdated {
7028 language_server_id: LanguageServerId(summary.language_server_id as usize),
7029 path: project_path,
7030 });
7031 }
7032 }
7033 Ok(())
7034 })?
7035 }
7036
7037 async fn handle_start_language_server(
7038 this: Model<Self>,
7039 envelope: TypedEnvelope<proto::StartLanguageServer>,
7040 _: Arc<Client>,
7041 mut cx: AsyncAppContext,
7042 ) -> Result<()> {
7043 let server = envelope
7044 .payload
7045 .server
7046 .ok_or_else(|| anyhow!("invalid server"))?;
7047 this.update(&mut cx, |this, cx| {
7048 this.language_server_statuses.insert(
7049 LanguageServerId(server.id as usize),
7050 LanguageServerStatus {
7051 name: server.name,
7052 pending_work: Default::default(),
7053 has_pending_diagnostic_updates: false,
7054 progress_tokens: Default::default(),
7055 },
7056 );
7057 cx.notify();
7058 })?;
7059 Ok(())
7060 }
7061
7062 async fn handle_update_language_server(
7063 this: Model<Self>,
7064 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7065 _: Arc<Client>,
7066 mut cx: AsyncAppContext,
7067 ) -> Result<()> {
7068 this.update(&mut cx, |this, cx| {
7069 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7070
7071 match envelope
7072 .payload
7073 .variant
7074 .ok_or_else(|| anyhow!("invalid variant"))?
7075 {
7076 proto::update_language_server::Variant::WorkStart(payload) => {
7077 this.on_lsp_work_start(
7078 language_server_id,
7079 payload.token,
7080 LanguageServerProgress {
7081 message: payload.message,
7082 percentage: payload.percentage.map(|p| p as usize),
7083 last_update_at: Instant::now(),
7084 },
7085 cx,
7086 );
7087 }
7088
7089 proto::update_language_server::Variant::WorkProgress(payload) => {
7090 this.on_lsp_work_progress(
7091 language_server_id,
7092 payload.token,
7093 LanguageServerProgress {
7094 message: payload.message,
7095 percentage: payload.percentage.map(|p| p as usize),
7096 last_update_at: Instant::now(),
7097 },
7098 cx,
7099 );
7100 }
7101
7102 proto::update_language_server::Variant::WorkEnd(payload) => {
7103 this.on_lsp_work_end(language_server_id, payload.token, cx);
7104 }
7105
7106 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7107 this.disk_based_diagnostics_started(language_server_id, cx);
7108 }
7109
7110 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7111 this.disk_based_diagnostics_finished(language_server_id, cx)
7112 }
7113 }
7114
7115 Ok(())
7116 })?
7117 }
7118
7119 async fn handle_update_buffer(
7120 this: Model<Self>,
7121 envelope: TypedEnvelope<proto::UpdateBuffer>,
7122 _: Arc<Client>,
7123 mut cx: AsyncAppContext,
7124 ) -> Result<proto::Ack> {
7125 this.update(&mut cx, |this, cx| {
7126 let payload = envelope.payload.clone();
7127 let buffer_id = payload.buffer_id;
7128 let ops = payload
7129 .operations
7130 .into_iter()
7131 .map(language::proto::deserialize_operation)
7132 .collect::<Result<Vec<_>, _>>()?;
7133 let is_remote = this.is_remote();
7134 match this.opened_buffers.entry(buffer_id) {
7135 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7136 OpenBuffer::Strong(buffer) => {
7137 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7138 }
7139 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7140 OpenBuffer::Weak(_) => {}
7141 },
7142 hash_map::Entry::Vacant(e) => {
7143 assert!(
7144 is_remote,
7145 "received buffer update from {:?}",
7146 envelope.original_sender_id
7147 );
7148 e.insert(OpenBuffer::Operations(ops));
7149 }
7150 }
7151 Ok(proto::Ack {})
7152 })?
7153 }
7154
7155 async fn handle_create_buffer_for_peer(
7156 this: Model<Self>,
7157 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7158 _: Arc<Client>,
7159 mut cx: AsyncAppContext,
7160 ) -> Result<()> {
7161 this.update(&mut cx, |this, cx| {
7162 match envelope
7163 .payload
7164 .variant
7165 .ok_or_else(|| anyhow!("missing variant"))?
7166 {
7167 proto::create_buffer_for_peer::Variant::State(mut state) => {
7168 let mut buffer_file = None;
7169 if let Some(file) = state.file.take() {
7170 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7171 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7172 anyhow!("no worktree found for id {}", file.worktree_id)
7173 })?;
7174 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7175 as Arc<dyn language::File>);
7176 }
7177
7178 let buffer_id = state.id;
7179 let buffer = cx.build_model(|_| {
7180 Buffer::from_proto(this.replica_id(), state, buffer_file).unwrap()
7181 });
7182 this.incomplete_remote_buffers
7183 .insert(buffer_id, Some(buffer));
7184 }
7185 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7186 let buffer = this
7187 .incomplete_remote_buffers
7188 .get(&chunk.buffer_id)
7189 .cloned()
7190 .flatten()
7191 .ok_or_else(|| {
7192 anyhow!(
7193 "received chunk for buffer {} without initial state",
7194 chunk.buffer_id
7195 )
7196 })?;
7197 let operations = chunk
7198 .operations
7199 .into_iter()
7200 .map(language::proto::deserialize_operation)
7201 .collect::<Result<Vec<_>>>()?;
7202 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7203
7204 if chunk.is_last {
7205 this.incomplete_remote_buffers.remove(&chunk.buffer_id);
7206 this.register_buffer(&buffer, cx)?;
7207 }
7208 }
7209 }
7210
7211 Ok(())
7212 })?
7213 }
7214
7215 async fn handle_update_diff_base(
7216 this: Model<Self>,
7217 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7218 _: Arc<Client>,
7219 mut cx: AsyncAppContext,
7220 ) -> Result<()> {
7221 this.update(&mut cx, |this, cx| {
7222 let buffer_id = envelope.payload.buffer_id;
7223 let diff_base = envelope.payload.diff_base;
7224 if let Some(buffer) = this
7225 .opened_buffers
7226 .get_mut(&buffer_id)
7227 .and_then(|b| b.upgrade())
7228 .or_else(|| {
7229 this.incomplete_remote_buffers
7230 .get(&buffer_id)
7231 .cloned()
7232 .flatten()
7233 })
7234 {
7235 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7236 }
7237 Ok(())
7238 })?
7239 }
7240
7241 async fn handle_update_buffer_file(
7242 this: Model<Self>,
7243 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7244 _: Arc<Client>,
7245 mut cx: AsyncAppContext,
7246 ) -> Result<()> {
7247 let buffer_id = envelope.payload.buffer_id;
7248
7249 this.update(&mut cx, |this, cx| {
7250 let payload = envelope.payload.clone();
7251 if let Some(buffer) = this
7252 .opened_buffers
7253 .get(&buffer_id)
7254 .and_then(|b| b.upgrade())
7255 .or_else(|| {
7256 this.incomplete_remote_buffers
7257 .get(&buffer_id)
7258 .cloned()
7259 .flatten()
7260 })
7261 {
7262 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7263 let worktree = this
7264 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7265 .ok_or_else(|| anyhow!("no such worktree"))?;
7266 let file = File::from_proto(file, worktree, cx)?;
7267 buffer.update(cx, |buffer, cx| {
7268 buffer.file_updated(Arc::new(file), cx);
7269 });
7270 this.detect_language_for_buffer(&buffer, cx);
7271 }
7272 Ok(())
7273 })?
7274 }
7275
7276 async fn handle_save_buffer(
7277 this: Model<Self>,
7278 envelope: TypedEnvelope<proto::SaveBuffer>,
7279 _: Arc<Client>,
7280 mut cx: AsyncAppContext,
7281 ) -> Result<proto::BufferSaved> {
7282 let buffer_id = envelope.payload.buffer_id;
7283 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7284 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7285 let buffer = this
7286 .opened_buffers
7287 .get(&buffer_id)
7288 .and_then(|buffer| buffer.upgrade())
7289 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7290 anyhow::Ok((project_id, buffer))
7291 })??;
7292 buffer
7293 .update(&mut cx, |buffer, _| {
7294 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7295 })?
7296 .await?;
7297 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
7298
7299 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
7300 .await?;
7301 Ok(buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
7302 project_id,
7303 buffer_id,
7304 version: serialize_version(buffer.saved_version()),
7305 mtime: Some(buffer.saved_mtime().into()),
7306 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
7307 })?)
7308 }
7309
7310 async fn handle_reload_buffers(
7311 this: Model<Self>,
7312 envelope: TypedEnvelope<proto::ReloadBuffers>,
7313 _: Arc<Client>,
7314 mut cx: AsyncAppContext,
7315 ) -> Result<proto::ReloadBuffersResponse> {
7316 let sender_id = envelope.original_sender_id()?;
7317 let reload = this.update(&mut cx, |this, cx| {
7318 let mut buffers = HashSet::default();
7319 for buffer_id in &envelope.payload.buffer_ids {
7320 buffers.insert(
7321 this.opened_buffers
7322 .get(buffer_id)
7323 .and_then(|buffer| buffer.upgrade())
7324 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7325 );
7326 }
7327 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
7328 })??;
7329
7330 let project_transaction = reload.await?;
7331 let project_transaction = this.update(&mut cx, |this, cx| {
7332 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7333 })?;
7334 Ok(proto::ReloadBuffersResponse {
7335 transaction: Some(project_transaction),
7336 })
7337 }
7338
7339 async fn handle_synchronize_buffers(
7340 this: Model<Self>,
7341 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
7342 _: Arc<Client>,
7343 mut cx: AsyncAppContext,
7344 ) -> Result<proto::SynchronizeBuffersResponse> {
7345 let project_id = envelope.payload.project_id;
7346 let mut response = proto::SynchronizeBuffersResponse {
7347 buffers: Default::default(),
7348 };
7349
7350 this.update(&mut cx, |this, cx| {
7351 let Some(guest_id) = envelope.original_sender_id else {
7352 error!("missing original_sender_id on SynchronizeBuffers request");
7353 return;
7354 };
7355
7356 this.shared_buffers.entry(guest_id).or_default().clear();
7357 for buffer in envelope.payload.buffers {
7358 let buffer_id = buffer.id;
7359 let remote_version = language::proto::deserialize_version(&buffer.version);
7360 if let Some(buffer) = this.buffer_for_id(buffer_id) {
7361 this.shared_buffers
7362 .entry(guest_id)
7363 .or_default()
7364 .insert(buffer_id);
7365
7366 let buffer = buffer.read(cx);
7367 response.buffers.push(proto::BufferVersion {
7368 id: buffer_id,
7369 version: language::proto::serialize_version(&buffer.version),
7370 });
7371
7372 let operations = buffer.serialize_ops(Some(remote_version), cx);
7373 let client = this.client.clone();
7374 if let Some(file) = buffer.file() {
7375 client
7376 .send(proto::UpdateBufferFile {
7377 project_id,
7378 buffer_id: buffer_id as u64,
7379 file: Some(file.to_proto()),
7380 })
7381 .log_err();
7382 }
7383
7384 client
7385 .send(proto::UpdateDiffBase {
7386 project_id,
7387 buffer_id: buffer_id as u64,
7388 diff_base: buffer.diff_base().map(Into::into),
7389 })
7390 .log_err();
7391
7392 client
7393 .send(proto::BufferReloaded {
7394 project_id,
7395 buffer_id,
7396 version: language::proto::serialize_version(buffer.saved_version()),
7397 mtime: Some(buffer.saved_mtime().into()),
7398 fingerprint: language::proto::serialize_fingerprint(
7399 buffer.saved_version_fingerprint(),
7400 ),
7401 line_ending: language::proto::serialize_line_ending(
7402 buffer.line_ending(),
7403 ) as i32,
7404 })
7405 .log_err();
7406
7407 cx.background_executor()
7408 .spawn(
7409 async move {
7410 let operations = operations.await;
7411 for chunk in split_operations(operations) {
7412 client
7413 .request(proto::UpdateBuffer {
7414 project_id,
7415 buffer_id,
7416 operations: chunk,
7417 })
7418 .await?;
7419 }
7420 anyhow::Ok(())
7421 }
7422 .log_err(),
7423 )
7424 .detach();
7425 }
7426 }
7427 })?;
7428
7429 Ok(response)
7430 }
7431
7432 async fn handle_format_buffers(
7433 this: Model<Self>,
7434 envelope: TypedEnvelope<proto::FormatBuffers>,
7435 _: Arc<Client>,
7436 mut cx: AsyncAppContext,
7437 ) -> Result<proto::FormatBuffersResponse> {
7438 let sender_id = envelope.original_sender_id()?;
7439 let format = this.update(&mut cx, |this, cx| {
7440 let mut buffers = HashSet::default();
7441 for buffer_id in &envelope.payload.buffer_ids {
7442 buffers.insert(
7443 this.opened_buffers
7444 .get(buffer_id)
7445 .and_then(|buffer| buffer.upgrade())
7446 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
7447 );
7448 }
7449 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
7450 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
7451 })??;
7452
7453 let project_transaction = format.await?;
7454 let project_transaction = this.update(&mut cx, |this, cx| {
7455 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7456 })?;
7457 Ok(proto::FormatBuffersResponse {
7458 transaction: Some(project_transaction),
7459 })
7460 }
7461
7462 async fn handle_apply_additional_edits_for_completion(
7463 this: Model<Self>,
7464 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
7465 _: Arc<Client>,
7466 mut cx: AsyncAppContext,
7467 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
7468 let (buffer, completion) = this.update(&mut cx, |this, cx| {
7469 let buffer = this
7470 .opened_buffers
7471 .get(&envelope.payload.buffer_id)
7472 .and_then(|buffer| buffer.upgrade())
7473 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7474 let language = buffer.read(cx).language();
7475 let completion = language::proto::deserialize_completion(
7476 envelope
7477 .payload
7478 .completion
7479 .ok_or_else(|| anyhow!("invalid completion"))?,
7480 language.cloned(),
7481 );
7482 Ok::<_, anyhow::Error>((buffer, completion))
7483 })??;
7484
7485 let completion = completion.await?;
7486
7487 let apply_additional_edits = this.update(&mut cx, |this, cx| {
7488 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
7489 })?;
7490
7491 Ok(proto::ApplyCompletionAdditionalEditsResponse {
7492 transaction: apply_additional_edits
7493 .await?
7494 .as_ref()
7495 .map(language::proto::serialize_transaction),
7496 })
7497 }
7498
7499 async fn handle_apply_code_action(
7500 this: Model<Self>,
7501 envelope: TypedEnvelope<proto::ApplyCodeAction>,
7502 _: Arc<Client>,
7503 mut cx: AsyncAppContext,
7504 ) -> Result<proto::ApplyCodeActionResponse> {
7505 let sender_id = envelope.original_sender_id()?;
7506 let action = language::proto::deserialize_code_action(
7507 envelope
7508 .payload
7509 .action
7510 .ok_or_else(|| anyhow!("invalid action"))?,
7511 )?;
7512 let apply_code_action = this.update(&mut cx, |this, cx| {
7513 let buffer = this
7514 .opened_buffers
7515 .get(&envelope.payload.buffer_id)
7516 .and_then(|buffer| buffer.upgrade())
7517 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7518 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
7519 })??;
7520
7521 let project_transaction = apply_code_action.await?;
7522 let project_transaction = this.update(&mut cx, |this, cx| {
7523 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
7524 })?;
7525 Ok(proto::ApplyCodeActionResponse {
7526 transaction: Some(project_transaction),
7527 })
7528 }
7529
7530 async fn handle_on_type_formatting(
7531 this: Model<Self>,
7532 envelope: TypedEnvelope<proto::OnTypeFormatting>,
7533 _: Arc<Client>,
7534 mut cx: AsyncAppContext,
7535 ) -> Result<proto::OnTypeFormattingResponse> {
7536 let on_type_formatting = this.update(&mut cx, |this, cx| {
7537 let buffer = this
7538 .opened_buffers
7539 .get(&envelope.payload.buffer_id)
7540 .and_then(|buffer| buffer.upgrade())
7541 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
7542 let position = envelope
7543 .payload
7544 .position
7545 .and_then(deserialize_anchor)
7546 .ok_or_else(|| anyhow!("invalid position"))?;
7547 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
7548 buffer,
7549 position,
7550 envelope.payload.trigger.clone(),
7551 cx,
7552 ))
7553 })??;
7554
7555 let transaction = on_type_formatting
7556 .await?
7557 .as_ref()
7558 .map(language::proto::serialize_transaction);
7559 Ok(proto::OnTypeFormattingResponse { transaction })
7560 }
7561
7562 async fn handle_inlay_hints(
7563 this: Model<Self>,
7564 envelope: TypedEnvelope<proto::InlayHints>,
7565 _: Arc<Client>,
7566 mut cx: AsyncAppContext,
7567 ) -> Result<proto::InlayHintsResponse> {
7568 let sender_id = envelope.original_sender_id()?;
7569 let buffer = this.update(&mut cx, |this, _| {
7570 this.opened_buffers
7571 .get(&envelope.payload.buffer_id)
7572 .and_then(|buffer| buffer.upgrade())
7573 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7574 })??;
7575 let buffer_version = deserialize_version(&envelope.payload.version);
7576
7577 buffer
7578 .update(&mut cx, |buffer, _| {
7579 buffer.wait_for_version(buffer_version.clone())
7580 })?
7581 .await
7582 .with_context(|| {
7583 format!(
7584 "waiting for version {:?} for buffer {}",
7585 buffer_version,
7586 buffer.entity_id()
7587 )
7588 })?;
7589
7590 let start = envelope
7591 .payload
7592 .start
7593 .and_then(deserialize_anchor)
7594 .context("missing range start")?;
7595 let end = envelope
7596 .payload
7597 .end
7598 .and_then(deserialize_anchor)
7599 .context("missing range end")?;
7600 let buffer_hints = this
7601 .update(&mut cx, |project, cx| {
7602 project.inlay_hints(buffer, start..end, cx)
7603 })?
7604 .await
7605 .context("inlay hints fetch")?;
7606
7607 Ok(this.update(&mut cx, |project, cx| {
7608 InlayHints::response_to_proto(buffer_hints, project, sender_id, &buffer_version, cx)
7609 })?)
7610 }
7611
7612 async fn handle_resolve_inlay_hint(
7613 this: Model<Self>,
7614 envelope: TypedEnvelope<proto::ResolveInlayHint>,
7615 _: Arc<Client>,
7616 mut cx: AsyncAppContext,
7617 ) -> Result<proto::ResolveInlayHintResponse> {
7618 let proto_hint = envelope
7619 .payload
7620 .hint
7621 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
7622 let hint = InlayHints::proto_to_project_hint(proto_hint)
7623 .context("resolved proto inlay hint conversion")?;
7624 let buffer = this.update(&mut cx, |this, _cx| {
7625 this.opened_buffers
7626 .get(&envelope.payload.buffer_id)
7627 .and_then(|buffer| buffer.upgrade())
7628 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
7629 })??;
7630 let response_hint = this
7631 .update(&mut cx, |project, cx| {
7632 project.resolve_inlay_hint(
7633 hint,
7634 buffer,
7635 LanguageServerId(envelope.payload.language_server_id as usize),
7636 cx,
7637 )
7638 })?
7639 .await
7640 .context("inlay hints fetch")?;
7641 Ok(proto::ResolveInlayHintResponse {
7642 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
7643 })
7644 }
7645
7646 async fn handle_refresh_inlay_hints(
7647 this: Model<Self>,
7648 _: TypedEnvelope<proto::RefreshInlayHints>,
7649 _: Arc<Client>,
7650 mut cx: AsyncAppContext,
7651 ) -> Result<proto::Ack> {
7652 this.update(&mut cx, |_, cx| {
7653 cx.emit(Event::RefreshInlayHints);
7654 })?;
7655 Ok(proto::Ack {})
7656 }
7657
7658 async fn handle_lsp_command<T: LspCommand>(
7659 this: Model<Self>,
7660 envelope: TypedEnvelope<T::ProtoRequest>,
7661 _: Arc<Client>,
7662 mut cx: AsyncAppContext,
7663 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
7664 where
7665 <T::LspRequest as lsp::request::Request>::Params: Send,
7666 <T::LspRequest as lsp::request::Request>::Result: Send,
7667 {
7668 let sender_id = envelope.original_sender_id()?;
7669 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
7670 let buffer_handle = this.update(&mut cx, |this, _cx| {
7671 this.opened_buffers
7672 .get(&buffer_id)
7673 .and_then(|buffer| buffer.upgrade())
7674 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7675 })??;
7676 let request = T::from_proto(
7677 envelope.payload,
7678 this.clone(),
7679 buffer_handle.clone(),
7680 cx.clone(),
7681 )
7682 .await?;
7683 let buffer_version = buffer_handle.update(&mut cx, |buffer, _| buffer.version())?;
7684 let response = this
7685 .update(&mut cx, |this, cx| {
7686 this.request_lsp(buffer_handle, LanguageServerToQuery::Primary, request, cx)
7687 })?
7688 .await?;
7689 this.update(&mut cx, |this, cx| {
7690 Ok(T::response_to_proto(
7691 response,
7692 this,
7693 sender_id,
7694 &buffer_version,
7695 cx,
7696 ))
7697 })?
7698 }
7699
7700 async fn handle_get_project_symbols(
7701 this: Model<Self>,
7702 envelope: TypedEnvelope<proto::GetProjectSymbols>,
7703 _: Arc<Client>,
7704 mut cx: AsyncAppContext,
7705 ) -> Result<proto::GetProjectSymbolsResponse> {
7706 let symbols = this
7707 .update(&mut cx, |this, cx| {
7708 this.symbols(&envelope.payload.query, cx)
7709 })?
7710 .await?;
7711
7712 Ok(proto::GetProjectSymbolsResponse {
7713 symbols: symbols.iter().map(serialize_symbol).collect(),
7714 })
7715 }
7716
7717 async fn handle_search_project(
7718 this: Model<Self>,
7719 envelope: TypedEnvelope<proto::SearchProject>,
7720 _: Arc<Client>,
7721 mut cx: AsyncAppContext,
7722 ) -> Result<proto::SearchProjectResponse> {
7723 let peer_id = envelope.original_sender_id()?;
7724 let query = SearchQuery::from_proto(envelope.payload)?;
7725 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
7726
7727 cx.spawn(move |mut cx| async move {
7728 let mut locations = Vec::new();
7729 while let Some((buffer, ranges)) = result.next().await {
7730 for range in ranges {
7731 let start = serialize_anchor(&range.start);
7732 let end = serialize_anchor(&range.end);
7733 let buffer_id = this.update(&mut cx, |this, cx| {
7734 this.create_buffer_for_peer(&buffer, peer_id, cx)
7735 })?;
7736 locations.push(proto::Location {
7737 buffer_id,
7738 start: Some(start),
7739 end: Some(end),
7740 });
7741 }
7742 }
7743 Ok(proto::SearchProjectResponse { locations })
7744 })
7745 .await
7746 }
7747
7748 async fn handle_open_buffer_for_symbol(
7749 this: Model<Self>,
7750 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
7751 _: Arc<Client>,
7752 mut cx: AsyncAppContext,
7753 ) -> Result<proto::OpenBufferForSymbolResponse> {
7754 let peer_id = envelope.original_sender_id()?;
7755 let symbol = envelope
7756 .payload
7757 .symbol
7758 .ok_or_else(|| anyhow!("invalid symbol"))?;
7759 let symbol = this
7760 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
7761 .await?;
7762 let symbol = this.update(&mut cx, |this, _| {
7763 let signature = this.symbol_signature(&symbol.path);
7764 if signature == symbol.signature {
7765 Ok(symbol)
7766 } else {
7767 Err(anyhow!("invalid symbol signature"))
7768 }
7769 })??;
7770 let buffer = this
7771 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
7772 .await?;
7773
7774 Ok(proto::OpenBufferForSymbolResponse {
7775 buffer_id: this.update(&mut cx, |this, cx| {
7776 this.create_buffer_for_peer(&buffer, peer_id, cx)
7777 })?,
7778 })
7779 }
7780
7781 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
7782 let mut hasher = Sha256::new();
7783 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
7784 hasher.update(project_path.path.to_string_lossy().as_bytes());
7785 hasher.update(self.nonce.to_be_bytes());
7786 hasher.finalize().as_slice().try_into().unwrap()
7787 }
7788
7789 async fn handle_open_buffer_by_id(
7790 this: Model<Self>,
7791 envelope: TypedEnvelope<proto::OpenBufferById>,
7792 _: Arc<Client>,
7793 mut cx: AsyncAppContext,
7794 ) -> Result<proto::OpenBufferResponse> {
7795 let peer_id = envelope.original_sender_id()?;
7796 let buffer = this
7797 .update(&mut cx, |this, cx| {
7798 this.open_buffer_by_id(envelope.payload.id, cx)
7799 })?
7800 .await?;
7801 this.update(&mut cx, |this, cx| {
7802 Ok(proto::OpenBufferResponse {
7803 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7804 })
7805 })?
7806 }
7807
7808 async fn handle_open_buffer_by_path(
7809 this: Model<Self>,
7810 envelope: TypedEnvelope<proto::OpenBufferByPath>,
7811 _: Arc<Client>,
7812 mut cx: AsyncAppContext,
7813 ) -> Result<proto::OpenBufferResponse> {
7814 let peer_id = envelope.original_sender_id()?;
7815 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7816 let open_buffer = this.update(&mut cx, |this, cx| {
7817 this.open_buffer(
7818 ProjectPath {
7819 worktree_id,
7820 path: PathBuf::from(envelope.payload.path).into(),
7821 },
7822 cx,
7823 )
7824 })?;
7825
7826 let buffer = open_buffer.await?;
7827 this.update(&mut cx, |this, cx| {
7828 Ok(proto::OpenBufferResponse {
7829 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx),
7830 })
7831 })?
7832 }
7833
7834 fn serialize_project_transaction_for_peer(
7835 &mut self,
7836 project_transaction: ProjectTransaction,
7837 peer_id: proto::PeerId,
7838 cx: &mut AppContext,
7839 ) -> proto::ProjectTransaction {
7840 let mut serialized_transaction = proto::ProjectTransaction {
7841 buffer_ids: Default::default(),
7842 transactions: Default::default(),
7843 };
7844 for (buffer, transaction) in project_transaction.0 {
7845 serialized_transaction
7846 .buffer_ids
7847 .push(self.create_buffer_for_peer(&buffer, peer_id, cx));
7848 serialized_transaction
7849 .transactions
7850 .push(language::proto::serialize_transaction(&transaction));
7851 }
7852 serialized_transaction
7853 }
7854
7855 fn deserialize_project_transaction(
7856 &mut self,
7857 message: proto::ProjectTransaction,
7858 push_to_history: bool,
7859 cx: &mut ModelContext<Self>,
7860 ) -> Task<Result<ProjectTransaction>> {
7861 cx.spawn(move |this, mut cx| async move {
7862 let mut project_transaction = ProjectTransaction::default();
7863 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
7864 {
7865 let buffer = this
7866 .update(&mut cx, |this, cx| {
7867 this.wait_for_remote_buffer(buffer_id, cx)
7868 })?
7869 .await?;
7870 let transaction = language::proto::deserialize_transaction(transaction)?;
7871 project_transaction.0.insert(buffer, transaction);
7872 }
7873
7874 for (buffer, transaction) in &project_transaction.0 {
7875 buffer
7876 .update(&mut cx, |buffer, _| {
7877 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
7878 })?
7879 .await?;
7880
7881 if push_to_history {
7882 buffer.update(&mut cx, |buffer, _| {
7883 buffer.push_transaction(transaction.clone(), Instant::now());
7884 })?;
7885 }
7886 }
7887
7888 Ok(project_transaction)
7889 })
7890 }
7891
7892 fn create_buffer_for_peer(
7893 &mut self,
7894 buffer: &Model<Buffer>,
7895 peer_id: proto::PeerId,
7896 cx: &mut AppContext,
7897 ) -> u64 {
7898 let buffer_id = buffer.read(cx).remote_id();
7899 if let Some(ProjectClientState::Local { updates_tx, .. }) = &self.client_state {
7900 updates_tx
7901 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
7902 .ok();
7903 }
7904 buffer_id
7905 }
7906
7907 fn wait_for_remote_buffer(
7908 &mut self,
7909 id: u64,
7910 cx: &mut ModelContext<Self>,
7911 ) -> Task<Result<Model<Buffer>>> {
7912 let mut opened_buffer_rx = self.opened_buffer.1.clone();
7913
7914 cx.spawn(move |this, mut cx| async move {
7915 let buffer = loop {
7916 let Some(this) = this.upgrade() else {
7917 return Err(anyhow!("project dropped"));
7918 };
7919
7920 let buffer = this.update(&mut cx, |this, _cx| {
7921 this.opened_buffers
7922 .get(&id)
7923 .and_then(|buffer| buffer.upgrade())
7924 })?;
7925
7926 if let Some(buffer) = buffer {
7927 break buffer;
7928 } else if this.update(&mut cx, |this, _| this.is_read_only())? {
7929 return Err(anyhow!("disconnected before buffer {} could be opened", id));
7930 }
7931
7932 this.update(&mut cx, |this, _| {
7933 this.incomplete_remote_buffers.entry(id).or_default();
7934 })?;
7935 drop(this);
7936
7937 opened_buffer_rx
7938 .next()
7939 .await
7940 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
7941 };
7942
7943 Ok(buffer)
7944 })
7945 }
7946
7947 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
7948 let project_id = match self.client_state.as_ref() {
7949 Some(ProjectClientState::Remote {
7950 sharing_has_stopped,
7951 remote_id,
7952 ..
7953 }) => {
7954 if *sharing_has_stopped {
7955 return Task::ready(Err(anyhow!(
7956 "can't synchronize remote buffers on a readonly project"
7957 )));
7958 } else {
7959 *remote_id
7960 }
7961 }
7962 Some(ProjectClientState::Local { .. }) | None => {
7963 return Task::ready(Err(anyhow!(
7964 "can't synchronize remote buffers on a local project"
7965 )))
7966 }
7967 };
7968
7969 let client = self.client.clone();
7970 cx.spawn(move |this, mut cx| async move {
7971 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
7972 let buffers = this
7973 .opened_buffers
7974 .iter()
7975 .filter_map(|(id, buffer)| {
7976 let buffer = buffer.upgrade()?;
7977 Some(proto::BufferVersion {
7978 id: *id,
7979 version: language::proto::serialize_version(&buffer.read(cx).version),
7980 })
7981 })
7982 .collect();
7983 let incomplete_buffer_ids = this
7984 .incomplete_remote_buffers
7985 .keys()
7986 .copied()
7987 .collect::<Vec<_>>();
7988
7989 (buffers, incomplete_buffer_ids)
7990 })?;
7991 let response = client
7992 .request(proto::SynchronizeBuffers {
7993 project_id,
7994 buffers,
7995 })
7996 .await?;
7997
7998 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
7999 response
8000 .buffers
8001 .into_iter()
8002 .map(|buffer| {
8003 let client = client.clone();
8004 let buffer_id = buffer.id;
8005 let remote_version = language::proto::deserialize_version(&buffer.version);
8006 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8007 let operations =
8008 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8009 cx.background_executor().spawn(async move {
8010 let operations = operations.await;
8011 for chunk in split_operations(operations) {
8012 client
8013 .request(proto::UpdateBuffer {
8014 project_id,
8015 buffer_id,
8016 operations: chunk,
8017 })
8018 .await?;
8019 }
8020 anyhow::Ok(())
8021 })
8022 } else {
8023 Task::ready(Ok(()))
8024 }
8025 })
8026 .collect::<Vec<_>>()
8027 })?;
8028
8029 // Any incomplete buffers have open requests waiting. Request that the host sends
8030 // creates these buffers for us again to unblock any waiting futures.
8031 for id in incomplete_buffer_ids {
8032 cx.background_executor()
8033 .spawn(client.request(proto::OpenBufferById { project_id, id }))
8034 .detach();
8035 }
8036
8037 futures::future::join_all(send_updates_for_buffers)
8038 .await
8039 .into_iter()
8040 .collect()
8041 })
8042 }
8043
8044 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8045 self.worktrees()
8046 .map(|worktree| {
8047 let worktree = worktree.read(cx);
8048 proto::WorktreeMetadata {
8049 id: worktree.id().to_proto(),
8050 root_name: worktree.root_name().into(),
8051 visible: worktree.is_visible(),
8052 abs_path: worktree.abs_path().to_string_lossy().into(),
8053 }
8054 })
8055 .collect()
8056 }
8057
8058 fn set_worktrees_from_proto(
8059 &mut self,
8060 worktrees: Vec<proto::WorktreeMetadata>,
8061 cx: &mut ModelContext<Project>,
8062 ) -> Result<()> {
8063 let replica_id = self.replica_id();
8064 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8065
8066 let mut old_worktrees_by_id = self
8067 .worktrees
8068 .drain(..)
8069 .filter_map(|worktree| {
8070 let worktree = worktree.upgrade()?;
8071 Some((worktree.read(cx).id(), worktree))
8072 })
8073 .collect::<HashMap<_, _>>();
8074
8075 for worktree in worktrees {
8076 if let Some(old_worktree) =
8077 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8078 {
8079 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8080 } else {
8081 let worktree =
8082 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8083 let _ = self.add_worktree(&worktree, cx);
8084 }
8085 }
8086
8087 self.metadata_changed(cx);
8088 for id in old_worktrees_by_id.keys() {
8089 cx.emit(Event::WorktreeRemoved(*id));
8090 }
8091
8092 Ok(())
8093 }
8094
8095 fn set_collaborators_from_proto(
8096 &mut self,
8097 messages: Vec<proto::Collaborator>,
8098 cx: &mut ModelContext<Self>,
8099 ) -> Result<()> {
8100 let mut collaborators = HashMap::default();
8101 for message in messages {
8102 let collaborator = Collaborator::from_proto(message)?;
8103 collaborators.insert(collaborator.peer_id, collaborator);
8104 }
8105 for old_peer_id in self.collaborators.keys() {
8106 if !collaborators.contains_key(old_peer_id) {
8107 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8108 }
8109 }
8110 self.collaborators = collaborators;
8111 Ok(())
8112 }
8113
8114 fn deserialize_symbol(
8115 &self,
8116 serialized_symbol: proto::Symbol,
8117 ) -> impl Future<Output = Result<Symbol>> {
8118 let languages = self.languages.clone();
8119 async move {
8120 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8121 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8122 let start = serialized_symbol
8123 .start
8124 .ok_or_else(|| anyhow!("invalid start"))?;
8125 let end = serialized_symbol
8126 .end
8127 .ok_or_else(|| anyhow!("invalid end"))?;
8128 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8129 let path = ProjectPath {
8130 worktree_id,
8131 path: PathBuf::from(serialized_symbol.path).into(),
8132 };
8133 let language = languages
8134 .language_for_file(&path.path, None)
8135 .await
8136 .log_err();
8137 Ok(Symbol {
8138 language_server_name: LanguageServerName(
8139 serialized_symbol.language_server_name.into(),
8140 ),
8141 source_worktree_id,
8142 path,
8143 label: {
8144 match language {
8145 Some(language) => {
8146 language
8147 .label_for_symbol(&serialized_symbol.name, kind)
8148 .await
8149 }
8150 None => None,
8151 }
8152 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8153 },
8154
8155 name: serialized_symbol.name,
8156 range: Unclipped(PointUtf16::new(start.row, start.column))
8157 ..Unclipped(PointUtf16::new(end.row, end.column)),
8158 kind,
8159 signature: serialized_symbol
8160 .signature
8161 .try_into()
8162 .map_err(|_| anyhow!("invalid signature"))?,
8163 })
8164 }
8165 }
8166
8167 async fn handle_buffer_saved(
8168 this: Model<Self>,
8169 envelope: TypedEnvelope<proto::BufferSaved>,
8170 _: Arc<Client>,
8171 mut cx: AsyncAppContext,
8172 ) -> Result<()> {
8173 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8174 let version = deserialize_version(&envelope.payload.version);
8175 let mtime = envelope
8176 .payload
8177 .mtime
8178 .ok_or_else(|| anyhow!("missing mtime"))?
8179 .into();
8180
8181 this.update(&mut cx, |this, cx| {
8182 let buffer = this
8183 .opened_buffers
8184 .get(&envelope.payload.buffer_id)
8185 .and_then(|buffer| buffer.upgrade())
8186 .or_else(|| {
8187 this.incomplete_remote_buffers
8188 .get(&envelope.payload.buffer_id)
8189 .and_then(|b| b.clone())
8190 });
8191 if let Some(buffer) = buffer {
8192 buffer.update(cx, |buffer, cx| {
8193 buffer.did_save(version, fingerprint, mtime, cx);
8194 });
8195 }
8196 Ok(())
8197 })?
8198 }
8199
8200 async fn handle_buffer_reloaded(
8201 this: Model<Self>,
8202 envelope: TypedEnvelope<proto::BufferReloaded>,
8203 _: Arc<Client>,
8204 mut cx: AsyncAppContext,
8205 ) -> Result<()> {
8206 let payload = envelope.payload;
8207 let version = deserialize_version(&payload.version);
8208 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
8209 let line_ending = deserialize_line_ending(
8210 proto::LineEnding::from_i32(payload.line_ending)
8211 .ok_or_else(|| anyhow!("missing line ending"))?,
8212 );
8213 let mtime = payload
8214 .mtime
8215 .ok_or_else(|| anyhow!("missing mtime"))?
8216 .into();
8217 this.update(&mut cx, |this, cx| {
8218 let buffer = this
8219 .opened_buffers
8220 .get(&payload.buffer_id)
8221 .and_then(|buffer| buffer.upgrade())
8222 .or_else(|| {
8223 this.incomplete_remote_buffers
8224 .get(&payload.buffer_id)
8225 .cloned()
8226 .flatten()
8227 });
8228 if let Some(buffer) = buffer {
8229 buffer.update(cx, |buffer, cx| {
8230 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
8231 });
8232 }
8233 Ok(())
8234 })?
8235 }
8236
8237 #[allow(clippy::type_complexity)]
8238 fn edits_from_lsp(
8239 &mut self,
8240 buffer: &Model<Buffer>,
8241 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
8242 server_id: LanguageServerId,
8243 version: Option<i32>,
8244 cx: &mut ModelContext<Self>,
8245 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
8246 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
8247 cx.background_executor().spawn(async move {
8248 let snapshot = snapshot?;
8249 let mut lsp_edits = lsp_edits
8250 .into_iter()
8251 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
8252 .collect::<Vec<_>>();
8253 lsp_edits.sort_by_key(|(range, _)| range.start);
8254
8255 let mut lsp_edits = lsp_edits.into_iter().peekable();
8256 let mut edits = Vec::new();
8257 while let Some((range, mut new_text)) = lsp_edits.next() {
8258 // Clip invalid ranges provided by the language server.
8259 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
8260 ..snapshot.clip_point_utf16(range.end, Bias::Left);
8261
8262 // Combine any LSP edits that are adjacent.
8263 //
8264 // Also, combine LSP edits that are separated from each other by only
8265 // a newline. This is important because for some code actions,
8266 // Rust-analyzer rewrites the entire buffer via a series of edits that
8267 // are separated by unchanged newline characters.
8268 //
8269 // In order for the diffing logic below to work properly, any edits that
8270 // cancel each other out must be combined into one.
8271 while let Some((next_range, next_text)) = lsp_edits.peek() {
8272 if next_range.start.0 > range.end {
8273 if next_range.start.0.row > range.end.row + 1
8274 || next_range.start.0.column > 0
8275 || snapshot.clip_point_utf16(
8276 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
8277 Bias::Left,
8278 ) > range.end
8279 {
8280 break;
8281 }
8282 new_text.push('\n');
8283 }
8284 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
8285 new_text.push_str(next_text);
8286 lsp_edits.next();
8287 }
8288
8289 // For multiline edits, perform a diff of the old and new text so that
8290 // we can identify the changes more precisely, preserving the locations
8291 // of any anchors positioned in the unchanged regions.
8292 if range.end.row > range.start.row {
8293 let mut offset = range.start.to_offset(&snapshot);
8294 let old_text = snapshot.text_for_range(range).collect::<String>();
8295
8296 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
8297 let mut moved_since_edit = true;
8298 for change in diff.iter_all_changes() {
8299 let tag = change.tag();
8300 let value = change.value();
8301 match tag {
8302 ChangeTag::Equal => {
8303 offset += value.len();
8304 moved_since_edit = true;
8305 }
8306 ChangeTag::Delete => {
8307 let start = snapshot.anchor_after(offset);
8308 let end = snapshot.anchor_before(offset + value.len());
8309 if moved_since_edit {
8310 edits.push((start..end, String::new()));
8311 } else {
8312 edits.last_mut().unwrap().0.end = end;
8313 }
8314 offset += value.len();
8315 moved_since_edit = false;
8316 }
8317 ChangeTag::Insert => {
8318 if moved_since_edit {
8319 let anchor = snapshot.anchor_after(offset);
8320 edits.push((anchor..anchor, value.to_string()));
8321 } else {
8322 edits.last_mut().unwrap().1.push_str(value);
8323 }
8324 moved_since_edit = false;
8325 }
8326 }
8327 }
8328 } else if range.end == range.start {
8329 let anchor = snapshot.anchor_after(range.start);
8330 edits.push((anchor..anchor, new_text));
8331 } else {
8332 let edit_start = snapshot.anchor_after(range.start);
8333 let edit_end = snapshot.anchor_before(range.end);
8334 edits.push((edit_start..edit_end, new_text));
8335 }
8336 }
8337
8338 Ok(edits)
8339 })
8340 }
8341
8342 fn buffer_snapshot_for_lsp_version(
8343 &mut self,
8344 buffer: &Model<Buffer>,
8345 server_id: LanguageServerId,
8346 version: Option<i32>,
8347 cx: &AppContext,
8348 ) -> Result<TextBufferSnapshot> {
8349 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
8350
8351 if let Some(version) = version {
8352 let buffer_id = buffer.read(cx).remote_id();
8353 let snapshots = self
8354 .buffer_snapshots
8355 .get_mut(&buffer_id)
8356 .and_then(|m| m.get_mut(&server_id))
8357 .ok_or_else(|| {
8358 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
8359 })?;
8360
8361 let found_snapshot = snapshots
8362 .binary_search_by_key(&version, |e| e.version)
8363 .map(|ix| snapshots[ix].snapshot.clone())
8364 .map_err(|_| {
8365 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
8366 })?;
8367
8368 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
8369 Ok(found_snapshot)
8370 } else {
8371 Ok((buffer.read(cx)).text_snapshot())
8372 }
8373 }
8374
8375 pub fn language_servers(
8376 &self,
8377 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
8378 self.language_server_ids
8379 .iter()
8380 .map(|((worktree_id, server_name), server_id)| {
8381 (*server_id, server_name.clone(), *worktree_id)
8382 })
8383 }
8384
8385 pub fn supplementary_language_servers(
8386 &self,
8387 ) -> impl '_
8388 + Iterator<
8389 Item = (
8390 &LanguageServerId,
8391 &(LanguageServerName, Arc<LanguageServer>),
8392 ),
8393 > {
8394 self.supplementary_language_servers.iter()
8395 }
8396
8397 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
8398 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
8399 Some(server.clone())
8400 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
8401 Some(Arc::clone(server))
8402 } else {
8403 None
8404 }
8405 }
8406
8407 pub fn language_servers_for_buffer(
8408 &self,
8409 buffer: &Buffer,
8410 cx: &AppContext,
8411 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8412 self.language_server_ids_for_buffer(buffer, cx)
8413 .into_iter()
8414 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
8415 LanguageServerState::Running {
8416 adapter, server, ..
8417 } => Some((adapter, server)),
8418 _ => None,
8419 })
8420 }
8421
8422 fn primary_language_server_for_buffer(
8423 &self,
8424 buffer: &Buffer,
8425 cx: &AppContext,
8426 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8427 self.language_servers_for_buffer(buffer, cx).next()
8428 }
8429
8430 pub fn language_server_for_buffer(
8431 &self,
8432 buffer: &Buffer,
8433 server_id: LanguageServerId,
8434 cx: &AppContext,
8435 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
8436 self.language_servers_for_buffer(buffer, cx)
8437 .find(|(_, s)| s.server_id() == server_id)
8438 }
8439
8440 fn language_server_ids_for_buffer(
8441 &self,
8442 buffer: &Buffer,
8443 cx: &AppContext,
8444 ) -> Vec<LanguageServerId> {
8445 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
8446 let worktree_id = file.worktree_id(cx);
8447 language
8448 .lsp_adapters()
8449 .iter()
8450 .flat_map(|adapter| {
8451 let key = (worktree_id, adapter.name.clone());
8452 self.language_server_ids.get(&key).copied()
8453 })
8454 .collect()
8455 } else {
8456 Vec::new()
8457 }
8458 }
8459}
8460
8461fn subscribe_for_copilot_events(
8462 copilot: &Model<Copilot>,
8463 cx: &mut ModelContext<'_, Project>,
8464) -> gpui::Subscription {
8465 cx.subscribe(
8466 copilot,
8467 |project, copilot, copilot_event, cx| match copilot_event {
8468 copilot::Event::CopilotLanguageServerStarted => {
8469 match copilot.read(cx).language_server() {
8470 Some((name, copilot_server)) => {
8471 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
8472 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
8473 let new_server_id = copilot_server.server_id();
8474 let weak_project = cx.weak_model();
8475 let copilot_log_subscription = copilot_server
8476 .on_notification::<copilot::request::LogMessage, _>(
8477 move |params, mut cx| {
8478 weak_project.update(&mut cx, |_, cx| {
8479 cx.emit(Event::LanguageServerLog(
8480 new_server_id,
8481 params.message,
8482 ));
8483 }).ok();
8484 },
8485 );
8486 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
8487 project.copilot_log_subscription = Some(copilot_log_subscription);
8488 cx.emit(Event::LanguageServerAdded(new_server_id));
8489 }
8490 }
8491 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
8492 }
8493 }
8494 },
8495 )
8496}
8497
8498fn glob_literal_prefix<'a>(glob: &'a str) -> &'a str {
8499 let mut literal_end = 0;
8500 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
8501 if part.contains(&['*', '?', '{', '}']) {
8502 break;
8503 } else {
8504 if i > 0 {
8505 // Acount for separator prior to this part
8506 literal_end += path::MAIN_SEPARATOR.len_utf8();
8507 }
8508 literal_end += part.len();
8509 }
8510 }
8511 &glob[..literal_end]
8512}
8513
8514impl WorktreeHandle {
8515 pub fn upgrade(&self) -> Option<Model<Worktree>> {
8516 match self {
8517 WorktreeHandle::Strong(handle) => Some(handle.clone()),
8518 WorktreeHandle::Weak(handle) => handle.upgrade(),
8519 }
8520 }
8521
8522 pub fn handle_id(&self) -> usize {
8523 match self {
8524 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
8525 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
8526 }
8527 }
8528}
8529
8530impl OpenBuffer {
8531 pub fn upgrade(&self) -> Option<Model<Buffer>> {
8532 match self {
8533 OpenBuffer::Strong(handle) => Some(handle.clone()),
8534 OpenBuffer::Weak(handle) => handle.upgrade(),
8535 OpenBuffer::Operations(_) => None,
8536 }
8537 }
8538}
8539
8540pub struct PathMatchCandidateSet {
8541 pub snapshot: Snapshot,
8542 pub include_ignored: bool,
8543 pub include_root_name: bool,
8544}
8545
8546impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
8547 type Candidates = PathMatchCandidateSetIter<'a>;
8548
8549 fn id(&self) -> usize {
8550 self.snapshot.id().to_usize()
8551 }
8552
8553 fn len(&self) -> usize {
8554 if self.include_ignored {
8555 self.snapshot.file_count()
8556 } else {
8557 self.snapshot.visible_file_count()
8558 }
8559 }
8560
8561 fn prefix(&self) -> Arc<str> {
8562 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
8563 self.snapshot.root_name().into()
8564 } else if self.include_root_name {
8565 format!("{}/", self.snapshot.root_name()).into()
8566 } else {
8567 "".into()
8568 }
8569 }
8570
8571 fn candidates(&'a self, start: usize) -> Self::Candidates {
8572 PathMatchCandidateSetIter {
8573 traversal: self.snapshot.files(self.include_ignored, start),
8574 }
8575 }
8576}
8577
8578pub struct PathMatchCandidateSetIter<'a> {
8579 traversal: Traversal<'a>,
8580}
8581
8582impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
8583 type Item = fuzzy::PathMatchCandidate<'a>;
8584
8585 fn next(&mut self) -> Option<Self::Item> {
8586 self.traversal.next().map(|entry| {
8587 if let EntryKind::File(char_bag) = entry.kind {
8588 fuzzy::PathMatchCandidate {
8589 path: &entry.path,
8590 char_bag,
8591 }
8592 } else {
8593 unreachable!()
8594 }
8595 })
8596 }
8597}
8598
8599impl EventEmitter<Event> for Project {}
8600
8601impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
8602 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
8603 Self {
8604 worktree_id,
8605 path: path.as_ref().into(),
8606 }
8607 }
8608}
8609
8610impl ProjectLspAdapterDelegate {
8611 fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
8612 Arc::new(Self {
8613 project: cx.handle(),
8614 http_client: project.client.http_client(),
8615 })
8616 }
8617}
8618
8619impl LspAdapterDelegate for ProjectLspAdapterDelegate {
8620 fn show_notification(&self, message: &str, cx: &mut AppContext) {
8621 self.project
8622 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
8623 }
8624
8625 fn http_client(&self) -> Arc<dyn HttpClient> {
8626 self.http_client.clone()
8627 }
8628}
8629
8630fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
8631 proto::Symbol {
8632 language_server_name: symbol.language_server_name.0.to_string(),
8633 source_worktree_id: symbol.source_worktree_id.to_proto(),
8634 worktree_id: symbol.path.worktree_id.to_proto(),
8635 path: symbol.path.path.to_string_lossy().to_string(),
8636 name: symbol.name.clone(),
8637 kind: unsafe { mem::transmute(symbol.kind) },
8638 start: Some(proto::PointUtf16 {
8639 row: symbol.range.start.0.row,
8640 column: symbol.range.start.0.column,
8641 }),
8642 end: Some(proto::PointUtf16 {
8643 row: symbol.range.end.0.row,
8644 column: symbol.range.end.0.column,
8645 }),
8646 signature: symbol.signature.to_vec(),
8647 }
8648}
8649
8650fn relativize_path(base: &Path, path: &Path) -> PathBuf {
8651 let mut path_components = path.components();
8652 let mut base_components = base.components();
8653 let mut components: Vec<Component> = Vec::new();
8654 loop {
8655 match (path_components.next(), base_components.next()) {
8656 (None, None) => break,
8657 (Some(a), None) => {
8658 components.push(a);
8659 components.extend(path_components.by_ref());
8660 break;
8661 }
8662 (None, _) => components.push(Component::ParentDir),
8663 (Some(a), Some(b)) if components.is_empty() && a == b => (),
8664 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
8665 (Some(a), Some(_)) => {
8666 components.push(Component::ParentDir);
8667 for _ in base_components {
8668 components.push(Component::ParentDir);
8669 }
8670 components.push(a);
8671 components.extend(path_components.by_ref());
8672 break;
8673 }
8674 }
8675 }
8676 components.iter().map(|c| c.as_os_str()).collect()
8677}
8678
8679impl Item for Buffer {
8680 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
8681 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
8682 }
8683
8684 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
8685 File::from_dyn(self.file()).map(|file| ProjectPath {
8686 worktree_id: file.worktree_id(cx),
8687 path: file.path().clone(),
8688 })
8689 }
8690}
8691
8692async fn wait_for_loading_buffer(
8693 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
8694) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
8695 loop {
8696 if let Some(result) = receiver.borrow().as_ref() {
8697 match result {
8698 Ok(buffer) => return Ok(buffer.to_owned()),
8699 Err(e) => return Err(e.to_owned()),
8700 }
8701 }
8702 receiver.next().await;
8703 }
8704}
8705
8706fn include_text(server: &lsp::LanguageServer) -> bool {
8707 server
8708 .capabilities()
8709 .text_document_sync
8710 .as_ref()
8711 .and_then(|sync| match sync {
8712 lsp::TextDocumentSyncCapability::Kind(_) => None,
8713 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
8714 })
8715 .and_then(|save_options| match save_options {
8716 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
8717 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
8718 })
8719 .unwrap_or(false)
8720}