1pub mod debounced_delay;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7mod task_inventory;
8pub mod terminals;
9
10#[cfg(test)]
11mod project_tests;
12pub mod search_history;
13
14use anyhow::{anyhow, bail, Context as _, Result};
15use async_trait::async_trait;
16use client::{
17 proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore,
18};
19use clock::ReplicaId;
20use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
21use copilot::Copilot;
22use debounced_delay::DebouncedDelay;
23use fs::repository::GitRepository;
24use futures::{
25 channel::{
26 mpsc::{self, UnboundedReceiver},
27 oneshot,
28 },
29 future::{try_join_all, Shared},
30 select,
31 stream::FuturesUnordered,
32 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
33};
34use git::blame::Blame;
35use globset::{Glob, GlobSet, GlobSetBuilder};
36use gpui::{
37 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, BorrowAppContext, Context, Entity,
38 EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel,
39};
40use itertools::Itertools;
41use language::{
42 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
43 markdown, point_to_lsp,
44 proto::{
45 deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
46 serialize_version, split_operations,
47 },
48 range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeAction,
49 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
50 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
51 LspAdapterDelegate, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
52 ToOffset, ToPointUtf16, Transaction, Unclipped,
53};
54use log::error;
55use lsp::{
56 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
57 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId,
58 MessageActionItem, OneOf, ServerHealthStatus, ServerStatus,
59};
60use lsp_command::*;
61use node_runtime::NodeRuntime;
62use parking_lot::{Mutex, RwLock};
63use postage::watch;
64use prettier_support::{DefaultPrettier, PrettierInstance};
65use project_settings::{LspSettings, ProjectSettings};
66use rand::prelude::*;
67use search_history::SearchHistory;
68use worktree::LocalSnapshot;
69
70use rpc::{ErrorCode, ErrorExt as _};
71use search::SearchQuery;
72use serde::Serialize;
73use settings::{watch_config_file, Settings, SettingsLocation, SettingsStore};
74use sha2::{Digest, Sha256};
75use similar::{ChangeTag, TextDiff};
76use smol::channel::{Receiver, Sender};
77use smol::lock::Semaphore;
78use std::{
79 cmp::{self, Ordering},
80 convert::TryInto,
81 env,
82 ffi::OsStr,
83 hash::Hash,
84 io, mem,
85 num::NonZeroU32,
86 ops::Range,
87 path::{self, Component, Path, PathBuf},
88 process::Stdio,
89 str::{self, FromStr},
90 sync::{
91 atomic::{AtomicUsize, Ordering::SeqCst},
92 Arc,
93 },
94 time::{Duration, Instant},
95};
96use task::static_source::{StaticSource, TrackedFile};
97use terminals::Terminals;
98use text::{Anchor, BufferId, RopeFingerprint};
99use util::{
100 debug_panic, defer,
101 http::{HttpClient, Url},
102 maybe, merge_json_value_into,
103 paths::{
104 LOCAL_SETTINGS_RELATIVE_PATH, LOCAL_TASKS_RELATIVE_PATH, LOCAL_VSCODE_TASKS_RELATIVE_PATH,
105 },
106 post_inc, ResultExt, TryFutureExt as _,
107};
108use worktree::{Snapshot, Traversal};
109
110pub use fs::*;
111pub use language::Location;
112#[cfg(any(test, feature = "test-support"))]
113pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
114#[cfg(feature = "test-support")]
115pub use task_inventory::test_inventory::*;
116pub use task_inventory::{Inventory, TaskSourceKind};
117pub use worktree::{
118 DiagnosticSummary, Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId,
119 RepositoryEntry, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId,
120 WorktreeSettings, FS_WATCH_LATENCY,
121};
122
123const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
124const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
125const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
126pub const SERVER_PROGRESS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
127
128const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500;
129
130pub trait Item {
131 fn try_open(
132 project: &Model<Project>,
133 path: &ProjectPath,
134 cx: &mut AppContext,
135 ) -> Option<Task<Result<Model<Self>>>>
136 where
137 Self: Sized;
138 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
139 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
140}
141
142#[derive(Clone)]
143pub enum OpenedBufferEvent {
144 Disconnected,
145 Ok(BufferId),
146 Err(BufferId, Arc<anyhow::Error>),
147}
148
149/// Semantics-aware entity that is relevant to one or more [`Worktree`] with the files.
150/// `Project` is responsible for tasks, LSP and collab queries, synchronizing worktree states accordingly.
151/// Maps [`Worktree`] entries with its own logic using [`ProjectEntryId`] and [`ProjectPath`] structs.
152///
153/// Can be either local (for the project opened on the same host) or remote.(for collab projects, browsed by multiple remote users).
154pub struct Project {
155 worktrees: Vec<WorktreeHandle>,
156 active_entry: Option<ProjectEntryId>,
157 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
158 pending_language_server_update: Option<BufferOrderedMessage>,
159 flush_language_server_update: Option<Task<()>>,
160
161 languages: Arc<LanguageRegistry>,
162 supplementary_language_servers:
163 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
164 language_servers: HashMap<LanguageServerId, LanguageServerState>,
165 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
166 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
167 last_formatting_failure: Option<String>,
168 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
169 language_server_watched_paths: HashMap<LanguageServerId, HashMap<WorktreeId, GlobSet>>,
170 client: Arc<client::Client>,
171 next_entry_id: Arc<AtomicUsize>,
172 join_project_response_message_id: u32,
173 next_diagnostic_group_id: usize,
174 user_store: Model<UserStore>,
175 fs: Arc<dyn Fs>,
176 client_state: ProjectClientState,
177 collaborators: HashMap<proto::PeerId, Collaborator>,
178 client_subscriptions: Vec<client::Subscription>,
179 _subscriptions: Vec<gpui::Subscription>,
180 next_buffer_id: BufferId,
181 loading_buffers: HashMap<BufferId, Vec<oneshot::Sender<Result<Model<Buffer>, anyhow::Error>>>>,
182 incomplete_remote_buffers: HashMap<BufferId, Model<Buffer>>,
183 shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
184 #[allow(clippy::type_complexity)]
185 loading_buffers_by_path: HashMap<
186 ProjectPath,
187 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
188 >,
189 #[allow(clippy::type_complexity)]
190 loading_local_worktrees:
191 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
192 opened_buffers: HashMap<BufferId, OpenBuffer>,
193 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
194 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
195 buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
196 buffers_being_formatted: HashSet<BufferId>,
197 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
198 git_diff_debouncer: DebouncedDelay,
199 nonce: u128,
200 _maintain_buffer_languages: Task<()>,
201 _maintain_workspace_config: Task<Result<()>>,
202 terminals: Terminals,
203 copilot_lsp_subscription: Option<gpui::Subscription>,
204 copilot_log_subscription: Option<lsp::Subscription>,
205 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
206 node: Option<Arc<dyn NodeRuntime>>,
207 default_prettier: DefaultPrettier,
208 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
209 prettier_instances: HashMap<PathBuf, PrettierInstance>,
210 tasks: Model<Inventory>,
211 hosted_project_id: Option<ProjectId>,
212 search_history: SearchHistory,
213}
214
215pub enum LanguageServerToQuery {
216 Primary,
217 Other(LanguageServerId),
218}
219
220struct LspBufferSnapshot {
221 version: i32,
222 snapshot: TextBufferSnapshot,
223}
224
225/// Message ordered with respect to buffer operations
226#[derive(Debug)]
227enum BufferOrderedMessage {
228 Operation {
229 buffer_id: BufferId,
230 operation: proto::Operation,
231 },
232 LanguageServerUpdate {
233 language_server_id: LanguageServerId,
234 message: proto::update_language_server::Variant,
235 },
236 Resync,
237}
238
239enum LocalProjectUpdate {
240 WorktreesChanged,
241 CreateBufferForPeer {
242 peer_id: proto::PeerId,
243 buffer_id: BufferId,
244 },
245}
246
247enum OpenBuffer {
248 Strong(Model<Buffer>),
249 Weak(WeakModel<Buffer>),
250 Operations(Vec<Operation>),
251}
252
253#[derive(Clone)]
254enum WorktreeHandle {
255 Strong(Model<Worktree>),
256 Weak(WeakModel<Worktree>),
257}
258
259#[derive(Debug)]
260enum ProjectClientState {
261 Local,
262 Shared {
263 remote_id: u64,
264 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
265 _send_updates: Task<Result<()>>,
266 },
267 Remote {
268 sharing_has_stopped: bool,
269 capability: Capability,
270 remote_id: u64,
271 replica_id: ReplicaId,
272 },
273}
274
275/// A prompt requested by LSP server.
276#[derive(Clone, Debug)]
277pub struct LanguageServerPromptRequest {
278 pub level: PromptLevel,
279 pub message: String,
280 pub actions: Vec<MessageActionItem>,
281 pub lsp_name: String,
282 response_channel: Sender<MessageActionItem>,
283}
284
285impl LanguageServerPromptRequest {
286 pub async fn respond(self, index: usize) -> Option<()> {
287 if let Some(response) = self.actions.into_iter().nth(index) {
288 self.response_channel.send(response).await.ok()
289 } else {
290 None
291 }
292 }
293}
294impl PartialEq for LanguageServerPromptRequest {
295 fn eq(&self, other: &Self) -> bool {
296 self.message == other.message && self.actions == other.actions
297 }
298}
299
300#[derive(Clone, Debug, PartialEq)]
301pub enum Event {
302 LanguageServerAdded(LanguageServerId),
303 LanguageServerRemoved(LanguageServerId),
304 LanguageServerLog(LanguageServerId, String),
305 Notification(String),
306 LanguageServerPrompt(LanguageServerPromptRequest),
307 LanguageNotFound(Model<Buffer>),
308 ActiveEntryChanged(Option<ProjectEntryId>),
309 ActivateProjectPanel,
310 WorktreeAdded,
311 WorktreeRemoved(WorktreeId),
312 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
313 WorktreeUpdatedGitRepositories,
314 DiskBasedDiagnosticsStarted {
315 language_server_id: LanguageServerId,
316 },
317 DiskBasedDiagnosticsFinished {
318 language_server_id: LanguageServerId,
319 },
320 DiagnosticsUpdated {
321 path: ProjectPath,
322 language_server_id: LanguageServerId,
323 },
324 RemoteIdChanged(Option<u64>),
325 DisconnectedFromHost,
326 Closed,
327 DeletedEntry(ProjectEntryId),
328 CollaboratorUpdated {
329 old_peer_id: proto::PeerId,
330 new_peer_id: proto::PeerId,
331 },
332 CollaboratorJoined(proto::PeerId),
333 CollaboratorLeft(proto::PeerId),
334 RefreshInlayHints,
335 RevealInProjectPanel(ProjectEntryId),
336}
337
338pub enum LanguageServerState {
339 Starting(Task<Option<Arc<LanguageServer>>>),
340
341 Running {
342 language: Arc<Language>,
343 adapter: Arc<CachedLspAdapter>,
344 server: Arc<LanguageServer>,
345 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
346 },
347}
348
349#[derive(Serialize)]
350pub struct LanguageServerStatus {
351 pub name: String,
352 pub pending_work: BTreeMap<String, LanguageServerProgress>,
353 pub has_pending_diagnostic_updates: bool,
354 progress_tokens: HashSet<String>,
355}
356
357#[derive(Clone, Debug, Serialize)]
358pub struct LanguageServerProgress {
359 pub message: Option<String>,
360 pub percentage: Option<usize>,
361 #[serde(skip_serializing)]
362 pub last_update_at: Instant,
363}
364
365#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
366pub struct ProjectPath {
367 pub worktree_id: WorktreeId,
368 pub path: Arc<Path>,
369}
370
371#[derive(Debug, Clone, PartialEq, Eq)]
372pub struct InlayHint {
373 pub position: language::Anchor,
374 pub label: InlayHintLabel,
375 pub kind: Option<InlayHintKind>,
376 pub padding_left: bool,
377 pub padding_right: bool,
378 pub tooltip: Option<InlayHintTooltip>,
379 pub resolve_state: ResolveState,
380}
381
382#[derive(Debug, Clone, PartialEq, Eq)]
383pub enum ResolveState {
384 Resolved,
385 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
386 Resolving,
387}
388
389impl InlayHint {
390 pub fn text(&self) -> String {
391 match &self.label {
392 InlayHintLabel::String(s) => s.to_owned(),
393 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
394 }
395 }
396}
397
398#[derive(Debug, Clone, PartialEq, Eq)]
399pub enum InlayHintLabel {
400 String(String),
401 LabelParts(Vec<InlayHintLabelPart>),
402}
403
404#[derive(Debug, Clone, PartialEq, Eq)]
405pub struct InlayHintLabelPart {
406 pub value: String,
407 pub tooltip: Option<InlayHintLabelPartTooltip>,
408 pub location: Option<(LanguageServerId, lsp::Location)>,
409}
410
411#[derive(Debug, Clone, PartialEq, Eq)]
412pub enum InlayHintTooltip {
413 String(String),
414 MarkupContent(MarkupContent),
415}
416
417#[derive(Debug, Clone, PartialEq, Eq)]
418pub enum InlayHintLabelPartTooltip {
419 String(String),
420 MarkupContent(MarkupContent),
421}
422
423#[derive(Debug, Clone, PartialEq, Eq)]
424pub struct MarkupContent {
425 pub kind: HoverBlockKind,
426 pub value: String,
427}
428
429#[derive(Debug, Clone)]
430pub struct LocationLink {
431 pub origin: Option<Location>,
432 pub target: Location,
433}
434
435#[derive(Debug)]
436pub struct DocumentHighlight {
437 pub range: Range<language::Anchor>,
438 pub kind: DocumentHighlightKind,
439}
440
441#[derive(Clone, Debug)]
442pub struct Symbol {
443 pub language_server_name: LanguageServerName,
444 pub source_worktree_id: WorktreeId,
445 pub path: ProjectPath,
446 pub label: CodeLabel,
447 pub name: String,
448 pub kind: lsp::SymbolKind,
449 pub range: Range<Unclipped<PointUtf16>>,
450 pub signature: [u8; 32],
451}
452
453#[derive(Clone, Debug, PartialEq)]
454pub struct HoverBlock {
455 pub text: String,
456 pub kind: HoverBlockKind,
457}
458
459#[derive(Clone, Debug, PartialEq, Eq)]
460pub enum HoverBlockKind {
461 PlainText,
462 Markdown,
463 Code { language: String },
464}
465
466#[derive(Debug)]
467pub struct Hover {
468 pub contents: Vec<HoverBlock>,
469 pub range: Option<Range<language::Anchor>>,
470 pub language: Option<Arc<Language>>,
471}
472
473impl Hover {
474 pub fn is_empty(&self) -> bool {
475 self.contents.iter().all(|block| block.text.is_empty())
476 }
477}
478
479#[derive(Default)]
480pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
481
482#[derive(Debug, Clone, Copy, PartialEq, Eq)]
483pub enum FormatTrigger {
484 Save,
485 Manual,
486}
487
488// Currently, formatting operations are represented differently depending on
489// whether they come from a language server or an external command.
490enum FormatOperation {
491 Lsp(Vec<(Range<Anchor>, String)>),
492 External(Diff),
493 Prettier(Diff),
494}
495
496impl FormatTrigger {
497 fn from_proto(value: i32) -> FormatTrigger {
498 match value {
499 0 => FormatTrigger::Save,
500 1 => FormatTrigger::Manual,
501 _ => FormatTrigger::Save,
502 }
503 }
504}
505
506#[derive(Clone, Debug, PartialEq)]
507enum SearchMatchCandidate {
508 OpenBuffer {
509 buffer: Model<Buffer>,
510 // This might be an unnamed file without representation on filesystem
511 path: Option<Arc<Path>>,
512 },
513 Path {
514 worktree_id: WorktreeId,
515 is_ignored: bool,
516 path: Arc<Path>,
517 },
518}
519
520impl SearchMatchCandidate {
521 fn path(&self) -> Option<Arc<Path>> {
522 match self {
523 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
524 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
525 }
526 }
527
528 fn is_ignored(&self) -> bool {
529 matches!(
530 self,
531 SearchMatchCandidate::Path {
532 is_ignored: true,
533 ..
534 }
535 )
536 }
537}
538
539pub enum SearchResult {
540 Buffer {
541 buffer: Model<Buffer>,
542 ranges: Vec<Range<Anchor>>,
543 },
544 LimitReached,
545}
546
547impl Project {
548 pub fn init_settings(cx: &mut AppContext) {
549 WorktreeSettings::register(cx);
550 ProjectSettings::register(cx);
551 }
552
553 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
554 Self::init_settings(cx);
555
556 client.add_model_message_handler(Self::handle_add_collaborator);
557 client.add_model_message_handler(Self::handle_update_project_collaborator);
558 client.add_model_message_handler(Self::handle_remove_collaborator);
559 client.add_model_message_handler(Self::handle_buffer_reloaded);
560 client.add_model_message_handler(Self::handle_buffer_saved);
561 client.add_model_message_handler(Self::handle_start_language_server);
562 client.add_model_message_handler(Self::handle_update_language_server);
563 client.add_model_message_handler(Self::handle_update_project);
564 client.add_model_message_handler(Self::handle_unshare_project);
565 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
566 client.add_model_message_handler(Self::handle_update_buffer_file);
567 client.add_model_request_handler(Self::handle_update_buffer);
568 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
569 client.add_model_message_handler(Self::handle_update_worktree);
570 client.add_model_message_handler(Self::handle_update_worktree_settings);
571 client.add_model_request_handler(Self::handle_create_project_entry);
572 client.add_model_request_handler(Self::handle_rename_project_entry);
573 client.add_model_request_handler(Self::handle_copy_project_entry);
574 client.add_model_request_handler(Self::handle_delete_project_entry);
575 client.add_model_request_handler(Self::handle_expand_project_entry);
576 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
577 client.add_model_request_handler(Self::handle_resolve_completion_documentation);
578 client.add_model_request_handler(Self::handle_apply_code_action);
579 client.add_model_request_handler(Self::handle_on_type_formatting);
580 client.add_model_request_handler(Self::handle_inlay_hints);
581 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
582 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
583 client.add_model_request_handler(Self::handle_reload_buffers);
584 client.add_model_request_handler(Self::handle_synchronize_buffers);
585 client.add_model_request_handler(Self::handle_format_buffers);
586 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
587 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
588 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
589 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
590 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
591 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
592 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
593 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
594 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
595 client.add_model_request_handler(Self::handle_search_project);
596 client.add_model_request_handler(Self::handle_get_project_symbols);
597 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
598 client.add_model_request_handler(Self::handle_open_buffer_by_id);
599 client.add_model_request_handler(Self::handle_open_buffer_by_path);
600 client.add_model_request_handler(Self::handle_save_buffer);
601 client.add_model_message_handler(Self::handle_update_diff_base);
602 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
603 client.add_model_request_handler(Self::handle_blame_buffer);
604 }
605
606 pub fn local(
607 client: Arc<Client>,
608 node: Arc<dyn NodeRuntime>,
609 user_store: Model<UserStore>,
610 languages: Arc<LanguageRegistry>,
611 fs: Arc<dyn Fs>,
612 cx: &mut AppContext,
613 ) -> Model<Self> {
614 cx.new_model(|cx: &mut ModelContext<Self>| {
615 let (tx, rx) = mpsc::unbounded();
616 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
617 .detach();
618 let copilot_lsp_subscription =
619 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
620 let tasks = Inventory::new(cx);
621
622 Self {
623 worktrees: Vec::new(),
624 buffer_ordered_messages_tx: tx,
625 flush_language_server_update: None,
626 pending_language_server_update: None,
627 collaborators: Default::default(),
628 next_buffer_id: BufferId::new(1).unwrap(),
629 opened_buffers: Default::default(),
630 shared_buffers: Default::default(),
631 loading_buffers_by_path: Default::default(),
632 loading_local_worktrees: Default::default(),
633 local_buffer_ids_by_path: Default::default(),
634 local_buffer_ids_by_entry_id: Default::default(),
635 buffer_snapshots: Default::default(),
636 join_project_response_message_id: 0,
637 client_state: ProjectClientState::Local,
638 loading_buffers: HashMap::default(),
639 incomplete_remote_buffers: HashMap::default(),
640 client_subscriptions: Vec::new(),
641 _subscriptions: vec![
642 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
643 cx.on_release(Self::release),
644 cx.on_app_quit(Self::shutdown_language_servers),
645 ],
646 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
647 _maintain_workspace_config: Self::maintain_workspace_config(cx),
648 active_entry: None,
649 languages,
650 client,
651 user_store,
652 fs,
653 next_entry_id: Default::default(),
654 next_diagnostic_group_id: Default::default(),
655 supplementary_language_servers: HashMap::default(),
656 language_servers: Default::default(),
657 language_server_ids: HashMap::default(),
658 language_server_statuses: Default::default(),
659 last_formatting_failure: None,
660 last_workspace_edits_by_language_server: Default::default(),
661 language_server_watched_paths: HashMap::default(),
662 buffers_being_formatted: Default::default(),
663 buffers_needing_diff: Default::default(),
664 git_diff_debouncer: DebouncedDelay::new(),
665 nonce: StdRng::from_entropy().gen(),
666 terminals: Terminals {
667 local_handles: Vec::new(),
668 },
669 copilot_lsp_subscription,
670 copilot_log_subscription: None,
671 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
672 node: Some(node),
673 default_prettier: DefaultPrettier::default(),
674 prettiers_per_worktree: HashMap::default(),
675 prettier_instances: HashMap::default(),
676 tasks,
677 hosted_project_id: None,
678 search_history: Self::new_search_history(),
679 }
680 })
681 }
682
683 pub async fn remote(
684 remote_id: u64,
685 client: Arc<Client>,
686 user_store: Model<UserStore>,
687 languages: Arc<LanguageRegistry>,
688 fs: Arc<dyn Fs>,
689 cx: AsyncAppContext,
690 ) -> Result<Model<Self>> {
691 client.authenticate_and_connect(true, &cx).await?;
692
693 let subscription = client.subscribe_to_entity(remote_id)?;
694 let response = client
695 .request_envelope(proto::JoinProject {
696 project_id: remote_id,
697 })
698 .await?;
699 Self::from_join_project_response(
700 response,
701 subscription,
702 client,
703 user_store,
704 languages,
705 fs,
706 cx,
707 )
708 .await
709 }
710 async fn from_join_project_response(
711 response: TypedEnvelope<proto::JoinProjectResponse>,
712 subscription: PendingEntitySubscription<Project>,
713 client: Arc<Client>,
714 user_store: Model<UserStore>,
715 languages: Arc<LanguageRegistry>,
716 fs: Arc<dyn Fs>,
717 mut cx: AsyncAppContext,
718 ) -> Result<Model<Self>> {
719 let remote_id = response.payload.project_id;
720 let role = response.payload.role();
721 let this = cx.new_model(|cx| {
722 let replica_id = response.payload.replica_id as ReplicaId;
723 let tasks = Inventory::new(cx);
724 // BIG CAUTION NOTE: The order in which we initialize fields here matters and it should match what's done in Self::local.
725 // Otherwise, you might run into issues where worktree id on remote is different than what's on local host.
726 // That's because Worktree's identifier is entity id, which should probably be changed.
727 let mut worktrees = Vec::new();
728 for worktree in response.payload.worktrees {
729 let worktree =
730 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
731 worktrees.push(worktree);
732 }
733
734 let (tx, rx) = mpsc::unbounded();
735 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
736 .detach();
737 let copilot_lsp_subscription =
738 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
739 let mut this = Self {
740 worktrees: Vec::new(),
741 buffer_ordered_messages_tx: tx,
742 pending_language_server_update: None,
743 flush_language_server_update: None,
744 loading_buffers_by_path: Default::default(),
745 next_buffer_id: BufferId::new(1).unwrap(),
746 loading_buffers: Default::default(),
747 shared_buffers: Default::default(),
748 incomplete_remote_buffers: Default::default(),
749 loading_local_worktrees: Default::default(),
750 local_buffer_ids_by_path: Default::default(),
751 local_buffer_ids_by_entry_id: Default::default(),
752 active_entry: None,
753 collaborators: Default::default(),
754 join_project_response_message_id: response.message_id,
755 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
756 _maintain_workspace_config: Self::maintain_workspace_config(cx),
757 languages,
758 user_store: user_store.clone(),
759 fs,
760 next_entry_id: Default::default(),
761 next_diagnostic_group_id: Default::default(),
762 client_subscriptions: Default::default(),
763 _subscriptions: vec![
764 cx.on_release(Self::release),
765 cx.on_app_quit(Self::shutdown_language_servers),
766 ],
767 client: client.clone(),
768 client_state: ProjectClientState::Remote {
769 sharing_has_stopped: false,
770 capability: Capability::ReadWrite,
771 remote_id,
772 replica_id,
773 },
774 supplementary_language_servers: HashMap::default(),
775 language_servers: Default::default(),
776 language_server_ids: HashMap::default(),
777 language_server_statuses: response
778 .payload
779 .language_servers
780 .into_iter()
781 .map(|server| {
782 (
783 LanguageServerId(server.id as usize),
784 LanguageServerStatus {
785 name: server.name,
786 pending_work: Default::default(),
787 has_pending_diagnostic_updates: false,
788 progress_tokens: Default::default(),
789 },
790 )
791 })
792 .collect(),
793 last_formatting_failure: None,
794 last_workspace_edits_by_language_server: Default::default(),
795 language_server_watched_paths: HashMap::default(),
796 opened_buffers: Default::default(),
797 buffers_being_formatted: Default::default(),
798 buffers_needing_diff: Default::default(),
799 git_diff_debouncer: DebouncedDelay::new(),
800 buffer_snapshots: Default::default(),
801 nonce: StdRng::from_entropy().gen(),
802 terminals: Terminals {
803 local_handles: Vec::new(),
804 },
805 copilot_lsp_subscription,
806 copilot_log_subscription: None,
807 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
808 node: None,
809 default_prettier: DefaultPrettier::default(),
810 prettiers_per_worktree: HashMap::default(),
811 prettier_instances: HashMap::default(),
812 tasks,
813 hosted_project_id: None,
814 search_history: Self::new_search_history(),
815 };
816 this.set_role(role, cx);
817 for worktree in worktrees {
818 let _ = this.add_worktree(&worktree, cx);
819 }
820 this
821 })?;
822 let subscription = subscription.set_model(&this, &mut cx);
823
824 let user_ids = response
825 .payload
826 .collaborators
827 .iter()
828 .map(|peer| peer.user_id)
829 .collect();
830 user_store
831 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
832 .await?;
833
834 this.update(&mut cx, |this, cx| {
835 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
836 this.client_subscriptions.push(subscription);
837 anyhow::Ok(())
838 })??;
839
840 Ok(this)
841 }
842
843 pub async fn hosted(
844 remote_id: ProjectId,
845 user_store: Model<UserStore>,
846 client: Arc<Client>,
847 languages: Arc<LanguageRegistry>,
848 fs: Arc<dyn Fs>,
849 cx: AsyncAppContext,
850 ) -> Result<Model<Self>> {
851 client.authenticate_and_connect(true, &cx).await?;
852
853 let subscription = client.subscribe_to_entity(remote_id.0)?;
854 let response = client
855 .request_envelope(proto::JoinHostedProject {
856 project_id: remote_id.0,
857 })
858 .await?;
859 Self::from_join_project_response(
860 response,
861 subscription,
862 client,
863 user_store,
864 languages,
865 fs,
866 cx,
867 )
868 .await
869 }
870
871 fn new_search_history() -> SearchHistory {
872 SearchHistory::new(
873 Some(MAX_PROJECT_SEARCH_HISTORY_SIZE),
874 search_history::QueryInsertionBehavior::AlwaysInsert,
875 )
876 }
877
878 fn release(&mut self, cx: &mut AppContext) {
879 match &self.client_state {
880 ProjectClientState::Local => {}
881 ProjectClientState::Shared { .. } => {
882 let _ = self.unshare_internal(cx);
883 }
884 ProjectClientState::Remote { remote_id, .. } => {
885 let _ = self.client.send(proto::LeaveProject {
886 project_id: *remote_id,
887 });
888 self.disconnected_from_host_internal(cx);
889 }
890 }
891 }
892
893 fn shutdown_language_servers(
894 &mut self,
895 _cx: &mut ModelContext<Self>,
896 ) -> impl Future<Output = ()> {
897 let shutdown_futures = self
898 .language_servers
899 .drain()
900 .map(|(_, server_state)| async {
901 use LanguageServerState::*;
902 match server_state {
903 Running { server, .. } => server.shutdown()?.await,
904 Starting(task) => task.await?.shutdown()?.await,
905 }
906 })
907 .collect::<Vec<_>>();
908
909 async move {
910 futures::future::join_all(shutdown_futures).await;
911 }
912 }
913
914 #[cfg(any(test, feature = "test-support"))]
915 pub async fn test(
916 fs: Arc<dyn Fs>,
917 root_paths: impl IntoIterator<Item = &Path>,
918 cx: &mut gpui::TestAppContext,
919 ) -> Model<Project> {
920 use clock::FakeSystemClock;
921
922 let languages = LanguageRegistry::test(cx.executor());
923 let clock = Arc::new(FakeSystemClock::default());
924 let http_client = util::http::FakeHttpClient::with_404_response();
925 let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
926 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
927 let project = cx.update(|cx| {
928 Project::local(
929 client,
930 node_runtime::FakeNodeRuntime::new(),
931 user_store,
932 Arc::new(languages),
933 fs,
934 cx,
935 )
936 });
937 for path in root_paths {
938 let (tree, _) = project
939 .update(cx, |project, cx| {
940 project.find_or_create_local_worktree(path, true, cx)
941 })
942 .await
943 .unwrap();
944 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
945 .await;
946 }
947 project
948 }
949
950 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
951 let mut language_servers_to_start = Vec::new();
952 let mut language_formatters_to_check = Vec::new();
953 for buffer in self.opened_buffers.values() {
954 if let Some(buffer) = buffer.upgrade() {
955 let buffer = buffer.read(cx);
956 let buffer_file = File::from_dyn(buffer.file());
957 let buffer_language = buffer.language();
958 let settings = language_settings(buffer_language, buffer.file(), cx);
959 if let Some(language) = buffer_language {
960 if settings.enable_language_server {
961 if let Some(file) = buffer_file {
962 language_servers_to_start
963 .push((file.worktree.clone(), Arc::clone(language)));
964 }
965 }
966 language_formatters_to_check.push((
967 buffer_file.map(|f| f.worktree_id(cx)),
968 Arc::clone(language),
969 settings.clone(),
970 ));
971 }
972 }
973 }
974
975 let mut language_servers_to_stop = Vec::new();
976 let mut language_servers_to_restart = Vec::new();
977 let languages = self.languages.to_vec();
978
979 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
980 let current_lsp_settings = &self.current_lsp_settings;
981 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
982 let language = languages.iter().find_map(|l| {
983 let adapter = self
984 .languages
985 .lsp_adapters(l)
986 .iter()
987 .find(|adapter| &adapter.name == started_lsp_name)?
988 .clone();
989 Some((l, adapter))
990 });
991 if let Some((language, adapter)) = language {
992 let worktree = self.worktree_for_id(*worktree_id, cx);
993 let file = worktree.as_ref().and_then(|tree| {
994 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
995 });
996 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
997 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
998 } else if let Some(worktree) = worktree {
999 let server_name = &adapter.name.0;
1000 match (
1001 current_lsp_settings.get(server_name),
1002 new_lsp_settings.get(server_name),
1003 ) {
1004 (None, None) => {}
1005 (Some(_), None) | (None, Some(_)) => {
1006 language_servers_to_restart.push((worktree, Arc::clone(language)));
1007 }
1008 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
1009 if current_lsp_settings != new_lsp_settings {
1010 language_servers_to_restart.push((worktree, Arc::clone(language)));
1011 }
1012 }
1013 }
1014 }
1015 }
1016 }
1017 self.current_lsp_settings = new_lsp_settings;
1018
1019 // Stop all newly-disabled language servers.
1020 for (worktree_id, adapter_name) in language_servers_to_stop {
1021 self.stop_language_server(worktree_id, adapter_name, cx)
1022 .detach();
1023 }
1024
1025 let mut prettier_plugins_by_worktree = HashMap::default();
1026 for (worktree, language, settings) in language_formatters_to_check {
1027 if let Some(plugins) =
1028 prettier_support::prettier_plugins_for_language(&language, &settings)
1029 {
1030 prettier_plugins_by_worktree
1031 .entry(worktree)
1032 .or_insert_with(|| HashSet::default())
1033 .extend(plugins.iter().cloned());
1034 }
1035 }
1036 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
1037 self.install_default_prettier(worktree, prettier_plugins.into_iter(), cx);
1038 }
1039
1040 // Start all the newly-enabled language servers.
1041 for (worktree, language) in language_servers_to_start {
1042 self.start_language_servers(&worktree, language, cx);
1043 }
1044
1045 // Restart all language servers with changed initialization options.
1046 for (worktree, language) in language_servers_to_restart {
1047 self.restart_language_servers(worktree, language, cx);
1048 }
1049
1050 if self.copilot_lsp_subscription.is_none() {
1051 if let Some(copilot) = Copilot::global(cx) {
1052 for buffer in self.opened_buffers.values() {
1053 if let Some(buffer) = buffer.upgrade() {
1054 self.register_buffer_with_copilot(&buffer, cx);
1055 }
1056 }
1057 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
1058 }
1059 }
1060
1061 cx.notify();
1062 }
1063
1064 pub fn buffer_for_id(&self, remote_id: BufferId) -> Option<Model<Buffer>> {
1065 self.opened_buffers
1066 .get(&remote_id)
1067 .and_then(|buffer| buffer.upgrade())
1068 }
1069
1070 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1071 &self.languages
1072 }
1073
1074 pub fn client(&self) -> Arc<Client> {
1075 self.client.clone()
1076 }
1077
1078 pub fn user_store(&self) -> Model<UserStore> {
1079 self.user_store.clone()
1080 }
1081
1082 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1083 self.opened_buffers
1084 .values()
1085 .filter_map(|b| b.upgrade())
1086 .collect()
1087 }
1088
1089 #[cfg(any(test, feature = "test-support"))]
1090 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1091 let path = path.into();
1092 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1093 self.opened_buffers.iter().any(|(_, buffer)| {
1094 if let Some(buffer) = buffer.upgrade() {
1095 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1096 if file.worktree == worktree && file.path() == &path.path {
1097 return true;
1098 }
1099 }
1100 }
1101 false
1102 })
1103 } else {
1104 false
1105 }
1106 }
1107
1108 pub fn fs(&self) -> &Arc<dyn Fs> {
1109 &self.fs
1110 }
1111
1112 pub fn remote_id(&self) -> Option<u64> {
1113 match self.client_state {
1114 ProjectClientState::Local => None,
1115 ProjectClientState::Shared { remote_id, .. }
1116 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1117 }
1118 }
1119
1120 pub fn hosted_project_id(&self) -> Option<ProjectId> {
1121 self.hosted_project_id
1122 }
1123
1124 pub fn replica_id(&self) -> ReplicaId {
1125 match self.client_state {
1126 ProjectClientState::Remote { replica_id, .. } => replica_id,
1127 _ => 0,
1128 }
1129 }
1130
1131 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1132 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1133 updates_tx
1134 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1135 .ok();
1136 }
1137 cx.notify();
1138 }
1139
1140 pub fn task_inventory(&self) -> &Model<Inventory> {
1141 &self.tasks
1142 }
1143
1144 pub fn search_history(&self) -> &SearchHistory {
1145 &self.search_history
1146 }
1147
1148 pub fn search_history_mut(&mut self) -> &mut SearchHistory {
1149 &mut self.search_history
1150 }
1151
1152 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1153 &self.collaborators
1154 }
1155
1156 pub fn host(&self) -> Option<&Collaborator> {
1157 self.collaborators.values().find(|c| c.replica_id == 0)
1158 }
1159
1160 /// Collect all worktrees, including ones that don't appear in the project panel
1161 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
1162 self.worktrees
1163 .iter()
1164 .filter_map(move |worktree| worktree.upgrade())
1165 }
1166
1167 /// Collect all user-visible worktrees, the ones that appear in the project panel
1168 pub fn visible_worktrees<'a>(
1169 &'a self,
1170 cx: &'a AppContext,
1171 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1172 self.worktrees.iter().filter_map(|worktree| {
1173 worktree.upgrade().and_then(|worktree| {
1174 if worktree.read(cx).is_visible() {
1175 Some(worktree)
1176 } else {
1177 None
1178 }
1179 })
1180 })
1181 }
1182
1183 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1184 self.visible_worktrees(cx)
1185 .map(|tree| tree.read(cx).root_name())
1186 }
1187
1188 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1189 self.worktrees()
1190 .find(|worktree| worktree.read(cx).id() == id)
1191 }
1192
1193 pub fn worktree_for_entry(
1194 &self,
1195 entry_id: ProjectEntryId,
1196 cx: &AppContext,
1197 ) -> Option<Model<Worktree>> {
1198 self.worktrees()
1199 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1200 }
1201
1202 pub fn worktree_id_for_entry(
1203 &self,
1204 entry_id: ProjectEntryId,
1205 cx: &AppContext,
1206 ) -> Option<WorktreeId> {
1207 self.worktree_for_entry(entry_id, cx)
1208 .map(|worktree| worktree.read(cx).id())
1209 }
1210
1211 pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option<bool> {
1212 paths
1213 .iter()
1214 .map(|path| self.visibility_for_path(path, cx))
1215 .max()
1216 .flatten()
1217 }
1218
1219 pub fn visibility_for_path(&self, path: &Path, cx: &AppContext) -> Option<bool> {
1220 self.worktrees()
1221 .filter_map(|worktree| {
1222 let worktree = worktree.read(cx);
1223 worktree
1224 .as_local()?
1225 .contains_abs_path(path)
1226 .then(|| worktree.is_visible())
1227 })
1228 .max()
1229 }
1230
1231 pub fn create_entry(
1232 &mut self,
1233 project_path: impl Into<ProjectPath>,
1234 is_directory: bool,
1235 cx: &mut ModelContext<Self>,
1236 ) -> Task<Result<Option<Entry>>> {
1237 let project_path = project_path.into();
1238 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1239 return Task::ready(Ok(None));
1240 };
1241 if self.is_local() {
1242 worktree.update(cx, |worktree, cx| {
1243 worktree
1244 .as_local_mut()
1245 .unwrap()
1246 .create_entry(project_path.path, is_directory, cx)
1247 })
1248 } else {
1249 let client = self.client.clone();
1250 let project_id = self.remote_id().unwrap();
1251 cx.spawn(move |_, mut cx| async move {
1252 let response = client
1253 .request(proto::CreateProjectEntry {
1254 worktree_id: project_path.worktree_id.to_proto(),
1255 project_id,
1256 path: project_path.path.to_string_lossy().into(),
1257 is_directory,
1258 })
1259 .await?;
1260 match response.entry {
1261 Some(entry) => worktree
1262 .update(&mut cx, |worktree, cx| {
1263 worktree.as_remote_mut().unwrap().insert_entry(
1264 entry,
1265 response.worktree_scan_id as usize,
1266 cx,
1267 )
1268 })?
1269 .await
1270 .map(Some),
1271 None => Ok(None),
1272 }
1273 })
1274 }
1275 }
1276
1277 pub fn copy_entry(
1278 &mut self,
1279 entry_id: ProjectEntryId,
1280 new_path: impl Into<Arc<Path>>,
1281 cx: &mut ModelContext<Self>,
1282 ) -> Task<Result<Option<Entry>>> {
1283 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1284 return Task::ready(Ok(None));
1285 };
1286 let new_path = new_path.into();
1287 if self.is_local() {
1288 worktree.update(cx, |worktree, cx| {
1289 worktree
1290 .as_local_mut()
1291 .unwrap()
1292 .copy_entry(entry_id, new_path, cx)
1293 })
1294 } else {
1295 let client = self.client.clone();
1296 let project_id = self.remote_id().unwrap();
1297
1298 cx.spawn(move |_, mut cx| async move {
1299 let response = client
1300 .request(proto::CopyProjectEntry {
1301 project_id,
1302 entry_id: entry_id.to_proto(),
1303 new_path: new_path.to_string_lossy().into(),
1304 })
1305 .await?;
1306 match response.entry {
1307 Some(entry) => worktree
1308 .update(&mut cx, |worktree, cx| {
1309 worktree.as_remote_mut().unwrap().insert_entry(
1310 entry,
1311 response.worktree_scan_id as usize,
1312 cx,
1313 )
1314 })?
1315 .await
1316 .map(Some),
1317 None => Ok(None),
1318 }
1319 })
1320 }
1321 }
1322
1323 pub fn rename_entry(
1324 &mut self,
1325 entry_id: ProjectEntryId,
1326 new_path: impl Into<Arc<Path>>,
1327 cx: &mut ModelContext<Self>,
1328 ) -> Task<Result<Option<Entry>>> {
1329 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1330 return Task::ready(Ok(None));
1331 };
1332 let new_path = new_path.into();
1333 if self.is_local() {
1334 worktree.update(cx, |worktree, cx| {
1335 worktree
1336 .as_local_mut()
1337 .unwrap()
1338 .rename_entry(entry_id, new_path, cx)
1339 })
1340 } else {
1341 let client = self.client.clone();
1342 let project_id = self.remote_id().unwrap();
1343
1344 cx.spawn(move |_, mut cx| async move {
1345 let response = client
1346 .request(proto::RenameProjectEntry {
1347 project_id,
1348 entry_id: entry_id.to_proto(),
1349 new_path: new_path.to_string_lossy().into(),
1350 })
1351 .await?;
1352 match response.entry {
1353 Some(entry) => worktree
1354 .update(&mut cx, |worktree, cx| {
1355 worktree.as_remote_mut().unwrap().insert_entry(
1356 entry,
1357 response.worktree_scan_id as usize,
1358 cx,
1359 )
1360 })?
1361 .await
1362 .map(Some),
1363 None => Ok(None),
1364 }
1365 })
1366 }
1367 }
1368
1369 pub fn delete_entry(
1370 &mut self,
1371 entry_id: ProjectEntryId,
1372 cx: &mut ModelContext<Self>,
1373 ) -> Option<Task<Result<()>>> {
1374 let worktree = self.worktree_for_entry(entry_id, cx)?;
1375
1376 cx.emit(Event::DeletedEntry(entry_id));
1377
1378 if self.is_local() {
1379 worktree.update(cx, |worktree, cx| {
1380 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1381 })
1382 } else {
1383 let client = self.client.clone();
1384 let project_id = self.remote_id().unwrap();
1385 Some(cx.spawn(move |_, mut cx| async move {
1386 let response = client
1387 .request(proto::DeleteProjectEntry {
1388 project_id,
1389 entry_id: entry_id.to_proto(),
1390 })
1391 .await?;
1392 worktree
1393 .update(&mut cx, move |worktree, cx| {
1394 worktree.as_remote_mut().unwrap().delete_entry(
1395 entry_id,
1396 response.worktree_scan_id as usize,
1397 cx,
1398 )
1399 })?
1400 .await
1401 }))
1402 }
1403 }
1404
1405 pub fn expand_entry(
1406 &mut self,
1407 worktree_id: WorktreeId,
1408 entry_id: ProjectEntryId,
1409 cx: &mut ModelContext<Self>,
1410 ) -> Option<Task<Result<()>>> {
1411 let worktree = self.worktree_for_id(worktree_id, cx)?;
1412 if self.is_local() {
1413 worktree.update(cx, |worktree, cx| {
1414 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1415 })
1416 } else {
1417 let worktree = worktree.downgrade();
1418 let request = self.client.request(proto::ExpandProjectEntry {
1419 project_id: self.remote_id().unwrap(),
1420 entry_id: entry_id.to_proto(),
1421 });
1422 Some(cx.spawn(move |_, mut cx| async move {
1423 let response = request.await?;
1424 if let Some(worktree) = worktree.upgrade() {
1425 worktree
1426 .update(&mut cx, |worktree, _| {
1427 worktree
1428 .as_remote_mut()
1429 .unwrap()
1430 .wait_for_snapshot(response.worktree_scan_id as usize)
1431 })?
1432 .await?;
1433 }
1434 Ok(())
1435 }))
1436 }
1437 }
1438
1439 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1440 if !matches!(self.client_state, ProjectClientState::Local) {
1441 return Err(anyhow!("project was already shared"));
1442 }
1443 self.client_subscriptions.push(
1444 self.client
1445 .subscribe_to_entity(project_id)?
1446 .set_model(&cx.handle(), &mut cx.to_async()),
1447 );
1448
1449 for open_buffer in self.opened_buffers.values_mut() {
1450 match open_buffer {
1451 OpenBuffer::Strong(_) => {}
1452 OpenBuffer::Weak(buffer) => {
1453 if let Some(buffer) = buffer.upgrade() {
1454 *open_buffer = OpenBuffer::Strong(buffer);
1455 }
1456 }
1457 OpenBuffer::Operations(_) => unreachable!(),
1458 }
1459 }
1460
1461 for worktree_handle in self.worktrees.iter_mut() {
1462 match worktree_handle {
1463 WorktreeHandle::Strong(_) => {}
1464 WorktreeHandle::Weak(worktree) => {
1465 if let Some(worktree) = worktree.upgrade() {
1466 *worktree_handle = WorktreeHandle::Strong(worktree);
1467 }
1468 }
1469 }
1470 }
1471
1472 for (server_id, status) in &self.language_server_statuses {
1473 self.client
1474 .send(proto::StartLanguageServer {
1475 project_id,
1476 server: Some(proto::LanguageServer {
1477 id: server_id.0 as u64,
1478 name: status.name.clone(),
1479 }),
1480 })
1481 .log_err();
1482 }
1483
1484 let store = cx.global::<SettingsStore>();
1485 for worktree in self.worktrees() {
1486 let worktree_id = worktree.read(cx).id().to_proto();
1487 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1488 self.client
1489 .send(proto::UpdateWorktreeSettings {
1490 project_id,
1491 worktree_id,
1492 path: path.to_string_lossy().into(),
1493 content: Some(content),
1494 })
1495 .log_err();
1496 }
1497 }
1498
1499 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1500 let client = self.client.clone();
1501 self.client_state = ProjectClientState::Shared {
1502 remote_id: project_id,
1503 updates_tx,
1504 _send_updates: cx.spawn(move |this, mut cx| async move {
1505 while let Some(update) = updates_rx.next().await {
1506 match update {
1507 LocalProjectUpdate::WorktreesChanged => {
1508 let worktrees = this.update(&mut cx, |this, _cx| {
1509 this.worktrees().collect::<Vec<_>>()
1510 })?;
1511 let update_project = this
1512 .update(&mut cx, |this, cx| {
1513 this.client.request(proto::UpdateProject {
1514 project_id,
1515 worktrees: this.worktree_metadata_protos(cx),
1516 })
1517 })?
1518 .await;
1519 if update_project.is_ok() {
1520 for worktree in worktrees {
1521 worktree.update(&mut cx, |worktree, cx| {
1522 let worktree = worktree.as_local_mut().unwrap();
1523 worktree.share(project_id, cx).detach_and_log_err(cx)
1524 })?;
1525 }
1526 }
1527 }
1528 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1529 let buffer = this.update(&mut cx, |this, _| {
1530 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1531 let shared_buffers =
1532 this.shared_buffers.entry(peer_id).or_default();
1533 if shared_buffers.insert(buffer_id) {
1534 if let OpenBuffer::Strong(buffer) = buffer {
1535 Some(buffer.clone())
1536 } else {
1537 None
1538 }
1539 } else {
1540 None
1541 }
1542 })?;
1543
1544 let Some(buffer) = buffer else { continue };
1545 let operations =
1546 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1547 let operations = operations.await;
1548 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1549
1550 let initial_state = proto::CreateBufferForPeer {
1551 project_id,
1552 peer_id: Some(peer_id),
1553 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1554 };
1555 if client.send(initial_state).log_err().is_some() {
1556 let client = client.clone();
1557 cx.background_executor()
1558 .spawn(async move {
1559 let mut chunks = split_operations(operations).peekable();
1560 while let Some(chunk) = chunks.next() {
1561 let is_last = chunks.peek().is_none();
1562 client.send(proto::CreateBufferForPeer {
1563 project_id,
1564 peer_id: Some(peer_id),
1565 variant: Some(
1566 proto::create_buffer_for_peer::Variant::Chunk(
1567 proto::BufferChunk {
1568 buffer_id: buffer_id.into(),
1569 operations: chunk,
1570 is_last,
1571 },
1572 ),
1573 ),
1574 })?;
1575 }
1576 anyhow::Ok(())
1577 })
1578 .await
1579 .log_err();
1580 }
1581 }
1582 }
1583 }
1584 Ok(())
1585 }),
1586 };
1587
1588 self.metadata_changed(cx);
1589 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1590 cx.notify();
1591 Ok(())
1592 }
1593
1594 pub fn reshared(
1595 &mut self,
1596 message: proto::ResharedProject,
1597 cx: &mut ModelContext<Self>,
1598 ) -> Result<()> {
1599 self.shared_buffers.clear();
1600 self.set_collaborators_from_proto(message.collaborators, cx)?;
1601 self.metadata_changed(cx);
1602 Ok(())
1603 }
1604
1605 pub fn rejoined(
1606 &mut self,
1607 message: proto::RejoinedProject,
1608 message_id: u32,
1609 cx: &mut ModelContext<Self>,
1610 ) -> Result<()> {
1611 cx.update_global::<SettingsStore, _>(|store, cx| {
1612 for worktree in &self.worktrees {
1613 store
1614 .clear_local_settings(worktree.handle_id(), cx)
1615 .log_err();
1616 }
1617 });
1618
1619 self.join_project_response_message_id = message_id;
1620 self.set_worktrees_from_proto(message.worktrees, cx)?;
1621 self.set_collaborators_from_proto(message.collaborators, cx)?;
1622 self.language_server_statuses = message
1623 .language_servers
1624 .into_iter()
1625 .map(|server| {
1626 (
1627 LanguageServerId(server.id as usize),
1628 LanguageServerStatus {
1629 name: server.name,
1630 pending_work: Default::default(),
1631 has_pending_diagnostic_updates: false,
1632 progress_tokens: Default::default(),
1633 },
1634 )
1635 })
1636 .collect();
1637 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
1638 .unwrap();
1639 cx.notify();
1640 Ok(())
1641 }
1642
1643 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1644 self.unshare_internal(cx)?;
1645 self.metadata_changed(cx);
1646 cx.notify();
1647 Ok(())
1648 }
1649
1650 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1651 if self.is_remote() {
1652 return Err(anyhow!("attempted to unshare a remote project"));
1653 }
1654
1655 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1656 self.client_state = ProjectClientState::Local;
1657 self.collaborators.clear();
1658 self.shared_buffers.clear();
1659 self.client_subscriptions.clear();
1660
1661 for worktree_handle in self.worktrees.iter_mut() {
1662 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1663 let is_visible = worktree.update(cx, |worktree, _| {
1664 worktree.as_local_mut().unwrap().unshare();
1665 worktree.is_visible()
1666 });
1667 if !is_visible {
1668 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1669 }
1670 }
1671 }
1672
1673 for open_buffer in self.opened_buffers.values_mut() {
1674 // Wake up any tasks waiting for peers' edits to this buffer.
1675 if let Some(buffer) = open_buffer.upgrade() {
1676 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1677 }
1678
1679 if let OpenBuffer::Strong(buffer) = open_buffer {
1680 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1681 }
1682 }
1683
1684 self.client.send(proto::UnshareProject {
1685 project_id: remote_id,
1686 })?;
1687
1688 Ok(())
1689 } else {
1690 Err(anyhow!("attempted to unshare an unshared project"))
1691 }
1692 }
1693
1694 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1695 self.disconnected_from_host_internal(cx);
1696 cx.emit(Event::DisconnectedFromHost);
1697 cx.notify();
1698 }
1699
1700 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1701 let new_capability =
1702 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1703 Capability::ReadWrite
1704 } else {
1705 Capability::ReadOnly
1706 };
1707 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1708 if *capability == new_capability {
1709 return;
1710 }
1711
1712 *capability = new_capability;
1713 for buffer in self.opened_buffers() {
1714 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1715 }
1716 }
1717 }
1718
1719 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1720 if let ProjectClientState::Remote {
1721 sharing_has_stopped,
1722 ..
1723 } = &mut self.client_state
1724 {
1725 *sharing_has_stopped = true;
1726
1727 self.collaborators.clear();
1728
1729 for worktree in &self.worktrees {
1730 if let Some(worktree) = worktree.upgrade() {
1731 worktree.update(cx, |worktree, _| {
1732 if let Some(worktree) = worktree.as_remote_mut() {
1733 worktree.disconnected_from_host();
1734 }
1735 });
1736 }
1737 }
1738
1739 for open_buffer in self.opened_buffers.values_mut() {
1740 // Wake up any tasks waiting for peers' edits to this buffer.
1741 if let Some(buffer) = open_buffer.upgrade() {
1742 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1743 }
1744
1745 if let OpenBuffer::Strong(buffer) = open_buffer {
1746 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1747 }
1748 }
1749
1750 // Wake up all futures currently waiting on a buffer to get opened,
1751 // to give them a chance to fail now that we've disconnected.
1752 self.loading_buffers.clear();
1753 // self.opened_buffer.send(OpenedBufferEvent::Disconnected);
1754 }
1755 }
1756
1757 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1758 cx.emit(Event::Closed);
1759 }
1760
1761 pub fn is_disconnected(&self) -> bool {
1762 match &self.client_state {
1763 ProjectClientState::Remote {
1764 sharing_has_stopped,
1765 ..
1766 } => *sharing_has_stopped,
1767 _ => false,
1768 }
1769 }
1770
1771 pub fn capability(&self) -> Capability {
1772 match &self.client_state {
1773 ProjectClientState::Remote { capability, .. } => *capability,
1774 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1775 }
1776 }
1777
1778 pub fn is_read_only(&self) -> bool {
1779 self.is_disconnected() || self.capability() == Capability::ReadOnly
1780 }
1781
1782 pub fn is_local(&self) -> bool {
1783 match &self.client_state {
1784 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1785 ProjectClientState::Remote { .. } => false,
1786 }
1787 }
1788
1789 pub fn is_remote(&self) -> bool {
1790 !self.is_local()
1791 }
1792
1793 pub fn create_buffer(
1794 &mut self,
1795 text: &str,
1796 language: Option<Arc<Language>>,
1797 cx: &mut ModelContext<Self>,
1798 ) -> Result<Model<Buffer>> {
1799 if self.is_remote() {
1800 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1801 }
1802 let id = self.next_buffer_id.next();
1803 let buffer = cx.new_model(|cx| {
1804 Buffer::new(self.replica_id(), id, text)
1805 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1806 });
1807 self.register_buffer(&buffer, cx)?;
1808 Ok(buffer)
1809 }
1810
1811 pub fn open_path(
1812 &mut self,
1813 path: ProjectPath,
1814 cx: &mut ModelContext<Self>,
1815 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1816 let task = self.open_buffer(path.clone(), cx);
1817 cx.spawn(move |_, cx| async move {
1818 let buffer = task.await?;
1819 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1820 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1821 })?;
1822
1823 let buffer: &AnyModel = &buffer;
1824 Ok((project_entry_id, buffer.clone()))
1825 })
1826 }
1827
1828 pub fn open_local_buffer(
1829 &mut self,
1830 abs_path: impl AsRef<Path>,
1831 cx: &mut ModelContext<Self>,
1832 ) -> Task<Result<Model<Buffer>>> {
1833 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1834 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1835 } else {
1836 Task::ready(Err(anyhow!("no such path")))
1837 }
1838 }
1839
1840 pub fn open_buffer(
1841 &mut self,
1842 path: impl Into<ProjectPath>,
1843 cx: &mut ModelContext<Self>,
1844 ) -> Task<Result<Model<Buffer>>> {
1845 let project_path = path.into();
1846 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1847 worktree
1848 } else {
1849 return Task::ready(Err(anyhow!("no such worktree")));
1850 };
1851
1852 // If there is already a buffer for the given path, then return it.
1853 let existing_buffer = self.get_open_buffer(&project_path, cx);
1854 if let Some(existing_buffer) = existing_buffer {
1855 return Task::ready(Ok(existing_buffer));
1856 }
1857
1858 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1859 // If the given path is already being loaded, then wait for that existing
1860 // task to complete and return the same buffer.
1861 hash_map::Entry::Occupied(e) => e.get().clone(),
1862
1863 // Otherwise, record the fact that this path is now being loaded.
1864 hash_map::Entry::Vacant(entry) => {
1865 let (mut tx, rx) = postage::watch::channel();
1866 entry.insert(rx.clone());
1867
1868 let project_path = project_path.clone();
1869 let load_buffer = if worktree.read(cx).is_local() {
1870 self.open_local_buffer_internal(project_path.path.clone(), worktree, cx)
1871 } else {
1872 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1873 };
1874
1875 cx.spawn(move |this, mut cx| async move {
1876 let load_result = load_buffer.await;
1877 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1878 // Record the fact that the buffer is no longer loading.
1879 this.loading_buffers_by_path.remove(&project_path);
1880 let buffer = load_result.map_err(Arc::new)?;
1881 Ok(buffer)
1882 })?);
1883 anyhow::Ok(())
1884 })
1885 .detach();
1886 rx
1887 }
1888 };
1889
1890 cx.background_executor().spawn(async move {
1891 wait_for_loading_buffer(loading_watch)
1892 .await
1893 .map_err(|e| e.cloned())
1894 })
1895 }
1896
1897 fn open_local_buffer_internal(
1898 &mut self,
1899 path: Arc<Path>,
1900 worktree: Model<Worktree>,
1901 cx: &mut ModelContext<Self>,
1902 ) -> Task<Result<Model<Buffer>>> {
1903 let buffer_id = self.next_buffer_id.next();
1904 let load_buffer = worktree.update(cx, |worktree, cx| {
1905 let worktree = worktree.as_local_mut().unwrap();
1906 worktree.load_buffer(buffer_id, &path, cx)
1907 });
1908 fn is_not_found_error(error: &anyhow::Error) -> bool {
1909 error
1910 .root_cause()
1911 .downcast_ref::<io::Error>()
1912 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
1913 }
1914 cx.spawn(move |this, mut cx| async move {
1915 let buffer = match load_buffer.await {
1916 Ok(buffer) => Ok(buffer),
1917 Err(error) if is_not_found_error(&error) => {
1918 worktree.update(&mut cx, |worktree, cx| {
1919 let worktree = worktree.as_local_mut().unwrap();
1920 worktree.new_buffer(buffer_id, path, cx)
1921 })
1922 }
1923 Err(e) => Err(e),
1924 }?;
1925 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1926 Ok(buffer)
1927 })
1928 }
1929
1930 fn open_remote_buffer_internal(
1931 &mut self,
1932 path: &Arc<Path>,
1933 worktree: &Model<Worktree>,
1934 cx: &mut ModelContext<Self>,
1935 ) -> Task<Result<Model<Buffer>>> {
1936 let rpc = self.client.clone();
1937 let project_id = self.remote_id().unwrap();
1938 let remote_worktree_id = worktree.read(cx).id();
1939 let path = path.clone();
1940 let path_string = path.to_string_lossy().to_string();
1941 cx.spawn(move |this, mut cx| async move {
1942 let response = rpc
1943 .request(proto::OpenBufferByPath {
1944 project_id,
1945 worktree_id: remote_worktree_id.to_proto(),
1946 path: path_string,
1947 })
1948 .await?;
1949 let buffer_id = BufferId::new(response.buffer_id)?;
1950 this.update(&mut cx, |this, cx| {
1951 this.wait_for_remote_buffer(buffer_id, cx)
1952 })?
1953 .await
1954 })
1955 }
1956
1957 /// LanguageServerName is owned, because it is inserted into a map
1958 pub fn open_local_buffer_via_lsp(
1959 &mut self,
1960 abs_path: lsp::Url,
1961 language_server_id: LanguageServerId,
1962 language_server_name: LanguageServerName,
1963 cx: &mut ModelContext<Self>,
1964 ) -> Task<Result<Model<Buffer>>> {
1965 cx.spawn(move |this, mut cx| async move {
1966 let abs_path = abs_path
1967 .to_file_path()
1968 .map_err(|_| anyhow!("can't convert URI to path"))?;
1969 let (worktree, relative_path) = if let Some(result) =
1970 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1971 {
1972 result
1973 } else {
1974 let worktree = this
1975 .update(&mut cx, |this, cx| {
1976 this.create_local_worktree(&abs_path, false, cx)
1977 })?
1978 .await?;
1979 this.update(&mut cx, |this, cx| {
1980 this.language_server_ids.insert(
1981 (worktree.read(cx).id(), language_server_name),
1982 language_server_id,
1983 );
1984 })
1985 .ok();
1986 (worktree, PathBuf::new())
1987 };
1988
1989 let project_path = ProjectPath {
1990 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1991 path: relative_path.into(),
1992 };
1993 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1994 .await
1995 })
1996 }
1997
1998 pub fn open_buffer_by_id(
1999 &mut self,
2000 id: BufferId,
2001 cx: &mut ModelContext<Self>,
2002 ) -> Task<Result<Model<Buffer>>> {
2003 if let Some(buffer) = self.buffer_for_id(id) {
2004 Task::ready(Ok(buffer))
2005 } else if self.is_local() {
2006 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
2007 } else if let Some(project_id) = self.remote_id() {
2008 let request = self.client.request(proto::OpenBufferById {
2009 project_id,
2010 id: id.into(),
2011 });
2012 cx.spawn(move |this, mut cx| async move {
2013 let buffer_id = BufferId::new(request.await?.buffer_id)?;
2014 this.update(&mut cx, |this, cx| {
2015 this.wait_for_remote_buffer(buffer_id, cx)
2016 })?
2017 .await
2018 })
2019 } else {
2020 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
2021 }
2022 }
2023
2024 pub fn save_buffers(
2025 &self,
2026 buffers: HashSet<Model<Buffer>>,
2027 cx: &mut ModelContext<Self>,
2028 ) -> Task<Result<()>> {
2029 cx.spawn(move |this, mut cx| async move {
2030 let save_tasks = buffers.into_iter().filter_map(|buffer| {
2031 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
2032 .ok()
2033 });
2034 try_join_all(save_tasks).await?;
2035 Ok(())
2036 })
2037 }
2038
2039 pub fn save_buffer(
2040 &self,
2041 buffer: Model<Buffer>,
2042 cx: &mut ModelContext<Self>,
2043 ) -> Task<Result<()>> {
2044 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2045 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
2046 };
2047 let worktree = file.worktree.clone();
2048 let path = file.path.clone();
2049 worktree.update(cx, |worktree, cx| match worktree {
2050 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
2051 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
2052 })
2053 }
2054
2055 pub fn save_buffer_as(
2056 &mut self,
2057 buffer: Model<Buffer>,
2058 abs_path: PathBuf,
2059 cx: &mut ModelContext<Self>,
2060 ) -> Task<Result<()>> {
2061 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
2062 let old_file = File::from_dyn(buffer.read(cx).file())
2063 .filter(|f| f.is_local())
2064 .cloned();
2065 cx.spawn(move |this, mut cx| async move {
2066 if let Some(old_file) = &old_file {
2067 this.update(&mut cx, |this, cx| {
2068 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
2069 })?;
2070 }
2071 let (worktree, path) = worktree_task.await?;
2072 worktree
2073 .update(&mut cx, |worktree, cx| match worktree {
2074 Worktree::Local(worktree) => {
2075 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
2076 }
2077 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
2078 })?
2079 .await?;
2080
2081 this.update(&mut cx, |this, cx| {
2082 this.detect_language_for_buffer(&buffer, cx);
2083 this.register_buffer_with_language_servers(&buffer, cx);
2084 })?;
2085 Ok(())
2086 })
2087 }
2088
2089 pub fn get_open_buffer(
2090 &mut self,
2091 path: &ProjectPath,
2092 cx: &mut ModelContext<Self>,
2093 ) -> Option<Model<Buffer>> {
2094 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
2095 self.opened_buffers.values().find_map(|buffer| {
2096 let buffer = buffer.upgrade()?;
2097 let file = File::from_dyn(buffer.read(cx).file())?;
2098 if file.worktree == worktree && file.path() == &path.path {
2099 Some(buffer)
2100 } else {
2101 None
2102 }
2103 })
2104 }
2105
2106 fn register_buffer(
2107 &mut self,
2108 buffer: &Model<Buffer>,
2109 cx: &mut ModelContext<Self>,
2110 ) -> Result<()> {
2111 self.request_buffer_diff_recalculation(buffer, cx);
2112 buffer.update(cx, |buffer, _| {
2113 buffer.set_language_registry(self.languages.clone())
2114 });
2115
2116 let remote_id = buffer.read(cx).remote_id();
2117 let is_remote = self.is_remote();
2118 let open_buffer = if is_remote || self.is_shared() {
2119 OpenBuffer::Strong(buffer.clone())
2120 } else {
2121 OpenBuffer::Weak(buffer.downgrade())
2122 };
2123
2124 match self.opened_buffers.entry(remote_id) {
2125 hash_map::Entry::Vacant(entry) => {
2126 entry.insert(open_buffer);
2127 }
2128 hash_map::Entry::Occupied(mut entry) => {
2129 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2130 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2131 } else if entry.get().upgrade().is_some() {
2132 if is_remote {
2133 return Ok(());
2134 } else {
2135 debug_panic!("buffer {} was already registered", remote_id);
2136 Err(anyhow!("buffer {} was already registered", remote_id))?;
2137 }
2138 }
2139 entry.insert(open_buffer);
2140 }
2141 }
2142 cx.subscribe(buffer, |this, buffer, event, cx| {
2143 this.on_buffer_event(buffer, event, cx);
2144 })
2145 .detach();
2146
2147 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2148 if file.is_local {
2149 self.local_buffer_ids_by_path.insert(
2150 ProjectPath {
2151 worktree_id: file.worktree_id(cx),
2152 path: file.path.clone(),
2153 },
2154 remote_id,
2155 );
2156
2157 if let Some(entry_id) = file.entry_id {
2158 self.local_buffer_ids_by_entry_id
2159 .insert(entry_id, remote_id);
2160 }
2161 }
2162 }
2163
2164 self.detect_language_for_buffer(buffer, cx);
2165 self.register_buffer_with_language_servers(buffer, cx);
2166 self.register_buffer_with_copilot(buffer, cx);
2167 cx.observe_release(buffer, |this, buffer, cx| {
2168 if let Some(file) = File::from_dyn(buffer.file()) {
2169 if file.is_local() {
2170 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2171 for server in this.language_servers_for_buffer(buffer, cx) {
2172 server
2173 .1
2174 .notify::<lsp::notification::DidCloseTextDocument>(
2175 lsp::DidCloseTextDocumentParams {
2176 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2177 },
2178 )
2179 .log_err();
2180 }
2181 }
2182 }
2183 })
2184 .detach();
2185
2186 if let Some(senders) = self.loading_buffers.remove(&remote_id) {
2187 for sender in senders {
2188 sender.send(Ok(buffer.clone())).ok();
2189 }
2190 }
2191 Ok(())
2192 }
2193
2194 fn register_buffer_with_language_servers(
2195 &mut self,
2196 buffer_handle: &Model<Buffer>,
2197 cx: &mut ModelContext<Self>,
2198 ) {
2199 let buffer = buffer_handle.read(cx);
2200 let buffer_id = buffer.remote_id();
2201
2202 if let Some(file) = File::from_dyn(buffer.file()) {
2203 if !file.is_local() {
2204 return;
2205 }
2206
2207 let abs_path = file.abs_path(cx);
2208 let uri = lsp::Url::from_file_path(&abs_path)
2209 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2210 let initial_snapshot = buffer.text_snapshot();
2211 let language = buffer.language().cloned();
2212 let worktree_id = file.worktree_id(cx);
2213
2214 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2215 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2216 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2217 .log_err();
2218 }
2219 }
2220
2221 if let Some(language) = language {
2222 for adapter in self.languages.lsp_adapters(&language) {
2223 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2224 let server = self
2225 .language_server_ids
2226 .get(&(worktree_id, adapter.name.clone()))
2227 .and_then(|id| self.language_servers.get(id))
2228 .and_then(|server_state| {
2229 if let LanguageServerState::Running { server, .. } = server_state {
2230 Some(server.clone())
2231 } else {
2232 None
2233 }
2234 });
2235 let server = match server {
2236 Some(server) => server,
2237 None => continue,
2238 };
2239
2240 server
2241 .notify::<lsp::notification::DidOpenTextDocument>(
2242 lsp::DidOpenTextDocumentParams {
2243 text_document: lsp::TextDocumentItem::new(
2244 uri.clone(),
2245 language_id.unwrap_or_default(),
2246 0,
2247 initial_snapshot.text(),
2248 ),
2249 },
2250 )
2251 .log_err();
2252
2253 buffer_handle.update(cx, |buffer, cx| {
2254 buffer.set_completion_triggers(
2255 server
2256 .capabilities()
2257 .completion_provider
2258 .as_ref()
2259 .and_then(|provider| provider.trigger_characters.clone())
2260 .unwrap_or_default(),
2261 cx,
2262 );
2263 });
2264
2265 let snapshot = LspBufferSnapshot {
2266 version: 0,
2267 snapshot: initial_snapshot.clone(),
2268 };
2269 self.buffer_snapshots
2270 .entry(buffer_id)
2271 .or_default()
2272 .insert(server.server_id(), vec![snapshot]);
2273 }
2274 }
2275 }
2276 }
2277
2278 fn unregister_buffer_from_language_servers(
2279 &mut self,
2280 buffer: &Model<Buffer>,
2281 old_file: &File,
2282 cx: &mut ModelContext<Self>,
2283 ) {
2284 let old_path = match old_file.as_local() {
2285 Some(local) => local.abs_path(cx),
2286 None => return,
2287 };
2288
2289 buffer.update(cx, |buffer, cx| {
2290 let worktree_id = old_file.worktree_id(cx);
2291 let ids = &self.language_server_ids;
2292
2293 if let Some(language) = buffer.language().cloned() {
2294 for adapter in self.languages.lsp_adapters(&language) {
2295 if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) {
2296 buffer.update_diagnostics(*server_id, Default::default(), cx);
2297 }
2298 }
2299 }
2300
2301 self.buffer_snapshots.remove(&buffer.remote_id());
2302 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2303 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2304 language_server
2305 .notify::<lsp::notification::DidCloseTextDocument>(
2306 lsp::DidCloseTextDocumentParams {
2307 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2308 },
2309 )
2310 .log_err();
2311 }
2312 });
2313 }
2314
2315 fn register_buffer_with_copilot(
2316 &self,
2317 buffer_handle: &Model<Buffer>,
2318 cx: &mut ModelContext<Self>,
2319 ) {
2320 if let Some(copilot) = Copilot::global(cx) {
2321 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2322 }
2323 }
2324
2325 async fn send_buffer_ordered_messages(
2326 this: WeakModel<Self>,
2327 rx: UnboundedReceiver<BufferOrderedMessage>,
2328 mut cx: AsyncAppContext,
2329 ) -> Result<()> {
2330 const MAX_BATCH_SIZE: usize = 128;
2331
2332 let mut operations_by_buffer_id = HashMap::default();
2333 async fn flush_operations(
2334 this: &WeakModel<Project>,
2335 operations_by_buffer_id: &mut HashMap<BufferId, Vec<proto::Operation>>,
2336 needs_resync_with_host: &mut bool,
2337 is_local: bool,
2338 cx: &mut AsyncAppContext,
2339 ) -> Result<()> {
2340 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2341 let request = this.update(cx, |this, _| {
2342 let project_id = this.remote_id()?;
2343 Some(this.client.request(proto::UpdateBuffer {
2344 buffer_id: buffer_id.into(),
2345 project_id,
2346 operations,
2347 }))
2348 })?;
2349 if let Some(request) = request {
2350 if request.await.is_err() && !is_local {
2351 *needs_resync_with_host = true;
2352 break;
2353 }
2354 }
2355 }
2356 Ok(())
2357 }
2358
2359 let mut needs_resync_with_host = false;
2360 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2361
2362 while let Some(changes) = changes.next().await {
2363 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2364
2365 for change in changes {
2366 match change {
2367 BufferOrderedMessage::Operation {
2368 buffer_id,
2369 operation,
2370 } => {
2371 if needs_resync_with_host {
2372 continue;
2373 }
2374
2375 operations_by_buffer_id
2376 .entry(buffer_id)
2377 .or_insert(Vec::new())
2378 .push(operation);
2379 }
2380
2381 BufferOrderedMessage::Resync => {
2382 operations_by_buffer_id.clear();
2383 if this
2384 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2385 .await
2386 .is_ok()
2387 {
2388 needs_resync_with_host = false;
2389 }
2390 }
2391
2392 BufferOrderedMessage::LanguageServerUpdate {
2393 language_server_id,
2394 message,
2395 } => {
2396 flush_operations(
2397 &this,
2398 &mut operations_by_buffer_id,
2399 &mut needs_resync_with_host,
2400 is_local,
2401 &mut cx,
2402 )
2403 .await?;
2404
2405 this.update(&mut cx, |this, _| {
2406 if let Some(project_id) = this.remote_id() {
2407 this.client
2408 .send(proto::UpdateLanguageServer {
2409 project_id,
2410 language_server_id: language_server_id.0 as u64,
2411 variant: Some(message),
2412 })
2413 .log_err();
2414 }
2415 })?;
2416 }
2417 }
2418 }
2419
2420 flush_operations(
2421 &this,
2422 &mut operations_by_buffer_id,
2423 &mut needs_resync_with_host,
2424 is_local,
2425 &mut cx,
2426 )
2427 .await?;
2428 }
2429
2430 Ok(())
2431 }
2432
2433 fn on_buffer_event(
2434 &mut self,
2435 buffer: Model<Buffer>,
2436 event: &BufferEvent,
2437 cx: &mut ModelContext<Self>,
2438 ) -> Option<()> {
2439 if matches!(
2440 event,
2441 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2442 ) {
2443 self.request_buffer_diff_recalculation(&buffer, cx);
2444 }
2445
2446 match event {
2447 BufferEvent::Operation(operation) => {
2448 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation {
2449 buffer_id: buffer.read(cx).remote_id(),
2450 operation: language::proto::serialize_operation(operation),
2451 })
2452 .ok();
2453 }
2454
2455 BufferEvent::Edited { .. } => {
2456 let buffer = buffer.read(cx);
2457 let file = File::from_dyn(buffer.file())?;
2458 let abs_path = file.as_local()?.abs_path(cx);
2459 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2460 let next_snapshot = buffer.text_snapshot();
2461
2462 let language_servers: Vec<_> = self
2463 .language_servers_for_buffer(buffer, cx)
2464 .map(|i| i.1.clone())
2465 .collect();
2466
2467 for language_server in language_servers {
2468 let language_server = language_server.clone();
2469
2470 let buffer_snapshots = self
2471 .buffer_snapshots
2472 .get_mut(&buffer.remote_id())
2473 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2474 let previous_snapshot = buffer_snapshots.last()?;
2475
2476 let build_incremental_change = || {
2477 buffer
2478 .edits_since::<(PointUtf16, usize)>(
2479 previous_snapshot.snapshot.version(),
2480 )
2481 .map(|edit| {
2482 let edit_start = edit.new.start.0;
2483 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2484 let new_text = next_snapshot
2485 .text_for_range(edit.new.start.1..edit.new.end.1)
2486 .collect();
2487 lsp::TextDocumentContentChangeEvent {
2488 range: Some(lsp::Range::new(
2489 point_to_lsp(edit_start),
2490 point_to_lsp(edit_end),
2491 )),
2492 range_length: None,
2493 text: new_text,
2494 }
2495 })
2496 .collect()
2497 };
2498
2499 let document_sync_kind = language_server
2500 .capabilities()
2501 .text_document_sync
2502 .as_ref()
2503 .and_then(|sync| match sync {
2504 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2505 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2506 });
2507
2508 let content_changes: Vec<_> = match document_sync_kind {
2509 Some(lsp::TextDocumentSyncKind::FULL) => {
2510 vec![lsp::TextDocumentContentChangeEvent {
2511 range: None,
2512 range_length: None,
2513 text: next_snapshot.text(),
2514 }]
2515 }
2516 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2517 _ => {
2518 #[cfg(any(test, feature = "test-support"))]
2519 {
2520 build_incremental_change()
2521 }
2522
2523 #[cfg(not(any(test, feature = "test-support")))]
2524 {
2525 continue;
2526 }
2527 }
2528 };
2529
2530 let next_version = previous_snapshot.version + 1;
2531
2532 buffer_snapshots.push(LspBufferSnapshot {
2533 version: next_version,
2534 snapshot: next_snapshot.clone(),
2535 });
2536
2537 language_server
2538 .notify::<lsp::notification::DidChangeTextDocument>(
2539 lsp::DidChangeTextDocumentParams {
2540 text_document: lsp::VersionedTextDocumentIdentifier::new(
2541 uri.clone(),
2542 next_version,
2543 ),
2544 content_changes,
2545 },
2546 )
2547 .log_err();
2548 }
2549 }
2550
2551 BufferEvent::Saved => {
2552 let file = File::from_dyn(buffer.read(cx).file())?;
2553 let worktree_id = file.worktree_id(cx);
2554 let abs_path = file.as_local()?.abs_path(cx);
2555 let text_document = lsp::TextDocumentIdentifier {
2556 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2557 };
2558
2559 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2560 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2561
2562 server
2563 .notify::<lsp::notification::DidSaveTextDocument>(
2564 lsp::DidSaveTextDocumentParams {
2565 text_document: text_document.clone(),
2566 text,
2567 },
2568 )
2569 .log_err();
2570 }
2571
2572 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2573 for language_server_id in language_server_ids {
2574 if let Some(LanguageServerState::Running {
2575 adapter,
2576 simulate_disk_based_diagnostics_completion,
2577 ..
2578 }) = self.language_servers.get_mut(&language_server_id)
2579 {
2580 // After saving a buffer using a language server that doesn't provide
2581 // a disk-based progress token, kick off a timer that will reset every
2582 // time the buffer is saved. If the timer eventually fires, simulate
2583 // disk-based diagnostics being finished so that other pieces of UI
2584 // (e.g., project diagnostics view, diagnostic status bar) can update.
2585 // We don't emit an event right away because the language server might take
2586 // some time to publish diagnostics.
2587 if adapter.disk_based_diagnostics_progress_token.is_none() {
2588 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2589 Duration::from_secs(1);
2590
2591 let task = cx.spawn(move |this, mut cx| async move {
2592 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2593 if let Some(this) = this.upgrade() {
2594 this.update(&mut cx, |this, cx| {
2595 this.disk_based_diagnostics_finished(
2596 language_server_id,
2597 cx,
2598 );
2599 this.enqueue_buffer_ordered_message(
2600 BufferOrderedMessage::LanguageServerUpdate {
2601 language_server_id,
2602 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2603 },
2604 )
2605 .ok();
2606 }).ok();
2607 }
2608 });
2609 *simulate_disk_based_diagnostics_completion = Some(task);
2610 }
2611 }
2612 }
2613 }
2614 BufferEvent::FileHandleChanged => {
2615 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2616 return None;
2617 };
2618
2619 let remote_id = buffer.read(cx).remote_id();
2620 if let Some(entry_id) = file.entry_id {
2621 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2622 Some(_) => {
2623 return None;
2624 }
2625 None => {
2626 self.local_buffer_ids_by_entry_id
2627 .insert(entry_id, remote_id);
2628 }
2629 }
2630 };
2631 self.local_buffer_ids_by_path.insert(
2632 ProjectPath {
2633 worktree_id: file.worktree_id(cx),
2634 path: file.path.clone(),
2635 },
2636 remote_id,
2637 );
2638 }
2639 _ => {}
2640 }
2641
2642 None
2643 }
2644
2645 fn request_buffer_diff_recalculation(
2646 &mut self,
2647 buffer: &Model<Buffer>,
2648 cx: &mut ModelContext<Self>,
2649 ) {
2650 self.buffers_needing_diff.insert(buffer.downgrade());
2651 let first_insertion = self.buffers_needing_diff.len() == 1;
2652
2653 let settings = ProjectSettings::get_global(cx);
2654 let delay = if let Some(delay) = settings.git.gutter_debounce {
2655 delay
2656 } else {
2657 if first_insertion {
2658 let this = cx.weak_model();
2659 cx.defer(move |cx| {
2660 if let Some(this) = this.upgrade() {
2661 this.update(cx, |this, cx| {
2662 this.recalculate_buffer_diffs(cx).detach();
2663 });
2664 }
2665 });
2666 }
2667 return;
2668 };
2669
2670 const MIN_DELAY: u64 = 50;
2671 let delay = delay.max(MIN_DELAY);
2672 let duration = Duration::from_millis(delay);
2673
2674 self.git_diff_debouncer
2675 .fire_new(duration, cx, move |this, cx| {
2676 this.recalculate_buffer_diffs(cx)
2677 });
2678 }
2679
2680 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2681 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2682 cx.spawn(move |this, mut cx| async move {
2683 let tasks: Vec<_> = buffers
2684 .iter()
2685 .filter_map(|buffer| {
2686 let buffer = buffer.upgrade()?;
2687 buffer
2688 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2689 .ok()
2690 .flatten()
2691 })
2692 .collect();
2693
2694 futures::future::join_all(tasks).await;
2695
2696 this.update(&mut cx, |this, cx| {
2697 if !this.buffers_needing_diff.is_empty() {
2698 this.recalculate_buffer_diffs(cx).detach();
2699 } else {
2700 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2701 for buffer in buffers {
2702 if let Some(buffer) = buffer.upgrade() {
2703 buffer.update(cx, |_, cx| cx.notify());
2704 }
2705 }
2706 }
2707 })
2708 .ok();
2709 })
2710 }
2711
2712 fn language_servers_for_worktree(
2713 &self,
2714 worktree_id: WorktreeId,
2715 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2716 self.language_server_ids
2717 .iter()
2718 .filter_map(move |((language_server_worktree_id, _), id)| {
2719 if *language_server_worktree_id == worktree_id {
2720 if let Some(LanguageServerState::Running {
2721 adapter,
2722 language,
2723 server,
2724 ..
2725 }) = self.language_servers.get(id)
2726 {
2727 return Some((adapter, language, server));
2728 }
2729 }
2730 None
2731 })
2732 }
2733
2734 fn maintain_buffer_languages(
2735 languages: Arc<LanguageRegistry>,
2736 cx: &mut ModelContext<Project>,
2737 ) -> Task<()> {
2738 let mut subscription = languages.subscribe();
2739 let mut prev_reload_count = languages.reload_count();
2740 cx.spawn(move |project, mut cx| async move {
2741 while let Some(()) = subscription.next().await {
2742 if let Some(project) = project.upgrade() {
2743 // If the language registry has been reloaded, then remove and
2744 // re-assign the languages on all open buffers.
2745 let reload_count = languages.reload_count();
2746 if reload_count > prev_reload_count {
2747 prev_reload_count = reload_count;
2748 project
2749 .update(&mut cx, |this, cx| {
2750 let buffers = this
2751 .opened_buffers
2752 .values()
2753 .filter_map(|b| b.upgrade())
2754 .collect::<Vec<_>>();
2755 for buffer in buffers {
2756 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2757 {
2758 this.unregister_buffer_from_language_servers(
2759 &buffer, &f, cx,
2760 );
2761 buffer
2762 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2763 }
2764 }
2765 })
2766 .ok();
2767 }
2768
2769 project
2770 .update(&mut cx, |project, cx| {
2771 let mut plain_text_buffers = Vec::new();
2772 let mut buffers_with_unknown_injections = Vec::new();
2773 for buffer in project.opened_buffers.values() {
2774 if let Some(handle) = buffer.upgrade() {
2775 let buffer = &handle.read(cx);
2776 if buffer.language().is_none()
2777 || buffer.language() == Some(&*language::PLAIN_TEXT)
2778 {
2779 plain_text_buffers.push(handle);
2780 } else if buffer.contains_unknown_injections() {
2781 buffers_with_unknown_injections.push(handle);
2782 }
2783 }
2784 }
2785
2786 for buffer in plain_text_buffers {
2787 project.detect_language_for_buffer(&buffer, cx);
2788 project.register_buffer_with_language_servers(&buffer, cx);
2789 }
2790
2791 for buffer in buffers_with_unknown_injections {
2792 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2793 }
2794 })
2795 .ok();
2796 }
2797 }
2798 })
2799 }
2800
2801 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2802 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2803 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2804
2805 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2806 *settings_changed_tx.borrow_mut() = ();
2807 });
2808
2809 cx.spawn(move |this, mut cx| async move {
2810 while let Some(()) = settings_changed_rx.next().await {
2811 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2812 this.language_servers
2813 .values()
2814 .filter_map(|state| match state {
2815 LanguageServerState::Starting(_) => None,
2816 LanguageServerState::Running {
2817 adapter, server, ..
2818 } => Some((adapter.clone(), server.clone())),
2819 })
2820 .collect()
2821 })?;
2822
2823 for (adapter, server) in servers {
2824 let settings =
2825 cx.update(|cx| adapter.workspace_configuration(server.root_path(), cx))?;
2826
2827 server
2828 .notify::<lsp::notification::DidChangeConfiguration>(
2829 lsp::DidChangeConfigurationParams { settings },
2830 )
2831 .ok();
2832 }
2833 }
2834
2835 drop(settings_observation);
2836 anyhow::Ok(())
2837 })
2838 }
2839
2840 fn detect_language_for_buffer(
2841 &mut self,
2842 buffer_handle: &Model<Buffer>,
2843 cx: &mut ModelContext<Self>,
2844 ) {
2845 // If the buffer has a language, set it and start the language server if we haven't already.
2846 let buffer = buffer_handle.read(cx);
2847 let Some(file) = buffer.file() else {
2848 return;
2849 };
2850 let content = buffer.as_rope();
2851 let Some(new_language_result) = self
2852 .languages
2853 .language_for_file(file, Some(content), cx)
2854 .now_or_never()
2855 else {
2856 return;
2857 };
2858
2859 match new_language_result {
2860 Err(e) => {
2861 if e.is::<language::LanguageNotFound>() {
2862 cx.emit(Event::LanguageNotFound(buffer_handle.clone()))
2863 }
2864 }
2865 Ok(new_language) => {
2866 self.set_language_for_buffer(buffer_handle, new_language, cx);
2867 }
2868 };
2869 }
2870
2871 pub fn set_language_for_buffer(
2872 &mut self,
2873 buffer: &Model<Buffer>,
2874 new_language: Arc<Language>,
2875 cx: &mut ModelContext<Self>,
2876 ) {
2877 buffer.update(cx, |buffer, cx| {
2878 if buffer.language().map_or(true, |old_language| {
2879 !Arc::ptr_eq(old_language, &new_language)
2880 }) {
2881 buffer.set_language(Some(new_language.clone()), cx);
2882 }
2883 });
2884
2885 let buffer_file = buffer.read(cx).file().cloned();
2886 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2887 let buffer_file = File::from_dyn(buffer_file.as_ref());
2888 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2889 if let Some(prettier_plugins) =
2890 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2891 {
2892 self.install_default_prettier(worktree, prettier_plugins.iter().cloned(), cx);
2893 };
2894 if let Some(file) = buffer_file {
2895 let worktree = file.worktree.clone();
2896 if worktree.read(cx).is_local() {
2897 self.start_language_servers(&worktree, new_language, cx);
2898 }
2899 }
2900 }
2901
2902 fn start_language_servers(
2903 &mut self,
2904 worktree: &Model<Worktree>,
2905 language: Arc<Language>,
2906 cx: &mut ModelContext<Self>,
2907 ) {
2908 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2909 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2910 if !settings.enable_language_server {
2911 return;
2912 }
2913
2914 for adapter in self.languages.clone().lsp_adapters(&language) {
2915 self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
2916 }
2917 }
2918
2919 fn start_language_server(
2920 &mut self,
2921 worktree_handle: &Model<Worktree>,
2922 adapter: Arc<CachedLspAdapter>,
2923 language: Arc<Language>,
2924 cx: &mut ModelContext<Self>,
2925 ) {
2926 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2927 return;
2928 }
2929
2930 let worktree = worktree_handle.read(cx);
2931 let worktree_id = worktree.id();
2932 let worktree_path = worktree.abs_path();
2933 let key = (worktree_id, adapter.name.clone());
2934 if self.language_server_ids.contains_key(&key) {
2935 return;
2936 }
2937
2938 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2939 let pending_server = match self.languages.create_pending_language_server(
2940 stderr_capture.clone(),
2941 language.clone(),
2942 adapter.clone(),
2943 Arc::clone(&worktree_path),
2944 ProjectLspAdapterDelegate::new(self, worktree_handle, cx),
2945 cx,
2946 ) {
2947 Some(pending_server) => pending_server,
2948 None => return,
2949 };
2950
2951 let project_settings = ProjectSettings::get(
2952 Some(SettingsLocation {
2953 worktree_id: worktree_id.to_proto() as usize,
2954 path: Path::new(""),
2955 }),
2956 cx,
2957 );
2958 let lsp = project_settings.lsp.get(&adapter.name.0);
2959 let override_options = lsp.and_then(|s| s.initialization_options.clone());
2960
2961 let server_id = pending_server.server_id;
2962 let container_dir = pending_server.container_dir.clone();
2963 let state = LanguageServerState::Starting({
2964 let adapter = adapter.clone();
2965 let server_name = adapter.name.0.clone();
2966 let language = language.clone();
2967 let key = key.clone();
2968
2969 cx.spawn(move |this, mut cx| async move {
2970 let result = Self::setup_and_insert_language_server(
2971 this.clone(),
2972 &worktree_path,
2973 override_options,
2974 pending_server,
2975 adapter.clone(),
2976 language.clone(),
2977 server_id,
2978 key,
2979 &mut cx,
2980 )
2981 .await;
2982
2983 match result {
2984 Ok(server) => {
2985 stderr_capture.lock().take();
2986 server
2987 }
2988
2989 Err(err) => {
2990 log::error!("failed to start language server {server_name:?}: {err}");
2991 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2992
2993 let this = this.upgrade()?;
2994 let container_dir = container_dir?;
2995
2996 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2997 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2998 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2999 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
3000 return None;
3001 }
3002
3003 log::info!(
3004 "retrying installation of language server {server_name:?} in {}s",
3005 SERVER_REINSTALL_DEBOUNCE_TIMEOUT.as_secs()
3006 );
3007 cx.background_executor()
3008 .timer(SERVER_REINSTALL_DEBOUNCE_TIMEOUT)
3009 .await;
3010
3011 let installation_test_binary = adapter
3012 .installation_test_binary(container_dir.to_path_buf())
3013 .await;
3014
3015 this.update(&mut cx, |_, cx| {
3016 Self::check_errored_server(
3017 language,
3018 adapter,
3019 server_id,
3020 installation_test_binary,
3021 cx,
3022 )
3023 })
3024 .ok();
3025
3026 None
3027 }
3028 }
3029 })
3030 });
3031
3032 self.language_servers.insert(server_id, state);
3033 self.language_server_ids.insert(key, server_id);
3034 }
3035
3036 fn reinstall_language_server(
3037 &mut self,
3038 language: Arc<Language>,
3039 adapter: Arc<CachedLspAdapter>,
3040 server_id: LanguageServerId,
3041 cx: &mut ModelContext<Self>,
3042 ) -> Option<Task<()>> {
3043 log::info!("beginning to reinstall server");
3044
3045 let existing_server = match self.language_servers.remove(&server_id) {
3046 Some(LanguageServerState::Running { server, .. }) => Some(server),
3047 _ => None,
3048 };
3049
3050 for worktree in &self.worktrees {
3051 if let Some(worktree) = worktree.upgrade() {
3052 let key = (worktree.read(cx).id(), adapter.name.clone());
3053 self.language_server_ids.remove(&key);
3054 }
3055 }
3056
3057 Some(cx.spawn(move |this, mut cx| async move {
3058 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
3059 log::info!("shutting down existing server");
3060 task.await;
3061 }
3062
3063 // TODO: This is race-safe with regards to preventing new instances from
3064 // starting while deleting, but existing instances in other projects are going
3065 // to be very confused and messed up
3066 let Some(task) = this
3067 .update(&mut cx, |this, cx| {
3068 this.languages.delete_server_container(adapter.clone(), cx)
3069 })
3070 .log_err()
3071 else {
3072 return;
3073 };
3074 task.await;
3075
3076 this.update(&mut cx, |this, cx| {
3077 let worktrees = this.worktrees.clone();
3078 for worktree in worktrees {
3079 if let Some(worktree) = worktree.upgrade() {
3080 this.start_language_server(
3081 &worktree,
3082 adapter.clone(),
3083 language.clone(),
3084 cx,
3085 );
3086 }
3087 }
3088 })
3089 .ok();
3090 }))
3091 }
3092
3093 #[allow(clippy::too_many_arguments)]
3094 async fn setup_and_insert_language_server(
3095 this: WeakModel<Self>,
3096 worktree_path: &Path,
3097 override_initialization_options: Option<serde_json::Value>,
3098 pending_server: PendingLanguageServer,
3099 adapter: Arc<CachedLspAdapter>,
3100 language: Arc<Language>,
3101 server_id: LanguageServerId,
3102 key: (WorktreeId, LanguageServerName),
3103 cx: &mut AsyncAppContext,
3104 ) -> Result<Option<Arc<LanguageServer>>> {
3105 let language_server = Self::setup_pending_language_server(
3106 this.clone(),
3107 override_initialization_options,
3108 pending_server,
3109 worktree_path,
3110 adapter.clone(),
3111 server_id,
3112 cx,
3113 )
3114 .await?;
3115
3116 let this = match this.upgrade() {
3117 Some(this) => this,
3118 None => return Err(anyhow!("failed to upgrade project handle")),
3119 };
3120
3121 this.update(cx, |this, cx| {
3122 this.insert_newly_running_language_server(
3123 language,
3124 adapter,
3125 language_server.clone(),
3126 server_id,
3127 key,
3128 cx,
3129 )
3130 })??;
3131
3132 Ok(Some(language_server))
3133 }
3134
3135 async fn setup_pending_language_server(
3136 this: WeakModel<Self>,
3137 override_options: Option<serde_json::Value>,
3138 pending_server: PendingLanguageServer,
3139 worktree_path: &Path,
3140 adapter: Arc<CachedLspAdapter>,
3141 server_id: LanguageServerId,
3142 cx: &mut AsyncAppContext,
3143 ) -> Result<Arc<LanguageServer>> {
3144 let workspace_config =
3145 cx.update(|cx| adapter.workspace_configuration(worktree_path, cx))?;
3146 let (language_server, mut initialization_options) = pending_server.task.await?;
3147
3148 let name = language_server.name();
3149 language_server
3150 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3151 let adapter = adapter.clone();
3152 let this = this.clone();
3153 move |mut params, mut cx| {
3154 let adapter = adapter.clone();
3155 if let Some(this) = this.upgrade() {
3156 adapter.process_diagnostics(&mut params);
3157 this.update(&mut cx, |this, cx| {
3158 this.update_diagnostics(
3159 server_id,
3160 params,
3161 &adapter.disk_based_diagnostic_sources,
3162 cx,
3163 )
3164 .log_err();
3165 })
3166 .ok();
3167 }
3168 }
3169 })
3170 .detach();
3171
3172 language_server
3173 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3174 let adapter = adapter.clone();
3175 let worktree_path = worktree_path.to_path_buf();
3176 move |params, cx| {
3177 let adapter = adapter.clone();
3178 let worktree_path = worktree_path.clone();
3179 async move {
3180 let workspace_config =
3181 cx.update(|cx| adapter.workspace_configuration(&worktree_path, cx))?;
3182 Ok(params
3183 .items
3184 .into_iter()
3185 .map(|item| {
3186 if let Some(section) = &item.section {
3187 workspace_config
3188 .get(section)
3189 .cloned()
3190 .unwrap_or(serde_json::Value::Null)
3191 } else {
3192 workspace_config.clone()
3193 }
3194 })
3195 .collect())
3196 }
3197 }
3198 })
3199 .detach();
3200
3201 // Even though we don't have handling for these requests, respond to them to
3202 // avoid stalling any language server like `gopls` which waits for a response
3203 // to these requests when initializing.
3204 language_server
3205 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3206 let this = this.clone();
3207 move |params, mut cx| {
3208 let this = this.clone();
3209 async move {
3210 this.update(&mut cx, |this, _| {
3211 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3212 {
3213 if let lsp::NumberOrString::String(token) = params.token {
3214 status.progress_tokens.insert(token);
3215 }
3216 }
3217 })?;
3218
3219 Ok(())
3220 }
3221 }
3222 })
3223 .detach();
3224
3225 language_server
3226 .on_request::<lsp::request::RegisterCapability, _, _>({
3227 let this = this.clone();
3228 move |params, mut cx| {
3229 let this = this.clone();
3230 async move {
3231 for reg in params.registrations {
3232 if reg.method == "workspace/didChangeWatchedFiles" {
3233 if let Some(options) = reg.register_options {
3234 let options = serde_json::from_value(options)?;
3235 this.update(&mut cx, |this, cx| {
3236 this.on_lsp_did_change_watched_files(
3237 server_id, options, cx,
3238 );
3239 })?;
3240 }
3241 }
3242 }
3243 Ok(())
3244 }
3245 }
3246 })
3247 .detach();
3248
3249 language_server
3250 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3251 let adapter = adapter.clone();
3252 let this = this.clone();
3253 move |params, cx| {
3254 Self::on_lsp_workspace_edit(
3255 this.clone(),
3256 params,
3257 server_id,
3258 adapter.clone(),
3259 cx,
3260 )
3261 }
3262 })
3263 .detach();
3264
3265 language_server
3266 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3267 let this = this.clone();
3268 move |(), mut cx| {
3269 let this = this.clone();
3270 async move {
3271 this.update(&mut cx, |project, cx| {
3272 cx.emit(Event::RefreshInlayHints);
3273 project.remote_id().map(|project_id| {
3274 project.client.send(proto::RefreshInlayHints { project_id })
3275 })
3276 })?
3277 .transpose()?;
3278 Ok(())
3279 }
3280 }
3281 })
3282 .detach();
3283
3284 language_server
3285 .on_request::<lsp::request::ShowMessageRequest, _, _>({
3286 let this = this.clone();
3287 let name = name.to_string();
3288 move |params, mut cx| {
3289 let this = this.clone();
3290 let name = name.to_string();
3291 async move {
3292 if let Some(actions) = params.actions {
3293 let (tx, mut rx) = smol::channel::bounded(1);
3294 let request = LanguageServerPromptRequest {
3295 level: match params.typ {
3296 lsp::MessageType::ERROR => PromptLevel::Critical,
3297 lsp::MessageType::WARNING => PromptLevel::Warning,
3298 _ => PromptLevel::Info,
3299 },
3300 message: params.message,
3301 actions,
3302 response_channel: tx,
3303 lsp_name: name.clone(),
3304 };
3305
3306 if let Ok(_) = this.update(&mut cx, |_, cx| {
3307 cx.emit(Event::LanguageServerPrompt(request));
3308 }) {
3309 let response = rx.next().await;
3310
3311 Ok(response)
3312 } else {
3313 Ok(None)
3314 }
3315 } else {
3316 Ok(None)
3317 }
3318 }
3319 }
3320 })
3321 .detach();
3322
3323 let disk_based_diagnostics_progress_token =
3324 adapter.disk_based_diagnostics_progress_token.clone();
3325
3326 language_server
3327 .on_notification::<ServerStatus, _>({
3328 let this = this.clone();
3329 let name = name.to_string();
3330 move |params, mut cx| {
3331 let this = this.clone();
3332 let name = name.to_string();
3333 if let Some(ref message) = params.message {
3334 let message = message.trim();
3335 if !message.is_empty() {
3336 let formatted_message = format!(
3337 "Language server {name} (id {server_id}) status update: {message}"
3338 );
3339 match params.health {
3340 ServerHealthStatus::Ok => log::info!("{}", formatted_message),
3341 ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
3342 ServerHealthStatus::Error => {
3343 log::error!("{}", formatted_message);
3344 let (tx, _rx) = smol::channel::bounded(1);
3345 let request = LanguageServerPromptRequest {
3346 level: PromptLevel::Critical,
3347 message: params.message.unwrap_or_default(),
3348 actions: Vec::new(),
3349 response_channel: tx,
3350 lsp_name: name.clone(),
3351 };
3352 let _ = this
3353 .update(&mut cx, |_, cx| {
3354 cx.emit(Event::LanguageServerPrompt(request));
3355 })
3356 .ok();
3357 }
3358 ServerHealthStatus::Other(status) => {
3359 log::info!(
3360 "Unknown server health: {status}\n{formatted_message}"
3361 )
3362 }
3363 }
3364 }
3365 }
3366 }
3367 })
3368 .detach();
3369
3370 language_server
3371 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3372 if let Some(this) = this.upgrade() {
3373 this.update(&mut cx, |this, cx| {
3374 this.on_lsp_progress(
3375 params,
3376 server_id,
3377 disk_based_diagnostics_progress_token.clone(),
3378 cx,
3379 );
3380 })
3381 .ok();
3382 }
3383 })
3384 .detach();
3385
3386 match (&mut initialization_options, override_options) {
3387 (Some(initialization_options), Some(override_options)) => {
3388 merge_json_value_into(override_options, initialization_options);
3389 }
3390 (None, override_options) => initialization_options = override_options,
3391 _ => {}
3392 }
3393 let language_server = cx
3394 .update(|cx| language_server.initialize(initialization_options, cx))?
3395 .await?;
3396
3397 language_server
3398 .notify::<lsp::notification::DidChangeConfiguration>(
3399 lsp::DidChangeConfigurationParams {
3400 settings: workspace_config,
3401 },
3402 )
3403 .ok();
3404
3405 Ok(language_server)
3406 }
3407
3408 fn insert_newly_running_language_server(
3409 &mut self,
3410 language: Arc<Language>,
3411 adapter: Arc<CachedLspAdapter>,
3412 language_server: Arc<LanguageServer>,
3413 server_id: LanguageServerId,
3414 key: (WorktreeId, LanguageServerName),
3415 cx: &mut ModelContext<Self>,
3416 ) -> Result<()> {
3417 // If the language server for this key doesn't match the server id, don't store the
3418 // server. Which will cause it to be dropped, killing the process
3419 if self
3420 .language_server_ids
3421 .get(&key)
3422 .map(|id| id != &server_id)
3423 .unwrap_or(false)
3424 {
3425 return Ok(());
3426 }
3427
3428 // Update language_servers collection with Running variant of LanguageServerState
3429 // indicating that the server is up and running and ready
3430 self.language_servers.insert(
3431 server_id,
3432 LanguageServerState::Running {
3433 adapter: adapter.clone(),
3434 language: language.clone(),
3435 server: language_server.clone(),
3436 simulate_disk_based_diagnostics_completion: None,
3437 },
3438 );
3439
3440 self.language_server_statuses.insert(
3441 server_id,
3442 LanguageServerStatus {
3443 name: language_server.name().to_string(),
3444 pending_work: Default::default(),
3445 has_pending_diagnostic_updates: false,
3446 progress_tokens: Default::default(),
3447 },
3448 );
3449
3450 cx.emit(Event::LanguageServerAdded(server_id));
3451
3452 if let Some(project_id) = self.remote_id() {
3453 self.client.send(proto::StartLanguageServer {
3454 project_id,
3455 server: Some(proto::LanguageServer {
3456 id: server_id.0 as u64,
3457 name: language_server.name().to_string(),
3458 }),
3459 })?;
3460 }
3461
3462 // Tell the language server about every open buffer in the worktree that matches the language.
3463 for buffer in self.opened_buffers.values() {
3464 if let Some(buffer_handle) = buffer.upgrade() {
3465 let buffer = buffer_handle.read(cx);
3466 let file = match File::from_dyn(buffer.file()) {
3467 Some(file) => file,
3468 None => continue,
3469 };
3470 let language = match buffer.language() {
3471 Some(language) => language,
3472 None => continue,
3473 };
3474
3475 if file.worktree.read(cx).id() != key.0
3476 || !self
3477 .languages
3478 .lsp_adapters(&language)
3479 .iter()
3480 .any(|a| a.name == key.1)
3481 {
3482 continue;
3483 }
3484
3485 let file = match file.as_local() {
3486 Some(file) => file,
3487 None => continue,
3488 };
3489
3490 let versions = self
3491 .buffer_snapshots
3492 .entry(buffer.remote_id())
3493 .or_default()
3494 .entry(server_id)
3495 .or_insert_with(|| {
3496 vec![LspBufferSnapshot {
3497 version: 0,
3498 snapshot: buffer.text_snapshot(),
3499 }]
3500 });
3501
3502 let snapshot = versions.last().unwrap();
3503 let version = snapshot.version;
3504 let initial_snapshot = &snapshot.snapshot;
3505 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3506 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3507 lsp::DidOpenTextDocumentParams {
3508 text_document: lsp::TextDocumentItem::new(
3509 uri,
3510 adapter
3511 .language_ids
3512 .get(language.name().as_ref())
3513 .cloned()
3514 .unwrap_or_default(),
3515 version,
3516 initial_snapshot.text(),
3517 ),
3518 },
3519 )?;
3520
3521 buffer_handle.update(cx, |buffer, cx| {
3522 buffer.set_completion_triggers(
3523 language_server
3524 .capabilities()
3525 .completion_provider
3526 .as_ref()
3527 .and_then(|provider| provider.trigger_characters.clone())
3528 .unwrap_or_default(),
3529 cx,
3530 )
3531 });
3532 }
3533 }
3534
3535 cx.notify();
3536 Ok(())
3537 }
3538
3539 // Returns a list of all of the worktrees which no longer have a language server and the root path
3540 // for the stopped server
3541 fn stop_language_server(
3542 &mut self,
3543 worktree_id: WorktreeId,
3544 adapter_name: LanguageServerName,
3545 cx: &mut ModelContext<Self>,
3546 ) -> Task<Vec<WorktreeId>> {
3547 let key = (worktree_id, adapter_name);
3548 if let Some(server_id) = self.language_server_ids.remove(&key) {
3549 let name = key.1 .0;
3550 log::info!("stopping language server {name}");
3551
3552 // Remove other entries for this language server as well
3553 let mut orphaned_worktrees = vec![worktree_id];
3554 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3555 for other_key in other_keys {
3556 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3557 self.language_server_ids.remove(&other_key);
3558 orphaned_worktrees.push(other_key.0);
3559 }
3560 }
3561
3562 for buffer in self.opened_buffers.values() {
3563 if let Some(buffer) = buffer.upgrade() {
3564 buffer.update(cx, |buffer, cx| {
3565 buffer.update_diagnostics(server_id, Default::default(), cx);
3566 });
3567 }
3568 }
3569 for worktree in &self.worktrees {
3570 if let Some(worktree) = worktree.upgrade() {
3571 worktree.update(cx, |worktree, cx| {
3572 if let Some(worktree) = worktree.as_local_mut() {
3573 worktree.clear_diagnostics_for_language_server(server_id, cx);
3574 }
3575 });
3576 }
3577 }
3578
3579 self.language_server_watched_paths.remove(&server_id);
3580 self.language_server_statuses.remove(&server_id);
3581 cx.notify();
3582
3583 let server_state = self.language_servers.remove(&server_id);
3584 cx.emit(Event::LanguageServerRemoved(server_id));
3585 cx.spawn(move |_, cx| async move {
3586 Self::shutdown_language_server(server_state, name, cx).await;
3587 orphaned_worktrees
3588 })
3589 } else {
3590 Task::ready(Vec::new())
3591 }
3592 }
3593
3594 async fn shutdown_language_server(
3595 server_state: Option<LanguageServerState>,
3596 name: Arc<str>,
3597 cx: AsyncAppContext,
3598 ) {
3599 let server = match server_state {
3600 Some(LanguageServerState::Starting(task)) => {
3601 let mut timer = cx
3602 .background_executor()
3603 .timer(SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT)
3604 .fuse();
3605
3606 select! {
3607 server = task.fuse() => server,
3608 _ = timer => {
3609 log::info!(
3610 "timeout waiting for language server {} to finish launching before stopping",
3611 name
3612 );
3613 None
3614 },
3615 }
3616 }
3617
3618 Some(LanguageServerState::Running { server, .. }) => Some(server),
3619
3620 None => None,
3621 };
3622
3623 if let Some(server) = server {
3624 if let Some(shutdown) = server.shutdown() {
3625 shutdown.await;
3626 }
3627 }
3628 }
3629
3630 pub fn restart_language_servers_for_buffers(
3631 &mut self,
3632 buffers: impl IntoIterator<Item = Model<Buffer>>,
3633 cx: &mut ModelContext<Self>,
3634 ) -> Option<()> {
3635 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3636 .into_iter()
3637 .filter_map(|buffer| {
3638 let buffer = buffer.read(cx);
3639 let file = buffer.file()?;
3640 let worktree = File::from_dyn(Some(file))?.worktree.clone();
3641 let language = self
3642 .languages
3643 .language_for_file(file, Some(buffer.as_rope()), cx)
3644 .now_or_never()?
3645 .ok()?;
3646 Some((worktree, language))
3647 })
3648 .collect();
3649 for (worktree, language) in language_server_lookup_info {
3650 self.restart_language_servers(worktree, language, cx);
3651 }
3652
3653 None
3654 }
3655
3656 fn restart_language_servers(
3657 &mut self,
3658 worktree: Model<Worktree>,
3659 language: Arc<Language>,
3660 cx: &mut ModelContext<Self>,
3661 ) {
3662 let worktree_id = worktree.read(cx).id();
3663
3664 let stop_tasks = self
3665 .languages
3666 .clone()
3667 .lsp_adapters(&language)
3668 .iter()
3669 .map(|adapter| {
3670 let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
3671 (stop_task, adapter.name.clone())
3672 })
3673 .collect::<Vec<_>>();
3674 if stop_tasks.is_empty() {
3675 return;
3676 }
3677
3678 cx.spawn(move |this, mut cx| async move {
3679 // For each stopped language server, record all of the worktrees with which
3680 // it was associated.
3681 let mut affected_worktrees = Vec::new();
3682 for (stop_task, language_server_name) in stop_tasks {
3683 for affected_worktree_id in stop_task.await {
3684 affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
3685 }
3686 }
3687
3688 this.update(&mut cx, |this, cx| {
3689 // Restart the language server for the given worktree.
3690 this.start_language_servers(&worktree, language.clone(), cx);
3691
3692 // Lookup new server ids and set them for each of the orphaned worktrees
3693 for (affected_worktree_id, language_server_name) in affected_worktrees {
3694 if let Some(new_server_id) = this
3695 .language_server_ids
3696 .get(&(worktree_id, language_server_name.clone()))
3697 .cloned()
3698 {
3699 this.language_server_ids
3700 .insert((affected_worktree_id, language_server_name), new_server_id);
3701 }
3702 }
3703 })
3704 .ok();
3705 })
3706 .detach();
3707 }
3708
3709 fn check_errored_server(
3710 language: Arc<Language>,
3711 adapter: Arc<CachedLspAdapter>,
3712 server_id: LanguageServerId,
3713 installation_test_binary: Option<LanguageServerBinary>,
3714 cx: &mut ModelContext<Self>,
3715 ) {
3716 if !adapter.can_be_reinstalled() {
3717 log::info!(
3718 "Validation check requested for {:?} but it cannot be reinstalled",
3719 adapter.name.0
3720 );
3721 return;
3722 }
3723
3724 cx.spawn(move |this, mut cx| async move {
3725 log::info!("About to spawn test binary");
3726
3727 // A lack of test binary counts as a failure
3728 let process = installation_test_binary.and_then(|binary| {
3729 smol::process::Command::new(&binary.path)
3730 .current_dir(&binary.path)
3731 .args(binary.arguments)
3732 .stdin(Stdio::piped())
3733 .stdout(Stdio::piped())
3734 .stderr(Stdio::inherit())
3735 .kill_on_drop(true)
3736 .spawn()
3737 .ok()
3738 });
3739
3740 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3741 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3742
3743 let mut errored = false;
3744 if let Some(mut process) = process {
3745 futures::select! {
3746 status = process.status().fuse() => match status {
3747 Ok(status) => errored = !status.success(),
3748 Err(_) => errored = true,
3749 },
3750
3751 _ = timeout => {
3752 log::info!("test binary time-ed out, this counts as a success");
3753 _ = process.kill();
3754 }
3755 }
3756 } else {
3757 log::warn!("test binary failed to launch");
3758 errored = true;
3759 }
3760
3761 if errored {
3762 log::warn!("test binary check failed");
3763 let task = this
3764 .update(&mut cx, move |this, cx| {
3765 this.reinstall_language_server(language, adapter, server_id, cx)
3766 })
3767 .ok()
3768 .flatten();
3769
3770 if let Some(task) = task {
3771 task.await;
3772 }
3773 }
3774 })
3775 .detach();
3776 }
3777
3778 fn enqueue_language_server_progress(
3779 &mut self,
3780 message: BufferOrderedMessage,
3781 cx: &mut ModelContext<Self>,
3782 ) {
3783 self.pending_language_server_update.replace(message);
3784 self.flush_language_server_update.get_or_insert_with(|| {
3785 cx.spawn(|this, mut cx| async move {
3786 cx.background_executor()
3787 .timer(SERVER_PROGRESS_DEBOUNCE_TIMEOUT)
3788 .await;
3789 this.update(&mut cx, |this, _| {
3790 this.flush_language_server_update.take();
3791 if let Some(update) = this.pending_language_server_update.take() {
3792 this.enqueue_buffer_ordered_message(update).ok();
3793 }
3794 })
3795 .ok();
3796 })
3797 });
3798 }
3799
3800 fn enqueue_buffer_ordered_message(&mut self, message: BufferOrderedMessage) -> Result<()> {
3801 if let Some(pending_message) = self.pending_language_server_update.take() {
3802 self.flush_language_server_update.take();
3803 self.buffer_ordered_messages_tx
3804 .unbounded_send(pending_message)
3805 .map_err(|e| anyhow!(e))?;
3806 }
3807 self.buffer_ordered_messages_tx
3808 .unbounded_send(message)
3809 .map_err(|e| anyhow!(e))
3810 }
3811
3812 fn on_lsp_progress(
3813 &mut self,
3814 progress: lsp::ProgressParams,
3815 language_server_id: LanguageServerId,
3816 disk_based_diagnostics_progress_token: Option<String>,
3817 cx: &mut ModelContext<Self>,
3818 ) {
3819 let token = match progress.token {
3820 lsp::NumberOrString::String(token) => token,
3821 lsp::NumberOrString::Number(token) => {
3822 log::info!("skipping numeric progress token {}", token);
3823 return;
3824 }
3825 };
3826 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3827 let language_server_status =
3828 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3829 status
3830 } else {
3831 return;
3832 };
3833
3834 if !language_server_status.progress_tokens.contains(&token) {
3835 return;
3836 }
3837
3838 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3839 .as_ref()
3840 .map_or(false, |disk_based_token| {
3841 token.starts_with(disk_based_token)
3842 });
3843
3844 match progress {
3845 lsp::WorkDoneProgress::Begin(report) => {
3846 if is_disk_based_diagnostics_progress {
3847 language_server_status.has_pending_diagnostic_updates = true;
3848 self.disk_based_diagnostics_started(language_server_id, cx);
3849 self.enqueue_buffer_ordered_message(BufferOrderedMessage::LanguageServerUpdate {
3850 language_server_id,
3851 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3852 })
3853 .ok();
3854 } else {
3855 self.on_lsp_work_start(
3856 language_server_id,
3857 token.clone(),
3858 LanguageServerProgress {
3859 message: report.message.clone(),
3860 percentage: report.percentage.map(|p| p as usize),
3861 last_update_at: Instant::now(),
3862 },
3863 cx,
3864 );
3865 self.enqueue_buffer_ordered_message(
3866 BufferOrderedMessage::LanguageServerUpdate {
3867 language_server_id,
3868 message: proto::update_language_server::Variant::WorkStart(
3869 proto::LspWorkStart {
3870 token,
3871 message: report.message,
3872 percentage: report.percentage,
3873 },
3874 ),
3875 },
3876 )
3877 .ok();
3878 }
3879 }
3880 lsp::WorkDoneProgress::Report(report) => {
3881 if !is_disk_based_diagnostics_progress {
3882 self.on_lsp_work_progress(
3883 language_server_id,
3884 token.clone(),
3885 LanguageServerProgress {
3886 message: report.message.clone(),
3887 percentage: report.percentage.map(|p| p as usize),
3888 last_update_at: Instant::now(),
3889 },
3890 cx,
3891 );
3892 self.enqueue_language_server_progress(
3893 BufferOrderedMessage::LanguageServerUpdate {
3894 language_server_id,
3895 message: proto::update_language_server::Variant::WorkProgress(
3896 proto::LspWorkProgress {
3897 token,
3898 message: report.message,
3899 percentage: report.percentage,
3900 },
3901 ),
3902 },
3903 cx,
3904 );
3905 }
3906 }
3907 lsp::WorkDoneProgress::End(_) => {
3908 language_server_status.progress_tokens.remove(&token);
3909
3910 if is_disk_based_diagnostics_progress {
3911 language_server_status.has_pending_diagnostic_updates = false;
3912 self.disk_based_diagnostics_finished(language_server_id, cx);
3913 self.enqueue_buffer_ordered_message(
3914 BufferOrderedMessage::LanguageServerUpdate {
3915 language_server_id,
3916 message:
3917 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3918 Default::default(),
3919 ),
3920 },
3921 )
3922 .ok();
3923 } else {
3924 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3925 self.enqueue_buffer_ordered_message(
3926 BufferOrderedMessage::LanguageServerUpdate {
3927 language_server_id,
3928 message: proto::update_language_server::Variant::WorkEnd(
3929 proto::LspWorkEnd { token },
3930 ),
3931 },
3932 )
3933 .ok();
3934 }
3935 }
3936 }
3937 }
3938
3939 fn on_lsp_work_start(
3940 &mut self,
3941 language_server_id: LanguageServerId,
3942 token: String,
3943 progress: LanguageServerProgress,
3944 cx: &mut ModelContext<Self>,
3945 ) {
3946 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3947 status.pending_work.insert(token, progress);
3948 cx.notify();
3949 }
3950 }
3951
3952 fn on_lsp_work_progress(
3953 &mut self,
3954 language_server_id: LanguageServerId,
3955 token: String,
3956 progress: LanguageServerProgress,
3957 cx: &mut ModelContext<Self>,
3958 ) {
3959 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3960 let entry = status
3961 .pending_work
3962 .entry(token)
3963 .or_insert(LanguageServerProgress {
3964 message: Default::default(),
3965 percentage: Default::default(),
3966 last_update_at: progress.last_update_at,
3967 });
3968 if progress.message.is_some() {
3969 entry.message = progress.message;
3970 }
3971 if progress.percentage.is_some() {
3972 entry.percentage = progress.percentage;
3973 }
3974 entry.last_update_at = progress.last_update_at;
3975 cx.notify();
3976 }
3977 }
3978
3979 fn on_lsp_work_end(
3980 &mut self,
3981 language_server_id: LanguageServerId,
3982 token: String,
3983 cx: &mut ModelContext<Self>,
3984 ) {
3985 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3986 cx.emit(Event::RefreshInlayHints);
3987 status.pending_work.remove(&token);
3988 cx.notify();
3989 }
3990 }
3991
3992 fn on_lsp_did_change_watched_files(
3993 &mut self,
3994 language_server_id: LanguageServerId,
3995 params: DidChangeWatchedFilesRegistrationOptions,
3996 cx: &mut ModelContext<Self>,
3997 ) {
3998 let watched_paths = self
3999 .language_server_watched_paths
4000 .entry(language_server_id)
4001 .or_default();
4002
4003 let mut builders = HashMap::default();
4004 for watcher in params.watchers {
4005 for worktree in &self.worktrees {
4006 if let Some(worktree) = worktree.upgrade() {
4007 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
4008 if let Some(abs_path) = tree.abs_path().to_str() {
4009 let relative_glob_pattern = match &watcher.glob_pattern {
4010 lsp::GlobPattern::String(s) => Some(
4011 s.strip_prefix(abs_path)
4012 .unwrap_or(s)
4013 .strip_prefix(std::path::MAIN_SEPARATOR)
4014 .unwrap_or(s),
4015 ),
4016 lsp::GlobPattern::Relative(rp) => {
4017 let base_uri = match &rp.base_uri {
4018 lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
4019 lsp::OneOf::Right(base_uri) => base_uri,
4020 };
4021 base_uri.to_file_path().ok().and_then(|file_path| {
4022 (file_path.to_str() == Some(abs_path))
4023 .then_some(rp.pattern.as_str())
4024 })
4025 }
4026 };
4027 if let Some(relative_glob_pattern) = relative_glob_pattern {
4028 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
4029 tree.as_local_mut()
4030 .unwrap()
4031 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
4032 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
4033 builders
4034 .entry(tree.id())
4035 .or_insert_with(|| GlobSetBuilder::new())
4036 .add(glob);
4037 }
4038 return true;
4039 }
4040 }
4041 false
4042 });
4043 if glob_is_inside_worktree {
4044 break;
4045 }
4046 }
4047 }
4048 }
4049
4050 watched_paths.clear();
4051 for (worktree_id, builder) in builders {
4052 if let Ok(globset) = builder.build() {
4053 watched_paths.insert(worktree_id, globset);
4054 }
4055 }
4056
4057 cx.notify();
4058 }
4059
4060 async fn on_lsp_workspace_edit(
4061 this: WeakModel<Self>,
4062 params: lsp::ApplyWorkspaceEditParams,
4063 server_id: LanguageServerId,
4064 adapter: Arc<CachedLspAdapter>,
4065 mut cx: AsyncAppContext,
4066 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
4067 let this = this
4068 .upgrade()
4069 .ok_or_else(|| anyhow!("project project closed"))?;
4070 let language_server = this
4071 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
4072 .ok_or_else(|| anyhow!("language server not found"))?;
4073 let transaction = Self::deserialize_workspace_edit(
4074 this.clone(),
4075 params.edit,
4076 true,
4077 adapter.clone(),
4078 language_server.clone(),
4079 &mut cx,
4080 )
4081 .await
4082 .log_err();
4083 this.update(&mut cx, |this, _| {
4084 if let Some(transaction) = transaction {
4085 this.last_workspace_edits_by_language_server
4086 .insert(server_id, transaction);
4087 }
4088 })?;
4089 Ok(lsp::ApplyWorkspaceEditResponse {
4090 applied: true,
4091 failed_change: None,
4092 failure_reason: None,
4093 })
4094 }
4095
4096 pub fn language_server_statuses(
4097 &self,
4098 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
4099 self.language_server_statuses.values()
4100 }
4101
4102 pub fn last_formatting_failure(&self) -> Option<&str> {
4103 self.last_formatting_failure.as_deref()
4104 }
4105
4106 pub fn update_diagnostics(
4107 &mut self,
4108 language_server_id: LanguageServerId,
4109 mut params: lsp::PublishDiagnosticsParams,
4110 disk_based_sources: &[String],
4111 cx: &mut ModelContext<Self>,
4112 ) -> Result<()> {
4113 let abs_path = params
4114 .uri
4115 .to_file_path()
4116 .map_err(|_| anyhow!("URI is not a file"))?;
4117 let mut diagnostics = Vec::default();
4118 let mut primary_diagnostic_group_ids = HashMap::default();
4119 let mut sources_by_group_id = HashMap::default();
4120 let mut supporting_diagnostics = HashMap::default();
4121
4122 // Ensure that primary diagnostics are always the most severe
4123 params.diagnostics.sort_by_key(|item| item.severity);
4124
4125 for diagnostic in ¶ms.diagnostics {
4126 let source = diagnostic.source.as_ref();
4127 let code = diagnostic.code.as_ref().map(|code| match code {
4128 lsp::NumberOrString::Number(code) => code.to_string(),
4129 lsp::NumberOrString::String(code) => code.clone(),
4130 });
4131 let range = range_from_lsp(diagnostic.range);
4132 let is_supporting = diagnostic
4133 .related_information
4134 .as_ref()
4135 .map_or(false, |infos| {
4136 infos.iter().any(|info| {
4137 primary_diagnostic_group_ids.contains_key(&(
4138 source,
4139 code.clone(),
4140 range_from_lsp(info.location.range),
4141 ))
4142 })
4143 });
4144
4145 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
4146 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
4147 });
4148
4149 if is_supporting {
4150 supporting_diagnostics.insert(
4151 (source, code.clone(), range),
4152 (diagnostic.severity, is_unnecessary),
4153 );
4154 } else {
4155 let group_id = post_inc(&mut self.next_diagnostic_group_id);
4156 let is_disk_based =
4157 source.map_or(false, |source| disk_based_sources.contains(source));
4158
4159 sources_by_group_id.insert(group_id, source);
4160 primary_diagnostic_group_ids
4161 .insert((source, code.clone(), range.clone()), group_id);
4162
4163 diagnostics.push(DiagnosticEntry {
4164 range,
4165 diagnostic: Diagnostic {
4166 source: diagnostic.source.clone(),
4167 code: code.clone(),
4168 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
4169 message: diagnostic.message.trim().to_string(),
4170 group_id,
4171 is_primary: true,
4172 is_disk_based,
4173 is_unnecessary,
4174 },
4175 });
4176 if let Some(infos) = &diagnostic.related_information {
4177 for info in infos {
4178 if info.location.uri == params.uri && !info.message.is_empty() {
4179 let range = range_from_lsp(info.location.range);
4180 diagnostics.push(DiagnosticEntry {
4181 range,
4182 diagnostic: Diagnostic {
4183 source: diagnostic.source.clone(),
4184 code: code.clone(),
4185 severity: DiagnosticSeverity::INFORMATION,
4186 message: info.message.trim().to_string(),
4187 group_id,
4188 is_primary: false,
4189 is_disk_based,
4190 is_unnecessary: false,
4191 },
4192 });
4193 }
4194 }
4195 }
4196 }
4197 }
4198
4199 for entry in &mut diagnostics {
4200 let diagnostic = &mut entry.diagnostic;
4201 if !diagnostic.is_primary {
4202 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
4203 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
4204 source,
4205 diagnostic.code.clone(),
4206 entry.range.clone(),
4207 )) {
4208 if let Some(severity) = severity {
4209 diagnostic.severity = severity;
4210 }
4211 diagnostic.is_unnecessary = is_unnecessary;
4212 }
4213 }
4214 }
4215
4216 self.update_diagnostic_entries(
4217 language_server_id,
4218 abs_path,
4219 params.version,
4220 diagnostics,
4221 cx,
4222 )?;
4223 Ok(())
4224 }
4225
4226 pub fn update_diagnostic_entries(
4227 &mut self,
4228 server_id: LanguageServerId,
4229 abs_path: PathBuf,
4230 version: Option<i32>,
4231 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4232 cx: &mut ModelContext<Project>,
4233 ) -> Result<(), anyhow::Error> {
4234 let (worktree, relative_path) = self
4235 .find_local_worktree(&abs_path, cx)
4236 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
4237
4238 let project_path = ProjectPath {
4239 worktree_id: worktree.read(cx).id(),
4240 path: relative_path.into(),
4241 };
4242
4243 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
4244 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
4245 }
4246
4247 let updated = worktree.update(cx, |worktree, cx| {
4248 worktree
4249 .as_local_mut()
4250 .ok_or_else(|| anyhow!("not a local worktree"))?
4251 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
4252 })?;
4253 if updated {
4254 cx.emit(Event::DiagnosticsUpdated {
4255 language_server_id: server_id,
4256 path: project_path,
4257 });
4258 }
4259 Ok(())
4260 }
4261
4262 fn update_buffer_diagnostics(
4263 &mut self,
4264 buffer: &Model<Buffer>,
4265 server_id: LanguageServerId,
4266 version: Option<i32>,
4267 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4268 cx: &mut ModelContext<Self>,
4269 ) -> Result<()> {
4270 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4271 Ordering::Equal
4272 .then_with(|| b.is_primary.cmp(&a.is_primary))
4273 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4274 .then_with(|| a.severity.cmp(&b.severity))
4275 .then_with(|| a.message.cmp(&b.message))
4276 }
4277
4278 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4279
4280 diagnostics.sort_unstable_by(|a, b| {
4281 Ordering::Equal
4282 .then_with(|| a.range.start.cmp(&b.range.start))
4283 .then_with(|| b.range.end.cmp(&a.range.end))
4284 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4285 });
4286
4287 let mut sanitized_diagnostics = Vec::new();
4288 let edits_since_save = Patch::new(
4289 snapshot
4290 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4291 .collect(),
4292 );
4293 for entry in diagnostics {
4294 let start;
4295 let end;
4296 if entry.diagnostic.is_disk_based {
4297 // Some diagnostics are based on files on disk instead of buffers'
4298 // current contents. Adjust these diagnostics' ranges to reflect
4299 // any unsaved edits.
4300 start = edits_since_save.old_to_new(entry.range.start);
4301 end = edits_since_save.old_to_new(entry.range.end);
4302 } else {
4303 start = entry.range.start;
4304 end = entry.range.end;
4305 }
4306
4307 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4308 ..snapshot.clip_point_utf16(end, Bias::Right);
4309
4310 // Expand empty ranges by one codepoint
4311 if range.start == range.end {
4312 // This will be go to the next boundary when being clipped
4313 range.end.column += 1;
4314 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4315 if range.start == range.end && range.end.column > 0 {
4316 range.start.column -= 1;
4317 range.start = snapshot.clip_point_utf16(Unclipped(range.start), Bias::Left);
4318 }
4319 }
4320
4321 sanitized_diagnostics.push(DiagnosticEntry {
4322 range,
4323 diagnostic: entry.diagnostic,
4324 });
4325 }
4326 drop(edits_since_save);
4327
4328 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4329 buffer.update(cx, |buffer, cx| {
4330 buffer.update_diagnostics(server_id, set, cx)
4331 });
4332 Ok(())
4333 }
4334
4335 pub fn reload_buffers(
4336 &self,
4337 buffers: HashSet<Model<Buffer>>,
4338 push_to_history: bool,
4339 cx: &mut ModelContext<Self>,
4340 ) -> Task<Result<ProjectTransaction>> {
4341 let mut local_buffers = Vec::new();
4342 let mut remote_buffers = None;
4343 for buffer_handle in buffers {
4344 let buffer = buffer_handle.read(cx);
4345 if buffer.is_dirty() {
4346 if let Some(file) = File::from_dyn(buffer.file()) {
4347 if file.is_local() {
4348 local_buffers.push(buffer_handle);
4349 } else {
4350 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4351 }
4352 }
4353 }
4354 }
4355
4356 let remote_buffers = self.remote_id().zip(remote_buffers);
4357 let client = self.client.clone();
4358
4359 cx.spawn(move |this, mut cx| async move {
4360 let mut project_transaction = ProjectTransaction::default();
4361
4362 if let Some((project_id, remote_buffers)) = remote_buffers {
4363 let response = client
4364 .request(proto::ReloadBuffers {
4365 project_id,
4366 buffer_ids: remote_buffers
4367 .iter()
4368 .filter_map(|buffer| {
4369 buffer
4370 .update(&mut cx, |buffer, _| buffer.remote_id().into())
4371 .ok()
4372 })
4373 .collect(),
4374 })
4375 .await?
4376 .transaction
4377 .ok_or_else(|| anyhow!("missing transaction"))?;
4378 project_transaction = this
4379 .update(&mut cx, |this, cx| {
4380 this.deserialize_project_transaction(response, push_to_history, cx)
4381 })?
4382 .await?;
4383 }
4384
4385 for buffer in local_buffers {
4386 let transaction = buffer
4387 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4388 .await?;
4389 buffer.update(&mut cx, |buffer, cx| {
4390 if let Some(transaction) = transaction {
4391 if !push_to_history {
4392 buffer.forget_transaction(transaction.id);
4393 }
4394 project_transaction.0.insert(cx.handle(), transaction);
4395 }
4396 })?;
4397 }
4398
4399 Ok(project_transaction)
4400 })
4401 }
4402
4403 pub fn format(
4404 &mut self,
4405 buffers: HashSet<Model<Buffer>>,
4406 push_to_history: bool,
4407 trigger: FormatTrigger,
4408 cx: &mut ModelContext<Project>,
4409 ) -> Task<anyhow::Result<ProjectTransaction>> {
4410 if self.is_local() {
4411 let buffers_with_paths = buffers
4412 .into_iter()
4413 .filter_map(|buffer_handle| {
4414 let buffer = buffer_handle.read(cx);
4415 let file = File::from_dyn(buffer.file())?;
4416 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4417 Some((buffer_handle, buffer_abs_path))
4418 })
4419 .collect::<Vec<_>>();
4420
4421 cx.spawn(move |project, mut cx| async move {
4422 let result = Self::format_locally(
4423 project.clone(),
4424 buffers_with_paths,
4425 push_to_history,
4426 trigger,
4427 cx.clone(),
4428 )
4429 .await;
4430
4431 project.update(&mut cx, |project, _| match &result {
4432 Ok(_) => project.last_formatting_failure = None,
4433 Err(error) => {
4434 project.last_formatting_failure.replace(error.to_string());
4435 }
4436 })?;
4437
4438 result
4439 })
4440 } else {
4441 let remote_id = self.remote_id();
4442 let client = self.client.clone();
4443 cx.spawn(move |this, mut cx| async move {
4444 let mut project_transaction = ProjectTransaction::default();
4445 if let Some(project_id) = remote_id {
4446 let response = client
4447 .request(proto::FormatBuffers {
4448 project_id,
4449 trigger: trigger as i32,
4450 buffer_ids: buffers
4451 .iter()
4452 .map(|buffer| {
4453 buffer.update(&mut cx, |buffer, _| buffer.remote_id().into())
4454 })
4455 .collect::<Result<_>>()?,
4456 })
4457 .await?
4458 .transaction
4459 .ok_or_else(|| anyhow!("missing transaction"))?;
4460 project_transaction = this
4461 .update(&mut cx, |this, cx| {
4462 this.deserialize_project_transaction(response, push_to_history, cx)
4463 })?
4464 .await?;
4465 }
4466 Ok(project_transaction)
4467 })
4468 }
4469 }
4470
4471 async fn format_locally(
4472 project: WeakModel<Project>,
4473 mut buffers_with_paths: Vec<(Model<Buffer>, Option<PathBuf>)>,
4474 push_to_history: bool,
4475 trigger: FormatTrigger,
4476 mut cx: AsyncAppContext,
4477 ) -> anyhow::Result<ProjectTransaction> {
4478 // Do not allow multiple concurrent formatting requests for the
4479 // same buffer.
4480 project.update(&mut cx, |this, cx| {
4481 buffers_with_paths.retain(|(buffer, _)| {
4482 this.buffers_being_formatted
4483 .insert(buffer.read(cx).remote_id())
4484 });
4485 })?;
4486
4487 let _cleanup = defer({
4488 let this = project.clone();
4489 let mut cx = cx.clone();
4490 let buffers = &buffers_with_paths;
4491 move || {
4492 this.update(&mut cx, |this, cx| {
4493 for (buffer, _) in buffers {
4494 this.buffers_being_formatted
4495 .remove(&buffer.read(cx).remote_id());
4496 }
4497 })
4498 .ok();
4499 }
4500 });
4501
4502 let mut project_transaction = ProjectTransaction::default();
4503 for (buffer, buffer_abs_path) in &buffers_with_paths {
4504 let adapters_and_servers: Vec<_> = project.update(&mut cx, |project, cx| {
4505 project
4506 .language_servers_for_buffer(&buffer.read(cx), cx)
4507 .map(|(adapter, lsp)| (adapter.clone(), lsp.clone()))
4508 .collect()
4509 })?;
4510
4511 let settings = buffer.update(&mut cx, |buffer, cx| {
4512 language_settings(buffer.language(), buffer.file(), cx).clone()
4513 })?;
4514
4515 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4516 let ensure_final_newline = settings.ensure_final_newline_on_save;
4517 let tab_size = settings.tab_size;
4518
4519 // First, format buffer's whitespace according to the settings.
4520 let trailing_whitespace_diff = if remove_trailing_whitespace {
4521 Some(
4522 buffer
4523 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4524 .await,
4525 )
4526 } else {
4527 None
4528 };
4529 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4530 buffer.finalize_last_transaction();
4531 buffer.start_transaction();
4532 if let Some(diff) = trailing_whitespace_diff {
4533 buffer.apply_diff(diff, cx);
4534 }
4535 if ensure_final_newline {
4536 buffer.ensure_final_newline(cx);
4537 }
4538 buffer.end_transaction(cx)
4539 })?;
4540
4541 for (lsp_adapter, language_server) in adapters_and_servers.iter() {
4542 // Apply the code actions on
4543 let code_actions: Vec<lsp::CodeActionKind> = settings
4544 .code_actions_on_format
4545 .iter()
4546 .flat_map(|(kind, enabled)| {
4547 if *enabled {
4548 Some(kind.clone().into())
4549 } else {
4550 None
4551 }
4552 })
4553 .collect();
4554
4555 #[allow(clippy::nonminimal_bool)]
4556 if !code_actions.is_empty()
4557 && !(trigger == FormatTrigger::Save
4558 && settings.format_on_save == FormatOnSave::Off)
4559 {
4560 let actions = project
4561 .update(&mut cx, |this, cx| {
4562 this.request_lsp(
4563 buffer.clone(),
4564 LanguageServerToQuery::Other(language_server.server_id()),
4565 GetCodeActions {
4566 range: text::Anchor::MIN..text::Anchor::MAX,
4567 kinds: Some(code_actions),
4568 },
4569 cx,
4570 )
4571 })?
4572 .await?;
4573
4574 for mut action in actions {
4575 Self::try_resolve_code_action(&language_server, &mut action)
4576 .await
4577 .context("resolving a formatting code action")?;
4578 if let Some(edit) = action.lsp_action.edit {
4579 if edit.changes.is_none() && edit.document_changes.is_none() {
4580 continue;
4581 }
4582
4583 let new = Self::deserialize_workspace_edit(
4584 project
4585 .upgrade()
4586 .ok_or_else(|| anyhow!("project dropped"))?,
4587 edit,
4588 push_to_history,
4589 lsp_adapter.clone(),
4590 language_server.clone(),
4591 &mut cx,
4592 )
4593 .await?;
4594 project_transaction.0.extend(new.0);
4595 }
4596
4597 if let Some(command) = action.lsp_action.command {
4598 project.update(&mut cx, |this, _| {
4599 this.last_workspace_edits_by_language_server
4600 .remove(&language_server.server_id());
4601 })?;
4602
4603 language_server
4604 .request::<lsp::request::ExecuteCommand>(
4605 lsp::ExecuteCommandParams {
4606 command: command.command,
4607 arguments: command.arguments.unwrap_or_default(),
4608 ..Default::default()
4609 },
4610 )
4611 .await?;
4612
4613 project.update(&mut cx, |this, _| {
4614 project_transaction.0.extend(
4615 this.last_workspace_edits_by_language_server
4616 .remove(&language_server.server_id())
4617 .unwrap_or_default()
4618 .0,
4619 )
4620 })?;
4621 }
4622 }
4623 }
4624 }
4625
4626 // Apply language-specific formatting using either the primary language server
4627 // or external command.
4628 let primary_language_server = adapters_and_servers
4629 .first()
4630 .cloned()
4631 .map(|(_, lsp)| lsp.clone());
4632 let server_and_buffer = primary_language_server
4633 .as_ref()
4634 .zip(buffer_abs_path.as_ref());
4635
4636 let mut format_operation = None;
4637 match (&settings.formatter, &settings.format_on_save) {
4638 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4639
4640 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4641 | (_, FormatOnSave::LanguageServer) => {
4642 if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4643 format_operation = Some(FormatOperation::Lsp(
4644 Self::format_via_lsp(
4645 &project,
4646 buffer,
4647 buffer_abs_path,
4648 language_server,
4649 tab_size,
4650 &mut cx,
4651 )
4652 .await
4653 .context("failed to format via language server")?,
4654 ));
4655 }
4656 }
4657
4658 (
4659 Formatter::External { command, arguments },
4660 FormatOnSave::On | FormatOnSave::Off,
4661 )
4662 | (_, FormatOnSave::External { command, arguments }) => {
4663 if let Some(buffer_abs_path) = buffer_abs_path {
4664 format_operation = Self::format_via_external_command(
4665 buffer,
4666 buffer_abs_path,
4667 command,
4668 arguments,
4669 &mut cx,
4670 )
4671 .await
4672 .context(format!(
4673 "failed to format via external command {:?}",
4674 command
4675 ))?
4676 .map(FormatOperation::External);
4677 }
4678 }
4679 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4680 if let Some(new_operation) =
4681 prettier_support::format_with_prettier(&project, buffer, &mut cx).await
4682 {
4683 format_operation = Some(new_operation);
4684 } else if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4685 format_operation = Some(FormatOperation::Lsp(
4686 Self::format_via_lsp(
4687 &project,
4688 buffer,
4689 buffer_abs_path,
4690 language_server,
4691 tab_size,
4692 &mut cx,
4693 )
4694 .await
4695 .context("failed to format via language server")?,
4696 ));
4697 }
4698 }
4699 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4700 if let Some(new_operation) =
4701 prettier_support::format_with_prettier(&project, buffer, &mut cx).await
4702 {
4703 format_operation = Some(new_operation);
4704 }
4705 }
4706 };
4707
4708 buffer.update(&mut cx, |b, cx| {
4709 // If the buffer had its whitespace formatted and was edited while the language-specific
4710 // formatting was being computed, avoid applying the language-specific formatting, because
4711 // it can't be grouped with the whitespace formatting in the undo history.
4712 if let Some(transaction_id) = whitespace_transaction_id {
4713 if b.peek_undo_stack()
4714 .map_or(true, |e| e.transaction_id() != transaction_id)
4715 {
4716 format_operation.take();
4717 }
4718 }
4719
4720 // Apply any language-specific formatting, and group the two formatting operations
4721 // in the buffer's undo history.
4722 if let Some(operation) = format_operation {
4723 match operation {
4724 FormatOperation::Lsp(edits) => {
4725 b.edit(edits, None, cx);
4726 }
4727 FormatOperation::External(diff) => {
4728 b.apply_diff(diff, cx);
4729 }
4730 FormatOperation::Prettier(diff) => {
4731 b.apply_diff(diff, cx);
4732 }
4733 }
4734
4735 if let Some(transaction_id) = whitespace_transaction_id {
4736 b.group_until_transaction(transaction_id);
4737 } else if let Some(transaction) = project_transaction.0.get(buffer) {
4738 b.group_until_transaction(transaction.id)
4739 }
4740 }
4741
4742 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4743 if !push_to_history {
4744 b.forget_transaction(transaction.id);
4745 }
4746 project_transaction.0.insert(buffer.clone(), transaction);
4747 }
4748 })?;
4749 }
4750
4751 Ok(project_transaction)
4752 }
4753
4754 async fn format_via_lsp(
4755 this: &WeakModel<Self>,
4756 buffer: &Model<Buffer>,
4757 abs_path: &Path,
4758 language_server: &Arc<LanguageServer>,
4759 tab_size: NonZeroU32,
4760 cx: &mut AsyncAppContext,
4761 ) -> Result<Vec<(Range<Anchor>, String)>> {
4762 let uri = lsp::Url::from_file_path(abs_path)
4763 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4764 let text_document = lsp::TextDocumentIdentifier::new(uri);
4765 let capabilities = &language_server.capabilities();
4766
4767 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4768 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4769
4770 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4771 language_server
4772 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4773 text_document,
4774 options: lsp_command::lsp_formatting_options(tab_size.get()),
4775 work_done_progress_params: Default::default(),
4776 })
4777 .await?
4778 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4779 let buffer_start = lsp::Position::new(0, 0);
4780 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4781
4782 language_server
4783 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4784 text_document,
4785 range: lsp::Range::new(buffer_start, buffer_end),
4786 options: lsp_command::lsp_formatting_options(tab_size.get()),
4787 work_done_progress_params: Default::default(),
4788 })
4789 .await?
4790 } else {
4791 None
4792 };
4793
4794 if let Some(lsp_edits) = lsp_edits {
4795 this.update(cx, |this, cx| {
4796 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4797 })?
4798 .await
4799 } else {
4800 Ok(Vec::new())
4801 }
4802 }
4803
4804 async fn format_via_external_command(
4805 buffer: &Model<Buffer>,
4806 buffer_abs_path: &Path,
4807 command: &str,
4808 arguments: &[String],
4809 cx: &mut AsyncAppContext,
4810 ) -> Result<Option<Diff>> {
4811 let working_dir_path = buffer.update(cx, |buffer, cx| {
4812 let file = File::from_dyn(buffer.file())?;
4813 let worktree = file.worktree.read(cx).as_local()?;
4814 let mut worktree_path = worktree.abs_path().to_path_buf();
4815 if worktree.root_entry()?.is_file() {
4816 worktree_path.pop();
4817 }
4818 Some(worktree_path)
4819 })?;
4820
4821 if let Some(working_dir_path) = working_dir_path {
4822 let mut child =
4823 smol::process::Command::new(command)
4824 .args(arguments.iter().map(|arg| {
4825 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4826 }))
4827 .current_dir(&working_dir_path)
4828 .stdin(smol::process::Stdio::piped())
4829 .stdout(smol::process::Stdio::piped())
4830 .stderr(smol::process::Stdio::piped())
4831 .spawn()?;
4832 let stdin = child
4833 .stdin
4834 .as_mut()
4835 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4836 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4837 for chunk in text.chunks() {
4838 stdin.write_all(chunk.as_bytes()).await?;
4839 }
4840 stdin.flush().await?;
4841
4842 let output = child.output().await?;
4843 if !output.status.success() {
4844 return Err(anyhow!(
4845 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4846 output.status.code(),
4847 String::from_utf8_lossy(&output.stdout),
4848 String::from_utf8_lossy(&output.stderr),
4849 ));
4850 }
4851
4852 let stdout = String::from_utf8(output.stdout)?;
4853 Ok(Some(
4854 buffer
4855 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4856 .await,
4857 ))
4858 } else {
4859 Ok(None)
4860 }
4861 }
4862
4863 #[inline(never)]
4864 fn definition_impl(
4865 &self,
4866 buffer: &Model<Buffer>,
4867 position: PointUtf16,
4868 cx: &mut ModelContext<Self>,
4869 ) -> Task<Result<Vec<LocationLink>>> {
4870 self.request_lsp(
4871 buffer.clone(),
4872 LanguageServerToQuery::Primary,
4873 GetDefinition { position },
4874 cx,
4875 )
4876 }
4877 pub fn definition<T: ToPointUtf16>(
4878 &self,
4879 buffer: &Model<Buffer>,
4880 position: T,
4881 cx: &mut ModelContext<Self>,
4882 ) -> Task<Result<Vec<LocationLink>>> {
4883 let position = position.to_point_utf16(buffer.read(cx));
4884 self.definition_impl(buffer, position, cx)
4885 }
4886
4887 fn type_definition_impl(
4888 &self,
4889 buffer: &Model<Buffer>,
4890 position: PointUtf16,
4891 cx: &mut ModelContext<Self>,
4892 ) -> Task<Result<Vec<LocationLink>>> {
4893 self.request_lsp(
4894 buffer.clone(),
4895 LanguageServerToQuery::Primary,
4896 GetTypeDefinition { position },
4897 cx,
4898 )
4899 }
4900
4901 pub fn type_definition<T: ToPointUtf16>(
4902 &self,
4903 buffer: &Model<Buffer>,
4904 position: T,
4905 cx: &mut ModelContext<Self>,
4906 ) -> Task<Result<Vec<LocationLink>>> {
4907 let position = position.to_point_utf16(buffer.read(cx));
4908 self.type_definition_impl(buffer, position, cx)
4909 }
4910
4911 fn implementation_impl(
4912 &self,
4913 buffer: &Model<Buffer>,
4914 position: PointUtf16,
4915 cx: &mut ModelContext<Self>,
4916 ) -> Task<Result<Vec<LocationLink>>> {
4917 self.request_lsp(
4918 buffer.clone(),
4919 LanguageServerToQuery::Primary,
4920 GetImplementation { position },
4921 cx,
4922 )
4923 }
4924
4925 pub fn implementation<T: ToPointUtf16>(
4926 &self,
4927 buffer: &Model<Buffer>,
4928 position: T,
4929 cx: &mut ModelContext<Self>,
4930 ) -> Task<Result<Vec<LocationLink>>> {
4931 let position = position.to_point_utf16(buffer.read(cx));
4932 self.implementation_impl(buffer, position, cx)
4933 }
4934
4935 fn references_impl(
4936 &self,
4937 buffer: &Model<Buffer>,
4938 position: PointUtf16,
4939 cx: &mut ModelContext<Self>,
4940 ) -> Task<Result<Vec<Location>>> {
4941 self.request_lsp(
4942 buffer.clone(),
4943 LanguageServerToQuery::Primary,
4944 GetReferences { position },
4945 cx,
4946 )
4947 }
4948 pub fn references<T: ToPointUtf16>(
4949 &self,
4950 buffer: &Model<Buffer>,
4951 position: T,
4952 cx: &mut ModelContext<Self>,
4953 ) -> Task<Result<Vec<Location>>> {
4954 let position = position.to_point_utf16(buffer.read(cx));
4955 self.references_impl(buffer, position, cx)
4956 }
4957
4958 fn document_highlights_impl(
4959 &self,
4960 buffer: &Model<Buffer>,
4961 position: PointUtf16,
4962 cx: &mut ModelContext<Self>,
4963 ) -> Task<Result<Vec<DocumentHighlight>>> {
4964 self.request_lsp(
4965 buffer.clone(),
4966 LanguageServerToQuery::Primary,
4967 GetDocumentHighlights { position },
4968 cx,
4969 )
4970 }
4971
4972 pub fn document_highlights<T: ToPointUtf16>(
4973 &self,
4974 buffer: &Model<Buffer>,
4975 position: T,
4976 cx: &mut ModelContext<Self>,
4977 ) -> Task<Result<Vec<DocumentHighlight>>> {
4978 let position = position.to_point_utf16(buffer.read(cx));
4979 self.document_highlights_impl(buffer, position, cx)
4980 }
4981
4982 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4983 if self.is_local() {
4984 let mut requests = Vec::new();
4985 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4986 let Some(worktree_handle) = self.worktree_for_id(*worktree_id, cx) else {
4987 continue;
4988 };
4989 let worktree = worktree_handle.read(cx);
4990 if !worktree.is_visible() {
4991 continue;
4992 }
4993 let Some(worktree) = worktree.as_local() else {
4994 continue;
4995 };
4996 let worktree_abs_path = worktree.abs_path().clone();
4997
4998 let (adapter, language, server) = match self.language_servers.get(server_id) {
4999 Some(LanguageServerState::Running {
5000 adapter,
5001 language,
5002 server,
5003 ..
5004 }) => (adapter.clone(), language.clone(), server),
5005
5006 _ => continue,
5007 };
5008
5009 requests.push(
5010 server
5011 .request::<lsp::request::WorkspaceSymbolRequest>(
5012 lsp::WorkspaceSymbolParams {
5013 query: query.to_string(),
5014 ..Default::default()
5015 },
5016 )
5017 .log_err()
5018 .map(move |response| {
5019 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
5020 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
5021 flat_responses.into_iter().map(|lsp_symbol| {
5022 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
5023 }).collect::<Vec<_>>()
5024 }
5025 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
5026 nested_responses.into_iter().filter_map(|lsp_symbol| {
5027 let location = match lsp_symbol.location {
5028 OneOf::Left(location) => location,
5029 OneOf::Right(_) => {
5030 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
5031 return None
5032 }
5033 };
5034 Some((lsp_symbol.name, lsp_symbol.kind, location))
5035 }).collect::<Vec<_>>()
5036 }
5037 }).unwrap_or_default();
5038
5039 (
5040 adapter,
5041 language,
5042 worktree_handle.downgrade(),
5043 worktree_abs_path,
5044 lsp_symbols,
5045 )
5046 }),
5047 );
5048 }
5049
5050 cx.spawn(move |this, mut cx| async move {
5051 let responses = futures::future::join_all(requests).await;
5052 let this = match this.upgrade() {
5053 Some(this) => this,
5054 None => return Ok(Vec::new()),
5055 };
5056
5057 let symbols = this.update(&mut cx, |this, cx| {
5058 let mut symbols = Vec::new();
5059 for (
5060 adapter,
5061 adapter_language,
5062 source_worktree,
5063 worktree_abs_path,
5064 lsp_symbols,
5065 ) in responses
5066 {
5067 symbols.extend(lsp_symbols.into_iter().filter_map(
5068 |(symbol_name, symbol_kind, symbol_location)| {
5069 let abs_path = symbol_location.uri.to_file_path().ok()?;
5070 let source_worktree = source_worktree.upgrade()?;
5071 let source_worktree_id = source_worktree.read(cx).id();
5072
5073 let path;
5074 let worktree;
5075 if let Some((tree, rel_path)) =
5076 this.find_local_worktree(&abs_path, cx)
5077 {
5078 worktree = tree;
5079 path = rel_path;
5080 } else {
5081 worktree = source_worktree.clone();
5082 path = relativize_path(&worktree_abs_path, &abs_path);
5083 }
5084
5085 let worktree_id = worktree.read(cx).id();
5086 let project_path = ProjectPath {
5087 worktree_id,
5088 path: path.into(),
5089 };
5090 let signature = this.symbol_signature(&project_path);
5091 let adapter_language = adapter_language.clone();
5092 let language = this
5093 .languages
5094 .language_for_file_path(&project_path.path)
5095 .unwrap_or_else(move |_| adapter_language);
5096 let adapter = adapter.clone();
5097 Some(async move {
5098 let language = language.await;
5099 let label = adapter
5100 .label_for_symbol(&symbol_name, symbol_kind, &language)
5101 .await;
5102
5103 Symbol {
5104 language_server_name: adapter.name.clone(),
5105 source_worktree_id,
5106 path: project_path,
5107 label: label.unwrap_or_else(|| {
5108 CodeLabel::plain(symbol_name.clone(), None)
5109 }),
5110 kind: symbol_kind,
5111 name: symbol_name,
5112 range: range_from_lsp(symbol_location.range),
5113 signature,
5114 }
5115 })
5116 },
5117 ));
5118 }
5119
5120 symbols
5121 })?;
5122
5123 Ok(futures::future::join_all(symbols).await)
5124 })
5125 } else if let Some(project_id) = self.remote_id() {
5126 let request = self.client.request(proto::GetProjectSymbols {
5127 project_id,
5128 query: query.to_string(),
5129 });
5130 cx.spawn(move |this, mut cx| async move {
5131 let response = request.await?;
5132 let mut symbols = Vec::new();
5133 if let Some(this) = this.upgrade() {
5134 let new_symbols = this.update(&mut cx, |this, _| {
5135 response
5136 .symbols
5137 .into_iter()
5138 .map(|symbol| this.deserialize_symbol(symbol))
5139 .collect::<Vec<_>>()
5140 })?;
5141 symbols = futures::future::join_all(new_symbols)
5142 .await
5143 .into_iter()
5144 .filter_map(|symbol| symbol.log_err())
5145 .collect::<Vec<_>>();
5146 }
5147 Ok(symbols)
5148 })
5149 } else {
5150 Task::ready(Ok(Default::default()))
5151 }
5152 }
5153
5154 pub fn open_buffer_for_symbol(
5155 &mut self,
5156 symbol: &Symbol,
5157 cx: &mut ModelContext<Self>,
5158 ) -> Task<Result<Model<Buffer>>> {
5159 if self.is_local() {
5160 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
5161 symbol.source_worktree_id,
5162 symbol.language_server_name.clone(),
5163 )) {
5164 *id
5165 } else {
5166 return Task::ready(Err(anyhow!(
5167 "language server for worktree and language not found"
5168 )));
5169 };
5170
5171 let worktree_abs_path = if let Some(worktree_abs_path) = self
5172 .worktree_for_id(symbol.path.worktree_id, cx)
5173 .and_then(|worktree| worktree.read(cx).as_local())
5174 .map(|local_worktree| local_worktree.abs_path())
5175 {
5176 worktree_abs_path
5177 } else {
5178 return Task::ready(Err(anyhow!("worktree not found for symbol")));
5179 };
5180
5181 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
5182 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
5183 uri
5184 } else {
5185 return Task::ready(Err(anyhow!("invalid symbol path")));
5186 };
5187
5188 self.open_local_buffer_via_lsp(
5189 symbol_uri,
5190 language_server_id,
5191 symbol.language_server_name.clone(),
5192 cx,
5193 )
5194 } else if let Some(project_id) = self.remote_id() {
5195 let request = self.client.request(proto::OpenBufferForSymbol {
5196 project_id,
5197 symbol: Some(serialize_symbol(symbol)),
5198 });
5199 cx.spawn(move |this, mut cx| async move {
5200 let response = request.await?;
5201 let buffer_id = BufferId::new(response.buffer_id)?;
5202 this.update(&mut cx, |this, cx| {
5203 this.wait_for_remote_buffer(buffer_id, cx)
5204 })?
5205 .await
5206 })
5207 } else {
5208 Task::ready(Err(anyhow!("project does not have a remote id")))
5209 }
5210 }
5211
5212 fn hover_impl(
5213 &self,
5214 buffer: &Model<Buffer>,
5215 position: PointUtf16,
5216 cx: &mut ModelContext<Self>,
5217 ) -> Task<Vec<Hover>> {
5218 fn remove_empty_hover_blocks(mut hover: Hover) -> Option<Hover> {
5219 hover
5220 .contents
5221 .retain(|hover_block| !hover_block.text.trim().is_empty());
5222 if hover.contents.is_empty() {
5223 None
5224 } else {
5225 Some(hover)
5226 }
5227 }
5228
5229 if self.is_local() {
5230 let snapshot = buffer.read(cx).snapshot();
5231 let offset = position.to_offset(&snapshot);
5232 let scope = snapshot.language_scope_at(offset);
5233
5234 let mut hover_responses = self
5235 .language_servers_for_buffer(buffer.read(cx), cx)
5236 .filter(|(_, server)| match server.capabilities().hover_provider {
5237 Some(lsp::HoverProviderCapability::Simple(enabled)) => enabled,
5238 Some(lsp::HoverProviderCapability::Options(_)) => true,
5239 None => false,
5240 })
5241 .filter(|(adapter, _)| {
5242 scope
5243 .as_ref()
5244 .map(|scope| scope.language_allowed(&adapter.name))
5245 .unwrap_or(true)
5246 })
5247 .map(|(_, server)| server.server_id())
5248 .map(|server_id| {
5249 self.request_lsp(
5250 buffer.clone(),
5251 LanguageServerToQuery::Other(server_id),
5252 GetHover { position },
5253 cx,
5254 )
5255 })
5256 .collect::<FuturesUnordered<_>>();
5257
5258 cx.spawn(|_, _| async move {
5259 let mut hovers = Vec::with_capacity(hover_responses.len());
5260 while let Some(hover_response) = hover_responses.next().await {
5261 if let Some(hover) = hover_response
5262 .log_err()
5263 .flatten()
5264 .and_then(remove_empty_hover_blocks)
5265 {
5266 hovers.push(hover);
5267 }
5268 }
5269 hovers
5270 })
5271 } else if self.is_remote() {
5272 let request_task = self.request_lsp(
5273 buffer.clone(),
5274 LanguageServerToQuery::Primary,
5275 GetHover { position },
5276 cx,
5277 );
5278 cx.spawn(|_, _| async move {
5279 request_task
5280 .await
5281 .log_err()
5282 .flatten()
5283 .and_then(remove_empty_hover_blocks)
5284 .map(|hover| vec![hover])
5285 .unwrap_or_default()
5286 })
5287 } else {
5288 log::error!("cannot show hovers: project does not have a remote id");
5289 Task::ready(Vec::new())
5290 }
5291 }
5292
5293 pub fn hover<T: ToPointUtf16>(
5294 &self,
5295 buffer: &Model<Buffer>,
5296 position: T,
5297 cx: &mut ModelContext<Self>,
5298 ) -> Task<Vec<Hover>> {
5299 let position = position.to_point_utf16(buffer.read(cx));
5300 self.hover_impl(buffer, position, cx)
5301 }
5302
5303 #[inline(never)]
5304 fn completions_impl(
5305 &self,
5306 buffer: &Model<Buffer>,
5307 position: PointUtf16,
5308 cx: &mut ModelContext<Self>,
5309 ) -> Task<Result<Vec<Completion>>> {
5310 if self.is_local() {
5311 let snapshot = buffer.read(cx).snapshot();
5312 let offset = position.to_offset(&snapshot);
5313 let scope = snapshot.language_scope_at(offset);
5314
5315 let server_ids: Vec<_> = self
5316 .language_servers_for_buffer(buffer.read(cx), cx)
5317 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
5318 .filter(|(adapter, _)| {
5319 scope
5320 .as_ref()
5321 .map(|scope| scope.language_allowed(&adapter.name))
5322 .unwrap_or(true)
5323 })
5324 .map(|(_, server)| server.server_id())
5325 .collect();
5326
5327 let buffer = buffer.clone();
5328 cx.spawn(move |this, mut cx| async move {
5329 let mut tasks = Vec::with_capacity(server_ids.len());
5330 this.update(&mut cx, |this, cx| {
5331 for server_id in server_ids {
5332 tasks.push(this.request_lsp(
5333 buffer.clone(),
5334 LanguageServerToQuery::Other(server_id),
5335 GetCompletions { position },
5336 cx,
5337 ));
5338 }
5339 })?;
5340
5341 let mut completions = Vec::new();
5342 for task in tasks {
5343 if let Ok(new_completions) = task.await {
5344 completions.extend_from_slice(&new_completions);
5345 }
5346 }
5347
5348 Ok(completions)
5349 })
5350 } else if let Some(project_id) = self.remote_id() {
5351 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
5352 } else {
5353 Task::ready(Ok(Default::default()))
5354 }
5355 }
5356 pub fn completions<T: ToOffset + ToPointUtf16>(
5357 &self,
5358 buffer: &Model<Buffer>,
5359 position: T,
5360 cx: &mut ModelContext<Self>,
5361 ) -> Task<Result<Vec<Completion>>> {
5362 let position = position.to_point_utf16(buffer.read(cx));
5363 self.completions_impl(buffer, position, cx)
5364 }
5365
5366 pub fn resolve_completions(
5367 &self,
5368 completion_indices: Vec<usize>,
5369 completions: Arc<RwLock<Box<[Completion]>>>,
5370 cx: &mut ModelContext<Self>,
5371 ) -> Task<Result<bool>> {
5372 let client = self.client();
5373 let language_registry = self.languages().clone();
5374
5375 let is_remote = self.is_remote();
5376 let project_id = self.remote_id();
5377
5378 cx.spawn(move |this, mut cx| async move {
5379 let mut did_resolve = false;
5380 if is_remote {
5381 let project_id =
5382 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
5383
5384 for completion_index in completion_indices {
5385 let (server_id, completion) = {
5386 let completions_guard = completions.read();
5387 let completion = &completions_guard[completion_index];
5388 if completion.documentation.is_some() {
5389 continue;
5390 }
5391
5392 did_resolve = true;
5393 let server_id = completion.server_id;
5394 let completion = completion.lsp_completion.clone();
5395
5396 (server_id, completion)
5397 };
5398
5399 Self::resolve_completion_documentation_remote(
5400 project_id,
5401 server_id,
5402 completions.clone(),
5403 completion_index,
5404 completion,
5405 client.clone(),
5406 language_registry.clone(),
5407 )
5408 .await;
5409 }
5410 } else {
5411 for completion_index in completion_indices {
5412 let (server_id, completion) = {
5413 let completions_guard = completions.read();
5414 let completion = &completions_guard[completion_index];
5415 if completion.documentation.is_some() {
5416 continue;
5417 }
5418
5419 let server_id = completion.server_id;
5420 let completion = completion.lsp_completion.clone();
5421
5422 (server_id, completion)
5423 };
5424
5425 let server = this
5426 .read_with(&mut cx, |project, _| {
5427 project.language_server_for_id(server_id)
5428 })
5429 .ok()
5430 .flatten();
5431 let Some(server) = server else {
5432 continue;
5433 };
5434
5435 did_resolve = true;
5436 Self::resolve_completion_documentation_local(
5437 server,
5438 completions.clone(),
5439 completion_index,
5440 completion,
5441 language_registry.clone(),
5442 )
5443 .await;
5444 }
5445 }
5446
5447 Ok(did_resolve)
5448 })
5449 }
5450
5451 async fn resolve_completion_documentation_local(
5452 server: Arc<lsp::LanguageServer>,
5453 completions: Arc<RwLock<Box<[Completion]>>>,
5454 completion_index: usize,
5455 completion: lsp::CompletionItem,
5456 language_registry: Arc<LanguageRegistry>,
5457 ) {
5458 let can_resolve = server
5459 .capabilities()
5460 .completion_provider
5461 .as_ref()
5462 .and_then(|options| options.resolve_provider)
5463 .unwrap_or(false);
5464 if !can_resolve {
5465 return;
5466 }
5467
5468 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
5469 let Some(completion_item) = request.await.log_err() else {
5470 return;
5471 };
5472
5473 if let Some(lsp_documentation) = completion_item.documentation {
5474 let documentation = language::prepare_completion_documentation(
5475 &lsp_documentation,
5476 &language_registry,
5477 None, // TODO: Try to reasonably work out which language the completion is for
5478 )
5479 .await;
5480
5481 let mut completions = completions.write();
5482 let completion = &mut completions[completion_index];
5483 completion.documentation = Some(documentation);
5484 } else {
5485 let mut completions = completions.write();
5486 let completion = &mut completions[completion_index];
5487 completion.documentation = Some(Documentation::Undocumented);
5488 }
5489 }
5490
5491 async fn resolve_completion_documentation_remote(
5492 project_id: u64,
5493 server_id: LanguageServerId,
5494 completions: Arc<RwLock<Box<[Completion]>>>,
5495 completion_index: usize,
5496 completion: lsp::CompletionItem,
5497 client: Arc<Client>,
5498 language_registry: Arc<LanguageRegistry>,
5499 ) {
5500 let request = proto::ResolveCompletionDocumentation {
5501 project_id,
5502 language_server_id: server_id.0 as u64,
5503 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
5504 };
5505
5506 let Some(response) = client
5507 .request(request)
5508 .await
5509 .context("completion documentation resolve proto request")
5510 .log_err()
5511 else {
5512 return;
5513 };
5514
5515 if response.text.is_empty() {
5516 let mut completions = completions.write();
5517 let completion = &mut completions[completion_index];
5518 completion.documentation = Some(Documentation::Undocumented);
5519 }
5520
5521 let documentation = if response.is_markdown {
5522 Documentation::MultiLineMarkdown(
5523 markdown::parse_markdown(&response.text, &language_registry, None).await,
5524 )
5525 } else if response.text.lines().count() <= 1 {
5526 Documentation::SingleLine(response.text)
5527 } else {
5528 Documentation::MultiLinePlainText(response.text)
5529 };
5530
5531 let mut completions = completions.write();
5532 let completion = &mut completions[completion_index];
5533 completion.documentation = Some(documentation);
5534 }
5535
5536 pub fn apply_additional_edits_for_completion(
5537 &self,
5538 buffer_handle: Model<Buffer>,
5539 completion: Completion,
5540 push_to_history: bool,
5541 cx: &mut ModelContext<Self>,
5542 ) -> Task<Result<Option<Transaction>>> {
5543 let buffer = buffer_handle.read(cx);
5544 let buffer_id = buffer.remote_id();
5545
5546 if self.is_local() {
5547 let server_id = completion.server_id;
5548 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
5549 Some((_, server)) => server.clone(),
5550 _ => return Task::ready(Ok(Default::default())),
5551 };
5552
5553 cx.spawn(move |this, mut cx| async move {
5554 let can_resolve = lang_server
5555 .capabilities()
5556 .completion_provider
5557 .as_ref()
5558 .and_then(|options| options.resolve_provider)
5559 .unwrap_or(false);
5560 let additional_text_edits = if can_resolve {
5561 lang_server
5562 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
5563 .await?
5564 .additional_text_edits
5565 } else {
5566 completion.lsp_completion.additional_text_edits
5567 };
5568 if let Some(edits) = additional_text_edits {
5569 let edits = this
5570 .update(&mut cx, |this, cx| {
5571 this.edits_from_lsp(
5572 &buffer_handle,
5573 edits,
5574 lang_server.server_id(),
5575 None,
5576 cx,
5577 )
5578 })?
5579 .await?;
5580
5581 buffer_handle.update(&mut cx, |buffer, cx| {
5582 buffer.finalize_last_transaction();
5583 buffer.start_transaction();
5584
5585 for (range, text) in edits {
5586 let primary = &completion.old_range;
5587 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5588 && primary.end.cmp(&range.start, buffer).is_ge();
5589 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5590 && range.end.cmp(&primary.end, buffer).is_ge();
5591
5592 //Skip additional edits which overlap with the primary completion edit
5593 //https://github.com/zed-industries/zed/pull/1871
5594 if !start_within && !end_within {
5595 buffer.edit([(range, text)], None, cx);
5596 }
5597 }
5598
5599 let transaction = if buffer.end_transaction(cx).is_some() {
5600 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5601 if !push_to_history {
5602 buffer.forget_transaction(transaction.id);
5603 }
5604 Some(transaction)
5605 } else {
5606 None
5607 };
5608 Ok(transaction)
5609 })?
5610 } else {
5611 Ok(None)
5612 }
5613 })
5614 } else if let Some(project_id) = self.remote_id() {
5615 let client = self.client.clone();
5616 cx.spawn(move |_, mut cx| async move {
5617 let response = client
5618 .request(proto::ApplyCompletionAdditionalEdits {
5619 project_id,
5620 buffer_id: buffer_id.into(),
5621 completion: Some(language::proto::serialize_completion(&completion)),
5622 })
5623 .await?;
5624
5625 if let Some(transaction) = response.transaction {
5626 let transaction = language::proto::deserialize_transaction(transaction)?;
5627 buffer_handle
5628 .update(&mut cx, |buffer, _| {
5629 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5630 })?
5631 .await?;
5632 if push_to_history {
5633 buffer_handle.update(&mut cx, |buffer, _| {
5634 buffer.push_transaction(transaction.clone(), Instant::now());
5635 })?;
5636 }
5637 Ok(Some(transaction))
5638 } else {
5639 Ok(None)
5640 }
5641 })
5642 } else {
5643 Task::ready(Err(anyhow!("project does not have a remote id")))
5644 }
5645 }
5646
5647 fn code_actions_impl(
5648 &self,
5649 buffer_handle: &Model<Buffer>,
5650 range: Range<Anchor>,
5651 cx: &mut ModelContext<Self>,
5652 ) -> Task<Vec<CodeAction>> {
5653 if self.is_local() {
5654 let snapshot = buffer_handle.read(cx).snapshot();
5655 let offset = range.start.to_offset(&snapshot);
5656 let scope = snapshot.language_scope_at(offset);
5657
5658 let mut hover_responses = self
5659 .language_servers_for_buffer(buffer_handle.read(cx), cx)
5660 .filter(|(_, server)| GetCodeActions::supports_code_actions(server.capabilities()))
5661 .filter(|(adapter, _)| {
5662 scope
5663 .as_ref()
5664 .map(|scope| scope.language_allowed(&adapter.name))
5665 .unwrap_or(true)
5666 })
5667 .map(|(_, server)| server.server_id())
5668 .map(|server_id| {
5669 self.request_lsp(
5670 buffer_handle.clone(),
5671 LanguageServerToQuery::Other(server_id),
5672 GetCodeActions {
5673 range: range.clone(),
5674 kinds: None,
5675 },
5676 cx,
5677 )
5678 })
5679 .collect::<FuturesUnordered<_>>();
5680
5681 cx.spawn(|_, _| async move {
5682 let mut hovers = Vec::with_capacity(hover_responses.len());
5683 while let Some(hover_response) = hover_responses.next().await {
5684 hovers.extend(hover_response.log_err().unwrap_or_default());
5685 }
5686 hovers
5687 })
5688 } else if self.is_remote() {
5689 let request_task = self.request_lsp(
5690 buffer_handle.clone(),
5691 LanguageServerToQuery::Primary,
5692 GetCodeActions { range, kinds: None },
5693 cx,
5694 );
5695 cx.spawn(|_, _| async move { request_task.await.log_err().unwrap_or_default() })
5696 } else {
5697 log::error!("cannot fetch actions: project does not have a remote id");
5698 Task::ready(Vec::new())
5699 }
5700 }
5701
5702 pub fn code_actions<T: Clone + ToOffset>(
5703 &self,
5704 buffer_handle: &Model<Buffer>,
5705 range: Range<T>,
5706 cx: &mut ModelContext<Self>,
5707 ) -> Task<Vec<CodeAction>> {
5708 let buffer = buffer_handle.read(cx);
5709 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5710 self.code_actions_impl(buffer_handle, range, cx)
5711 }
5712
5713 pub fn apply_code_action(
5714 &self,
5715 buffer_handle: Model<Buffer>,
5716 mut action: CodeAction,
5717 push_to_history: bool,
5718 cx: &mut ModelContext<Self>,
5719 ) -> Task<Result<ProjectTransaction>> {
5720 if self.is_local() {
5721 let buffer = buffer_handle.read(cx);
5722 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5723 self.language_server_for_buffer(buffer, action.server_id, cx)
5724 {
5725 (adapter.clone(), server.clone())
5726 } else {
5727 return Task::ready(Ok(Default::default()));
5728 };
5729 cx.spawn(move |this, mut cx| async move {
5730 Self::try_resolve_code_action(&lang_server, &mut action)
5731 .await
5732 .context("resolving a code action")?;
5733 if let Some(edit) = action.lsp_action.edit {
5734 if edit.changes.is_some() || edit.document_changes.is_some() {
5735 return Self::deserialize_workspace_edit(
5736 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5737 edit,
5738 push_to_history,
5739 lsp_adapter.clone(),
5740 lang_server.clone(),
5741 &mut cx,
5742 )
5743 .await;
5744 }
5745 }
5746
5747 if let Some(command) = action.lsp_action.command {
5748 this.update(&mut cx, |this, _| {
5749 this.last_workspace_edits_by_language_server
5750 .remove(&lang_server.server_id());
5751 })?;
5752
5753 let result = lang_server
5754 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5755 command: command.command,
5756 arguments: command.arguments.unwrap_or_default(),
5757 ..Default::default()
5758 })
5759 .await;
5760
5761 if let Err(err) = result {
5762 // TODO: LSP ERROR
5763 return Err(err);
5764 }
5765
5766 return this.update(&mut cx, |this, _| {
5767 this.last_workspace_edits_by_language_server
5768 .remove(&lang_server.server_id())
5769 .unwrap_or_default()
5770 });
5771 }
5772
5773 Ok(ProjectTransaction::default())
5774 })
5775 } else if let Some(project_id) = self.remote_id() {
5776 let client = self.client.clone();
5777 let request = proto::ApplyCodeAction {
5778 project_id,
5779 buffer_id: buffer_handle.read(cx).remote_id().into(),
5780 action: Some(language::proto::serialize_code_action(&action)),
5781 };
5782 cx.spawn(move |this, mut cx| async move {
5783 let response = client
5784 .request(request)
5785 .await?
5786 .transaction
5787 .ok_or_else(|| anyhow!("missing transaction"))?;
5788 this.update(&mut cx, |this, cx| {
5789 this.deserialize_project_transaction(response, push_to_history, cx)
5790 })?
5791 .await
5792 })
5793 } else {
5794 Task::ready(Err(anyhow!("project does not have a remote id")))
5795 }
5796 }
5797
5798 fn apply_on_type_formatting(
5799 &self,
5800 buffer: Model<Buffer>,
5801 position: Anchor,
5802 trigger: String,
5803 cx: &mut ModelContext<Self>,
5804 ) -> Task<Result<Option<Transaction>>> {
5805 if self.is_local() {
5806 cx.spawn(move |this, mut cx| async move {
5807 // Do not allow multiple concurrent formatting requests for the
5808 // same buffer.
5809 this.update(&mut cx, |this, cx| {
5810 this.buffers_being_formatted
5811 .insert(buffer.read(cx).remote_id())
5812 })?;
5813
5814 let _cleanup = defer({
5815 let this = this.clone();
5816 let mut cx = cx.clone();
5817 let closure_buffer = buffer.clone();
5818 move || {
5819 this.update(&mut cx, |this, cx| {
5820 this.buffers_being_formatted
5821 .remove(&closure_buffer.read(cx).remote_id());
5822 })
5823 .ok();
5824 }
5825 });
5826
5827 buffer
5828 .update(&mut cx, |buffer, _| {
5829 buffer.wait_for_edits(Some(position.timestamp))
5830 })?
5831 .await?;
5832 this.update(&mut cx, |this, cx| {
5833 let position = position.to_point_utf16(buffer.read(cx));
5834 this.on_type_format(buffer, position, trigger, false, cx)
5835 })?
5836 .await
5837 })
5838 } else if let Some(project_id) = self.remote_id() {
5839 let client = self.client.clone();
5840 let request = proto::OnTypeFormatting {
5841 project_id,
5842 buffer_id: buffer.read(cx).remote_id().into(),
5843 position: Some(serialize_anchor(&position)),
5844 trigger,
5845 version: serialize_version(&buffer.read(cx).version()),
5846 };
5847 cx.spawn(move |_, _| async move {
5848 client
5849 .request(request)
5850 .await?
5851 .transaction
5852 .map(language::proto::deserialize_transaction)
5853 .transpose()
5854 })
5855 } else {
5856 Task::ready(Err(anyhow!("project does not have a remote id")))
5857 }
5858 }
5859
5860 async fn deserialize_edits(
5861 this: Model<Self>,
5862 buffer_to_edit: Model<Buffer>,
5863 edits: Vec<lsp::TextEdit>,
5864 push_to_history: bool,
5865 _: Arc<CachedLspAdapter>,
5866 language_server: Arc<LanguageServer>,
5867 cx: &mut AsyncAppContext,
5868 ) -> Result<Option<Transaction>> {
5869 let edits = this
5870 .update(cx, |this, cx| {
5871 this.edits_from_lsp(
5872 &buffer_to_edit,
5873 edits,
5874 language_server.server_id(),
5875 None,
5876 cx,
5877 )
5878 })?
5879 .await?;
5880
5881 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5882 buffer.finalize_last_transaction();
5883 buffer.start_transaction();
5884 for (range, text) in edits {
5885 buffer.edit([(range, text)], None, cx);
5886 }
5887
5888 if buffer.end_transaction(cx).is_some() {
5889 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5890 if !push_to_history {
5891 buffer.forget_transaction(transaction.id);
5892 }
5893 Some(transaction)
5894 } else {
5895 None
5896 }
5897 })?;
5898
5899 Ok(transaction)
5900 }
5901
5902 async fn deserialize_workspace_edit(
5903 this: Model<Self>,
5904 edit: lsp::WorkspaceEdit,
5905 push_to_history: bool,
5906 lsp_adapter: Arc<CachedLspAdapter>,
5907 language_server: Arc<LanguageServer>,
5908 cx: &mut AsyncAppContext,
5909 ) -> Result<ProjectTransaction> {
5910 let fs = this.update(cx, |this, _| this.fs.clone())?;
5911 let mut operations = Vec::new();
5912 if let Some(document_changes) = edit.document_changes {
5913 match document_changes {
5914 lsp::DocumentChanges::Edits(edits) => {
5915 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5916 }
5917 lsp::DocumentChanges::Operations(ops) => operations = ops,
5918 }
5919 } else if let Some(changes) = edit.changes {
5920 operations.extend(changes.into_iter().map(|(uri, edits)| {
5921 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5922 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5923 uri,
5924 version: None,
5925 },
5926 edits: edits.into_iter().map(OneOf::Left).collect(),
5927 })
5928 }));
5929 }
5930
5931 let mut project_transaction = ProjectTransaction::default();
5932 for operation in operations {
5933 match operation {
5934 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5935 let abs_path = op
5936 .uri
5937 .to_file_path()
5938 .map_err(|_| anyhow!("can't convert URI to path"))?;
5939
5940 if let Some(parent_path) = abs_path.parent() {
5941 fs.create_dir(parent_path).await?;
5942 }
5943 if abs_path.ends_with("/") {
5944 fs.create_dir(&abs_path).await?;
5945 } else {
5946 fs.create_file(
5947 &abs_path,
5948 op.options
5949 .map(|options| fs::CreateOptions {
5950 overwrite: options.overwrite.unwrap_or(false),
5951 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5952 })
5953 .unwrap_or_default(),
5954 )
5955 .await?;
5956 }
5957 }
5958
5959 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5960 let source_abs_path = op
5961 .old_uri
5962 .to_file_path()
5963 .map_err(|_| anyhow!("can't convert URI to path"))?;
5964 let target_abs_path = op
5965 .new_uri
5966 .to_file_path()
5967 .map_err(|_| anyhow!("can't convert URI to path"))?;
5968 fs.rename(
5969 &source_abs_path,
5970 &target_abs_path,
5971 op.options
5972 .map(|options| fs::RenameOptions {
5973 overwrite: options.overwrite.unwrap_or(false),
5974 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5975 })
5976 .unwrap_or_default(),
5977 )
5978 .await?;
5979 }
5980
5981 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5982 let abs_path = op
5983 .uri
5984 .to_file_path()
5985 .map_err(|_| anyhow!("can't convert URI to path"))?;
5986 let options = op
5987 .options
5988 .map(|options| fs::RemoveOptions {
5989 recursive: options.recursive.unwrap_or(false),
5990 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5991 })
5992 .unwrap_or_default();
5993 if abs_path.ends_with("/") {
5994 fs.remove_dir(&abs_path, options).await?;
5995 } else {
5996 fs.remove_file(&abs_path, options).await?;
5997 }
5998 }
5999
6000 lsp::DocumentChangeOperation::Edit(op) => {
6001 let buffer_to_edit = this
6002 .update(cx, |this, cx| {
6003 this.open_local_buffer_via_lsp(
6004 op.text_document.uri,
6005 language_server.server_id(),
6006 lsp_adapter.name.clone(),
6007 cx,
6008 )
6009 })?
6010 .await?;
6011
6012 let edits = this
6013 .update(cx, |this, cx| {
6014 let edits = op.edits.into_iter().map(|edit| match edit {
6015 OneOf::Left(edit) => edit,
6016 OneOf::Right(edit) => edit.text_edit,
6017 });
6018 this.edits_from_lsp(
6019 &buffer_to_edit,
6020 edits,
6021 language_server.server_id(),
6022 op.text_document.version,
6023 cx,
6024 )
6025 })?
6026 .await?;
6027
6028 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
6029 buffer.finalize_last_transaction();
6030 buffer.start_transaction();
6031 for (range, text) in edits {
6032 buffer.edit([(range, text)], None, cx);
6033 }
6034 let transaction = if buffer.end_transaction(cx).is_some() {
6035 let transaction = buffer.finalize_last_transaction().unwrap().clone();
6036 if !push_to_history {
6037 buffer.forget_transaction(transaction.id);
6038 }
6039 Some(transaction)
6040 } else {
6041 None
6042 };
6043
6044 transaction
6045 })?;
6046 if let Some(transaction) = transaction {
6047 project_transaction.0.insert(buffer_to_edit, transaction);
6048 }
6049 }
6050 }
6051 }
6052
6053 Ok(project_transaction)
6054 }
6055
6056 fn prepare_rename_impl(
6057 &self,
6058 buffer: Model<Buffer>,
6059 position: PointUtf16,
6060 cx: &mut ModelContext<Self>,
6061 ) -> Task<Result<Option<Range<Anchor>>>> {
6062 self.request_lsp(
6063 buffer,
6064 LanguageServerToQuery::Primary,
6065 PrepareRename { position },
6066 cx,
6067 )
6068 }
6069 pub fn prepare_rename<T: ToPointUtf16>(
6070 &self,
6071 buffer: Model<Buffer>,
6072 position: T,
6073 cx: &mut ModelContext<Self>,
6074 ) -> Task<Result<Option<Range<Anchor>>>> {
6075 let position = position.to_point_utf16(buffer.read(cx));
6076 self.prepare_rename_impl(buffer, position, cx)
6077 }
6078
6079 fn perform_rename_impl(
6080 &self,
6081 buffer: Model<Buffer>,
6082 position: PointUtf16,
6083 new_name: String,
6084 push_to_history: bool,
6085 cx: &mut ModelContext<Self>,
6086 ) -> Task<Result<ProjectTransaction>> {
6087 let position = position.to_point_utf16(buffer.read(cx));
6088 self.request_lsp(
6089 buffer,
6090 LanguageServerToQuery::Primary,
6091 PerformRename {
6092 position,
6093 new_name,
6094 push_to_history,
6095 },
6096 cx,
6097 )
6098 }
6099 pub fn perform_rename<T: ToPointUtf16>(
6100 &self,
6101 buffer: Model<Buffer>,
6102 position: T,
6103 new_name: String,
6104 push_to_history: bool,
6105 cx: &mut ModelContext<Self>,
6106 ) -> Task<Result<ProjectTransaction>> {
6107 let position = position.to_point_utf16(buffer.read(cx));
6108 self.perform_rename_impl(buffer, position, new_name, push_to_history, cx)
6109 }
6110
6111 pub fn on_type_format_impl(
6112 &self,
6113 buffer: Model<Buffer>,
6114 position: PointUtf16,
6115 trigger: String,
6116 push_to_history: bool,
6117 cx: &mut ModelContext<Self>,
6118 ) -> Task<Result<Option<Transaction>>> {
6119 let tab_size = buffer.update(cx, |buffer, cx| {
6120 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx).tab_size
6121 });
6122 self.request_lsp(
6123 buffer.clone(),
6124 LanguageServerToQuery::Primary,
6125 OnTypeFormatting {
6126 position,
6127 trigger,
6128 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
6129 push_to_history,
6130 },
6131 cx,
6132 )
6133 }
6134
6135 pub fn on_type_format<T: ToPointUtf16>(
6136 &self,
6137 buffer: Model<Buffer>,
6138 position: T,
6139 trigger: String,
6140 push_to_history: bool,
6141 cx: &mut ModelContext<Self>,
6142 ) -> Task<Result<Option<Transaction>>> {
6143 let position = position.to_point_utf16(buffer.read(cx));
6144 self.on_type_format_impl(buffer, position, trigger, push_to_history, cx)
6145 }
6146
6147 pub fn inlay_hints<T: ToOffset>(
6148 &self,
6149 buffer_handle: Model<Buffer>,
6150 range: Range<T>,
6151 cx: &mut ModelContext<Self>,
6152 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
6153 let buffer = buffer_handle.read(cx);
6154 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
6155 self.inlay_hints_impl(buffer_handle, range, cx)
6156 }
6157 fn inlay_hints_impl(
6158 &self,
6159 buffer_handle: Model<Buffer>,
6160 range: Range<Anchor>,
6161 cx: &mut ModelContext<Self>,
6162 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
6163 let buffer = buffer_handle.read(cx);
6164 let range_start = range.start;
6165 let range_end = range.end;
6166 let buffer_id = buffer.remote_id().into();
6167 let lsp_request = InlayHints { range };
6168
6169 if self.is_local() {
6170 let lsp_request_task = self.request_lsp(
6171 buffer_handle.clone(),
6172 LanguageServerToQuery::Primary,
6173 lsp_request,
6174 cx,
6175 );
6176 cx.spawn(move |_, mut cx| async move {
6177 buffer_handle
6178 .update(&mut cx, |buffer, _| {
6179 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
6180 })?
6181 .await
6182 .context("waiting for inlay hint request range edits")?;
6183 lsp_request_task.await.context("inlay hints LSP request")
6184 })
6185 } else if let Some(project_id) = self.remote_id() {
6186 let client = self.client.clone();
6187 let request = proto::InlayHints {
6188 project_id,
6189 buffer_id,
6190 start: Some(serialize_anchor(&range_start)),
6191 end: Some(serialize_anchor(&range_end)),
6192 version: serialize_version(&buffer_handle.read(cx).version()),
6193 };
6194 cx.spawn(move |project, cx| async move {
6195 let response = client
6196 .request(request)
6197 .await
6198 .context("inlay hints proto request")?;
6199 LspCommand::response_from_proto(
6200 lsp_request,
6201 response,
6202 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
6203 buffer_handle.clone(),
6204 cx.clone(),
6205 )
6206 .await
6207 .context("inlay hints proto response conversion")
6208 })
6209 } else {
6210 Task::ready(Err(anyhow!("project does not have a remote id")))
6211 }
6212 }
6213
6214 pub fn resolve_inlay_hint(
6215 &self,
6216 hint: InlayHint,
6217 buffer_handle: Model<Buffer>,
6218 server_id: LanguageServerId,
6219 cx: &mut ModelContext<Self>,
6220 ) -> Task<anyhow::Result<InlayHint>> {
6221 if self.is_local() {
6222 let buffer = buffer_handle.read(cx);
6223 let (_, lang_server) = if let Some((adapter, server)) =
6224 self.language_server_for_buffer(buffer, server_id, cx)
6225 {
6226 (adapter.clone(), server.clone())
6227 } else {
6228 return Task::ready(Ok(hint));
6229 };
6230 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
6231 return Task::ready(Ok(hint));
6232 }
6233
6234 let buffer_snapshot = buffer.snapshot();
6235 cx.spawn(move |_, mut cx| async move {
6236 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
6237 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
6238 );
6239 let resolved_hint = resolve_task
6240 .await
6241 .context("inlay hint resolve LSP request")?;
6242 let resolved_hint = InlayHints::lsp_to_project_hint(
6243 resolved_hint,
6244 &buffer_handle,
6245 server_id,
6246 ResolveState::Resolved,
6247 false,
6248 &mut cx,
6249 )
6250 .await?;
6251 Ok(resolved_hint)
6252 })
6253 } else if let Some(project_id) = self.remote_id() {
6254 let client = self.client.clone();
6255 let request = proto::ResolveInlayHint {
6256 project_id,
6257 buffer_id: buffer_handle.read(cx).remote_id().into(),
6258 language_server_id: server_id.0 as u64,
6259 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
6260 };
6261 cx.spawn(move |_, _| async move {
6262 let response = client
6263 .request(request)
6264 .await
6265 .context("inlay hints proto request")?;
6266 match response.hint {
6267 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
6268 .context("inlay hints proto resolve response conversion"),
6269 None => Ok(hint),
6270 }
6271 })
6272 } else {
6273 Task::ready(Err(anyhow!("project does not have a remote id")))
6274 }
6275 }
6276
6277 #[allow(clippy::type_complexity)]
6278 pub fn search(
6279 &self,
6280 query: SearchQuery,
6281 cx: &mut ModelContext<Self>,
6282 ) -> Receiver<SearchResult> {
6283 if self.is_local() {
6284 self.search_local(query, cx)
6285 } else if let Some(project_id) = self.remote_id() {
6286 let (tx, rx) = smol::channel::unbounded();
6287 let request = self.client.request(query.to_proto(project_id));
6288 cx.spawn(move |this, mut cx| async move {
6289 let response = request.await?;
6290 let mut result = HashMap::default();
6291 for location in response.locations {
6292 let buffer_id = BufferId::new(location.buffer_id)?;
6293 let target_buffer = this
6294 .update(&mut cx, |this, cx| {
6295 this.wait_for_remote_buffer(buffer_id, cx)
6296 })?
6297 .await?;
6298 let start = location
6299 .start
6300 .and_then(deserialize_anchor)
6301 .ok_or_else(|| anyhow!("missing target start"))?;
6302 let end = location
6303 .end
6304 .and_then(deserialize_anchor)
6305 .ok_or_else(|| anyhow!("missing target end"))?;
6306 result
6307 .entry(target_buffer)
6308 .or_insert(Vec::new())
6309 .push(start..end)
6310 }
6311 for (buffer, ranges) in result {
6312 let _ = tx.send(SearchResult::Buffer { buffer, ranges }).await;
6313 }
6314
6315 if response.limit_reached {
6316 let _ = tx.send(SearchResult::LimitReached).await;
6317 }
6318
6319 Result::<(), anyhow::Error>::Ok(())
6320 })
6321 .detach_and_log_err(cx);
6322 rx
6323 } else {
6324 unimplemented!();
6325 }
6326 }
6327
6328 pub fn search_local(
6329 &self,
6330 query: SearchQuery,
6331 cx: &mut ModelContext<Self>,
6332 ) -> Receiver<SearchResult> {
6333 // Local search is split into several phases.
6334 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
6335 // and the second phase that finds positions of all the matches found in the candidate files.
6336 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
6337 //
6338 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
6339 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
6340 //
6341 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
6342 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
6343 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
6344 // 2. At this point, we have a list of all potentially matching buffers/files.
6345 // We sort that list by buffer path - this list is retained for later use.
6346 // We ensure that all buffers are now opened and available in project.
6347 // 3. We run a scan over all the candidate buffers on multiple background threads.
6348 // We cannot assume that there will even be a match - while at least one match
6349 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
6350 // There is also an auxiliary background thread responsible for result gathering.
6351 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
6352 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
6353 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
6354 // entry - which might already be available thanks to out-of-order processing.
6355 //
6356 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
6357 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
6358 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
6359 // in face of constantly updating list of sorted matches.
6360 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
6361 let snapshots = self
6362 .visible_worktrees(cx)
6363 .filter_map(|tree| {
6364 let tree = tree.read(cx).as_local()?;
6365 Some(tree.snapshot())
6366 })
6367 .collect::<Vec<_>>();
6368 let include_root = snapshots.len() > 1;
6369
6370 let background = cx.background_executor().clone();
6371 let path_count: usize = snapshots
6372 .iter()
6373 .map(|s| {
6374 if query.include_ignored() {
6375 s.file_count()
6376 } else {
6377 s.visible_file_count()
6378 }
6379 })
6380 .sum();
6381 if path_count == 0 {
6382 let (_, rx) = smol::channel::bounded(1024);
6383 return rx;
6384 }
6385 let workers = background.num_cpus().min(path_count);
6386 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
6387 let mut unnamed_files = vec![];
6388 let opened_buffers = self
6389 .opened_buffers
6390 .iter()
6391 .filter_map(|(_, b)| {
6392 let buffer = b.upgrade()?;
6393 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
6394 let is_ignored = buffer
6395 .project_path(cx)
6396 .and_then(|path| self.entry_for_path(&path, cx))
6397 .map_or(false, |entry| entry.is_ignored);
6398 (is_ignored, buffer.snapshot())
6399 });
6400 if is_ignored && !query.include_ignored() {
6401 return None;
6402 } else if let Some(file) = snapshot.file() {
6403 let matched_path = if include_root {
6404 query.file_matches(Some(&file.full_path(cx)))
6405 } else {
6406 query.file_matches(Some(file.path()))
6407 };
6408
6409 if matched_path {
6410 Some((file.path().clone(), (buffer, snapshot)))
6411 } else {
6412 None
6413 }
6414 } else {
6415 unnamed_files.push(buffer);
6416 None
6417 }
6418 })
6419 .collect();
6420 cx.background_executor()
6421 .spawn(Self::background_search(
6422 unnamed_files,
6423 opened_buffers,
6424 cx.background_executor().clone(),
6425 self.fs.clone(),
6426 workers,
6427 query.clone(),
6428 include_root,
6429 path_count,
6430 snapshots,
6431 matching_paths_tx,
6432 ))
6433 .detach();
6434
6435 let (result_tx, result_rx) = smol::channel::bounded(1024);
6436
6437 cx.spawn(|this, mut cx| async move {
6438 const MAX_SEARCH_RESULT_FILES: usize = 5_000;
6439 const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
6440
6441 let mut matching_paths = matching_paths_rx
6442 .take(MAX_SEARCH_RESULT_FILES + 1)
6443 .collect::<Vec<_>>()
6444 .await;
6445 let mut limit_reached = if matching_paths.len() > MAX_SEARCH_RESULT_FILES {
6446 matching_paths.pop();
6447 true
6448 } else {
6449 false
6450 };
6451 matching_paths.sort_by_key(|candidate| (candidate.is_ignored(), candidate.path()));
6452
6453 let mut range_count = 0;
6454 let query = Arc::new(query);
6455
6456 // Now that we know what paths match the query, we will load at most
6457 // 64 buffers at a time to avoid overwhelming the main thread. For each
6458 // opened buffer, we will spawn a background task that retrieves all the
6459 // ranges in the buffer matched by the query.
6460 'outer: for matching_paths_chunk in matching_paths.chunks(64) {
6461 let mut chunk_results = Vec::new();
6462 for matching_path in matching_paths_chunk {
6463 let query = query.clone();
6464 let buffer = match matching_path {
6465 SearchMatchCandidate::OpenBuffer { buffer, .. } => {
6466 Task::ready(Ok(buffer.clone()))
6467 }
6468 SearchMatchCandidate::Path {
6469 worktree_id, path, ..
6470 } => this.update(&mut cx, |this, cx| {
6471 this.open_buffer((*worktree_id, path.clone()), cx)
6472 })?,
6473 };
6474
6475 chunk_results.push(cx.spawn(|cx| async move {
6476 let buffer = buffer.await?;
6477 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
6478 let ranges = cx
6479 .background_executor()
6480 .spawn(async move {
6481 query
6482 .search(&snapshot, None)
6483 .await
6484 .iter()
6485 .map(|range| {
6486 snapshot.anchor_before(range.start)
6487 ..snapshot.anchor_after(range.end)
6488 })
6489 .collect::<Vec<_>>()
6490 })
6491 .await;
6492 anyhow::Ok((buffer, ranges))
6493 }));
6494 }
6495
6496 let chunk_results = futures::future::join_all(chunk_results).await;
6497 for result in chunk_results {
6498 if let Some((buffer, ranges)) = result.log_err() {
6499 range_count += ranges.len();
6500 result_tx
6501 .send(SearchResult::Buffer { buffer, ranges })
6502 .await?;
6503 if range_count > MAX_SEARCH_RESULT_RANGES {
6504 limit_reached = true;
6505 break 'outer;
6506 }
6507 }
6508 }
6509 }
6510
6511 if limit_reached {
6512 result_tx.send(SearchResult::LimitReached).await?;
6513 }
6514
6515 anyhow::Ok(())
6516 })
6517 .detach();
6518
6519 result_rx
6520 }
6521
6522 /// Pick paths that might potentially contain a match of a given search query.
6523 #[allow(clippy::too_many_arguments)]
6524 async fn background_search(
6525 unnamed_buffers: Vec<Model<Buffer>>,
6526 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
6527 executor: BackgroundExecutor,
6528 fs: Arc<dyn Fs>,
6529 workers: usize,
6530 query: SearchQuery,
6531 include_root: bool,
6532 path_count: usize,
6533 snapshots: Vec<LocalSnapshot>,
6534 matching_paths_tx: Sender<SearchMatchCandidate>,
6535 ) {
6536 let fs = &fs;
6537 let query = &query;
6538 let matching_paths_tx = &matching_paths_tx;
6539 let snapshots = &snapshots;
6540 for buffer in unnamed_buffers {
6541 matching_paths_tx
6542 .send(SearchMatchCandidate::OpenBuffer {
6543 buffer: buffer.clone(),
6544 path: None,
6545 })
6546 .await
6547 .log_err();
6548 }
6549 for (path, (buffer, _)) in opened_buffers.iter() {
6550 matching_paths_tx
6551 .send(SearchMatchCandidate::OpenBuffer {
6552 buffer: buffer.clone(),
6553 path: Some(path.clone()),
6554 })
6555 .await
6556 .log_err();
6557 }
6558
6559 let paths_per_worker = (path_count + workers - 1) / workers;
6560
6561 executor
6562 .scoped(|scope| {
6563 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
6564
6565 for worker_ix in 0..workers {
6566 let worker_start_ix = worker_ix * paths_per_worker;
6567 let worker_end_ix = worker_start_ix + paths_per_worker;
6568 let opened_buffers = opened_buffers.clone();
6569 let limiter = Arc::clone(&max_concurrent_workers);
6570 scope.spawn({
6571 async move {
6572 let _guard = limiter.acquire().await;
6573 search_snapshots(
6574 snapshots,
6575 worker_start_ix,
6576 worker_end_ix,
6577 query,
6578 matching_paths_tx,
6579 &opened_buffers,
6580 include_root,
6581 fs,
6582 )
6583 .await;
6584 }
6585 });
6586 }
6587
6588 if query.include_ignored() {
6589 for snapshot in snapshots {
6590 for ignored_entry in snapshot.entries(true).filter(|e| e.is_ignored) {
6591 let limiter = Arc::clone(&max_concurrent_workers);
6592 scope.spawn(async move {
6593 let _guard = limiter.acquire().await;
6594 search_ignored_entry(
6595 snapshot,
6596 ignored_entry,
6597 fs,
6598 query,
6599 matching_paths_tx,
6600 )
6601 .await;
6602 });
6603 }
6604 }
6605 }
6606 })
6607 .await;
6608 }
6609
6610 pub fn request_lsp<R: LspCommand>(
6611 &self,
6612 buffer_handle: Model<Buffer>,
6613 server: LanguageServerToQuery,
6614 request: R,
6615 cx: &mut ModelContext<Self>,
6616 ) -> Task<Result<R::Response>>
6617 where
6618 <R::LspRequest as lsp::request::Request>::Result: Send,
6619 <R::LspRequest as lsp::request::Request>::Params: Send,
6620 {
6621 let buffer = buffer_handle.read(cx);
6622 if self.is_local() {
6623 let language_server = match server {
6624 LanguageServerToQuery::Primary => {
6625 match self.primary_language_server_for_buffer(buffer, cx) {
6626 Some((_, server)) => Some(Arc::clone(server)),
6627 None => return Task::ready(Ok(Default::default())),
6628 }
6629 }
6630 LanguageServerToQuery::Other(id) => self
6631 .language_server_for_buffer(buffer, id, cx)
6632 .map(|(_, server)| Arc::clone(server)),
6633 };
6634 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6635 if let (Some(file), Some(language_server)) = (file, language_server) {
6636 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6637 return cx.spawn(move |this, cx| async move {
6638 if !request.check_capabilities(language_server.capabilities()) {
6639 return Ok(Default::default());
6640 }
6641
6642 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6643 let response = match result {
6644 Ok(response) => response,
6645
6646 Err(err) => {
6647 log::warn!(
6648 "Generic lsp request to {} failed: {}",
6649 language_server.name(),
6650 err
6651 );
6652 return Err(err);
6653 }
6654 };
6655
6656 request
6657 .response_from_lsp(
6658 response,
6659 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6660 buffer_handle,
6661 language_server.server_id(),
6662 cx,
6663 )
6664 .await
6665 });
6666 }
6667 } else if let Some(project_id) = self.remote_id() {
6668 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6669 }
6670
6671 Task::ready(Ok(Default::default()))
6672 }
6673
6674 fn send_lsp_proto_request<R: LspCommand>(
6675 &self,
6676 buffer: Model<Buffer>,
6677 project_id: u64,
6678 request: R,
6679 cx: &mut ModelContext<'_, Project>,
6680 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6681 let rpc = self.client.clone();
6682 let message = request.to_proto(project_id, buffer.read(cx));
6683 cx.spawn(move |this, mut cx| async move {
6684 // Ensure the project is still alive by the time the task
6685 // is scheduled.
6686 this.upgrade().context("project dropped")?;
6687 let response = rpc.request(message).await?;
6688 let this = this.upgrade().context("project dropped")?;
6689 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6690 Err(anyhow!("disconnected before completing request"))
6691 } else {
6692 request
6693 .response_from_proto(response, this, buffer, cx)
6694 .await
6695 }
6696 })
6697 }
6698
6699 pub fn find_or_create_local_worktree(
6700 &mut self,
6701 abs_path: impl AsRef<Path>,
6702 visible: bool,
6703 cx: &mut ModelContext<Self>,
6704 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6705 let abs_path = abs_path.as_ref();
6706 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6707 Task::ready(Ok((tree, relative_path)))
6708 } else {
6709 let worktree = self.create_local_worktree(abs_path, visible, cx);
6710 cx.background_executor()
6711 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6712 }
6713 }
6714
6715 pub fn find_local_worktree(
6716 &self,
6717 abs_path: &Path,
6718 cx: &AppContext,
6719 ) -> Option<(Model<Worktree>, PathBuf)> {
6720 for tree in &self.worktrees {
6721 if let Some(tree) = tree.upgrade() {
6722 if let Some(relative_path) = tree
6723 .read(cx)
6724 .as_local()
6725 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6726 {
6727 return Some((tree.clone(), relative_path.into()));
6728 }
6729 }
6730 }
6731 None
6732 }
6733
6734 pub fn is_shared(&self) -> bool {
6735 match &self.client_state {
6736 ProjectClientState::Shared { .. } => true,
6737 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6738 }
6739 }
6740
6741 fn create_local_worktree(
6742 &mut self,
6743 abs_path: impl AsRef<Path>,
6744 visible: bool,
6745 cx: &mut ModelContext<Self>,
6746 ) -> Task<Result<Model<Worktree>>> {
6747 let fs = self.fs.clone();
6748 let client = self.client.clone();
6749 let next_entry_id = self.next_entry_id.clone();
6750 let path: Arc<Path> = abs_path.as_ref().into();
6751 let task = self
6752 .loading_local_worktrees
6753 .entry(path.clone())
6754 .or_insert_with(|| {
6755 cx.spawn(move |project, mut cx| {
6756 async move {
6757 let worktree = Worktree::local(
6758 client.clone(),
6759 path.clone(),
6760 visible,
6761 fs,
6762 next_entry_id,
6763 &mut cx,
6764 )
6765 .await;
6766
6767 project.update(&mut cx, |project, _| {
6768 project.loading_local_worktrees.remove(&path);
6769 })?;
6770
6771 let worktree = worktree?;
6772 project
6773 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6774
6775 cx.update(|cx| {
6776 cx.add_recent_document(&path);
6777 })
6778 .log_err();
6779
6780 Ok(worktree)
6781 }
6782 .map_err(Arc::new)
6783 })
6784 .shared()
6785 })
6786 .clone();
6787 cx.background_executor().spawn(async move {
6788 match task.await {
6789 Ok(worktree) => Ok(worktree),
6790 Err(err) => Err(anyhow!("{}", err)),
6791 }
6792 })
6793 }
6794
6795 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6796 let mut servers_to_remove = HashMap::default();
6797 let mut servers_to_preserve = HashSet::default();
6798 for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
6799 if worktree_id == &id_to_remove {
6800 servers_to_remove.insert(server_id, server_name.clone());
6801 } else {
6802 servers_to_preserve.insert(server_id);
6803 }
6804 }
6805 servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id));
6806 for (server_id_to_remove, server_name) in servers_to_remove {
6807 self.language_server_ids
6808 .remove(&(id_to_remove, server_name));
6809 self.language_server_statuses.remove(&server_id_to_remove);
6810 self.language_server_watched_paths
6811 .remove(&server_id_to_remove);
6812 self.last_workspace_edits_by_language_server
6813 .remove(&server_id_to_remove);
6814 self.language_servers.remove(&server_id_to_remove);
6815 cx.emit(Event::LanguageServerRemoved(server_id_to_remove));
6816 }
6817
6818 let mut prettier_instances_to_clean = FuturesUnordered::new();
6819 if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) {
6820 for path in prettier_paths.iter().flatten() {
6821 if let Some(prettier_instance) = self.prettier_instances.remove(path) {
6822 prettier_instances_to_clean.push(async move {
6823 prettier_instance
6824 .server()
6825 .await
6826 .map(|server| server.server_id())
6827 });
6828 }
6829 }
6830 }
6831 cx.spawn(|project, mut cx| async move {
6832 while let Some(prettier_server_id) = prettier_instances_to_clean.next().await {
6833 if let Some(prettier_server_id) = prettier_server_id {
6834 project
6835 .update(&mut cx, |project, cx| {
6836 project
6837 .supplementary_language_servers
6838 .remove(&prettier_server_id);
6839 cx.emit(Event::LanguageServerRemoved(prettier_server_id));
6840 })
6841 .ok();
6842 }
6843 }
6844 })
6845 .detach();
6846
6847 self.task_inventory().update(cx, |inventory, _| {
6848 inventory.remove_worktree_sources(id_to_remove);
6849 });
6850
6851 self.worktrees.retain(|worktree| {
6852 if let Some(worktree) = worktree.upgrade() {
6853 let id = worktree.read(cx).id();
6854 if id == id_to_remove {
6855 cx.emit(Event::WorktreeRemoved(id));
6856 false
6857 } else {
6858 true
6859 }
6860 } else {
6861 false
6862 }
6863 });
6864 self.metadata_changed(cx);
6865 }
6866
6867 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6868 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6869 cx.subscribe(worktree, |this, worktree, event, cx| {
6870 let is_local = worktree.read(cx).is_local();
6871 match event {
6872 worktree::Event::UpdatedEntries(changes) => {
6873 if is_local {
6874 this.update_local_worktree_buffers(&worktree, changes, cx);
6875 this.update_local_worktree_language_servers(&worktree, changes, cx);
6876 this.update_local_worktree_settings(&worktree, changes, cx);
6877 this.update_prettier_settings(&worktree, changes, cx);
6878 }
6879
6880 cx.emit(Event::WorktreeUpdatedEntries(
6881 worktree.read(cx).id(),
6882 changes.clone(),
6883 ));
6884 }
6885 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6886 if is_local {
6887 this.update_local_worktree_buffers_git_repos(
6888 worktree.clone(),
6889 updated_repos,
6890 cx,
6891 )
6892 }
6893 cx.emit(Event::WorktreeUpdatedGitRepositories);
6894 }
6895 }
6896 })
6897 .detach();
6898
6899 let push_strong_handle = {
6900 let worktree = worktree.read(cx);
6901 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6902 };
6903 if push_strong_handle {
6904 self.worktrees
6905 .push(WorktreeHandle::Strong(worktree.clone()));
6906 } else {
6907 self.worktrees
6908 .push(WorktreeHandle::Weak(worktree.downgrade()));
6909 }
6910
6911 let handle_id = worktree.entity_id();
6912 cx.observe_release(worktree, move |this, worktree, cx| {
6913 let _ = this.remove_worktree(worktree.id(), cx);
6914 cx.update_global::<SettingsStore, _>(|store, cx| {
6915 store
6916 .clear_local_settings(handle_id.as_u64() as usize, cx)
6917 .log_err()
6918 });
6919 })
6920 .detach();
6921
6922 cx.emit(Event::WorktreeAdded);
6923 self.metadata_changed(cx);
6924 }
6925
6926 fn update_local_worktree_buffers(
6927 &mut self,
6928 worktree_handle: &Model<Worktree>,
6929 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6930 cx: &mut ModelContext<Self>,
6931 ) {
6932 let snapshot = worktree_handle.read(cx).snapshot();
6933
6934 let mut renamed_buffers = Vec::new();
6935 for (path, entry_id, _) in changes {
6936 let worktree_id = worktree_handle.read(cx).id();
6937 let project_path = ProjectPath {
6938 worktree_id,
6939 path: path.clone(),
6940 };
6941
6942 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6943 Some(&buffer_id) => buffer_id,
6944 None => match self.local_buffer_ids_by_path.get(&project_path) {
6945 Some(&buffer_id) => buffer_id,
6946 None => {
6947 continue;
6948 }
6949 },
6950 };
6951
6952 let open_buffer = self.opened_buffers.get(&buffer_id);
6953 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6954 buffer
6955 } else {
6956 self.opened_buffers.remove(&buffer_id);
6957 self.local_buffer_ids_by_path.remove(&project_path);
6958 self.local_buffer_ids_by_entry_id.remove(entry_id);
6959 continue;
6960 };
6961
6962 buffer.update(cx, |buffer, cx| {
6963 if let Some(old_file) = File::from_dyn(buffer.file()) {
6964 if old_file.worktree != *worktree_handle {
6965 return;
6966 }
6967
6968 let new_file = if let Some(entry) = old_file
6969 .entry_id
6970 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6971 {
6972 File {
6973 is_local: true,
6974 entry_id: Some(entry.id),
6975 mtime: entry.mtime,
6976 path: entry.path.clone(),
6977 worktree: worktree_handle.clone(),
6978 is_deleted: false,
6979 is_private: entry.is_private,
6980 }
6981 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6982 File {
6983 is_local: true,
6984 entry_id: Some(entry.id),
6985 mtime: entry.mtime,
6986 path: entry.path.clone(),
6987 worktree: worktree_handle.clone(),
6988 is_deleted: false,
6989 is_private: entry.is_private,
6990 }
6991 } else {
6992 File {
6993 is_local: true,
6994 entry_id: old_file.entry_id,
6995 path: old_file.path().clone(),
6996 mtime: old_file.mtime(),
6997 worktree: worktree_handle.clone(),
6998 is_deleted: true,
6999 is_private: old_file.is_private,
7000 }
7001 };
7002
7003 let old_path = old_file.abs_path(cx);
7004 if new_file.abs_path(cx) != old_path {
7005 renamed_buffers.push((cx.handle(), old_file.clone()));
7006 self.local_buffer_ids_by_path.remove(&project_path);
7007 self.local_buffer_ids_by_path.insert(
7008 ProjectPath {
7009 worktree_id,
7010 path: path.clone(),
7011 },
7012 buffer_id,
7013 );
7014 }
7015
7016 if new_file.entry_id != Some(*entry_id) {
7017 self.local_buffer_ids_by_entry_id.remove(entry_id);
7018 if let Some(entry_id) = new_file.entry_id {
7019 self.local_buffer_ids_by_entry_id
7020 .insert(entry_id, buffer_id);
7021 }
7022 }
7023
7024 if new_file != *old_file {
7025 if let Some(project_id) = self.remote_id() {
7026 self.client
7027 .send(proto::UpdateBufferFile {
7028 project_id,
7029 buffer_id: buffer_id.into(),
7030 file: Some(new_file.to_proto()),
7031 })
7032 .log_err();
7033 }
7034
7035 buffer.file_updated(Arc::new(new_file), cx);
7036 }
7037 }
7038 });
7039 }
7040
7041 for (buffer, old_file) in renamed_buffers {
7042 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
7043 self.detect_language_for_buffer(&buffer, cx);
7044 self.register_buffer_with_language_servers(&buffer, cx);
7045 }
7046 }
7047
7048 fn update_local_worktree_language_servers(
7049 &mut self,
7050 worktree_handle: &Model<Worktree>,
7051 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
7052 cx: &mut ModelContext<Self>,
7053 ) {
7054 if changes.is_empty() {
7055 return;
7056 }
7057
7058 let worktree_id = worktree_handle.read(cx).id();
7059 let mut language_server_ids = self
7060 .language_server_ids
7061 .iter()
7062 .filter_map(|((server_worktree_id, _), server_id)| {
7063 (*server_worktree_id == worktree_id).then_some(*server_id)
7064 })
7065 .collect::<Vec<_>>();
7066 language_server_ids.sort();
7067 language_server_ids.dedup();
7068
7069 let abs_path = worktree_handle.read(cx).abs_path();
7070 for server_id in &language_server_ids {
7071 if let Some(LanguageServerState::Running { server, .. }) =
7072 self.language_servers.get(server_id)
7073 {
7074 if let Some(watched_paths) = self
7075 .language_server_watched_paths
7076 .get(&server_id)
7077 .and_then(|paths| paths.get(&worktree_id))
7078 {
7079 let params = lsp::DidChangeWatchedFilesParams {
7080 changes: changes
7081 .iter()
7082 .filter_map(|(path, _, change)| {
7083 if !watched_paths.is_match(&path) {
7084 return None;
7085 }
7086 let typ = match change {
7087 PathChange::Loaded => return None,
7088 PathChange::Added => lsp::FileChangeType::CREATED,
7089 PathChange::Removed => lsp::FileChangeType::DELETED,
7090 PathChange::Updated => lsp::FileChangeType::CHANGED,
7091 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
7092 };
7093 Some(lsp::FileEvent {
7094 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
7095 typ,
7096 })
7097 })
7098 .collect(),
7099 };
7100 if !params.changes.is_empty() {
7101 server
7102 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
7103 .log_err();
7104 }
7105 }
7106 }
7107 }
7108 }
7109
7110 fn update_local_worktree_buffers_git_repos(
7111 &mut self,
7112 worktree_handle: Model<Worktree>,
7113 changed_repos: &UpdatedGitRepositoriesSet,
7114 cx: &mut ModelContext<Self>,
7115 ) {
7116 debug_assert!(worktree_handle.read(cx).is_local());
7117
7118 // Identify the loading buffers whose containing repository that has changed.
7119 let future_buffers = self
7120 .loading_buffers_by_path
7121 .iter()
7122 .filter_map(|(project_path, receiver)| {
7123 if project_path.worktree_id != worktree_handle.read(cx).id() {
7124 return None;
7125 }
7126 let path = &project_path.path;
7127 changed_repos
7128 .iter()
7129 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7130 let receiver = receiver.clone();
7131 let path = path.clone();
7132 let abs_path = worktree_handle.read(cx).absolutize(&path).ok()?;
7133 Some(async move {
7134 wait_for_loading_buffer(receiver)
7135 .await
7136 .ok()
7137 .map(|buffer| (buffer, path, abs_path))
7138 })
7139 })
7140 .collect::<FuturesUnordered<_>>();
7141
7142 // Identify the current buffers whose containing repository has changed.
7143 let current_buffers = self
7144 .opened_buffers
7145 .values()
7146 .filter_map(|buffer| {
7147 let buffer = buffer.upgrade()?;
7148 let file = File::from_dyn(buffer.read(cx).file())?;
7149 if file.worktree != worktree_handle {
7150 return None;
7151 }
7152 let path = file.path();
7153 changed_repos
7154 .iter()
7155 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7156 Some((buffer, path.clone(), file.abs_path(cx)))
7157 })
7158 .collect::<Vec<_>>();
7159
7160 if future_buffers.len() + current_buffers.len() == 0 {
7161 return;
7162 }
7163
7164 let remote_id = self.remote_id();
7165 let client = self.client.clone();
7166 let fs = self.fs.clone();
7167 cx.spawn(move |_, mut cx| async move {
7168 // Wait for all of the buffers to load.
7169 let future_buffers = future_buffers.collect::<Vec<_>>().await;
7170
7171 // Reload the diff base for every buffer whose containing git repository has changed.
7172 let snapshot =
7173 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
7174 let diff_bases_by_buffer = cx
7175 .background_executor()
7176 .spawn(async move {
7177 let mut diff_base_tasks = future_buffers
7178 .into_iter()
7179 .flatten()
7180 .chain(current_buffers)
7181 .filter_map(|(buffer, path, abs_path)| {
7182 let (work_directory, repo) =
7183 snapshot.repository_and_work_directory_for_path(&path)?;
7184 let repo_entry = snapshot.get_local_repo(&repo)?;
7185 Some((buffer, path, abs_path, work_directory, repo_entry))
7186 })
7187 .map(|(buffer, path, abs_path, work_directory, repo_entry)| {
7188 let fs = fs.clone();
7189 async move {
7190 let abs_path_metadata = fs
7191 .metadata(&abs_path)
7192 .await
7193 .with_context(|| {
7194 format!("loading file and FS metadata for {path:?}")
7195 })
7196 .log_err()
7197 .flatten()?;
7198 let base_text = if abs_path_metadata.is_dir
7199 || abs_path_metadata.is_symlink
7200 {
7201 None
7202 } else {
7203 let relative_path = path.strip_prefix(&work_directory).ok()?;
7204 repo_entry.repo().lock().load_index_text(relative_path)
7205 };
7206 Some((buffer, base_text))
7207 }
7208 })
7209 .collect::<FuturesUnordered<_>>();
7210
7211 let mut diff_bases = Vec::with_capacity(diff_base_tasks.len());
7212 while let Some(diff_base) = diff_base_tasks.next().await {
7213 if let Some(diff_base) = diff_base {
7214 diff_bases.push(diff_base);
7215 }
7216 }
7217 diff_bases
7218 })
7219 .await;
7220
7221 // Assign the new diff bases on all of the buffers.
7222 for (buffer, diff_base) in diff_bases_by_buffer {
7223 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
7224 buffer.set_diff_base(diff_base.clone(), cx);
7225 buffer.remote_id().into()
7226 })?;
7227 if let Some(project_id) = remote_id {
7228 client
7229 .send(proto::UpdateDiffBase {
7230 project_id,
7231 buffer_id,
7232 diff_base,
7233 })
7234 .log_err();
7235 }
7236 }
7237
7238 anyhow::Ok(())
7239 })
7240 .detach();
7241 }
7242
7243 fn update_local_worktree_settings(
7244 &mut self,
7245 worktree: &Model<Worktree>,
7246 changes: &UpdatedEntriesSet,
7247 cx: &mut ModelContext<Self>,
7248 ) {
7249 if worktree.read(cx).as_local().is_none() {
7250 return;
7251 }
7252 let project_id = self.remote_id();
7253 let worktree_id = worktree.entity_id();
7254 let remote_worktree_id = worktree.read(cx).id();
7255
7256 let mut settings_contents = Vec::new();
7257 for (path, _, change) in changes.iter() {
7258 let removed = change == &PathChange::Removed;
7259 let abs_path = match worktree.read(cx).absolutize(path) {
7260 Ok(abs_path) => abs_path,
7261 Err(e) => {
7262 log::warn!("Cannot absolutize {path:?} received as {change:?} FS change: {e}");
7263 continue;
7264 }
7265 };
7266
7267 if abs_path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
7268 let settings_dir = Arc::from(
7269 path.ancestors()
7270 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
7271 .unwrap(),
7272 );
7273 let fs = self.fs.clone();
7274 settings_contents.push(async move {
7275 (
7276 settings_dir,
7277 if removed {
7278 None
7279 } else {
7280 Some(async move { fs.load(&abs_path).await }.await)
7281 },
7282 )
7283 });
7284 } else if abs_path.ends_with(&*LOCAL_TASKS_RELATIVE_PATH) {
7285 self.task_inventory().update(cx, |task_inventory, cx| {
7286 if removed {
7287 task_inventory.remove_local_static_source(&abs_path);
7288 } else {
7289 let fs = self.fs.clone();
7290 let task_abs_path = abs_path.clone();
7291 task_inventory.add_source(
7292 TaskSourceKind::Worktree {
7293 id: remote_worktree_id,
7294 abs_path,
7295 },
7296 |cx| {
7297 let tasks_file_rx =
7298 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7299 StaticSource::new(
7300 format!("local_tasks_for_workspace_{remote_worktree_id}"),
7301 TrackedFile::new(tasks_file_rx, cx),
7302 cx,
7303 )
7304 },
7305 cx,
7306 );
7307 }
7308 })
7309 } else if abs_path.ends_with(&*LOCAL_VSCODE_TASKS_RELATIVE_PATH) {
7310 self.task_inventory().update(cx, |task_inventory, cx| {
7311 if removed {
7312 task_inventory.remove_local_static_source(&abs_path);
7313 } else {
7314 let fs = self.fs.clone();
7315 let task_abs_path = abs_path.clone();
7316 task_inventory.add_source(
7317 TaskSourceKind::Worktree {
7318 id: remote_worktree_id,
7319 abs_path,
7320 },
7321 |cx| {
7322 let tasks_file_rx =
7323 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7324 StaticSource::new(
7325 format!(
7326 "local_vscode_tasks_for_workspace_{remote_worktree_id}"
7327 ),
7328 TrackedFile::new_convertible::<task::VsCodeTaskFile>(
7329 tasks_file_rx,
7330 cx,
7331 ),
7332 cx,
7333 )
7334 },
7335 cx,
7336 );
7337 }
7338 })
7339 }
7340 }
7341
7342 if settings_contents.is_empty() {
7343 return;
7344 }
7345
7346 let client = self.client.clone();
7347 cx.spawn(move |_, cx| async move {
7348 let settings_contents: Vec<(Arc<Path>, _)> =
7349 futures::future::join_all(settings_contents).await;
7350 cx.update(|cx| {
7351 cx.update_global::<SettingsStore, _>(|store, cx| {
7352 for (directory, file_content) in settings_contents {
7353 let file_content = file_content.and_then(|content| content.log_err());
7354 store
7355 .set_local_settings(
7356 worktree_id.as_u64() as usize,
7357 directory.clone(),
7358 file_content.as_deref(),
7359 cx,
7360 )
7361 .log_err();
7362 if let Some(remote_id) = project_id {
7363 client
7364 .send(proto::UpdateWorktreeSettings {
7365 project_id: remote_id,
7366 worktree_id: remote_worktree_id.to_proto(),
7367 path: directory.to_string_lossy().into_owned(),
7368 content: file_content,
7369 })
7370 .log_err();
7371 }
7372 }
7373 });
7374 })
7375 .ok();
7376 })
7377 .detach();
7378 }
7379
7380 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
7381 let new_active_entry = entry.and_then(|project_path| {
7382 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
7383 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
7384 Some(entry.id)
7385 });
7386 if new_active_entry != self.active_entry {
7387 self.active_entry = new_active_entry;
7388 cx.emit(Event::ActiveEntryChanged(new_active_entry));
7389 }
7390 }
7391
7392 pub fn language_servers_running_disk_based_diagnostics(
7393 &self,
7394 ) -> impl Iterator<Item = LanguageServerId> + '_ {
7395 self.language_server_statuses
7396 .iter()
7397 .filter_map(|(id, status)| {
7398 if status.has_pending_diagnostic_updates {
7399 Some(*id)
7400 } else {
7401 None
7402 }
7403 })
7404 }
7405
7406 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
7407 let mut summary = DiagnosticSummary::default();
7408 for (_, _, path_summary) in
7409 self.diagnostic_summaries(include_ignored, cx)
7410 .filter(|(path, _, _)| {
7411 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7412 include_ignored || worktree == Some(false)
7413 })
7414 {
7415 summary.error_count += path_summary.error_count;
7416 summary.warning_count += path_summary.warning_count;
7417 }
7418 summary
7419 }
7420
7421 pub fn diagnostic_summaries<'a>(
7422 &'a self,
7423 include_ignored: bool,
7424 cx: &'a AppContext,
7425 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
7426 self.visible_worktrees(cx)
7427 .flat_map(move |worktree| {
7428 let worktree = worktree.read(cx);
7429 let worktree_id = worktree.id();
7430 worktree
7431 .diagnostic_summaries()
7432 .map(move |(path, server_id, summary)| {
7433 (ProjectPath { worktree_id, path }, server_id, summary)
7434 })
7435 })
7436 .filter(move |(path, _, _)| {
7437 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7438 include_ignored || worktree == Some(false)
7439 })
7440 }
7441
7442 pub fn disk_based_diagnostics_started(
7443 &mut self,
7444 language_server_id: LanguageServerId,
7445 cx: &mut ModelContext<Self>,
7446 ) {
7447 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
7448 }
7449
7450 pub fn disk_based_diagnostics_finished(
7451 &mut self,
7452 language_server_id: LanguageServerId,
7453 cx: &mut ModelContext<Self>,
7454 ) {
7455 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
7456 }
7457
7458 pub fn active_entry(&self) -> Option<ProjectEntryId> {
7459 self.active_entry
7460 }
7461
7462 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
7463 self.worktree_for_id(path.worktree_id, cx)?
7464 .read(cx)
7465 .entry_for_path(&path.path)
7466 .cloned()
7467 }
7468
7469 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
7470 let worktree = self.worktree_for_entry(entry_id, cx)?;
7471 let worktree = worktree.read(cx);
7472 let worktree_id = worktree.id();
7473 let path = worktree.entry_for_id(entry_id)?.path.clone();
7474 Some(ProjectPath { worktree_id, path })
7475 }
7476
7477 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
7478 let workspace_root = self
7479 .worktree_for_id(project_path.worktree_id, cx)?
7480 .read(cx)
7481 .abs_path();
7482 let project_path = project_path.path.as_ref();
7483
7484 Some(if project_path == Path::new("") {
7485 workspace_root.to_path_buf()
7486 } else {
7487 workspace_root.join(project_path)
7488 })
7489 }
7490
7491 pub fn get_workspace_root(
7492 &self,
7493 project_path: &ProjectPath,
7494 cx: &AppContext,
7495 ) -> Option<PathBuf> {
7496 Some(
7497 self.worktree_for_id(project_path.worktree_id, cx)?
7498 .read(cx)
7499 .abs_path()
7500 .to_path_buf(),
7501 )
7502 }
7503
7504 pub fn get_repo(
7505 &self,
7506 project_path: &ProjectPath,
7507 cx: &AppContext,
7508 ) -> Option<Arc<Mutex<dyn GitRepository>>> {
7509 self.worktree_for_id(project_path.worktree_id, cx)?
7510 .read(cx)
7511 .as_local()?
7512 .snapshot()
7513 .local_git_repo(&project_path.path)
7514 }
7515
7516 pub fn blame_buffer(
7517 &self,
7518 buffer: &Model<Buffer>,
7519 version: Option<clock::Global>,
7520 cx: &AppContext,
7521 ) -> Task<Result<Blame>> {
7522 if self.is_local() {
7523 let blame_params = maybe!({
7524 let buffer = buffer.read(cx);
7525 let buffer_project_path = buffer
7526 .project_path(cx)
7527 .context("failed to get buffer project path")?;
7528
7529 let worktree = self
7530 .worktree_for_id(buffer_project_path.worktree_id, cx)
7531 .context("failed to get worktree")?
7532 .read(cx)
7533 .as_local()
7534 .context("worktree was not local")?
7535 .snapshot();
7536 let (work_directory, repo) = worktree
7537 .repository_and_work_directory_for_path(&buffer_project_path.path)
7538 .context("failed to get repo for blamed buffer")?;
7539
7540 let repo_entry = worktree
7541 .get_local_repo(&repo)
7542 .context("failed to get repo for blamed buffer")?;
7543
7544 let relative_path = buffer_project_path
7545 .path
7546 .strip_prefix(&work_directory)?
7547 .to_path_buf();
7548
7549 let content = match version {
7550 Some(version) => buffer.rope_for_version(&version).clone(),
7551 None => buffer.as_rope().clone(),
7552 };
7553 let repo = repo_entry.repo().clone();
7554
7555 anyhow::Ok((repo, relative_path, content))
7556 });
7557
7558 cx.background_executor().spawn(async move {
7559 let (repo, relative_path, content) = blame_params?;
7560 let lock = repo.lock();
7561 lock.blame(&relative_path, content)
7562 })
7563 } else {
7564 let project_id = self.remote_id();
7565 let buffer_id = buffer.read(cx).remote_id();
7566 let client = self.client.clone();
7567 let version = buffer.read(cx).version();
7568
7569 cx.spawn(|_| async move {
7570 let project_id = project_id.context("unable to get project id for buffer")?;
7571 let response = client
7572 .request(proto::BlameBuffer {
7573 project_id,
7574 buffer_id: buffer_id.into(),
7575 version: serialize_version(&version),
7576 })
7577 .await?;
7578
7579 Ok(deserialize_blame_buffer_response(response))
7580 })
7581 }
7582 }
7583
7584 // RPC message handlers
7585
7586 async fn handle_blame_buffer(
7587 this: Model<Self>,
7588 envelope: TypedEnvelope<proto::BlameBuffer>,
7589 _: Arc<Client>,
7590 mut cx: AsyncAppContext,
7591 ) -> Result<proto::BlameBufferResponse> {
7592 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
7593 let version = deserialize_version(&envelope.payload.version);
7594
7595 let buffer = this.update(&mut cx, |this, _cx| {
7596 this.opened_buffers
7597 .get(&buffer_id)
7598 .and_then(|buffer| buffer.upgrade())
7599 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7600 })??;
7601
7602 buffer
7603 .update(&mut cx, |buffer, _| {
7604 buffer.wait_for_version(version.clone())
7605 })?
7606 .await?;
7607
7608 let blame = this
7609 .update(&mut cx, |this, cx| {
7610 this.blame_buffer(&buffer, Some(version), cx)
7611 })?
7612 .await?;
7613
7614 Ok(serialize_blame_buffer_response(blame))
7615 }
7616
7617 async fn handle_unshare_project(
7618 this: Model<Self>,
7619 _: TypedEnvelope<proto::UnshareProject>,
7620 _: Arc<Client>,
7621 mut cx: AsyncAppContext,
7622 ) -> Result<()> {
7623 this.update(&mut cx, |this, cx| {
7624 if this.is_local() {
7625 this.unshare(cx)?;
7626 } else {
7627 this.disconnected_from_host(cx);
7628 }
7629 Ok(())
7630 })?
7631 }
7632
7633 async fn handle_add_collaborator(
7634 this: Model<Self>,
7635 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
7636 _: Arc<Client>,
7637 mut cx: AsyncAppContext,
7638 ) -> Result<()> {
7639 let collaborator = envelope
7640 .payload
7641 .collaborator
7642 .take()
7643 .ok_or_else(|| anyhow!("empty collaborator"))?;
7644
7645 let collaborator = Collaborator::from_proto(collaborator)?;
7646 this.update(&mut cx, |this, cx| {
7647 this.shared_buffers.remove(&collaborator.peer_id);
7648 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
7649 this.collaborators
7650 .insert(collaborator.peer_id, collaborator);
7651 cx.notify();
7652 })?;
7653
7654 Ok(())
7655 }
7656
7657 async fn handle_update_project_collaborator(
7658 this: Model<Self>,
7659 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
7660 _: Arc<Client>,
7661 mut cx: AsyncAppContext,
7662 ) -> Result<()> {
7663 let old_peer_id = envelope
7664 .payload
7665 .old_peer_id
7666 .ok_or_else(|| anyhow!("missing old peer id"))?;
7667 let new_peer_id = envelope
7668 .payload
7669 .new_peer_id
7670 .ok_or_else(|| anyhow!("missing new peer id"))?;
7671 this.update(&mut cx, |this, cx| {
7672 let collaborator = this
7673 .collaborators
7674 .remove(&old_peer_id)
7675 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
7676 let is_host = collaborator.replica_id == 0;
7677 this.collaborators.insert(new_peer_id, collaborator);
7678
7679 let buffers = this.shared_buffers.remove(&old_peer_id);
7680 log::info!(
7681 "peer {} became {}. moving buffers {:?}",
7682 old_peer_id,
7683 new_peer_id,
7684 &buffers
7685 );
7686 if let Some(buffers) = buffers {
7687 this.shared_buffers.insert(new_peer_id, buffers);
7688 }
7689
7690 if is_host {
7691 this.opened_buffers
7692 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
7693 this.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
7694 .unwrap();
7695 }
7696
7697 cx.emit(Event::CollaboratorUpdated {
7698 old_peer_id,
7699 new_peer_id,
7700 });
7701 cx.notify();
7702 Ok(())
7703 })?
7704 }
7705
7706 async fn handle_remove_collaborator(
7707 this: Model<Self>,
7708 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7709 _: Arc<Client>,
7710 mut cx: AsyncAppContext,
7711 ) -> Result<()> {
7712 this.update(&mut cx, |this, cx| {
7713 let peer_id = envelope
7714 .payload
7715 .peer_id
7716 .ok_or_else(|| anyhow!("invalid peer id"))?;
7717 let replica_id = this
7718 .collaborators
7719 .remove(&peer_id)
7720 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7721 .replica_id;
7722 for buffer in this.opened_buffers.values() {
7723 if let Some(buffer) = buffer.upgrade() {
7724 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7725 }
7726 }
7727 this.shared_buffers.remove(&peer_id);
7728
7729 cx.emit(Event::CollaboratorLeft(peer_id));
7730 cx.notify();
7731 Ok(())
7732 })?
7733 }
7734
7735 async fn handle_update_project(
7736 this: Model<Self>,
7737 envelope: TypedEnvelope<proto::UpdateProject>,
7738 _: Arc<Client>,
7739 mut cx: AsyncAppContext,
7740 ) -> Result<()> {
7741 this.update(&mut cx, |this, cx| {
7742 // Don't handle messages that were sent before the response to us joining the project
7743 if envelope.message_id > this.join_project_response_message_id {
7744 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7745 }
7746 Ok(())
7747 })?
7748 }
7749
7750 async fn handle_update_worktree(
7751 this: Model<Self>,
7752 envelope: TypedEnvelope<proto::UpdateWorktree>,
7753 _: Arc<Client>,
7754 mut cx: AsyncAppContext,
7755 ) -> Result<()> {
7756 this.update(&mut cx, |this, cx| {
7757 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7758 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7759 worktree.update(cx, |worktree, _| {
7760 let worktree = worktree.as_remote_mut().unwrap();
7761 worktree.update_from_remote(envelope.payload);
7762 });
7763 }
7764 Ok(())
7765 })?
7766 }
7767
7768 async fn handle_update_worktree_settings(
7769 this: Model<Self>,
7770 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7771 _: Arc<Client>,
7772 mut cx: AsyncAppContext,
7773 ) -> Result<()> {
7774 this.update(&mut cx, |this, cx| {
7775 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7776 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7777 cx.update_global::<SettingsStore, _>(|store, cx| {
7778 store
7779 .set_local_settings(
7780 worktree.entity_id().as_u64() as usize,
7781 PathBuf::from(&envelope.payload.path).into(),
7782 envelope.payload.content.as_deref(),
7783 cx,
7784 )
7785 .log_err();
7786 });
7787 }
7788 Ok(())
7789 })?
7790 }
7791
7792 async fn handle_create_project_entry(
7793 this: Model<Self>,
7794 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7795 _: Arc<Client>,
7796 mut cx: AsyncAppContext,
7797 ) -> Result<proto::ProjectEntryResponse> {
7798 let worktree = this.update(&mut cx, |this, cx| {
7799 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7800 this.worktree_for_id(worktree_id, cx)
7801 .ok_or_else(|| anyhow!("worktree not found"))
7802 })??;
7803 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7804 let entry = worktree
7805 .update(&mut cx, |worktree, cx| {
7806 let worktree = worktree.as_local_mut().unwrap();
7807 let path = PathBuf::from(envelope.payload.path);
7808 worktree.create_entry(path, envelope.payload.is_directory, cx)
7809 })?
7810 .await?;
7811 Ok(proto::ProjectEntryResponse {
7812 entry: entry.as_ref().map(|e| e.into()),
7813 worktree_scan_id: worktree_scan_id as u64,
7814 })
7815 }
7816
7817 async fn handle_rename_project_entry(
7818 this: Model<Self>,
7819 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7820 _: Arc<Client>,
7821 mut cx: AsyncAppContext,
7822 ) -> Result<proto::ProjectEntryResponse> {
7823 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7824 let worktree = this.update(&mut cx, |this, cx| {
7825 this.worktree_for_entry(entry_id, cx)
7826 .ok_or_else(|| anyhow!("worktree not found"))
7827 })??;
7828 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7829 let entry = worktree
7830 .update(&mut cx, |worktree, cx| {
7831 let new_path = PathBuf::from(envelope.payload.new_path);
7832 worktree
7833 .as_local_mut()
7834 .unwrap()
7835 .rename_entry(entry_id, new_path, cx)
7836 })?
7837 .await?;
7838 Ok(proto::ProjectEntryResponse {
7839 entry: entry.as_ref().map(|e| e.into()),
7840 worktree_scan_id: worktree_scan_id as u64,
7841 })
7842 }
7843
7844 async fn handle_copy_project_entry(
7845 this: Model<Self>,
7846 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7847 _: Arc<Client>,
7848 mut cx: AsyncAppContext,
7849 ) -> Result<proto::ProjectEntryResponse> {
7850 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7851 let worktree = this.update(&mut cx, |this, cx| {
7852 this.worktree_for_entry(entry_id, cx)
7853 .ok_or_else(|| anyhow!("worktree not found"))
7854 })??;
7855 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7856 let entry = worktree
7857 .update(&mut cx, |worktree, cx| {
7858 let new_path = PathBuf::from(envelope.payload.new_path);
7859 worktree
7860 .as_local_mut()
7861 .unwrap()
7862 .copy_entry(entry_id, new_path, cx)
7863 })?
7864 .await?;
7865 Ok(proto::ProjectEntryResponse {
7866 entry: entry.as_ref().map(|e| e.into()),
7867 worktree_scan_id: worktree_scan_id as u64,
7868 })
7869 }
7870
7871 async fn handle_delete_project_entry(
7872 this: Model<Self>,
7873 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7874 _: Arc<Client>,
7875 mut cx: AsyncAppContext,
7876 ) -> Result<proto::ProjectEntryResponse> {
7877 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7878
7879 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7880
7881 let worktree = this.update(&mut cx, |this, cx| {
7882 this.worktree_for_entry(entry_id, cx)
7883 .ok_or_else(|| anyhow!("worktree not found"))
7884 })??;
7885 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7886 worktree
7887 .update(&mut cx, |worktree, cx| {
7888 worktree
7889 .as_local_mut()
7890 .unwrap()
7891 .delete_entry(entry_id, cx)
7892 .ok_or_else(|| anyhow!("invalid entry"))
7893 })??
7894 .await?;
7895 Ok(proto::ProjectEntryResponse {
7896 entry: None,
7897 worktree_scan_id: worktree_scan_id as u64,
7898 })
7899 }
7900
7901 async fn handle_expand_project_entry(
7902 this: Model<Self>,
7903 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7904 _: Arc<Client>,
7905 mut cx: AsyncAppContext,
7906 ) -> Result<proto::ExpandProjectEntryResponse> {
7907 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7908 let worktree = this
7909 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7910 .ok_or_else(|| anyhow!("invalid request"))?;
7911 worktree
7912 .update(&mut cx, |worktree, cx| {
7913 worktree
7914 .as_local_mut()
7915 .unwrap()
7916 .expand_entry(entry_id, cx)
7917 .ok_or_else(|| anyhow!("invalid entry"))
7918 })??
7919 .await?;
7920 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7921 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7922 }
7923
7924 async fn handle_update_diagnostic_summary(
7925 this: Model<Self>,
7926 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7927 _: Arc<Client>,
7928 mut cx: AsyncAppContext,
7929 ) -> Result<()> {
7930 this.update(&mut cx, |this, cx| {
7931 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7932 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7933 if let Some(summary) = envelope.payload.summary {
7934 let project_path = ProjectPath {
7935 worktree_id,
7936 path: Path::new(&summary.path).into(),
7937 };
7938 worktree.update(cx, |worktree, _| {
7939 worktree
7940 .as_remote_mut()
7941 .unwrap()
7942 .update_diagnostic_summary(project_path.path.clone(), &summary);
7943 });
7944 cx.emit(Event::DiagnosticsUpdated {
7945 language_server_id: LanguageServerId(summary.language_server_id as usize),
7946 path: project_path,
7947 });
7948 }
7949 }
7950 Ok(())
7951 })?
7952 }
7953
7954 async fn handle_start_language_server(
7955 this: Model<Self>,
7956 envelope: TypedEnvelope<proto::StartLanguageServer>,
7957 _: Arc<Client>,
7958 mut cx: AsyncAppContext,
7959 ) -> Result<()> {
7960 let server = envelope
7961 .payload
7962 .server
7963 .ok_or_else(|| anyhow!("invalid server"))?;
7964 this.update(&mut cx, |this, cx| {
7965 this.language_server_statuses.insert(
7966 LanguageServerId(server.id as usize),
7967 LanguageServerStatus {
7968 name: server.name,
7969 pending_work: Default::default(),
7970 has_pending_diagnostic_updates: false,
7971 progress_tokens: Default::default(),
7972 },
7973 );
7974 cx.notify();
7975 })?;
7976 Ok(())
7977 }
7978
7979 async fn handle_update_language_server(
7980 this: Model<Self>,
7981 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7982 _: Arc<Client>,
7983 mut cx: AsyncAppContext,
7984 ) -> Result<()> {
7985 this.update(&mut cx, |this, cx| {
7986 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7987
7988 match envelope
7989 .payload
7990 .variant
7991 .ok_or_else(|| anyhow!("invalid variant"))?
7992 {
7993 proto::update_language_server::Variant::WorkStart(payload) => {
7994 this.on_lsp_work_start(
7995 language_server_id,
7996 payload.token,
7997 LanguageServerProgress {
7998 message: payload.message,
7999 percentage: payload.percentage.map(|p| p as usize),
8000 last_update_at: Instant::now(),
8001 },
8002 cx,
8003 );
8004 }
8005
8006 proto::update_language_server::Variant::WorkProgress(payload) => {
8007 this.on_lsp_work_progress(
8008 language_server_id,
8009 payload.token,
8010 LanguageServerProgress {
8011 message: payload.message,
8012 percentage: payload.percentage.map(|p| p as usize),
8013 last_update_at: Instant::now(),
8014 },
8015 cx,
8016 );
8017 }
8018
8019 proto::update_language_server::Variant::WorkEnd(payload) => {
8020 this.on_lsp_work_end(language_server_id, payload.token, cx);
8021 }
8022
8023 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
8024 this.disk_based_diagnostics_started(language_server_id, cx);
8025 }
8026
8027 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
8028 this.disk_based_diagnostics_finished(language_server_id, cx)
8029 }
8030 }
8031
8032 Ok(())
8033 })?
8034 }
8035
8036 async fn handle_update_buffer(
8037 this: Model<Self>,
8038 envelope: TypedEnvelope<proto::UpdateBuffer>,
8039 _: Arc<Client>,
8040 mut cx: AsyncAppContext,
8041 ) -> Result<proto::Ack> {
8042 this.update(&mut cx, |this, cx| {
8043 let payload = envelope.payload.clone();
8044 let buffer_id = BufferId::new(payload.buffer_id)?;
8045 let ops = payload
8046 .operations
8047 .into_iter()
8048 .map(language::proto::deserialize_operation)
8049 .collect::<Result<Vec<_>, _>>()?;
8050 let is_remote = this.is_remote();
8051 match this.opened_buffers.entry(buffer_id) {
8052 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
8053 OpenBuffer::Strong(buffer) => {
8054 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
8055 }
8056 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
8057 OpenBuffer::Weak(_) => {}
8058 },
8059 hash_map::Entry::Vacant(e) => {
8060 assert!(
8061 is_remote,
8062 "received buffer update from {:?}",
8063 envelope.original_sender_id
8064 );
8065 e.insert(OpenBuffer::Operations(ops));
8066 }
8067 }
8068 Ok(proto::Ack {})
8069 })?
8070 }
8071
8072 async fn handle_create_buffer_for_peer(
8073 this: Model<Self>,
8074 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
8075 _: Arc<Client>,
8076 mut cx: AsyncAppContext,
8077 ) -> Result<()> {
8078 this.update(&mut cx, |this, cx| {
8079 match envelope
8080 .payload
8081 .variant
8082 .ok_or_else(|| anyhow!("missing variant"))?
8083 {
8084 proto::create_buffer_for_peer::Variant::State(mut state) => {
8085 let buffer_id = BufferId::new(state.id)?;
8086
8087 let buffer_result = maybe!({
8088 let mut buffer_file = None;
8089 if let Some(file) = state.file.take() {
8090 let worktree_id = WorktreeId::from_proto(file.worktree_id);
8091 let worktree =
8092 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
8093 anyhow!("no worktree found for id {}", file.worktree_id)
8094 })?;
8095 buffer_file =
8096 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
8097 as Arc<dyn language::File>);
8098 }
8099 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
8100 });
8101
8102 match buffer_result {
8103 Ok(buffer) => {
8104 let buffer = cx.new_model(|_| buffer);
8105 this.incomplete_remote_buffers.insert(buffer_id, buffer);
8106 }
8107 Err(error) => {
8108 if let Some(listeners) = this.loading_buffers.remove(&buffer_id) {
8109 for listener in listeners {
8110 listener.send(Err(anyhow!(error.cloned()))).ok();
8111 }
8112 }
8113 }
8114 };
8115 }
8116 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
8117 let buffer_id = BufferId::new(chunk.buffer_id)?;
8118 let buffer = this
8119 .incomplete_remote_buffers
8120 .get(&buffer_id)
8121 .cloned()
8122 .ok_or_else(|| {
8123 anyhow!(
8124 "received chunk for buffer {} without initial state",
8125 chunk.buffer_id
8126 )
8127 })?;
8128
8129 let result = maybe!({
8130 let operations = chunk
8131 .operations
8132 .into_iter()
8133 .map(language::proto::deserialize_operation)
8134 .collect::<Result<Vec<_>>>()?;
8135 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))
8136 });
8137
8138 if let Err(error) = result {
8139 this.incomplete_remote_buffers.remove(&buffer_id);
8140 if let Some(listeners) = this.loading_buffers.remove(&buffer_id) {
8141 for listener in listeners {
8142 listener.send(Err(error.cloned())).ok();
8143 }
8144 }
8145 } else {
8146 if chunk.is_last {
8147 this.incomplete_remote_buffers.remove(&buffer_id);
8148 this.register_buffer(&buffer, cx)?;
8149 }
8150 }
8151 }
8152 }
8153
8154 Ok(())
8155 })?
8156 }
8157
8158 async fn handle_update_diff_base(
8159 this: Model<Self>,
8160 envelope: TypedEnvelope<proto::UpdateDiffBase>,
8161 _: Arc<Client>,
8162 mut cx: AsyncAppContext,
8163 ) -> Result<()> {
8164 this.update(&mut cx, |this, cx| {
8165 let buffer_id = envelope.payload.buffer_id;
8166 let buffer_id = BufferId::new(buffer_id)?;
8167 let diff_base = envelope.payload.diff_base;
8168 if let Some(buffer) = this
8169 .opened_buffers
8170 .get_mut(&buffer_id)
8171 .and_then(|b| b.upgrade())
8172 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned())
8173 {
8174 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
8175 }
8176 Ok(())
8177 })?
8178 }
8179
8180 async fn handle_update_buffer_file(
8181 this: Model<Self>,
8182 envelope: TypedEnvelope<proto::UpdateBufferFile>,
8183 _: Arc<Client>,
8184 mut cx: AsyncAppContext,
8185 ) -> Result<()> {
8186 let buffer_id = envelope.payload.buffer_id;
8187 let buffer_id = BufferId::new(buffer_id)?;
8188
8189 this.update(&mut cx, |this, cx| {
8190 let payload = envelope.payload.clone();
8191 if let Some(buffer) = this
8192 .opened_buffers
8193 .get(&buffer_id)
8194 .and_then(|b| b.upgrade())
8195 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned())
8196 {
8197 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
8198 let worktree = this
8199 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
8200 .ok_or_else(|| anyhow!("no such worktree"))?;
8201 let file = File::from_proto(file, worktree, cx)?;
8202 buffer.update(cx, |buffer, cx| {
8203 buffer.file_updated(Arc::new(file), cx);
8204 });
8205 this.detect_language_for_buffer(&buffer, cx);
8206 }
8207 Ok(())
8208 })?
8209 }
8210
8211 async fn handle_save_buffer(
8212 this: Model<Self>,
8213 envelope: TypedEnvelope<proto::SaveBuffer>,
8214 _: Arc<Client>,
8215 mut cx: AsyncAppContext,
8216 ) -> Result<proto::BufferSaved> {
8217 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8218 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
8219 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
8220 let buffer = this
8221 .opened_buffers
8222 .get(&buffer_id)
8223 .and_then(|buffer| buffer.upgrade())
8224 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8225 anyhow::Ok((project_id, buffer))
8226 })??;
8227 buffer
8228 .update(&mut cx, |buffer, _| {
8229 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8230 })?
8231 .await?;
8232 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
8233
8234 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
8235 .await?;
8236 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
8237 project_id,
8238 buffer_id: buffer_id.into(),
8239 version: serialize_version(buffer.saved_version()),
8240 mtime: buffer.saved_mtime().map(|time| time.into()),
8241 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
8242 })
8243 }
8244
8245 async fn handle_reload_buffers(
8246 this: Model<Self>,
8247 envelope: TypedEnvelope<proto::ReloadBuffers>,
8248 _: Arc<Client>,
8249 mut cx: AsyncAppContext,
8250 ) -> Result<proto::ReloadBuffersResponse> {
8251 let sender_id = envelope.original_sender_id()?;
8252 let reload = this.update(&mut cx, |this, cx| {
8253 let mut buffers = HashSet::default();
8254 for buffer_id in &envelope.payload.buffer_ids {
8255 let buffer_id = BufferId::new(*buffer_id)?;
8256 buffers.insert(
8257 this.opened_buffers
8258 .get(&buffer_id)
8259 .and_then(|buffer| buffer.upgrade())
8260 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8261 );
8262 }
8263 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
8264 })??;
8265
8266 let project_transaction = reload.await?;
8267 let project_transaction = this.update(&mut cx, |this, cx| {
8268 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8269 })?;
8270 Ok(proto::ReloadBuffersResponse {
8271 transaction: Some(project_transaction),
8272 })
8273 }
8274
8275 async fn handle_synchronize_buffers(
8276 this: Model<Self>,
8277 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
8278 _: Arc<Client>,
8279 mut cx: AsyncAppContext,
8280 ) -> Result<proto::SynchronizeBuffersResponse> {
8281 let project_id = envelope.payload.project_id;
8282 let mut response = proto::SynchronizeBuffersResponse {
8283 buffers: Default::default(),
8284 };
8285
8286 this.update(&mut cx, |this, cx| {
8287 let Some(guest_id) = envelope.original_sender_id else {
8288 error!("missing original_sender_id on SynchronizeBuffers request");
8289 bail!("missing original_sender_id on SynchronizeBuffers request");
8290 };
8291
8292 this.shared_buffers.entry(guest_id).or_default().clear();
8293 for buffer in envelope.payload.buffers {
8294 let buffer_id = BufferId::new(buffer.id)?;
8295 let remote_version = language::proto::deserialize_version(&buffer.version);
8296 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8297 this.shared_buffers
8298 .entry(guest_id)
8299 .or_default()
8300 .insert(buffer_id);
8301
8302 let buffer = buffer.read(cx);
8303 response.buffers.push(proto::BufferVersion {
8304 id: buffer_id.into(),
8305 version: language::proto::serialize_version(&buffer.version),
8306 });
8307
8308 let operations = buffer.serialize_ops(Some(remote_version), cx);
8309 let client = this.client.clone();
8310 if let Some(file) = buffer.file() {
8311 client
8312 .send(proto::UpdateBufferFile {
8313 project_id,
8314 buffer_id: buffer_id.into(),
8315 file: Some(file.to_proto()),
8316 })
8317 .log_err();
8318 }
8319
8320 client
8321 .send(proto::UpdateDiffBase {
8322 project_id,
8323 buffer_id: buffer_id.into(),
8324 diff_base: buffer.diff_base().map(Into::into),
8325 })
8326 .log_err();
8327
8328 client
8329 .send(proto::BufferReloaded {
8330 project_id,
8331 buffer_id: buffer_id.into(),
8332 version: language::proto::serialize_version(buffer.saved_version()),
8333 mtime: buffer.saved_mtime().map(|time| time.into()),
8334 fingerprint: language::proto::serialize_fingerprint(
8335 buffer.saved_version_fingerprint(),
8336 ),
8337 line_ending: language::proto::serialize_line_ending(
8338 buffer.line_ending(),
8339 ) as i32,
8340 })
8341 .log_err();
8342
8343 cx.background_executor()
8344 .spawn(
8345 async move {
8346 let operations = operations.await;
8347 for chunk in split_operations(operations) {
8348 client
8349 .request(proto::UpdateBuffer {
8350 project_id,
8351 buffer_id: buffer_id.into(),
8352 operations: chunk,
8353 })
8354 .await?;
8355 }
8356 anyhow::Ok(())
8357 }
8358 .log_err(),
8359 )
8360 .detach();
8361 }
8362 }
8363 Ok(())
8364 })??;
8365
8366 Ok(response)
8367 }
8368
8369 async fn handle_format_buffers(
8370 this: Model<Self>,
8371 envelope: TypedEnvelope<proto::FormatBuffers>,
8372 _: Arc<Client>,
8373 mut cx: AsyncAppContext,
8374 ) -> Result<proto::FormatBuffersResponse> {
8375 let sender_id = envelope.original_sender_id()?;
8376 let format = this.update(&mut cx, |this, cx| {
8377 let mut buffers = HashSet::default();
8378 for buffer_id in &envelope.payload.buffer_ids {
8379 let buffer_id = BufferId::new(*buffer_id)?;
8380 buffers.insert(
8381 this.opened_buffers
8382 .get(&buffer_id)
8383 .and_then(|buffer| buffer.upgrade())
8384 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8385 );
8386 }
8387 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
8388 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
8389 })??;
8390
8391 let project_transaction = format.await?;
8392 let project_transaction = this.update(&mut cx, |this, cx| {
8393 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8394 })?;
8395 Ok(proto::FormatBuffersResponse {
8396 transaction: Some(project_transaction),
8397 })
8398 }
8399
8400 async fn handle_apply_additional_edits_for_completion(
8401 this: Model<Self>,
8402 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
8403 _: Arc<Client>,
8404 mut cx: AsyncAppContext,
8405 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
8406 let languages = this.update(&mut cx, |this, _| this.languages.clone())?;
8407 let (buffer, completion) = this.update(&mut cx, |this, cx| {
8408 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8409 let buffer = this
8410 .opened_buffers
8411 .get(&buffer_id)
8412 .and_then(|buffer| buffer.upgrade())
8413 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8414 let language = buffer.read(cx).language();
8415 let completion = language::proto::deserialize_completion(
8416 envelope
8417 .payload
8418 .completion
8419 .ok_or_else(|| anyhow!("invalid completion"))?,
8420 language.cloned(),
8421 &languages,
8422 );
8423 Ok::<_, anyhow::Error>((buffer, completion))
8424 })??;
8425
8426 let completion = completion.await?;
8427
8428 let apply_additional_edits = this.update(&mut cx, |this, cx| {
8429 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
8430 })?;
8431
8432 Ok(proto::ApplyCompletionAdditionalEditsResponse {
8433 transaction: apply_additional_edits
8434 .await?
8435 .as_ref()
8436 .map(language::proto::serialize_transaction),
8437 })
8438 }
8439
8440 async fn handle_resolve_completion_documentation(
8441 this: Model<Self>,
8442 envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
8443 _: Arc<Client>,
8444 mut cx: AsyncAppContext,
8445 ) -> Result<proto::ResolveCompletionDocumentationResponse> {
8446 let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
8447
8448 let completion = this
8449 .read_with(&mut cx, |this, _| {
8450 let id = LanguageServerId(envelope.payload.language_server_id as usize);
8451 let Some(server) = this.language_server_for_id(id) else {
8452 return Err(anyhow!("No language server {id}"));
8453 };
8454
8455 Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
8456 })??
8457 .await?;
8458
8459 let mut is_markdown = false;
8460 let text = match completion.documentation {
8461 Some(lsp::Documentation::String(text)) => text,
8462
8463 Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
8464 is_markdown = kind == lsp::MarkupKind::Markdown;
8465 value
8466 }
8467
8468 _ => String::new(),
8469 };
8470
8471 Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
8472 }
8473
8474 async fn handle_apply_code_action(
8475 this: Model<Self>,
8476 envelope: TypedEnvelope<proto::ApplyCodeAction>,
8477 _: Arc<Client>,
8478 mut cx: AsyncAppContext,
8479 ) -> Result<proto::ApplyCodeActionResponse> {
8480 let sender_id = envelope.original_sender_id()?;
8481 let action = language::proto::deserialize_code_action(
8482 envelope
8483 .payload
8484 .action
8485 .ok_or_else(|| anyhow!("invalid action"))?,
8486 )?;
8487 let apply_code_action = this.update(&mut cx, |this, cx| {
8488 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8489 let buffer = this
8490 .opened_buffers
8491 .get(&buffer_id)
8492 .and_then(|buffer| buffer.upgrade())
8493 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
8494 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
8495 })??;
8496
8497 let project_transaction = apply_code_action.await?;
8498 let project_transaction = this.update(&mut cx, |this, cx| {
8499 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8500 })?;
8501 Ok(proto::ApplyCodeActionResponse {
8502 transaction: Some(project_transaction),
8503 })
8504 }
8505
8506 async fn handle_on_type_formatting(
8507 this: Model<Self>,
8508 envelope: TypedEnvelope<proto::OnTypeFormatting>,
8509 _: Arc<Client>,
8510 mut cx: AsyncAppContext,
8511 ) -> Result<proto::OnTypeFormattingResponse> {
8512 let on_type_formatting = this.update(&mut cx, |this, cx| {
8513 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8514 let buffer = this
8515 .opened_buffers
8516 .get(&buffer_id)
8517 .and_then(|buffer| buffer.upgrade())
8518 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8519 let position = envelope
8520 .payload
8521 .position
8522 .and_then(deserialize_anchor)
8523 .ok_or_else(|| anyhow!("invalid position"))?;
8524 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
8525 buffer,
8526 position,
8527 envelope.payload.trigger.clone(),
8528 cx,
8529 ))
8530 })??;
8531
8532 let transaction = on_type_formatting
8533 .await?
8534 .as_ref()
8535 .map(language::proto::serialize_transaction);
8536 Ok(proto::OnTypeFormattingResponse { transaction })
8537 }
8538
8539 async fn handle_inlay_hints(
8540 this: Model<Self>,
8541 envelope: TypedEnvelope<proto::InlayHints>,
8542 _: Arc<Client>,
8543 mut cx: AsyncAppContext,
8544 ) -> Result<proto::InlayHintsResponse> {
8545 let sender_id = envelope.original_sender_id()?;
8546 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8547 let buffer = this.update(&mut cx, |this, _| {
8548 this.opened_buffers
8549 .get(&buffer_id)
8550 .and_then(|buffer| buffer.upgrade())
8551 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
8552 })??;
8553 buffer
8554 .update(&mut cx, |buffer, _| {
8555 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8556 })?
8557 .await
8558 .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?;
8559
8560 let start = envelope
8561 .payload
8562 .start
8563 .and_then(deserialize_anchor)
8564 .context("missing range start")?;
8565 let end = envelope
8566 .payload
8567 .end
8568 .and_then(deserialize_anchor)
8569 .context("missing range end")?;
8570 let buffer_hints = this
8571 .update(&mut cx, |project, cx| {
8572 project.inlay_hints(buffer.clone(), start..end, cx)
8573 })?
8574 .await
8575 .context("inlay hints fetch")?;
8576
8577 this.update(&mut cx, |project, cx| {
8578 InlayHints::response_to_proto(
8579 buffer_hints,
8580 project,
8581 sender_id,
8582 &buffer.read(cx).version(),
8583 cx,
8584 )
8585 })
8586 }
8587
8588 async fn handle_resolve_inlay_hint(
8589 this: Model<Self>,
8590 envelope: TypedEnvelope<proto::ResolveInlayHint>,
8591 _: Arc<Client>,
8592 mut cx: AsyncAppContext,
8593 ) -> Result<proto::ResolveInlayHintResponse> {
8594 let proto_hint = envelope
8595 .payload
8596 .hint
8597 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
8598 let hint = InlayHints::proto_to_project_hint(proto_hint)
8599 .context("resolved proto inlay hint conversion")?;
8600 let buffer = this.update(&mut cx, |this, _cx| {
8601 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8602 this.opened_buffers
8603 .get(&buffer_id)
8604 .and_then(|buffer| buffer.upgrade())
8605 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8606 })??;
8607 let response_hint = this
8608 .update(&mut cx, |project, cx| {
8609 project.resolve_inlay_hint(
8610 hint,
8611 buffer,
8612 LanguageServerId(envelope.payload.language_server_id as usize),
8613 cx,
8614 )
8615 })?
8616 .await
8617 .context("inlay hints fetch")?;
8618 Ok(proto::ResolveInlayHintResponse {
8619 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
8620 })
8621 }
8622
8623 async fn try_resolve_code_action(
8624 lang_server: &LanguageServer,
8625 action: &mut CodeAction,
8626 ) -> anyhow::Result<()> {
8627 if GetCodeActions::can_resolve_actions(&lang_server.capabilities()) {
8628 if action.lsp_action.data.is_some()
8629 && (action.lsp_action.command.is_none() || action.lsp_action.edit.is_none())
8630 {
8631 action.lsp_action = lang_server
8632 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action.clone())
8633 .await?;
8634 }
8635 }
8636
8637 anyhow::Ok(())
8638 }
8639
8640 async fn handle_refresh_inlay_hints(
8641 this: Model<Self>,
8642 _: TypedEnvelope<proto::RefreshInlayHints>,
8643 _: Arc<Client>,
8644 mut cx: AsyncAppContext,
8645 ) -> Result<proto::Ack> {
8646 this.update(&mut cx, |_, cx| {
8647 cx.emit(Event::RefreshInlayHints);
8648 })?;
8649 Ok(proto::Ack {})
8650 }
8651
8652 async fn handle_lsp_command<T: LspCommand>(
8653 this: Model<Self>,
8654 envelope: TypedEnvelope<T::ProtoRequest>,
8655 _: Arc<Client>,
8656 mut cx: AsyncAppContext,
8657 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
8658 where
8659 <T::LspRequest as lsp::request::Request>::Params: Send,
8660 <T::LspRequest as lsp::request::Request>::Result: Send,
8661 {
8662 let sender_id = envelope.original_sender_id()?;
8663 let buffer_id = T::buffer_id_from_proto(&envelope.payload)?;
8664 let buffer_handle = this.update(&mut cx, |this, _cx| {
8665 this.opened_buffers
8666 .get(&buffer_id)
8667 .and_then(|buffer| buffer.upgrade())
8668 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8669 })??;
8670 let request = T::from_proto(
8671 envelope.payload,
8672 this.clone(),
8673 buffer_handle.clone(),
8674 cx.clone(),
8675 )
8676 .await?;
8677 let response = this
8678 .update(&mut cx, |this, cx| {
8679 this.request_lsp(
8680 buffer_handle.clone(),
8681 LanguageServerToQuery::Primary,
8682 request,
8683 cx,
8684 )
8685 })?
8686 .await?;
8687 this.update(&mut cx, |this, cx| {
8688 Ok(T::response_to_proto(
8689 response,
8690 this,
8691 sender_id,
8692 &buffer_handle.read(cx).version(),
8693 cx,
8694 ))
8695 })?
8696 }
8697
8698 async fn handle_get_project_symbols(
8699 this: Model<Self>,
8700 envelope: TypedEnvelope<proto::GetProjectSymbols>,
8701 _: Arc<Client>,
8702 mut cx: AsyncAppContext,
8703 ) -> Result<proto::GetProjectSymbolsResponse> {
8704 let symbols = this
8705 .update(&mut cx, |this, cx| {
8706 this.symbols(&envelope.payload.query, cx)
8707 })?
8708 .await?;
8709
8710 Ok(proto::GetProjectSymbolsResponse {
8711 symbols: symbols.iter().map(serialize_symbol).collect(),
8712 })
8713 }
8714
8715 async fn handle_search_project(
8716 this: Model<Self>,
8717 envelope: TypedEnvelope<proto::SearchProject>,
8718 _: Arc<Client>,
8719 mut cx: AsyncAppContext,
8720 ) -> Result<proto::SearchProjectResponse> {
8721 let peer_id = envelope.original_sender_id()?;
8722 let query = SearchQuery::from_proto(envelope.payload)?;
8723 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
8724
8725 cx.spawn(move |mut cx| async move {
8726 let mut locations = Vec::new();
8727 let mut limit_reached = false;
8728 while let Some(result) = result.next().await {
8729 match result {
8730 SearchResult::Buffer { buffer, ranges } => {
8731 for range in ranges {
8732 let start = serialize_anchor(&range.start);
8733 let end = serialize_anchor(&range.end);
8734 let buffer_id = this.update(&mut cx, |this, cx| {
8735 this.create_buffer_for_peer(&buffer, peer_id, cx).into()
8736 })?;
8737 locations.push(proto::Location {
8738 buffer_id,
8739 start: Some(start),
8740 end: Some(end),
8741 });
8742 }
8743 }
8744 SearchResult::LimitReached => limit_reached = true,
8745 }
8746 }
8747 Ok(proto::SearchProjectResponse {
8748 locations,
8749 limit_reached,
8750 })
8751 })
8752 .await
8753 }
8754
8755 async fn handle_open_buffer_for_symbol(
8756 this: Model<Self>,
8757 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
8758 _: Arc<Client>,
8759 mut cx: AsyncAppContext,
8760 ) -> Result<proto::OpenBufferForSymbolResponse> {
8761 let peer_id = envelope.original_sender_id()?;
8762 let symbol = envelope
8763 .payload
8764 .symbol
8765 .ok_or_else(|| anyhow!("invalid symbol"))?;
8766 let symbol = this
8767 .update(&mut cx, |this, _cx| this.deserialize_symbol(symbol))?
8768 .await?;
8769 let symbol = this.update(&mut cx, |this, _| {
8770 let signature = this.symbol_signature(&symbol.path);
8771 if signature == symbol.signature {
8772 Ok(symbol)
8773 } else {
8774 Err(anyhow!("invalid symbol signature"))
8775 }
8776 })??;
8777 let buffer = this
8778 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
8779 .await?;
8780
8781 this.update(&mut cx, |this, cx| {
8782 let is_private = buffer
8783 .read(cx)
8784 .file()
8785 .map(|f| f.is_private())
8786 .unwrap_or_default();
8787 if is_private {
8788 Err(anyhow!(ErrorCode::UnsharedItem))
8789 } else {
8790 Ok(proto::OpenBufferForSymbolResponse {
8791 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
8792 })
8793 }
8794 })?
8795 }
8796
8797 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
8798 let mut hasher = Sha256::new();
8799 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
8800 hasher.update(project_path.path.to_string_lossy().as_bytes());
8801 hasher.update(self.nonce.to_be_bytes());
8802 hasher.finalize().as_slice().try_into().unwrap()
8803 }
8804
8805 async fn handle_open_buffer_by_id(
8806 this: Model<Self>,
8807 envelope: TypedEnvelope<proto::OpenBufferById>,
8808 _: Arc<Client>,
8809 mut cx: AsyncAppContext,
8810 ) -> Result<proto::OpenBufferResponse> {
8811 let peer_id = envelope.original_sender_id()?;
8812 let buffer_id = BufferId::new(envelope.payload.id)?;
8813 let buffer = this
8814 .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))?
8815 .await?;
8816 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
8817 }
8818
8819 async fn handle_open_buffer_by_path(
8820 this: Model<Self>,
8821 envelope: TypedEnvelope<proto::OpenBufferByPath>,
8822 _: Arc<Client>,
8823 mut cx: AsyncAppContext,
8824 ) -> Result<proto::OpenBufferResponse> {
8825 let peer_id = envelope.original_sender_id()?;
8826 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8827 let open_buffer = this.update(&mut cx, |this, cx| {
8828 this.open_buffer(
8829 ProjectPath {
8830 worktree_id,
8831 path: PathBuf::from(envelope.payload.path).into(),
8832 },
8833 cx,
8834 )
8835 })?;
8836
8837 let buffer = open_buffer.await?;
8838 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
8839 }
8840
8841 fn respond_to_open_buffer_request(
8842 this: Model<Self>,
8843 buffer: Model<Buffer>,
8844 peer_id: proto::PeerId,
8845 cx: &mut AsyncAppContext,
8846 ) -> Result<proto::OpenBufferResponse> {
8847 this.update(cx, |this, cx| {
8848 let is_private = buffer
8849 .read(cx)
8850 .file()
8851 .map(|f| f.is_private())
8852 .unwrap_or_default();
8853 if is_private {
8854 Err(anyhow!(ErrorCode::UnsharedItem))
8855 } else {
8856 Ok(proto::OpenBufferResponse {
8857 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
8858 })
8859 }
8860 })?
8861 }
8862
8863 fn serialize_project_transaction_for_peer(
8864 &mut self,
8865 project_transaction: ProjectTransaction,
8866 peer_id: proto::PeerId,
8867 cx: &mut AppContext,
8868 ) -> proto::ProjectTransaction {
8869 let mut serialized_transaction = proto::ProjectTransaction {
8870 buffer_ids: Default::default(),
8871 transactions: Default::default(),
8872 };
8873 for (buffer, transaction) in project_transaction.0 {
8874 serialized_transaction
8875 .buffer_ids
8876 .push(self.create_buffer_for_peer(&buffer, peer_id, cx).into());
8877 serialized_transaction
8878 .transactions
8879 .push(language::proto::serialize_transaction(&transaction));
8880 }
8881 serialized_transaction
8882 }
8883
8884 fn deserialize_project_transaction(
8885 &mut self,
8886 message: proto::ProjectTransaction,
8887 push_to_history: bool,
8888 cx: &mut ModelContext<Self>,
8889 ) -> Task<Result<ProjectTransaction>> {
8890 cx.spawn(move |this, mut cx| async move {
8891 let mut project_transaction = ProjectTransaction::default();
8892 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
8893 {
8894 let buffer_id = BufferId::new(buffer_id)?;
8895 let buffer = this
8896 .update(&mut cx, |this, cx| {
8897 this.wait_for_remote_buffer(buffer_id, cx)
8898 })?
8899 .await?;
8900 let transaction = language::proto::deserialize_transaction(transaction)?;
8901 project_transaction.0.insert(buffer, transaction);
8902 }
8903
8904 for (buffer, transaction) in &project_transaction.0 {
8905 buffer
8906 .update(&mut cx, |buffer, _| {
8907 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
8908 })?
8909 .await?;
8910
8911 if push_to_history {
8912 buffer.update(&mut cx, |buffer, _| {
8913 buffer.push_transaction(transaction.clone(), Instant::now());
8914 })?;
8915 }
8916 }
8917
8918 Ok(project_transaction)
8919 })
8920 }
8921
8922 fn create_buffer_for_peer(
8923 &mut self,
8924 buffer: &Model<Buffer>,
8925 peer_id: proto::PeerId,
8926 cx: &mut AppContext,
8927 ) -> BufferId {
8928 let buffer_id = buffer.read(cx).remote_id();
8929 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
8930 updates_tx
8931 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
8932 .ok();
8933 }
8934 buffer_id
8935 }
8936
8937 fn wait_for_remote_buffer(
8938 &mut self,
8939 id: BufferId,
8940 cx: &mut ModelContext<Self>,
8941 ) -> Task<Result<Model<Buffer>>> {
8942 let buffer = self
8943 .opened_buffers
8944 .get(&id)
8945 .and_then(|buffer| buffer.upgrade());
8946
8947 if let Some(buffer) = buffer {
8948 return Task::ready(Ok(buffer));
8949 }
8950
8951 let (tx, rx) = oneshot::channel();
8952 self.loading_buffers.entry(id).or_default().push(tx);
8953
8954 cx.background_executor().spawn(async move { rx.await? })
8955 }
8956
8957 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8958 let project_id = match self.client_state {
8959 ProjectClientState::Remote {
8960 sharing_has_stopped,
8961 remote_id,
8962 ..
8963 } => {
8964 if sharing_has_stopped {
8965 return Task::ready(Err(anyhow!(
8966 "can't synchronize remote buffers on a readonly project"
8967 )));
8968 } else {
8969 remote_id
8970 }
8971 }
8972 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8973 return Task::ready(Err(anyhow!(
8974 "can't synchronize remote buffers on a local project"
8975 )))
8976 }
8977 };
8978
8979 let client = self.client.clone();
8980 cx.spawn(move |this, mut cx| async move {
8981 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8982 let buffers = this
8983 .opened_buffers
8984 .iter()
8985 .filter_map(|(id, buffer)| {
8986 let buffer = buffer.upgrade()?;
8987 Some(proto::BufferVersion {
8988 id: (*id).into(),
8989 version: language::proto::serialize_version(&buffer.read(cx).version),
8990 })
8991 })
8992 .collect();
8993 let incomplete_buffer_ids = this
8994 .incomplete_remote_buffers
8995 .keys()
8996 .copied()
8997 .collect::<Vec<_>>();
8998
8999 (buffers, incomplete_buffer_ids)
9000 })?;
9001 let response = client
9002 .request(proto::SynchronizeBuffers {
9003 project_id,
9004 buffers,
9005 })
9006 .await?;
9007
9008 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
9009 response
9010 .buffers
9011 .into_iter()
9012 .map(|buffer| {
9013 let client = client.clone();
9014 let buffer_id = match BufferId::new(buffer.id) {
9015 Ok(id) => id,
9016 Err(e) => {
9017 return Task::ready(Err(e));
9018 }
9019 };
9020 let remote_version = language::proto::deserialize_version(&buffer.version);
9021 if let Some(buffer) = this.buffer_for_id(buffer_id) {
9022 let operations =
9023 buffer.read(cx).serialize_ops(Some(remote_version), cx);
9024 cx.background_executor().spawn(async move {
9025 let operations = operations.await;
9026 for chunk in split_operations(operations) {
9027 client
9028 .request(proto::UpdateBuffer {
9029 project_id,
9030 buffer_id: buffer_id.into(),
9031 operations: chunk,
9032 })
9033 .await?;
9034 }
9035 anyhow::Ok(())
9036 })
9037 } else {
9038 Task::ready(Ok(()))
9039 }
9040 })
9041 .collect::<Vec<_>>()
9042 })?;
9043
9044 // Any incomplete buffers have open requests waiting. Request that the host sends
9045 // creates these buffers for us again to unblock any waiting futures.
9046 for id in incomplete_buffer_ids {
9047 cx.background_executor()
9048 .spawn(client.request(proto::OpenBufferById {
9049 project_id,
9050 id: id.into(),
9051 }))
9052 .detach();
9053 }
9054
9055 futures::future::join_all(send_updates_for_buffers)
9056 .await
9057 .into_iter()
9058 .collect()
9059 })
9060 }
9061
9062 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
9063 self.worktrees()
9064 .map(|worktree| {
9065 let worktree = worktree.read(cx);
9066 proto::WorktreeMetadata {
9067 id: worktree.id().to_proto(),
9068 root_name: worktree.root_name().into(),
9069 visible: worktree.is_visible(),
9070 abs_path: worktree.abs_path().to_string_lossy().into(),
9071 }
9072 })
9073 .collect()
9074 }
9075
9076 fn set_worktrees_from_proto(
9077 &mut self,
9078 worktrees: Vec<proto::WorktreeMetadata>,
9079 cx: &mut ModelContext<Project>,
9080 ) -> Result<()> {
9081 let replica_id = self.replica_id();
9082 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
9083
9084 let mut old_worktrees_by_id = self
9085 .worktrees
9086 .drain(..)
9087 .filter_map(|worktree| {
9088 let worktree = worktree.upgrade()?;
9089 Some((worktree.read(cx).id(), worktree))
9090 })
9091 .collect::<HashMap<_, _>>();
9092
9093 for worktree in worktrees {
9094 if let Some(old_worktree) =
9095 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
9096 {
9097 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
9098 } else {
9099 let worktree =
9100 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
9101 let _ = self.add_worktree(&worktree, cx);
9102 }
9103 }
9104
9105 self.metadata_changed(cx);
9106 for id in old_worktrees_by_id.keys() {
9107 cx.emit(Event::WorktreeRemoved(*id));
9108 }
9109
9110 Ok(())
9111 }
9112
9113 fn set_collaborators_from_proto(
9114 &mut self,
9115 messages: Vec<proto::Collaborator>,
9116 cx: &mut ModelContext<Self>,
9117 ) -> Result<()> {
9118 let mut collaborators = HashMap::default();
9119 for message in messages {
9120 let collaborator = Collaborator::from_proto(message)?;
9121 collaborators.insert(collaborator.peer_id, collaborator);
9122 }
9123 for old_peer_id in self.collaborators.keys() {
9124 if !collaborators.contains_key(old_peer_id) {
9125 cx.emit(Event::CollaboratorLeft(*old_peer_id));
9126 }
9127 }
9128 self.collaborators = collaborators;
9129 Ok(())
9130 }
9131
9132 fn deserialize_symbol(
9133 &self,
9134 serialized_symbol: proto::Symbol,
9135 ) -> impl Future<Output = Result<Symbol>> {
9136 let languages = self.languages.clone();
9137 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
9138 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
9139 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
9140 let path = ProjectPath {
9141 worktree_id,
9142 path: PathBuf::from(serialized_symbol.path).into(),
9143 };
9144 let language = languages.language_for_file_path(&path.path);
9145
9146 async move {
9147 let language = language.await.log_err();
9148 let adapter = language
9149 .as_ref()
9150 .and_then(|language| languages.lsp_adapters(language).first().cloned());
9151 let start = serialized_symbol
9152 .start
9153 .ok_or_else(|| anyhow!("invalid start"))?;
9154 let end = serialized_symbol
9155 .end
9156 .ok_or_else(|| anyhow!("invalid end"))?;
9157 Ok(Symbol {
9158 language_server_name: LanguageServerName(
9159 serialized_symbol.language_server_name.into(),
9160 ),
9161 source_worktree_id,
9162 path,
9163 label: {
9164 match language.as_ref().zip(adapter.as_ref()) {
9165 Some((language, adapter)) => {
9166 adapter
9167 .label_for_symbol(&serialized_symbol.name, kind, language)
9168 .await
9169 }
9170 None => None,
9171 }
9172 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
9173 },
9174
9175 name: serialized_symbol.name,
9176 range: Unclipped(PointUtf16::new(start.row, start.column))
9177 ..Unclipped(PointUtf16::new(end.row, end.column)),
9178 kind,
9179 signature: serialized_symbol
9180 .signature
9181 .try_into()
9182 .map_err(|_| anyhow!("invalid signature"))?,
9183 })
9184 }
9185 }
9186
9187 async fn handle_buffer_saved(
9188 this: Model<Self>,
9189 envelope: TypedEnvelope<proto::BufferSaved>,
9190 _: Arc<Client>,
9191 mut cx: AsyncAppContext,
9192 ) -> Result<()> {
9193 let fingerprint = Default::default();
9194 let version = deserialize_version(&envelope.payload.version);
9195 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
9196 let mtime = envelope.payload.mtime.map(|time| time.into());
9197
9198 this.update(&mut cx, |this, cx| {
9199 let buffer = this
9200 .opened_buffers
9201 .get(&buffer_id)
9202 .and_then(|buffer| buffer.upgrade())
9203 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned());
9204 if let Some(buffer) = buffer {
9205 buffer.update(cx, |buffer, cx| {
9206 buffer.did_save(version, fingerprint, mtime, cx);
9207 });
9208 }
9209 Ok(())
9210 })?
9211 }
9212
9213 async fn handle_buffer_reloaded(
9214 this: Model<Self>,
9215 envelope: TypedEnvelope<proto::BufferReloaded>,
9216 _: Arc<Client>,
9217 mut cx: AsyncAppContext,
9218 ) -> Result<()> {
9219 let payload = envelope.payload;
9220 let version = deserialize_version(&payload.version);
9221 let fingerprint = RopeFingerprint::default();
9222 let line_ending = deserialize_line_ending(
9223 proto::LineEnding::from_i32(payload.line_ending)
9224 .ok_or_else(|| anyhow!("missing line ending"))?,
9225 );
9226 let mtime = payload.mtime.map(|time| time.into());
9227 let buffer_id = BufferId::new(payload.buffer_id)?;
9228 this.update(&mut cx, |this, cx| {
9229 let buffer = this
9230 .opened_buffers
9231 .get(&buffer_id)
9232 .and_then(|buffer| buffer.upgrade())
9233 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned());
9234 if let Some(buffer) = buffer {
9235 buffer.update(cx, |buffer, cx| {
9236 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
9237 });
9238 }
9239 Ok(())
9240 })?
9241 }
9242
9243 #[allow(clippy::type_complexity)]
9244 fn edits_from_lsp(
9245 &mut self,
9246 buffer: &Model<Buffer>,
9247 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
9248 server_id: LanguageServerId,
9249 version: Option<i32>,
9250 cx: &mut ModelContext<Self>,
9251 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
9252 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
9253 cx.background_executor().spawn(async move {
9254 let snapshot = snapshot?;
9255 let mut lsp_edits = lsp_edits
9256 .into_iter()
9257 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
9258 .collect::<Vec<_>>();
9259 lsp_edits.sort_by_key(|(range, _)| range.start);
9260
9261 let mut lsp_edits = lsp_edits.into_iter().peekable();
9262 let mut edits = Vec::new();
9263 while let Some((range, mut new_text)) = lsp_edits.next() {
9264 // Clip invalid ranges provided by the language server.
9265 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
9266 ..snapshot.clip_point_utf16(range.end, Bias::Left);
9267
9268 // Combine any LSP edits that are adjacent.
9269 //
9270 // Also, combine LSP edits that are separated from each other by only
9271 // a newline. This is important because for some code actions,
9272 // Rust-analyzer rewrites the entire buffer via a series of edits that
9273 // are separated by unchanged newline characters.
9274 //
9275 // In order for the diffing logic below to work properly, any edits that
9276 // cancel each other out must be combined into one.
9277 while let Some((next_range, next_text)) = lsp_edits.peek() {
9278 if next_range.start.0 > range.end {
9279 if next_range.start.0.row > range.end.row + 1
9280 || next_range.start.0.column > 0
9281 || snapshot.clip_point_utf16(
9282 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
9283 Bias::Left,
9284 ) > range.end
9285 {
9286 break;
9287 }
9288 new_text.push('\n');
9289 }
9290 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
9291 new_text.push_str(next_text);
9292 lsp_edits.next();
9293 }
9294
9295 // For multiline edits, perform a diff of the old and new text so that
9296 // we can identify the changes more precisely, preserving the locations
9297 // of any anchors positioned in the unchanged regions.
9298 if range.end.row > range.start.row {
9299 let mut offset = range.start.to_offset(&snapshot);
9300 let old_text = snapshot.text_for_range(range).collect::<String>();
9301
9302 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
9303 let mut moved_since_edit = true;
9304 for change in diff.iter_all_changes() {
9305 let tag = change.tag();
9306 let value = change.value();
9307 match tag {
9308 ChangeTag::Equal => {
9309 offset += value.len();
9310 moved_since_edit = true;
9311 }
9312 ChangeTag::Delete => {
9313 let start = snapshot.anchor_after(offset);
9314 let end = snapshot.anchor_before(offset + value.len());
9315 if moved_since_edit {
9316 edits.push((start..end, String::new()));
9317 } else {
9318 edits.last_mut().unwrap().0.end = end;
9319 }
9320 offset += value.len();
9321 moved_since_edit = false;
9322 }
9323 ChangeTag::Insert => {
9324 if moved_since_edit {
9325 let anchor = snapshot.anchor_after(offset);
9326 edits.push((anchor..anchor, value.to_string()));
9327 } else {
9328 edits.last_mut().unwrap().1.push_str(value);
9329 }
9330 moved_since_edit = false;
9331 }
9332 }
9333 }
9334 } else if range.end == range.start {
9335 let anchor = snapshot.anchor_after(range.start);
9336 edits.push((anchor..anchor, new_text));
9337 } else {
9338 let edit_start = snapshot.anchor_after(range.start);
9339 let edit_end = snapshot.anchor_before(range.end);
9340 edits.push((edit_start..edit_end, new_text));
9341 }
9342 }
9343
9344 Ok(edits)
9345 })
9346 }
9347
9348 fn buffer_snapshot_for_lsp_version(
9349 &mut self,
9350 buffer: &Model<Buffer>,
9351 server_id: LanguageServerId,
9352 version: Option<i32>,
9353 cx: &AppContext,
9354 ) -> Result<TextBufferSnapshot> {
9355 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
9356
9357 if let Some(version) = version {
9358 let buffer_id = buffer.read(cx).remote_id();
9359 let snapshots = self
9360 .buffer_snapshots
9361 .get_mut(&buffer_id)
9362 .and_then(|m| m.get_mut(&server_id))
9363 .ok_or_else(|| {
9364 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
9365 })?;
9366
9367 let found_snapshot = snapshots
9368 .binary_search_by_key(&version, |e| e.version)
9369 .map(|ix| snapshots[ix].snapshot.clone())
9370 .map_err(|_| {
9371 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
9372 })?;
9373
9374 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
9375 Ok(found_snapshot)
9376 } else {
9377 Ok((buffer.read(cx)).text_snapshot())
9378 }
9379 }
9380
9381 pub fn language_servers(
9382 &self,
9383 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
9384 self.language_server_ids
9385 .iter()
9386 .map(|((worktree_id, server_name), server_id)| {
9387 (*server_id, server_name.clone(), *worktree_id)
9388 })
9389 }
9390
9391 pub fn supplementary_language_servers(
9392 &self,
9393 ) -> impl '_
9394 + Iterator<
9395 Item = (
9396 &LanguageServerId,
9397 &(LanguageServerName, Arc<LanguageServer>),
9398 ),
9399 > {
9400 self.supplementary_language_servers.iter()
9401 }
9402
9403 pub fn language_server_adapter_for_id(
9404 &self,
9405 id: LanguageServerId,
9406 ) -> Option<Arc<CachedLspAdapter>> {
9407 if let Some(LanguageServerState::Running { adapter, .. }) = self.language_servers.get(&id) {
9408 Some(adapter.clone())
9409 } else {
9410 None
9411 }
9412 }
9413
9414 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
9415 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
9416 Some(server.clone())
9417 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
9418 Some(Arc::clone(server))
9419 } else {
9420 None
9421 }
9422 }
9423
9424 pub fn language_servers_for_buffer(
9425 &self,
9426 buffer: &Buffer,
9427 cx: &AppContext,
9428 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9429 self.language_server_ids_for_buffer(buffer, cx)
9430 .into_iter()
9431 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
9432 LanguageServerState::Running {
9433 adapter, server, ..
9434 } => Some((adapter, server)),
9435 _ => None,
9436 })
9437 }
9438
9439 fn primary_language_server_for_buffer(
9440 &self,
9441 buffer: &Buffer,
9442 cx: &AppContext,
9443 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9444 self.language_servers_for_buffer(buffer, cx)
9445 .find(|s| s.0.is_primary)
9446 }
9447
9448 pub fn language_server_for_buffer(
9449 &self,
9450 buffer: &Buffer,
9451 server_id: LanguageServerId,
9452 cx: &AppContext,
9453 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9454 self.language_servers_for_buffer(buffer, cx)
9455 .find(|(_, s)| s.server_id() == server_id)
9456 }
9457
9458 fn language_server_ids_for_buffer(
9459 &self,
9460 buffer: &Buffer,
9461 cx: &AppContext,
9462 ) -> Vec<LanguageServerId> {
9463 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
9464 let worktree_id = file.worktree_id(cx);
9465 self.languages
9466 .lsp_adapters(&language)
9467 .iter()
9468 .flat_map(|adapter| {
9469 let key = (worktree_id, adapter.name.clone());
9470 self.language_server_ids.get(&key).copied()
9471 })
9472 .collect()
9473 } else {
9474 Vec::new()
9475 }
9476 }
9477}
9478
9479#[allow(clippy::too_many_arguments)]
9480async fn search_snapshots(
9481 snapshots: &Vec<LocalSnapshot>,
9482 worker_start_ix: usize,
9483 worker_end_ix: usize,
9484 query: &SearchQuery,
9485 results_tx: &Sender<SearchMatchCandidate>,
9486 opened_buffers: &HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
9487 include_root: bool,
9488 fs: &Arc<dyn Fs>,
9489) {
9490 let mut snapshot_start_ix = 0;
9491 let mut abs_path = PathBuf::new();
9492
9493 for snapshot in snapshots {
9494 let snapshot_end_ix = snapshot_start_ix
9495 + if query.include_ignored() {
9496 snapshot.file_count()
9497 } else {
9498 snapshot.visible_file_count()
9499 };
9500 if worker_end_ix <= snapshot_start_ix {
9501 break;
9502 } else if worker_start_ix > snapshot_end_ix {
9503 snapshot_start_ix = snapshot_end_ix;
9504 continue;
9505 } else {
9506 let start_in_snapshot = worker_start_ix.saturating_sub(snapshot_start_ix);
9507 let end_in_snapshot = cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
9508
9509 for entry in snapshot
9510 .files(false, start_in_snapshot)
9511 .take(end_in_snapshot - start_in_snapshot)
9512 {
9513 if results_tx.is_closed() {
9514 break;
9515 }
9516 if opened_buffers.contains_key(&entry.path) {
9517 continue;
9518 }
9519
9520 let matched_path = if include_root {
9521 let mut full_path = PathBuf::from(snapshot.root_name());
9522 full_path.push(&entry.path);
9523 query.file_matches(Some(&full_path))
9524 } else {
9525 query.file_matches(Some(&entry.path))
9526 };
9527
9528 let matches = if matched_path {
9529 abs_path.clear();
9530 abs_path.push(&snapshot.abs_path());
9531 abs_path.push(&entry.path);
9532 if let Some(file) = fs.open_sync(&abs_path).await.log_err() {
9533 query.detect(file).unwrap_or(false)
9534 } else {
9535 false
9536 }
9537 } else {
9538 false
9539 };
9540
9541 if matches {
9542 let project_path = SearchMatchCandidate::Path {
9543 worktree_id: snapshot.id(),
9544 path: entry.path.clone(),
9545 is_ignored: entry.is_ignored,
9546 };
9547 if results_tx.send(project_path).await.is_err() {
9548 return;
9549 }
9550 }
9551 }
9552
9553 snapshot_start_ix = snapshot_end_ix;
9554 }
9555 }
9556}
9557
9558async fn search_ignored_entry(
9559 snapshot: &LocalSnapshot,
9560 ignored_entry: &Entry,
9561 fs: &Arc<dyn Fs>,
9562 query: &SearchQuery,
9563 counter_tx: &Sender<SearchMatchCandidate>,
9564) {
9565 let mut ignored_paths_to_process =
9566 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
9567
9568 while let Some(ignored_abs_path) = ignored_paths_to_process.pop_front() {
9569 let metadata = fs
9570 .metadata(&ignored_abs_path)
9571 .await
9572 .with_context(|| format!("fetching fs metadata for {ignored_abs_path:?}"))
9573 .log_err()
9574 .flatten();
9575
9576 if let Some(fs_metadata) = metadata {
9577 if fs_metadata.is_dir {
9578 let files = fs
9579 .read_dir(&ignored_abs_path)
9580 .await
9581 .with_context(|| format!("listing ignored path {ignored_abs_path:?}"))
9582 .log_err();
9583
9584 if let Some(mut subfiles) = files {
9585 while let Some(subfile) = subfiles.next().await {
9586 if let Some(subfile) = subfile.log_err() {
9587 ignored_paths_to_process.push_back(subfile);
9588 }
9589 }
9590 }
9591 } else if !fs_metadata.is_symlink {
9592 if !query.file_matches(Some(&ignored_abs_path))
9593 || snapshot.is_path_excluded(ignored_entry.path.to_path_buf())
9594 {
9595 continue;
9596 }
9597 let matches = if let Some(file) = fs
9598 .open_sync(&ignored_abs_path)
9599 .await
9600 .with_context(|| format!("Opening ignored path {ignored_abs_path:?}"))
9601 .log_err()
9602 {
9603 query.detect(file).unwrap_or(false)
9604 } else {
9605 false
9606 };
9607
9608 if matches {
9609 let project_path = SearchMatchCandidate::Path {
9610 worktree_id: snapshot.id(),
9611 path: Arc::from(
9612 ignored_abs_path
9613 .strip_prefix(snapshot.abs_path())
9614 .expect("scanning worktree-related files"),
9615 ),
9616 is_ignored: true,
9617 };
9618 if counter_tx.send(project_path).await.is_err() {
9619 return;
9620 }
9621 }
9622 }
9623 }
9624 }
9625}
9626
9627fn subscribe_for_copilot_events(
9628 copilot: &Model<Copilot>,
9629 cx: &mut ModelContext<'_, Project>,
9630) -> gpui::Subscription {
9631 cx.subscribe(
9632 copilot,
9633 |project, copilot, copilot_event, cx| match copilot_event {
9634 copilot::Event::CopilotLanguageServerStarted => {
9635 match copilot.read(cx).language_server() {
9636 Some((name, copilot_server)) => {
9637 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
9638 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
9639 let new_server_id = copilot_server.server_id();
9640 let weak_project = cx.weak_model();
9641 let copilot_log_subscription = copilot_server
9642 .on_notification::<copilot::request::LogMessage, _>(
9643 move |params, mut cx| {
9644 weak_project.update(&mut cx, |_, cx| {
9645 cx.emit(Event::LanguageServerLog(
9646 new_server_id,
9647 params.message,
9648 ));
9649 }).ok();
9650 },
9651 );
9652 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
9653 project.copilot_log_subscription = Some(copilot_log_subscription);
9654 cx.emit(Event::LanguageServerAdded(new_server_id));
9655 }
9656 }
9657 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
9658 }
9659 }
9660 },
9661 )
9662}
9663
9664fn glob_literal_prefix(glob: &str) -> &str {
9665 let mut literal_end = 0;
9666 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
9667 if part.contains(&['*', '?', '{', '}']) {
9668 break;
9669 } else {
9670 if i > 0 {
9671 // Account for separator prior to this part
9672 literal_end += path::MAIN_SEPARATOR.len_utf8();
9673 }
9674 literal_end += part.len();
9675 }
9676 }
9677 &glob[..literal_end]
9678}
9679
9680impl WorktreeHandle {
9681 pub fn upgrade(&self) -> Option<Model<Worktree>> {
9682 match self {
9683 WorktreeHandle::Strong(handle) => Some(handle.clone()),
9684 WorktreeHandle::Weak(handle) => handle.upgrade(),
9685 }
9686 }
9687
9688 pub fn handle_id(&self) -> usize {
9689 match self {
9690 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
9691 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
9692 }
9693 }
9694}
9695
9696impl OpenBuffer {
9697 pub fn upgrade(&self) -> Option<Model<Buffer>> {
9698 match self {
9699 OpenBuffer::Strong(handle) => Some(handle.clone()),
9700 OpenBuffer::Weak(handle) => handle.upgrade(),
9701 OpenBuffer::Operations(_) => None,
9702 }
9703 }
9704}
9705
9706pub struct PathMatchCandidateSet {
9707 pub snapshot: Snapshot,
9708 pub include_ignored: bool,
9709 pub include_root_name: bool,
9710}
9711
9712impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
9713 type Candidates = PathMatchCandidateSetIter<'a>;
9714
9715 fn id(&self) -> usize {
9716 self.snapshot.id().to_usize()
9717 }
9718
9719 fn len(&self) -> usize {
9720 if self.include_ignored {
9721 self.snapshot.file_count()
9722 } else {
9723 self.snapshot.visible_file_count()
9724 }
9725 }
9726
9727 fn prefix(&self) -> Arc<str> {
9728 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
9729 self.snapshot.root_name().into()
9730 } else if self.include_root_name {
9731 format!("{}/", self.snapshot.root_name()).into()
9732 } else {
9733 "".into()
9734 }
9735 }
9736
9737 fn candidates(&'a self, start: usize) -> Self::Candidates {
9738 PathMatchCandidateSetIter {
9739 traversal: self.snapshot.files(self.include_ignored, start),
9740 }
9741 }
9742}
9743
9744pub struct PathMatchCandidateSetIter<'a> {
9745 traversal: Traversal<'a>,
9746}
9747
9748impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
9749 type Item = fuzzy::PathMatchCandidate<'a>;
9750
9751 fn next(&mut self) -> Option<Self::Item> {
9752 self.traversal.next().map(|entry| {
9753 if let EntryKind::File(char_bag) = entry.kind {
9754 fuzzy::PathMatchCandidate {
9755 path: &entry.path,
9756 char_bag,
9757 }
9758 } else {
9759 unreachable!()
9760 }
9761 })
9762 }
9763}
9764
9765impl EventEmitter<Event> for Project {}
9766
9767impl<'a> Into<SettingsLocation<'a>> for &'a ProjectPath {
9768 fn into(self) -> SettingsLocation<'a> {
9769 SettingsLocation {
9770 worktree_id: self.worktree_id.to_usize(),
9771 path: self.path.as_ref(),
9772 }
9773 }
9774}
9775
9776impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
9777 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
9778 Self {
9779 worktree_id,
9780 path: path.as_ref().into(),
9781 }
9782 }
9783}
9784
9785struct ProjectLspAdapterDelegate {
9786 project: WeakModel<Project>,
9787 worktree: worktree::Snapshot,
9788 fs: Arc<dyn Fs>,
9789 http_client: Arc<dyn HttpClient>,
9790 language_registry: Arc<LanguageRegistry>,
9791 shell_env: Mutex<Option<HashMap<String, String>>>,
9792}
9793
9794impl ProjectLspAdapterDelegate {
9795 fn new(project: &Project, worktree: &Model<Worktree>, cx: &ModelContext<Project>) -> Arc<Self> {
9796 Arc::new(Self {
9797 project: cx.weak_model(),
9798 worktree: worktree.read(cx).snapshot(),
9799 fs: project.fs.clone(),
9800 http_client: project.client.http_client(),
9801 language_registry: project.languages.clone(),
9802 shell_env: Default::default(),
9803 })
9804 }
9805
9806 async fn load_shell_env(&self) {
9807 let worktree_abs_path = self.worktree.abs_path();
9808 let shell_env = load_shell_environment(&worktree_abs_path)
9809 .await
9810 .with_context(|| {
9811 format!("failed to determine load login shell environment in {worktree_abs_path:?}")
9812 })
9813 .log_err()
9814 .unwrap_or_default();
9815 *self.shell_env.lock() = Some(shell_env);
9816 }
9817}
9818
9819#[async_trait]
9820impl LspAdapterDelegate for ProjectLspAdapterDelegate {
9821 fn show_notification(&self, message: &str, cx: &mut AppContext) {
9822 self.project
9823 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())))
9824 .ok();
9825 }
9826
9827 fn http_client(&self) -> Arc<dyn HttpClient> {
9828 self.http_client.clone()
9829 }
9830
9831 async fn shell_env(&self) -> HashMap<String, String> {
9832 self.load_shell_env().await;
9833 self.shell_env.lock().as_ref().cloned().unwrap_or_default()
9834 }
9835
9836 #[cfg(not(target_os = "windows"))]
9837 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
9838 let worktree_abs_path = self.worktree.abs_path();
9839 self.load_shell_env().await;
9840 let shell_path = self
9841 .shell_env
9842 .lock()
9843 .as_ref()
9844 .and_then(|shell_env| shell_env.get("PATH").cloned());
9845 which::which_in(command, shell_path.as_ref(), &worktree_abs_path).ok()
9846 }
9847
9848 #[cfg(target_os = "windows")]
9849 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
9850 // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms
9851 // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal
9852 // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup
9853 which::which(command).ok()
9854 }
9855
9856 fn update_status(
9857 &self,
9858 server_name: LanguageServerName,
9859 status: language::LanguageServerBinaryStatus,
9860 ) {
9861 self.language_registry
9862 .update_lsp_status(server_name, status);
9863 }
9864
9865 async fn read_text_file(&self, path: PathBuf) -> Result<String> {
9866 if self.worktree.entry_for_path(&path).is_none() {
9867 return Err(anyhow!("no such path {path:?}"));
9868 }
9869 let path = self.worktree.absolutize(path.as_ref())?;
9870 let content = self.fs.load(&path).await?;
9871 Ok(content)
9872 }
9873}
9874
9875fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
9876 proto::Symbol {
9877 language_server_name: symbol.language_server_name.0.to_string(),
9878 source_worktree_id: symbol.source_worktree_id.to_proto(),
9879 worktree_id: symbol.path.worktree_id.to_proto(),
9880 path: symbol.path.path.to_string_lossy().to_string(),
9881 name: symbol.name.clone(),
9882 kind: unsafe { mem::transmute(symbol.kind) },
9883 start: Some(proto::PointUtf16 {
9884 row: symbol.range.start.0.row,
9885 column: symbol.range.start.0.column,
9886 }),
9887 end: Some(proto::PointUtf16 {
9888 row: symbol.range.end.0.row,
9889 column: symbol.range.end.0.column,
9890 }),
9891 signature: symbol.signature.to_vec(),
9892 }
9893}
9894
9895fn relativize_path(base: &Path, path: &Path) -> PathBuf {
9896 let mut path_components = path.components();
9897 let mut base_components = base.components();
9898 let mut components: Vec<Component> = Vec::new();
9899 loop {
9900 match (path_components.next(), base_components.next()) {
9901 (None, None) => break,
9902 (Some(a), None) => {
9903 components.push(a);
9904 components.extend(path_components.by_ref());
9905 break;
9906 }
9907 (None, _) => components.push(Component::ParentDir),
9908 (Some(a), Some(b)) if components.is_empty() && a == b => (),
9909 (Some(a), Some(Component::CurDir)) => components.push(a),
9910 (Some(a), Some(_)) => {
9911 components.push(Component::ParentDir);
9912 for _ in base_components {
9913 components.push(Component::ParentDir);
9914 }
9915 components.push(a);
9916 components.extend(path_components.by_ref());
9917 break;
9918 }
9919 }
9920 }
9921 components.iter().map(|c| c.as_os_str()).collect()
9922}
9923
9924fn resolve_path(base: &Path, path: &Path) -> PathBuf {
9925 let mut result = base.to_path_buf();
9926 for component in path.components() {
9927 match component {
9928 Component::ParentDir => {
9929 result.pop();
9930 }
9931 Component::CurDir => (),
9932 _ => result.push(component),
9933 }
9934 }
9935 result
9936}
9937
9938impl Item for Buffer {
9939 fn try_open(
9940 project: &Model<Project>,
9941 path: &ProjectPath,
9942 cx: &mut AppContext,
9943 ) -> Option<Task<Result<Model<Self>>>> {
9944 Some(project.update(cx, |project, cx| project.open_buffer(path.clone(), cx)))
9945 }
9946
9947 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
9948 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
9949 }
9950
9951 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
9952 File::from_dyn(self.file()).map(|file| ProjectPath {
9953 worktree_id: file.worktree_id(cx),
9954 path: file.path().clone(),
9955 })
9956 }
9957}
9958
9959async fn wait_for_loading_buffer(
9960 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
9961) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
9962 loop {
9963 if let Some(result) = receiver.borrow().as_ref() {
9964 match result {
9965 Ok(buffer) => return Ok(buffer.to_owned()),
9966 Err(e) => return Err(e.to_owned()),
9967 }
9968 }
9969 receiver.next().await;
9970 }
9971}
9972
9973fn include_text(server: &lsp::LanguageServer) -> bool {
9974 server
9975 .capabilities()
9976 .text_document_sync
9977 .as_ref()
9978 .and_then(|sync| match sync {
9979 lsp::TextDocumentSyncCapability::Kind(_) => None,
9980 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
9981 })
9982 .and_then(|save_options| match save_options {
9983 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
9984 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
9985 })
9986 .unwrap_or(false)
9987}
9988
9989async fn load_shell_environment(dir: &Path) -> Result<HashMap<String, String>> {
9990 let marker = "ZED_SHELL_START";
9991 let shell = env::var("SHELL").context(
9992 "SHELL environment variable is not assigned so we can't source login environment variables",
9993 )?;
9994
9995 // What we're doing here is to spawn a shell and then `cd` into
9996 // the project directory to get the env in there as if the user
9997 // `cd`'d into it. We do that because tools like direnv, asdf, ...
9998 // hook into `cd` and only set up the env after that.
9999 //
10000 // In certain shells we need to execute additional_command in order to
10001 // trigger the behavior of direnv, etc.
10002 //
10003 //
10004 // The `exit 0` is the result of hours of debugging, trying to find out
10005 // why running this command here, without `exit 0`, would mess
10006 // up signal process for our process so that `ctrl-c` doesn't work
10007 // anymore.
10008 //
10009 // We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would
10010 // do that, but it does, and `exit 0` helps.
10011 let additional_command = PathBuf::from(&shell)
10012 .file_name()
10013 .and_then(|f| f.to_str())
10014 .and_then(|shell| match shell {
10015 "fish" => Some("emit fish_prompt;"),
10016 _ => None,
10017 });
10018
10019 let command = format!(
10020 "cd '{}';{} echo {marker}; /usr/bin/env -0; exit 0;",
10021 dir.display(),
10022 additional_command.unwrap_or("")
10023 );
10024
10025 let output = smol::process::Command::new(&shell)
10026 .args(["-i", "-c", &command])
10027 .output()
10028 .await
10029 .context("failed to spawn login shell to source login environment variables")?;
10030
10031 anyhow::ensure!(
10032 output.status.success(),
10033 "login shell exited with error {:?}",
10034 output.status
10035 );
10036
10037 let stdout = String::from_utf8_lossy(&output.stdout);
10038 let env_output_start = stdout.find(marker).ok_or_else(|| {
10039 anyhow!(
10040 "failed to parse output of `env` command in login shell: {}",
10041 stdout
10042 )
10043 })?;
10044
10045 let mut parsed_env = HashMap::default();
10046 let env_output = &stdout[env_output_start + marker.len()..];
10047 for line in env_output.split_terminator('\0') {
10048 if let Some(separator_index) = line.find('=') {
10049 let key = line[..separator_index].to_string();
10050 let value = line[separator_index + 1..].to_string();
10051 parsed_env.insert(key, value);
10052 }
10053 }
10054 Ok(parsed_env)
10055}
10056
10057fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBufferResponse {
10058 let entries = blame
10059 .entries
10060 .into_iter()
10061 .map(|entry| proto::BlameEntry {
10062 sha: entry.sha.as_bytes().into(),
10063 start_line: entry.range.start,
10064 end_line: entry.range.end,
10065 original_line_number: entry.original_line_number,
10066 author: entry.author.clone(),
10067 author_mail: entry.author_mail.clone(),
10068 author_time: entry.author_time,
10069 author_tz: entry.author_tz.clone(),
10070 committer: entry.committer.clone(),
10071 committer_mail: entry.committer_mail.clone(),
10072 committer_time: entry.committer_time,
10073 committer_tz: entry.committer_tz.clone(),
10074 summary: entry.summary.clone(),
10075 previous: entry.previous.clone(),
10076 filename: entry.filename.clone(),
10077 })
10078 .collect::<Vec<_>>();
10079
10080 let messages = blame
10081 .messages
10082 .into_iter()
10083 .map(|(oid, message)| proto::CommitMessage {
10084 oid: oid.as_bytes().into(),
10085 message,
10086 })
10087 .collect::<Vec<_>>();
10088
10089 let permalinks = blame
10090 .permalinks
10091 .into_iter()
10092 .map(|(oid, url)| proto::CommitPermalink {
10093 oid: oid.as_bytes().into(),
10094 permalink: url.to_string(),
10095 })
10096 .collect::<Vec<_>>();
10097
10098 proto::BlameBufferResponse {
10099 entries,
10100 messages,
10101 permalinks,
10102 }
10103}
10104
10105fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> git::blame::Blame {
10106 let entries = response
10107 .entries
10108 .into_iter()
10109 .filter_map(|entry| {
10110 Some(git::blame::BlameEntry {
10111 sha: git::Oid::from_bytes(&entry.sha).ok()?,
10112 range: entry.start_line..entry.end_line,
10113 original_line_number: entry.original_line_number,
10114 committer: entry.committer,
10115 committer_time: entry.committer_time,
10116 committer_tz: entry.committer_tz,
10117 committer_mail: entry.committer_mail,
10118 author: entry.author,
10119 author_mail: entry.author_mail,
10120 author_time: entry.author_time,
10121 author_tz: entry.author_tz,
10122 summary: entry.summary,
10123 previous: entry.previous,
10124 filename: entry.filename,
10125 })
10126 })
10127 .collect::<Vec<_>>();
10128
10129 let messages = response
10130 .messages
10131 .into_iter()
10132 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
10133 .collect::<HashMap<_, _>>();
10134
10135 let permalinks = response
10136 .permalinks
10137 .into_iter()
10138 .filter_map(|permalink| {
10139 Some((
10140 git::Oid::from_bytes(&permalink.oid).ok()?,
10141 Url::from_str(&permalink.permalink).ok()?,
10142 ))
10143 })
10144 .collect::<HashMap<_, _>>();
10145
10146 Blame {
10147 entries,
10148 permalinks,
10149 messages,
10150 }
10151}