1pub mod debounced_delay;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7mod task_inventory;
8pub mod terminals;
9
10#[cfg(test)]
11mod project_tests;
12pub mod search_history;
13
14use anyhow::{anyhow, bail, Context as _, Result};
15use async_trait::async_trait;
16use client::{
17 proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore,
18};
19use clock::ReplicaId;
20use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
21use copilot::Copilot;
22use debounced_delay::DebouncedDelay;
23use fs::repository::GitRepository;
24use futures::{
25 channel::{
26 mpsc::{self, UnboundedReceiver},
27 oneshot,
28 },
29 future::{join_all, try_join_all, Shared},
30 select,
31 stream::FuturesUnordered,
32 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
33};
34use git::blame::Blame;
35use globset::{Glob, GlobSet, GlobSetBuilder};
36use gpui::{
37 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, BorrowAppContext, Context, Entity,
38 EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel,
39};
40use itertools::Itertools;
41use language::{
42 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
43 markdown, point_to_lsp,
44 proto::{
45 deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
46 serialize_version, split_operations,
47 },
48 range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeAction,
49 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
50 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
51 LspAdapterDelegate, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
52 ToOffset, ToPointUtf16, Transaction, Unclipped,
53};
54use log::error;
55use lsp::{
56 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
57 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId,
58 MessageActionItem, OneOf, ServerCapabilities, ServerHealthStatus, ServerStatus,
59};
60use lsp_command::*;
61use node_runtime::NodeRuntime;
62use parking_lot::{Mutex, RwLock};
63use postage::watch;
64use prettier_support::{DefaultPrettier, PrettierInstance};
65use project_settings::{LspSettings, ProjectSettings};
66use rand::prelude::*;
67use search_history::SearchHistory;
68use worktree::LocalSnapshot;
69
70use rpc::{ErrorCode, ErrorExt as _};
71use search::SearchQuery;
72use serde::Serialize;
73use settings::{watch_config_file, Settings, SettingsLocation, SettingsStore};
74use sha2::{Digest, Sha256};
75use similar::{ChangeTag, TextDiff};
76use smol::channel::{Receiver, Sender};
77use smol::lock::Semaphore;
78use std::{
79 cmp::{self, Ordering},
80 convert::TryInto,
81 env,
82 ffi::OsStr,
83 hash::Hash,
84 io, mem,
85 num::NonZeroU32,
86 ops::Range,
87 path::{self, Component, Path, PathBuf},
88 process::Stdio,
89 str::{self, FromStr},
90 sync::{
91 atomic::{AtomicUsize, Ordering::SeqCst},
92 Arc,
93 },
94 time::{Duration, Instant},
95};
96use task::static_source::{StaticSource, TrackedFile};
97use terminals::Terminals;
98use text::{Anchor, BufferId, RopeFingerprint};
99use util::{
100 debug_panic, defer,
101 http::{HttpClient, Url},
102 maybe, merge_json_value_into,
103 paths::{
104 LOCAL_SETTINGS_RELATIVE_PATH, LOCAL_TASKS_RELATIVE_PATH, LOCAL_VSCODE_TASKS_RELATIVE_PATH,
105 },
106 post_inc, ResultExt, TryFutureExt as _,
107};
108use worktree::{Snapshot, Traversal};
109
110pub use fs::*;
111pub use language::Location;
112#[cfg(any(test, feature = "test-support"))]
113pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
114#[cfg(feature = "test-support")]
115pub use task_inventory::test_inventory::*;
116pub use task_inventory::{Inventory, TaskSourceKind};
117pub use worktree::{
118 DiagnosticSummary, Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId,
119 RepositoryEntry, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId,
120 WorktreeSettings, FS_WATCH_LATENCY,
121};
122
123const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
124const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
125const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
126pub const SERVER_PROGRESS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
127
128const MAX_PROJECT_SEARCH_HISTORY_SIZE: usize = 500;
129
130pub trait Item {
131 fn try_open(
132 project: &Model<Project>,
133 path: &ProjectPath,
134 cx: &mut AppContext,
135 ) -> Option<Task<Result<Model<Self>>>>
136 where
137 Self: Sized;
138 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
139 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
140}
141
142#[derive(Clone)]
143pub enum OpenedBufferEvent {
144 Disconnected,
145 Ok(BufferId),
146 Err(BufferId, Arc<anyhow::Error>),
147}
148
149/// Semantics-aware entity that is relevant to one or more [`Worktree`] with the files.
150/// `Project` is responsible for tasks, LSP and collab queries, synchronizing worktree states accordingly.
151/// Maps [`Worktree`] entries with its own logic using [`ProjectEntryId`] and [`ProjectPath`] structs.
152///
153/// Can be either local (for the project opened on the same host) or remote.(for collab projects, browsed by multiple remote users).
154pub struct Project {
155 worktrees: Vec<WorktreeHandle>,
156 active_entry: Option<ProjectEntryId>,
157 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
158 pending_language_server_update: Option<BufferOrderedMessage>,
159 flush_language_server_update: Option<Task<()>>,
160
161 languages: Arc<LanguageRegistry>,
162 supplementary_language_servers:
163 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
164 language_servers: HashMap<LanguageServerId, LanguageServerState>,
165 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
166 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
167 last_formatting_failure: Option<String>,
168 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
169 language_server_watched_paths: HashMap<LanguageServerId, HashMap<WorktreeId, GlobSet>>,
170 client: Arc<client::Client>,
171 next_entry_id: Arc<AtomicUsize>,
172 join_project_response_message_id: u32,
173 next_diagnostic_group_id: usize,
174 user_store: Model<UserStore>,
175 fs: Arc<dyn Fs>,
176 client_state: ProjectClientState,
177 collaborators: HashMap<proto::PeerId, Collaborator>,
178 client_subscriptions: Vec<client::Subscription>,
179 _subscriptions: Vec<gpui::Subscription>,
180 next_buffer_id: BufferId,
181 loading_buffers: HashMap<BufferId, Vec<oneshot::Sender<Result<Model<Buffer>, anyhow::Error>>>>,
182 incomplete_remote_buffers: HashMap<BufferId, Model<Buffer>>,
183 shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
184 #[allow(clippy::type_complexity)]
185 loading_buffers_by_path: HashMap<
186 ProjectPath,
187 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
188 >,
189 #[allow(clippy::type_complexity)]
190 loading_local_worktrees:
191 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
192 opened_buffers: HashMap<BufferId, OpenBuffer>,
193 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
194 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
195 buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
196 buffers_being_formatted: HashSet<BufferId>,
197 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
198 git_diff_debouncer: DebouncedDelay,
199 nonce: u128,
200 _maintain_buffer_languages: Task<()>,
201 _maintain_workspace_config: Task<Result<()>>,
202 terminals: Terminals,
203 copilot_lsp_subscription: Option<gpui::Subscription>,
204 copilot_log_subscription: Option<lsp::Subscription>,
205 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
206 node: Option<Arc<dyn NodeRuntime>>,
207 default_prettier: DefaultPrettier,
208 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
209 prettier_instances: HashMap<PathBuf, PrettierInstance>,
210 tasks: Model<Inventory>,
211 hosted_project_id: Option<ProjectId>,
212 search_history: SearchHistory,
213}
214
215pub enum LanguageServerToQuery {
216 Primary,
217 Other(LanguageServerId),
218}
219
220struct LspBufferSnapshot {
221 version: i32,
222 snapshot: TextBufferSnapshot,
223}
224
225/// Message ordered with respect to buffer operations
226#[derive(Debug)]
227enum BufferOrderedMessage {
228 Operation {
229 buffer_id: BufferId,
230 operation: proto::Operation,
231 },
232 LanguageServerUpdate {
233 language_server_id: LanguageServerId,
234 message: proto::update_language_server::Variant,
235 },
236 Resync,
237}
238
239enum LocalProjectUpdate {
240 WorktreesChanged,
241 CreateBufferForPeer {
242 peer_id: proto::PeerId,
243 buffer_id: BufferId,
244 },
245}
246
247enum OpenBuffer {
248 Strong(Model<Buffer>),
249 Weak(WeakModel<Buffer>),
250 Operations(Vec<Operation>),
251}
252
253#[derive(Clone)]
254enum WorktreeHandle {
255 Strong(Model<Worktree>),
256 Weak(WeakModel<Worktree>),
257}
258
259#[derive(Debug)]
260enum ProjectClientState {
261 Local,
262 Shared {
263 remote_id: u64,
264 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
265 _send_updates: Task<Result<()>>,
266 },
267 Remote {
268 sharing_has_stopped: bool,
269 capability: Capability,
270 remote_id: u64,
271 replica_id: ReplicaId,
272 },
273}
274
275/// A prompt requested by LSP server.
276#[derive(Clone, Debug)]
277pub struct LanguageServerPromptRequest {
278 pub level: PromptLevel,
279 pub message: String,
280 pub actions: Vec<MessageActionItem>,
281 pub lsp_name: String,
282 response_channel: Sender<MessageActionItem>,
283}
284
285impl LanguageServerPromptRequest {
286 pub async fn respond(self, index: usize) -> Option<()> {
287 if let Some(response) = self.actions.into_iter().nth(index) {
288 self.response_channel.send(response).await.ok()
289 } else {
290 None
291 }
292 }
293}
294impl PartialEq for LanguageServerPromptRequest {
295 fn eq(&self, other: &Self) -> bool {
296 self.message == other.message && self.actions == other.actions
297 }
298}
299
300#[derive(Clone, Debug, PartialEq)]
301pub enum Event {
302 LanguageServerAdded(LanguageServerId),
303 LanguageServerRemoved(LanguageServerId),
304 LanguageServerLog(LanguageServerId, String),
305 Notification(String),
306 LanguageServerPrompt(LanguageServerPromptRequest),
307 LanguageNotFound(Model<Buffer>),
308 ActiveEntryChanged(Option<ProjectEntryId>),
309 ActivateProjectPanel,
310 WorktreeAdded,
311 WorktreeRemoved(WorktreeId),
312 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
313 WorktreeUpdatedGitRepositories,
314 DiskBasedDiagnosticsStarted {
315 language_server_id: LanguageServerId,
316 },
317 DiskBasedDiagnosticsFinished {
318 language_server_id: LanguageServerId,
319 },
320 DiagnosticsUpdated {
321 path: ProjectPath,
322 language_server_id: LanguageServerId,
323 },
324 RemoteIdChanged(Option<u64>),
325 DisconnectedFromHost,
326 Closed,
327 DeletedEntry(ProjectEntryId),
328 CollaboratorUpdated {
329 old_peer_id: proto::PeerId,
330 new_peer_id: proto::PeerId,
331 },
332 CollaboratorJoined(proto::PeerId),
333 CollaboratorLeft(proto::PeerId),
334 RefreshInlayHints,
335 RevealInProjectPanel(ProjectEntryId),
336}
337
338pub enum LanguageServerState {
339 Starting(Task<Option<Arc<LanguageServer>>>),
340
341 Running {
342 language: Arc<Language>,
343 adapter: Arc<CachedLspAdapter>,
344 server: Arc<LanguageServer>,
345 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
346 },
347}
348
349#[derive(Serialize)]
350pub struct LanguageServerStatus {
351 pub name: String,
352 pub pending_work: BTreeMap<String, LanguageServerProgress>,
353 pub has_pending_diagnostic_updates: bool,
354 progress_tokens: HashSet<String>,
355}
356
357#[derive(Clone, Debug, Serialize)]
358pub struct LanguageServerProgress {
359 pub message: Option<String>,
360 pub percentage: Option<usize>,
361 #[serde(skip_serializing)]
362 pub last_update_at: Instant,
363}
364
365#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
366pub struct ProjectPath {
367 pub worktree_id: WorktreeId,
368 pub path: Arc<Path>,
369}
370
371#[derive(Debug, Clone, PartialEq, Eq)]
372pub struct InlayHint {
373 pub position: language::Anchor,
374 pub label: InlayHintLabel,
375 pub kind: Option<InlayHintKind>,
376 pub padding_left: bool,
377 pub padding_right: bool,
378 pub tooltip: Option<InlayHintTooltip>,
379 pub resolve_state: ResolveState,
380}
381
382#[derive(Debug, Clone, PartialEq, Eq)]
383pub enum ResolveState {
384 Resolved,
385 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
386 Resolving,
387}
388
389impl InlayHint {
390 pub fn text(&self) -> String {
391 match &self.label {
392 InlayHintLabel::String(s) => s.to_owned(),
393 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
394 }
395 }
396}
397
398#[derive(Debug, Clone, PartialEq, Eq)]
399pub enum InlayHintLabel {
400 String(String),
401 LabelParts(Vec<InlayHintLabelPart>),
402}
403
404#[derive(Debug, Clone, PartialEq, Eq)]
405pub struct InlayHintLabelPart {
406 pub value: String,
407 pub tooltip: Option<InlayHintLabelPartTooltip>,
408 pub location: Option<(LanguageServerId, lsp::Location)>,
409}
410
411#[derive(Debug, Clone, PartialEq, Eq)]
412pub enum InlayHintTooltip {
413 String(String),
414 MarkupContent(MarkupContent),
415}
416
417#[derive(Debug, Clone, PartialEq, Eq)]
418pub enum InlayHintLabelPartTooltip {
419 String(String),
420 MarkupContent(MarkupContent),
421}
422
423#[derive(Debug, Clone, PartialEq, Eq)]
424pub struct MarkupContent {
425 pub kind: HoverBlockKind,
426 pub value: String,
427}
428
429#[derive(Debug, Clone)]
430pub struct LocationLink {
431 pub origin: Option<Location>,
432 pub target: Location,
433}
434
435#[derive(Debug)]
436pub struct DocumentHighlight {
437 pub range: Range<language::Anchor>,
438 pub kind: DocumentHighlightKind,
439}
440
441#[derive(Clone, Debug)]
442pub struct Symbol {
443 pub language_server_name: LanguageServerName,
444 pub source_worktree_id: WorktreeId,
445 pub path: ProjectPath,
446 pub label: CodeLabel,
447 pub name: String,
448 pub kind: lsp::SymbolKind,
449 pub range: Range<Unclipped<PointUtf16>>,
450 pub signature: [u8; 32],
451}
452
453#[derive(Clone, Debug, PartialEq)]
454pub struct HoverBlock {
455 pub text: String,
456 pub kind: HoverBlockKind,
457}
458
459#[derive(Clone, Debug, PartialEq, Eq)]
460pub enum HoverBlockKind {
461 PlainText,
462 Markdown,
463 Code { language: String },
464}
465
466#[derive(Debug, Clone)]
467pub struct Hover {
468 pub contents: Vec<HoverBlock>,
469 pub range: Option<Range<language::Anchor>>,
470 pub language: Option<Arc<Language>>,
471}
472
473impl Hover {
474 pub fn is_empty(&self) -> bool {
475 self.contents.iter().all(|block| block.text.is_empty())
476 }
477}
478
479#[derive(Default)]
480pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
481
482#[derive(Debug, Clone, Copy, PartialEq, Eq)]
483pub enum FormatTrigger {
484 Save,
485 Manual,
486}
487
488// Currently, formatting operations are represented differently depending on
489// whether they come from a language server or an external command.
490enum FormatOperation {
491 Lsp(Vec<(Range<Anchor>, String)>),
492 External(Diff),
493 Prettier(Diff),
494}
495
496impl FormatTrigger {
497 fn from_proto(value: i32) -> FormatTrigger {
498 match value {
499 0 => FormatTrigger::Save,
500 1 => FormatTrigger::Manual,
501 _ => FormatTrigger::Save,
502 }
503 }
504}
505
506#[derive(Clone, Debug, PartialEq)]
507enum SearchMatchCandidate {
508 OpenBuffer {
509 buffer: Model<Buffer>,
510 // This might be an unnamed file without representation on filesystem
511 path: Option<Arc<Path>>,
512 },
513 Path {
514 worktree_id: WorktreeId,
515 is_ignored: bool,
516 path: Arc<Path>,
517 },
518}
519
520impl SearchMatchCandidate {
521 fn path(&self) -> Option<Arc<Path>> {
522 match self {
523 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
524 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
525 }
526 }
527
528 fn is_ignored(&self) -> bool {
529 matches!(
530 self,
531 SearchMatchCandidate::Path {
532 is_ignored: true,
533 ..
534 }
535 )
536 }
537}
538
539pub enum SearchResult {
540 Buffer {
541 buffer: Model<Buffer>,
542 ranges: Vec<Range<Anchor>>,
543 },
544 LimitReached,
545}
546
547impl Project {
548 pub fn init_settings(cx: &mut AppContext) {
549 WorktreeSettings::register(cx);
550 ProjectSettings::register(cx);
551 }
552
553 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
554 Self::init_settings(cx);
555
556 client.add_model_message_handler(Self::handle_add_collaborator);
557 client.add_model_message_handler(Self::handle_update_project_collaborator);
558 client.add_model_message_handler(Self::handle_remove_collaborator);
559 client.add_model_message_handler(Self::handle_buffer_reloaded);
560 client.add_model_message_handler(Self::handle_buffer_saved);
561 client.add_model_message_handler(Self::handle_start_language_server);
562 client.add_model_message_handler(Self::handle_update_language_server);
563 client.add_model_message_handler(Self::handle_update_project);
564 client.add_model_message_handler(Self::handle_unshare_project);
565 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
566 client.add_model_message_handler(Self::handle_update_buffer_file);
567 client.add_model_request_handler(Self::handle_update_buffer);
568 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
569 client.add_model_message_handler(Self::handle_update_worktree);
570 client.add_model_message_handler(Self::handle_update_worktree_settings);
571 client.add_model_request_handler(Self::handle_create_project_entry);
572 client.add_model_request_handler(Self::handle_rename_project_entry);
573 client.add_model_request_handler(Self::handle_copy_project_entry);
574 client.add_model_request_handler(Self::handle_delete_project_entry);
575 client.add_model_request_handler(Self::handle_expand_project_entry);
576 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
577 client.add_model_request_handler(Self::handle_resolve_completion_documentation);
578 client.add_model_request_handler(Self::handle_apply_code_action);
579 client.add_model_request_handler(Self::handle_on_type_formatting);
580 client.add_model_request_handler(Self::handle_inlay_hints);
581 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
582 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
583 client.add_model_request_handler(Self::handle_reload_buffers);
584 client.add_model_request_handler(Self::handle_synchronize_buffers);
585 client.add_model_request_handler(Self::handle_format_buffers);
586 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
587 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
588 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
589 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
590 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
591 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
592 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
593 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
594 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
595 client.add_model_request_handler(Self::handle_search_project);
596 client.add_model_request_handler(Self::handle_get_project_symbols);
597 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
598 client.add_model_request_handler(Self::handle_open_buffer_by_id);
599 client.add_model_request_handler(Self::handle_open_buffer_by_path);
600 client.add_model_request_handler(Self::handle_save_buffer);
601 client.add_model_message_handler(Self::handle_update_diff_base);
602 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
603 client.add_model_request_handler(Self::handle_blame_buffer);
604 client.add_model_request_handler(Self::handle_multi_lsp_query);
605 }
606
607 pub fn local(
608 client: Arc<Client>,
609 node: Arc<dyn NodeRuntime>,
610 user_store: Model<UserStore>,
611 languages: Arc<LanguageRegistry>,
612 fs: Arc<dyn Fs>,
613 cx: &mut AppContext,
614 ) -> Model<Self> {
615 cx.new_model(|cx: &mut ModelContext<Self>| {
616 let (tx, rx) = mpsc::unbounded();
617 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
618 .detach();
619 let copilot_lsp_subscription =
620 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
621 let tasks = Inventory::new(cx);
622
623 Self {
624 worktrees: Vec::new(),
625 buffer_ordered_messages_tx: tx,
626 flush_language_server_update: None,
627 pending_language_server_update: None,
628 collaborators: Default::default(),
629 next_buffer_id: BufferId::new(1).unwrap(),
630 opened_buffers: Default::default(),
631 shared_buffers: Default::default(),
632 loading_buffers_by_path: Default::default(),
633 loading_local_worktrees: Default::default(),
634 local_buffer_ids_by_path: Default::default(),
635 local_buffer_ids_by_entry_id: Default::default(),
636 buffer_snapshots: Default::default(),
637 join_project_response_message_id: 0,
638 client_state: ProjectClientState::Local,
639 loading_buffers: HashMap::default(),
640 incomplete_remote_buffers: HashMap::default(),
641 client_subscriptions: Vec::new(),
642 _subscriptions: vec![
643 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
644 cx.on_release(Self::release),
645 cx.on_app_quit(Self::shutdown_language_servers),
646 ],
647 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
648 _maintain_workspace_config: Self::maintain_workspace_config(cx),
649 active_entry: None,
650 languages,
651 client,
652 user_store,
653 fs,
654 next_entry_id: Default::default(),
655 next_diagnostic_group_id: Default::default(),
656 supplementary_language_servers: HashMap::default(),
657 language_servers: Default::default(),
658 language_server_ids: HashMap::default(),
659 language_server_statuses: Default::default(),
660 last_formatting_failure: None,
661 last_workspace_edits_by_language_server: Default::default(),
662 language_server_watched_paths: HashMap::default(),
663 buffers_being_formatted: Default::default(),
664 buffers_needing_diff: Default::default(),
665 git_diff_debouncer: DebouncedDelay::new(),
666 nonce: StdRng::from_entropy().gen(),
667 terminals: Terminals {
668 local_handles: Vec::new(),
669 },
670 copilot_lsp_subscription,
671 copilot_log_subscription: None,
672 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
673 node: Some(node),
674 default_prettier: DefaultPrettier::default(),
675 prettiers_per_worktree: HashMap::default(),
676 prettier_instances: HashMap::default(),
677 tasks,
678 hosted_project_id: None,
679 search_history: Self::new_search_history(),
680 }
681 })
682 }
683
684 pub async fn remote(
685 remote_id: u64,
686 client: Arc<Client>,
687 user_store: Model<UserStore>,
688 languages: Arc<LanguageRegistry>,
689 fs: Arc<dyn Fs>,
690 cx: AsyncAppContext,
691 ) -> Result<Model<Self>> {
692 client.authenticate_and_connect(true, &cx).await?;
693
694 let subscription = client.subscribe_to_entity(remote_id)?;
695 let response = client
696 .request_envelope(proto::JoinProject {
697 project_id: remote_id,
698 })
699 .await?;
700 Self::from_join_project_response(
701 response,
702 subscription,
703 client,
704 user_store,
705 languages,
706 fs,
707 cx,
708 )
709 .await
710 }
711 async fn from_join_project_response(
712 response: TypedEnvelope<proto::JoinProjectResponse>,
713 subscription: PendingEntitySubscription<Project>,
714 client: Arc<Client>,
715 user_store: Model<UserStore>,
716 languages: Arc<LanguageRegistry>,
717 fs: Arc<dyn Fs>,
718 mut cx: AsyncAppContext,
719 ) -> Result<Model<Self>> {
720 let remote_id = response.payload.project_id;
721 let role = response.payload.role();
722 let this = cx.new_model(|cx| {
723 let replica_id = response.payload.replica_id as ReplicaId;
724 let tasks = Inventory::new(cx);
725 // BIG CAUTION NOTE: The order in which we initialize fields here matters and it should match what's done in Self::local.
726 // Otherwise, you might run into issues where worktree id on remote is different than what's on local host.
727 // That's because Worktree's identifier is entity id, which should probably be changed.
728 let mut worktrees = Vec::new();
729 for worktree in response.payload.worktrees {
730 let worktree =
731 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
732 worktrees.push(worktree);
733 }
734
735 let (tx, rx) = mpsc::unbounded();
736 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
737 .detach();
738 let copilot_lsp_subscription =
739 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
740 let mut this = Self {
741 worktrees: Vec::new(),
742 buffer_ordered_messages_tx: tx,
743 pending_language_server_update: None,
744 flush_language_server_update: None,
745 loading_buffers_by_path: Default::default(),
746 next_buffer_id: BufferId::new(1).unwrap(),
747 loading_buffers: Default::default(),
748 shared_buffers: Default::default(),
749 incomplete_remote_buffers: Default::default(),
750 loading_local_worktrees: Default::default(),
751 local_buffer_ids_by_path: Default::default(),
752 local_buffer_ids_by_entry_id: Default::default(),
753 active_entry: None,
754 collaborators: Default::default(),
755 join_project_response_message_id: response.message_id,
756 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
757 _maintain_workspace_config: Self::maintain_workspace_config(cx),
758 languages,
759 user_store: user_store.clone(),
760 fs,
761 next_entry_id: Default::default(),
762 next_diagnostic_group_id: Default::default(),
763 client_subscriptions: Default::default(),
764 _subscriptions: vec![
765 cx.on_release(Self::release),
766 cx.on_app_quit(Self::shutdown_language_servers),
767 ],
768 client: client.clone(),
769 client_state: ProjectClientState::Remote {
770 sharing_has_stopped: false,
771 capability: Capability::ReadWrite,
772 remote_id,
773 replica_id,
774 },
775 supplementary_language_servers: HashMap::default(),
776 language_servers: Default::default(),
777 language_server_ids: HashMap::default(),
778 language_server_statuses: response
779 .payload
780 .language_servers
781 .into_iter()
782 .map(|server| {
783 (
784 LanguageServerId(server.id as usize),
785 LanguageServerStatus {
786 name: server.name,
787 pending_work: Default::default(),
788 has_pending_diagnostic_updates: false,
789 progress_tokens: Default::default(),
790 },
791 )
792 })
793 .collect(),
794 last_formatting_failure: None,
795 last_workspace_edits_by_language_server: Default::default(),
796 language_server_watched_paths: HashMap::default(),
797 opened_buffers: Default::default(),
798 buffers_being_formatted: Default::default(),
799 buffers_needing_diff: Default::default(),
800 git_diff_debouncer: DebouncedDelay::new(),
801 buffer_snapshots: Default::default(),
802 nonce: StdRng::from_entropy().gen(),
803 terminals: Terminals {
804 local_handles: Vec::new(),
805 },
806 copilot_lsp_subscription,
807 copilot_log_subscription: None,
808 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
809 node: None,
810 default_prettier: DefaultPrettier::default(),
811 prettiers_per_worktree: HashMap::default(),
812 prettier_instances: HashMap::default(),
813 tasks,
814 hosted_project_id: None,
815 search_history: Self::new_search_history(),
816 };
817 this.set_role(role, cx);
818 for worktree in worktrees {
819 let _ = this.add_worktree(&worktree, cx);
820 }
821 this
822 })?;
823 let subscription = subscription.set_model(&this, &mut cx);
824
825 let user_ids = response
826 .payload
827 .collaborators
828 .iter()
829 .map(|peer| peer.user_id)
830 .collect();
831 user_store
832 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
833 .await?;
834
835 this.update(&mut cx, |this, cx| {
836 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
837 this.client_subscriptions.push(subscription);
838 anyhow::Ok(())
839 })??;
840
841 Ok(this)
842 }
843
844 pub async fn hosted(
845 remote_id: ProjectId,
846 user_store: Model<UserStore>,
847 client: Arc<Client>,
848 languages: Arc<LanguageRegistry>,
849 fs: Arc<dyn Fs>,
850 cx: AsyncAppContext,
851 ) -> Result<Model<Self>> {
852 client.authenticate_and_connect(true, &cx).await?;
853
854 let subscription = client.subscribe_to_entity(remote_id.0)?;
855 let response = client
856 .request_envelope(proto::JoinHostedProject {
857 project_id: remote_id.0,
858 })
859 .await?;
860 Self::from_join_project_response(
861 response,
862 subscription,
863 client,
864 user_store,
865 languages,
866 fs,
867 cx,
868 )
869 .await
870 }
871
872 fn new_search_history() -> SearchHistory {
873 SearchHistory::new(
874 Some(MAX_PROJECT_SEARCH_HISTORY_SIZE),
875 search_history::QueryInsertionBehavior::AlwaysInsert,
876 )
877 }
878
879 fn release(&mut self, cx: &mut AppContext) {
880 match &self.client_state {
881 ProjectClientState::Local => {}
882 ProjectClientState::Shared { .. } => {
883 let _ = self.unshare_internal(cx);
884 }
885 ProjectClientState::Remote { remote_id, .. } => {
886 let _ = self.client.send(proto::LeaveProject {
887 project_id: *remote_id,
888 });
889 self.disconnected_from_host_internal(cx);
890 }
891 }
892 }
893
894 fn shutdown_language_servers(
895 &mut self,
896 _cx: &mut ModelContext<Self>,
897 ) -> impl Future<Output = ()> {
898 let shutdown_futures = self
899 .language_servers
900 .drain()
901 .map(|(_, server_state)| async {
902 use LanguageServerState::*;
903 match server_state {
904 Running { server, .. } => server.shutdown()?.await,
905 Starting(task) => task.await?.shutdown()?.await,
906 }
907 })
908 .collect::<Vec<_>>();
909
910 async move {
911 futures::future::join_all(shutdown_futures).await;
912 }
913 }
914
915 #[cfg(any(test, feature = "test-support"))]
916 pub async fn test(
917 fs: Arc<dyn Fs>,
918 root_paths: impl IntoIterator<Item = &Path>,
919 cx: &mut gpui::TestAppContext,
920 ) -> Model<Project> {
921 use clock::FakeSystemClock;
922
923 let languages = LanguageRegistry::test(cx.executor());
924 let clock = Arc::new(FakeSystemClock::default());
925 let http_client = util::http::FakeHttpClient::with_404_response();
926 let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
927 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
928 let project = cx.update(|cx| {
929 Project::local(
930 client,
931 node_runtime::FakeNodeRuntime::new(),
932 user_store,
933 Arc::new(languages),
934 fs,
935 cx,
936 )
937 });
938 for path in root_paths {
939 let (tree, _) = project
940 .update(cx, |project, cx| {
941 project.find_or_create_local_worktree(path, true, cx)
942 })
943 .await
944 .unwrap();
945 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
946 .await;
947 }
948 project
949 }
950
951 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
952 let mut language_servers_to_start = Vec::new();
953 let mut language_formatters_to_check = Vec::new();
954 for buffer in self.opened_buffers.values() {
955 if let Some(buffer) = buffer.upgrade() {
956 let buffer = buffer.read(cx);
957 let buffer_file = File::from_dyn(buffer.file());
958 let buffer_language = buffer.language();
959 let settings = language_settings(buffer_language, buffer.file(), cx);
960 if let Some(language) = buffer_language {
961 if settings.enable_language_server {
962 if let Some(file) = buffer_file {
963 language_servers_to_start
964 .push((file.worktree.clone(), Arc::clone(language)));
965 }
966 }
967 language_formatters_to_check.push((
968 buffer_file.map(|f| f.worktree_id(cx)),
969 Arc::clone(language),
970 settings.clone(),
971 ));
972 }
973 }
974 }
975
976 let mut language_servers_to_stop = Vec::new();
977 let mut language_servers_to_restart = Vec::new();
978 let languages = self.languages.to_vec();
979
980 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
981 let current_lsp_settings = &self.current_lsp_settings;
982 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
983 let language = languages.iter().find_map(|l| {
984 let adapter = self
985 .languages
986 .lsp_adapters(l)
987 .iter()
988 .find(|adapter| &adapter.name == started_lsp_name)?
989 .clone();
990 Some((l, adapter))
991 });
992 if let Some((language, adapter)) = language {
993 let worktree = self.worktree_for_id(*worktree_id, cx);
994 let file = worktree.as_ref().and_then(|tree| {
995 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
996 });
997 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
998 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
999 } else if let Some(worktree) = worktree {
1000 let server_name = &adapter.name.0;
1001 match (
1002 current_lsp_settings.get(server_name),
1003 new_lsp_settings.get(server_name),
1004 ) {
1005 (None, None) => {}
1006 (Some(_), None) | (None, Some(_)) => {
1007 language_servers_to_restart.push((worktree, Arc::clone(language)));
1008 }
1009 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
1010 if current_lsp_settings != new_lsp_settings {
1011 language_servers_to_restart.push((worktree, Arc::clone(language)));
1012 }
1013 }
1014 }
1015 }
1016 }
1017 }
1018 self.current_lsp_settings = new_lsp_settings;
1019
1020 // Stop all newly-disabled language servers.
1021 for (worktree_id, adapter_name) in language_servers_to_stop {
1022 self.stop_language_server(worktree_id, adapter_name, cx)
1023 .detach();
1024 }
1025
1026 let mut prettier_plugins_by_worktree = HashMap::default();
1027 for (worktree, language, settings) in language_formatters_to_check {
1028 if let Some(plugins) =
1029 prettier_support::prettier_plugins_for_language(&language, &settings)
1030 {
1031 prettier_plugins_by_worktree
1032 .entry(worktree)
1033 .or_insert_with(|| HashSet::default())
1034 .extend(plugins.iter().cloned());
1035 }
1036 }
1037 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
1038 self.install_default_prettier(worktree, prettier_plugins.into_iter(), cx);
1039 }
1040
1041 // Start all the newly-enabled language servers.
1042 for (worktree, language) in language_servers_to_start {
1043 self.start_language_servers(&worktree, language, cx);
1044 }
1045
1046 // Restart all language servers with changed initialization options.
1047 for (worktree, language) in language_servers_to_restart {
1048 self.restart_language_servers(worktree, language, cx);
1049 }
1050
1051 if self.copilot_lsp_subscription.is_none() {
1052 if let Some(copilot) = Copilot::global(cx) {
1053 for buffer in self.opened_buffers.values() {
1054 if let Some(buffer) = buffer.upgrade() {
1055 self.register_buffer_with_copilot(&buffer, cx);
1056 }
1057 }
1058 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
1059 }
1060 }
1061
1062 cx.notify();
1063 }
1064
1065 pub fn buffer_for_id(&self, remote_id: BufferId) -> Option<Model<Buffer>> {
1066 self.opened_buffers
1067 .get(&remote_id)
1068 .and_then(|buffer| buffer.upgrade())
1069 }
1070
1071 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1072 &self.languages
1073 }
1074
1075 pub fn client(&self) -> Arc<Client> {
1076 self.client.clone()
1077 }
1078
1079 pub fn user_store(&self) -> Model<UserStore> {
1080 self.user_store.clone()
1081 }
1082
1083 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1084 self.opened_buffers
1085 .values()
1086 .filter_map(|b| b.upgrade())
1087 .collect()
1088 }
1089
1090 #[cfg(any(test, feature = "test-support"))]
1091 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1092 let path = path.into();
1093 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1094 self.opened_buffers.iter().any(|(_, buffer)| {
1095 if let Some(buffer) = buffer.upgrade() {
1096 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1097 if file.worktree == worktree && file.path() == &path.path {
1098 return true;
1099 }
1100 }
1101 }
1102 false
1103 })
1104 } else {
1105 false
1106 }
1107 }
1108
1109 pub fn fs(&self) -> &Arc<dyn Fs> {
1110 &self.fs
1111 }
1112
1113 pub fn remote_id(&self) -> Option<u64> {
1114 match self.client_state {
1115 ProjectClientState::Local => None,
1116 ProjectClientState::Shared { remote_id, .. }
1117 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1118 }
1119 }
1120
1121 pub fn hosted_project_id(&self) -> Option<ProjectId> {
1122 self.hosted_project_id
1123 }
1124
1125 pub fn replica_id(&self) -> ReplicaId {
1126 match self.client_state {
1127 ProjectClientState::Remote { replica_id, .. } => replica_id,
1128 _ => 0,
1129 }
1130 }
1131
1132 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1133 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1134 updates_tx
1135 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1136 .ok();
1137 }
1138 cx.notify();
1139 }
1140
1141 pub fn task_inventory(&self) -> &Model<Inventory> {
1142 &self.tasks
1143 }
1144
1145 pub fn search_history(&self) -> &SearchHistory {
1146 &self.search_history
1147 }
1148
1149 pub fn search_history_mut(&mut self) -> &mut SearchHistory {
1150 &mut self.search_history
1151 }
1152
1153 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1154 &self.collaborators
1155 }
1156
1157 pub fn host(&self) -> Option<&Collaborator> {
1158 self.collaborators.values().find(|c| c.replica_id == 0)
1159 }
1160
1161 /// Collect all worktrees, including ones that don't appear in the project panel
1162 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
1163 self.worktrees
1164 .iter()
1165 .filter_map(move |worktree| worktree.upgrade())
1166 }
1167
1168 /// Collect all user-visible worktrees, the ones that appear in the project panel
1169 pub fn visible_worktrees<'a>(
1170 &'a self,
1171 cx: &'a AppContext,
1172 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1173 self.worktrees.iter().filter_map(|worktree| {
1174 worktree.upgrade().and_then(|worktree| {
1175 if worktree.read(cx).is_visible() {
1176 Some(worktree)
1177 } else {
1178 None
1179 }
1180 })
1181 })
1182 }
1183
1184 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1185 self.visible_worktrees(cx)
1186 .map(|tree| tree.read(cx).root_name())
1187 }
1188
1189 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1190 self.worktrees()
1191 .find(|worktree| worktree.read(cx).id() == id)
1192 }
1193
1194 pub fn worktree_for_entry(
1195 &self,
1196 entry_id: ProjectEntryId,
1197 cx: &AppContext,
1198 ) -> Option<Model<Worktree>> {
1199 self.worktrees()
1200 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1201 }
1202
1203 pub fn worktree_id_for_entry(
1204 &self,
1205 entry_id: ProjectEntryId,
1206 cx: &AppContext,
1207 ) -> Option<WorktreeId> {
1208 self.worktree_for_entry(entry_id, cx)
1209 .map(|worktree| worktree.read(cx).id())
1210 }
1211
1212 pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option<bool> {
1213 paths
1214 .iter()
1215 .map(|path| self.visibility_for_path(path, cx))
1216 .max()
1217 .flatten()
1218 }
1219
1220 pub fn visibility_for_path(&self, path: &Path, cx: &AppContext) -> Option<bool> {
1221 self.worktrees()
1222 .filter_map(|worktree| {
1223 let worktree = worktree.read(cx);
1224 worktree
1225 .as_local()?
1226 .contains_abs_path(path)
1227 .then(|| worktree.is_visible())
1228 })
1229 .max()
1230 }
1231
1232 pub fn create_entry(
1233 &mut self,
1234 project_path: impl Into<ProjectPath>,
1235 is_directory: bool,
1236 cx: &mut ModelContext<Self>,
1237 ) -> Task<Result<Option<Entry>>> {
1238 let project_path = project_path.into();
1239 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1240 return Task::ready(Ok(None));
1241 };
1242 if self.is_local() {
1243 worktree.update(cx, |worktree, cx| {
1244 worktree
1245 .as_local_mut()
1246 .unwrap()
1247 .create_entry(project_path.path, is_directory, cx)
1248 })
1249 } else {
1250 let client = self.client.clone();
1251 let project_id = self.remote_id().unwrap();
1252 cx.spawn(move |_, mut cx| async move {
1253 let response = client
1254 .request(proto::CreateProjectEntry {
1255 worktree_id: project_path.worktree_id.to_proto(),
1256 project_id,
1257 path: project_path.path.to_string_lossy().into(),
1258 is_directory,
1259 })
1260 .await?;
1261 match response.entry {
1262 Some(entry) => worktree
1263 .update(&mut cx, |worktree, cx| {
1264 worktree.as_remote_mut().unwrap().insert_entry(
1265 entry,
1266 response.worktree_scan_id as usize,
1267 cx,
1268 )
1269 })?
1270 .await
1271 .map(Some),
1272 None => Ok(None),
1273 }
1274 })
1275 }
1276 }
1277
1278 pub fn copy_entry(
1279 &mut self,
1280 entry_id: ProjectEntryId,
1281 new_path: impl Into<Arc<Path>>,
1282 cx: &mut ModelContext<Self>,
1283 ) -> Task<Result<Option<Entry>>> {
1284 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1285 return Task::ready(Ok(None));
1286 };
1287 let new_path = new_path.into();
1288 if self.is_local() {
1289 worktree.update(cx, |worktree, cx| {
1290 worktree
1291 .as_local_mut()
1292 .unwrap()
1293 .copy_entry(entry_id, new_path, cx)
1294 })
1295 } else {
1296 let client = self.client.clone();
1297 let project_id = self.remote_id().unwrap();
1298
1299 cx.spawn(move |_, mut cx| async move {
1300 let response = client
1301 .request(proto::CopyProjectEntry {
1302 project_id,
1303 entry_id: entry_id.to_proto(),
1304 new_path: new_path.to_string_lossy().into(),
1305 })
1306 .await?;
1307 match response.entry {
1308 Some(entry) => worktree
1309 .update(&mut cx, |worktree, cx| {
1310 worktree.as_remote_mut().unwrap().insert_entry(
1311 entry,
1312 response.worktree_scan_id as usize,
1313 cx,
1314 )
1315 })?
1316 .await
1317 .map(Some),
1318 None => Ok(None),
1319 }
1320 })
1321 }
1322 }
1323
1324 pub fn rename_entry(
1325 &mut self,
1326 entry_id: ProjectEntryId,
1327 new_path: impl Into<Arc<Path>>,
1328 cx: &mut ModelContext<Self>,
1329 ) -> Task<Result<Option<Entry>>> {
1330 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1331 return Task::ready(Ok(None));
1332 };
1333 let new_path = new_path.into();
1334 if self.is_local() {
1335 worktree.update(cx, |worktree, cx| {
1336 worktree
1337 .as_local_mut()
1338 .unwrap()
1339 .rename_entry(entry_id, new_path, cx)
1340 })
1341 } else {
1342 let client = self.client.clone();
1343 let project_id = self.remote_id().unwrap();
1344
1345 cx.spawn(move |_, mut cx| async move {
1346 let response = client
1347 .request(proto::RenameProjectEntry {
1348 project_id,
1349 entry_id: entry_id.to_proto(),
1350 new_path: new_path.to_string_lossy().into(),
1351 })
1352 .await?;
1353 match response.entry {
1354 Some(entry) => worktree
1355 .update(&mut cx, |worktree, cx| {
1356 worktree.as_remote_mut().unwrap().insert_entry(
1357 entry,
1358 response.worktree_scan_id as usize,
1359 cx,
1360 )
1361 })?
1362 .await
1363 .map(Some),
1364 None => Ok(None),
1365 }
1366 })
1367 }
1368 }
1369
1370 pub fn delete_entry(
1371 &mut self,
1372 entry_id: ProjectEntryId,
1373 cx: &mut ModelContext<Self>,
1374 ) -> Option<Task<Result<()>>> {
1375 let worktree = self.worktree_for_entry(entry_id, cx)?;
1376
1377 cx.emit(Event::DeletedEntry(entry_id));
1378
1379 if self.is_local() {
1380 worktree.update(cx, |worktree, cx| {
1381 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1382 })
1383 } else {
1384 let client = self.client.clone();
1385 let project_id = self.remote_id().unwrap();
1386 Some(cx.spawn(move |_, mut cx| async move {
1387 let response = client
1388 .request(proto::DeleteProjectEntry {
1389 project_id,
1390 entry_id: entry_id.to_proto(),
1391 })
1392 .await?;
1393 worktree
1394 .update(&mut cx, move |worktree, cx| {
1395 worktree.as_remote_mut().unwrap().delete_entry(
1396 entry_id,
1397 response.worktree_scan_id as usize,
1398 cx,
1399 )
1400 })?
1401 .await
1402 }))
1403 }
1404 }
1405
1406 pub fn expand_entry(
1407 &mut self,
1408 worktree_id: WorktreeId,
1409 entry_id: ProjectEntryId,
1410 cx: &mut ModelContext<Self>,
1411 ) -> Option<Task<Result<()>>> {
1412 let worktree = self.worktree_for_id(worktree_id, cx)?;
1413 if self.is_local() {
1414 worktree.update(cx, |worktree, cx| {
1415 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1416 })
1417 } else {
1418 let worktree = worktree.downgrade();
1419 let request = self.client.request(proto::ExpandProjectEntry {
1420 project_id: self.remote_id().unwrap(),
1421 entry_id: entry_id.to_proto(),
1422 });
1423 Some(cx.spawn(move |_, mut cx| async move {
1424 let response = request.await?;
1425 if let Some(worktree) = worktree.upgrade() {
1426 worktree
1427 .update(&mut cx, |worktree, _| {
1428 worktree
1429 .as_remote_mut()
1430 .unwrap()
1431 .wait_for_snapshot(response.worktree_scan_id as usize)
1432 })?
1433 .await?;
1434 }
1435 Ok(())
1436 }))
1437 }
1438 }
1439
1440 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1441 if !matches!(self.client_state, ProjectClientState::Local) {
1442 return Err(anyhow!("project was already shared"));
1443 }
1444 self.client_subscriptions.push(
1445 self.client
1446 .subscribe_to_entity(project_id)?
1447 .set_model(&cx.handle(), &mut cx.to_async()),
1448 );
1449
1450 for open_buffer in self.opened_buffers.values_mut() {
1451 match open_buffer {
1452 OpenBuffer::Strong(_) => {}
1453 OpenBuffer::Weak(buffer) => {
1454 if let Some(buffer) = buffer.upgrade() {
1455 *open_buffer = OpenBuffer::Strong(buffer);
1456 }
1457 }
1458 OpenBuffer::Operations(_) => unreachable!(),
1459 }
1460 }
1461
1462 for worktree_handle in self.worktrees.iter_mut() {
1463 match worktree_handle {
1464 WorktreeHandle::Strong(_) => {}
1465 WorktreeHandle::Weak(worktree) => {
1466 if let Some(worktree) = worktree.upgrade() {
1467 *worktree_handle = WorktreeHandle::Strong(worktree);
1468 }
1469 }
1470 }
1471 }
1472
1473 for (server_id, status) in &self.language_server_statuses {
1474 self.client
1475 .send(proto::StartLanguageServer {
1476 project_id,
1477 server: Some(proto::LanguageServer {
1478 id: server_id.0 as u64,
1479 name: status.name.clone(),
1480 }),
1481 })
1482 .log_err();
1483 }
1484
1485 let store = cx.global::<SettingsStore>();
1486 for worktree in self.worktrees() {
1487 let worktree_id = worktree.read(cx).id().to_proto();
1488 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1489 self.client
1490 .send(proto::UpdateWorktreeSettings {
1491 project_id,
1492 worktree_id,
1493 path: path.to_string_lossy().into(),
1494 content: Some(content),
1495 })
1496 .log_err();
1497 }
1498 }
1499
1500 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1501 let client = self.client.clone();
1502 self.client_state = ProjectClientState::Shared {
1503 remote_id: project_id,
1504 updates_tx,
1505 _send_updates: cx.spawn(move |this, mut cx| async move {
1506 while let Some(update) = updates_rx.next().await {
1507 match update {
1508 LocalProjectUpdate::WorktreesChanged => {
1509 let worktrees = this.update(&mut cx, |this, _cx| {
1510 this.worktrees().collect::<Vec<_>>()
1511 })?;
1512 let update_project = this
1513 .update(&mut cx, |this, cx| {
1514 this.client.request(proto::UpdateProject {
1515 project_id,
1516 worktrees: this.worktree_metadata_protos(cx),
1517 })
1518 })?
1519 .await;
1520 if update_project.is_ok() {
1521 for worktree in worktrees {
1522 worktree.update(&mut cx, |worktree, cx| {
1523 let worktree = worktree.as_local_mut().unwrap();
1524 worktree.share(project_id, cx).detach_and_log_err(cx)
1525 })?;
1526 }
1527 }
1528 }
1529 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1530 let buffer = this.update(&mut cx, |this, _| {
1531 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1532 let shared_buffers =
1533 this.shared_buffers.entry(peer_id).or_default();
1534 if shared_buffers.insert(buffer_id) {
1535 if let OpenBuffer::Strong(buffer) = buffer {
1536 Some(buffer.clone())
1537 } else {
1538 None
1539 }
1540 } else {
1541 None
1542 }
1543 })?;
1544
1545 let Some(buffer) = buffer else { continue };
1546 let operations =
1547 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1548 let operations = operations.await;
1549 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1550
1551 let initial_state = proto::CreateBufferForPeer {
1552 project_id,
1553 peer_id: Some(peer_id),
1554 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1555 };
1556 if client.send(initial_state).log_err().is_some() {
1557 let client = client.clone();
1558 cx.background_executor()
1559 .spawn(async move {
1560 let mut chunks = split_operations(operations).peekable();
1561 while let Some(chunk) = chunks.next() {
1562 let is_last = chunks.peek().is_none();
1563 client.send(proto::CreateBufferForPeer {
1564 project_id,
1565 peer_id: Some(peer_id),
1566 variant: Some(
1567 proto::create_buffer_for_peer::Variant::Chunk(
1568 proto::BufferChunk {
1569 buffer_id: buffer_id.into(),
1570 operations: chunk,
1571 is_last,
1572 },
1573 ),
1574 ),
1575 })?;
1576 }
1577 anyhow::Ok(())
1578 })
1579 .await
1580 .log_err();
1581 }
1582 }
1583 }
1584 }
1585 Ok(())
1586 }),
1587 };
1588
1589 self.metadata_changed(cx);
1590 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1591 cx.notify();
1592 Ok(())
1593 }
1594
1595 pub fn reshared(
1596 &mut self,
1597 message: proto::ResharedProject,
1598 cx: &mut ModelContext<Self>,
1599 ) -> Result<()> {
1600 self.shared_buffers.clear();
1601 self.set_collaborators_from_proto(message.collaborators, cx)?;
1602 self.metadata_changed(cx);
1603 Ok(())
1604 }
1605
1606 pub fn rejoined(
1607 &mut self,
1608 message: proto::RejoinedProject,
1609 message_id: u32,
1610 cx: &mut ModelContext<Self>,
1611 ) -> Result<()> {
1612 cx.update_global::<SettingsStore, _>(|store, cx| {
1613 for worktree in &self.worktrees {
1614 store
1615 .clear_local_settings(worktree.handle_id(), cx)
1616 .log_err();
1617 }
1618 });
1619
1620 self.join_project_response_message_id = message_id;
1621 self.set_worktrees_from_proto(message.worktrees, cx)?;
1622 self.set_collaborators_from_proto(message.collaborators, cx)?;
1623 self.language_server_statuses = message
1624 .language_servers
1625 .into_iter()
1626 .map(|server| {
1627 (
1628 LanguageServerId(server.id as usize),
1629 LanguageServerStatus {
1630 name: server.name,
1631 pending_work: Default::default(),
1632 has_pending_diagnostic_updates: false,
1633 progress_tokens: Default::default(),
1634 },
1635 )
1636 })
1637 .collect();
1638 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
1639 .unwrap();
1640 cx.notify();
1641 Ok(())
1642 }
1643
1644 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1645 self.unshare_internal(cx)?;
1646 self.metadata_changed(cx);
1647 cx.notify();
1648 Ok(())
1649 }
1650
1651 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1652 if self.is_remote() {
1653 return Err(anyhow!("attempted to unshare a remote project"));
1654 }
1655
1656 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1657 self.client_state = ProjectClientState::Local;
1658 self.collaborators.clear();
1659 self.shared_buffers.clear();
1660 self.client_subscriptions.clear();
1661
1662 for worktree_handle in self.worktrees.iter_mut() {
1663 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1664 let is_visible = worktree.update(cx, |worktree, _| {
1665 worktree.as_local_mut().unwrap().unshare();
1666 worktree.is_visible()
1667 });
1668 if !is_visible {
1669 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1670 }
1671 }
1672 }
1673
1674 for open_buffer in self.opened_buffers.values_mut() {
1675 // Wake up any tasks waiting for peers' edits to this buffer.
1676 if let Some(buffer) = open_buffer.upgrade() {
1677 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1678 }
1679
1680 if let OpenBuffer::Strong(buffer) = open_buffer {
1681 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1682 }
1683 }
1684
1685 self.client.send(proto::UnshareProject {
1686 project_id: remote_id,
1687 })?;
1688
1689 Ok(())
1690 } else {
1691 Err(anyhow!("attempted to unshare an unshared project"))
1692 }
1693 }
1694
1695 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1696 self.disconnected_from_host_internal(cx);
1697 cx.emit(Event::DisconnectedFromHost);
1698 cx.notify();
1699 }
1700
1701 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1702 let new_capability =
1703 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1704 Capability::ReadWrite
1705 } else {
1706 Capability::ReadOnly
1707 };
1708 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1709 if *capability == new_capability {
1710 return;
1711 }
1712
1713 *capability = new_capability;
1714 for buffer in self.opened_buffers() {
1715 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1716 }
1717 }
1718 }
1719
1720 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1721 if let ProjectClientState::Remote {
1722 sharing_has_stopped,
1723 ..
1724 } = &mut self.client_state
1725 {
1726 *sharing_has_stopped = true;
1727
1728 self.collaborators.clear();
1729
1730 for worktree in &self.worktrees {
1731 if let Some(worktree) = worktree.upgrade() {
1732 worktree.update(cx, |worktree, _| {
1733 if let Some(worktree) = worktree.as_remote_mut() {
1734 worktree.disconnected_from_host();
1735 }
1736 });
1737 }
1738 }
1739
1740 for open_buffer in self.opened_buffers.values_mut() {
1741 // Wake up any tasks waiting for peers' edits to this buffer.
1742 if let Some(buffer) = open_buffer.upgrade() {
1743 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1744 }
1745
1746 if let OpenBuffer::Strong(buffer) = open_buffer {
1747 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1748 }
1749 }
1750
1751 // Wake up all futures currently waiting on a buffer to get opened,
1752 // to give them a chance to fail now that we've disconnected.
1753 self.loading_buffers.clear();
1754 // self.opened_buffer.send(OpenedBufferEvent::Disconnected);
1755 }
1756 }
1757
1758 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1759 cx.emit(Event::Closed);
1760 }
1761
1762 pub fn is_disconnected(&self) -> bool {
1763 match &self.client_state {
1764 ProjectClientState::Remote {
1765 sharing_has_stopped,
1766 ..
1767 } => *sharing_has_stopped,
1768 _ => false,
1769 }
1770 }
1771
1772 pub fn capability(&self) -> Capability {
1773 match &self.client_state {
1774 ProjectClientState::Remote { capability, .. } => *capability,
1775 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1776 }
1777 }
1778
1779 pub fn is_read_only(&self) -> bool {
1780 self.is_disconnected() || self.capability() == Capability::ReadOnly
1781 }
1782
1783 pub fn is_local(&self) -> bool {
1784 match &self.client_state {
1785 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1786 ProjectClientState::Remote { .. } => false,
1787 }
1788 }
1789
1790 pub fn is_remote(&self) -> bool {
1791 !self.is_local()
1792 }
1793
1794 pub fn create_buffer(
1795 &mut self,
1796 text: &str,
1797 language: Option<Arc<Language>>,
1798 cx: &mut ModelContext<Self>,
1799 ) -> Result<Model<Buffer>> {
1800 if self.is_remote() {
1801 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1802 }
1803 let id = self.next_buffer_id.next();
1804 let buffer = cx.new_model(|cx| {
1805 Buffer::new(self.replica_id(), id, text)
1806 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1807 });
1808 self.register_buffer(&buffer, cx)?;
1809 Ok(buffer)
1810 }
1811
1812 pub fn open_path(
1813 &mut self,
1814 path: ProjectPath,
1815 cx: &mut ModelContext<Self>,
1816 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1817 let task = self.open_buffer(path.clone(), cx);
1818 cx.spawn(move |_, cx| async move {
1819 let buffer = task.await?;
1820 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1821 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1822 })?;
1823
1824 let buffer: &AnyModel = &buffer;
1825 Ok((project_entry_id, buffer.clone()))
1826 })
1827 }
1828
1829 pub fn open_local_buffer(
1830 &mut self,
1831 abs_path: impl AsRef<Path>,
1832 cx: &mut ModelContext<Self>,
1833 ) -> Task<Result<Model<Buffer>>> {
1834 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1835 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1836 } else {
1837 Task::ready(Err(anyhow!("no such path")))
1838 }
1839 }
1840
1841 pub fn open_buffer(
1842 &mut self,
1843 path: impl Into<ProjectPath>,
1844 cx: &mut ModelContext<Self>,
1845 ) -> Task<Result<Model<Buffer>>> {
1846 let project_path = path.into();
1847 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1848 worktree
1849 } else {
1850 return Task::ready(Err(anyhow!("no such worktree")));
1851 };
1852
1853 // If there is already a buffer for the given path, then return it.
1854 let existing_buffer = self.get_open_buffer(&project_path, cx);
1855 if let Some(existing_buffer) = existing_buffer {
1856 return Task::ready(Ok(existing_buffer));
1857 }
1858
1859 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1860 // If the given path is already being loaded, then wait for that existing
1861 // task to complete and return the same buffer.
1862 hash_map::Entry::Occupied(e) => e.get().clone(),
1863
1864 // Otherwise, record the fact that this path is now being loaded.
1865 hash_map::Entry::Vacant(entry) => {
1866 let (mut tx, rx) = postage::watch::channel();
1867 entry.insert(rx.clone());
1868
1869 let project_path = project_path.clone();
1870 let load_buffer = if worktree.read(cx).is_local() {
1871 self.open_local_buffer_internal(project_path.path.clone(), worktree, cx)
1872 } else {
1873 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1874 };
1875
1876 cx.spawn(move |this, mut cx| async move {
1877 let load_result = load_buffer.await;
1878 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1879 // Record the fact that the buffer is no longer loading.
1880 this.loading_buffers_by_path.remove(&project_path);
1881 let buffer = load_result.map_err(Arc::new)?;
1882 Ok(buffer)
1883 })?);
1884 anyhow::Ok(())
1885 })
1886 .detach();
1887 rx
1888 }
1889 };
1890
1891 cx.background_executor().spawn(async move {
1892 wait_for_loading_buffer(loading_watch)
1893 .await
1894 .map_err(|e| e.cloned())
1895 })
1896 }
1897
1898 fn open_local_buffer_internal(
1899 &mut self,
1900 path: Arc<Path>,
1901 worktree: Model<Worktree>,
1902 cx: &mut ModelContext<Self>,
1903 ) -> Task<Result<Model<Buffer>>> {
1904 let buffer_id = self.next_buffer_id.next();
1905 let load_buffer = worktree.update(cx, |worktree, cx| {
1906 let worktree = worktree.as_local_mut().unwrap();
1907 worktree.load_buffer(buffer_id, &path, cx)
1908 });
1909 fn is_not_found_error(error: &anyhow::Error) -> bool {
1910 error
1911 .root_cause()
1912 .downcast_ref::<io::Error>()
1913 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
1914 }
1915 cx.spawn(move |this, mut cx| async move {
1916 let buffer = match load_buffer.await {
1917 Ok(buffer) => Ok(buffer),
1918 Err(error) if is_not_found_error(&error) => {
1919 worktree.update(&mut cx, |worktree, cx| {
1920 let worktree = worktree.as_local_mut().unwrap();
1921 worktree.new_buffer(buffer_id, path, cx)
1922 })
1923 }
1924 Err(e) => Err(e),
1925 }?;
1926 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1927 Ok(buffer)
1928 })
1929 }
1930
1931 fn open_remote_buffer_internal(
1932 &mut self,
1933 path: &Arc<Path>,
1934 worktree: &Model<Worktree>,
1935 cx: &mut ModelContext<Self>,
1936 ) -> Task<Result<Model<Buffer>>> {
1937 let rpc = self.client.clone();
1938 let project_id = self.remote_id().unwrap();
1939 let remote_worktree_id = worktree.read(cx).id();
1940 let path = path.clone();
1941 let path_string = path.to_string_lossy().to_string();
1942 cx.spawn(move |this, mut cx| async move {
1943 let response = rpc
1944 .request(proto::OpenBufferByPath {
1945 project_id,
1946 worktree_id: remote_worktree_id.to_proto(),
1947 path: path_string,
1948 })
1949 .await?;
1950 let buffer_id = BufferId::new(response.buffer_id)?;
1951 this.update(&mut cx, |this, cx| {
1952 this.wait_for_remote_buffer(buffer_id, cx)
1953 })?
1954 .await
1955 })
1956 }
1957
1958 /// LanguageServerName is owned, because it is inserted into a map
1959 pub fn open_local_buffer_via_lsp(
1960 &mut self,
1961 abs_path: lsp::Url,
1962 language_server_id: LanguageServerId,
1963 language_server_name: LanguageServerName,
1964 cx: &mut ModelContext<Self>,
1965 ) -> Task<Result<Model<Buffer>>> {
1966 cx.spawn(move |this, mut cx| async move {
1967 let abs_path = abs_path
1968 .to_file_path()
1969 .map_err(|_| anyhow!("can't convert URI to path"))?;
1970 let (worktree, relative_path) = if let Some(result) =
1971 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1972 {
1973 result
1974 } else {
1975 let worktree = this
1976 .update(&mut cx, |this, cx| {
1977 this.create_local_worktree(&abs_path, false, cx)
1978 })?
1979 .await?;
1980 this.update(&mut cx, |this, cx| {
1981 this.language_server_ids.insert(
1982 (worktree.read(cx).id(), language_server_name),
1983 language_server_id,
1984 );
1985 })
1986 .ok();
1987 (worktree, PathBuf::new())
1988 };
1989
1990 let project_path = ProjectPath {
1991 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1992 path: relative_path.into(),
1993 };
1994 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1995 .await
1996 })
1997 }
1998
1999 pub fn open_buffer_by_id(
2000 &mut self,
2001 id: BufferId,
2002 cx: &mut ModelContext<Self>,
2003 ) -> Task<Result<Model<Buffer>>> {
2004 if let Some(buffer) = self.buffer_for_id(id) {
2005 Task::ready(Ok(buffer))
2006 } else if self.is_local() {
2007 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
2008 } else if let Some(project_id) = self.remote_id() {
2009 let request = self.client.request(proto::OpenBufferById {
2010 project_id,
2011 id: id.into(),
2012 });
2013 cx.spawn(move |this, mut cx| async move {
2014 let buffer_id = BufferId::new(request.await?.buffer_id)?;
2015 this.update(&mut cx, |this, cx| {
2016 this.wait_for_remote_buffer(buffer_id, cx)
2017 })?
2018 .await
2019 })
2020 } else {
2021 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
2022 }
2023 }
2024
2025 pub fn save_buffers(
2026 &self,
2027 buffers: HashSet<Model<Buffer>>,
2028 cx: &mut ModelContext<Self>,
2029 ) -> Task<Result<()>> {
2030 cx.spawn(move |this, mut cx| async move {
2031 let save_tasks = buffers.into_iter().filter_map(|buffer| {
2032 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
2033 .ok()
2034 });
2035 try_join_all(save_tasks).await?;
2036 Ok(())
2037 })
2038 }
2039
2040 pub fn save_buffer(
2041 &self,
2042 buffer: Model<Buffer>,
2043 cx: &mut ModelContext<Self>,
2044 ) -> Task<Result<()>> {
2045 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2046 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
2047 };
2048 let worktree = file.worktree.clone();
2049 let path = file.path.clone();
2050 worktree.update(cx, |worktree, cx| match worktree {
2051 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
2052 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
2053 })
2054 }
2055
2056 pub fn save_buffer_as(
2057 &mut self,
2058 buffer: Model<Buffer>,
2059 abs_path: PathBuf,
2060 cx: &mut ModelContext<Self>,
2061 ) -> Task<Result<()>> {
2062 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
2063 let old_file = File::from_dyn(buffer.read(cx).file())
2064 .filter(|f| f.is_local())
2065 .cloned();
2066 cx.spawn(move |this, mut cx| async move {
2067 if let Some(old_file) = &old_file {
2068 this.update(&mut cx, |this, cx| {
2069 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
2070 })?;
2071 }
2072 let (worktree, path) = worktree_task.await?;
2073 worktree
2074 .update(&mut cx, |worktree, cx| match worktree {
2075 Worktree::Local(worktree) => {
2076 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
2077 }
2078 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
2079 })?
2080 .await?;
2081
2082 this.update(&mut cx, |this, cx| {
2083 this.detect_language_for_buffer(&buffer, cx);
2084 this.register_buffer_with_language_servers(&buffer, cx);
2085 })?;
2086 Ok(())
2087 })
2088 }
2089
2090 pub fn get_open_buffer(
2091 &mut self,
2092 path: &ProjectPath,
2093 cx: &mut ModelContext<Self>,
2094 ) -> Option<Model<Buffer>> {
2095 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
2096 self.opened_buffers.values().find_map(|buffer| {
2097 let buffer = buffer.upgrade()?;
2098 let file = File::from_dyn(buffer.read(cx).file())?;
2099 if file.worktree == worktree && file.path() == &path.path {
2100 Some(buffer)
2101 } else {
2102 None
2103 }
2104 })
2105 }
2106
2107 fn register_buffer(
2108 &mut self,
2109 buffer: &Model<Buffer>,
2110 cx: &mut ModelContext<Self>,
2111 ) -> Result<()> {
2112 self.request_buffer_diff_recalculation(buffer, cx);
2113 buffer.update(cx, |buffer, _| {
2114 buffer.set_language_registry(self.languages.clone())
2115 });
2116
2117 let remote_id = buffer.read(cx).remote_id();
2118 let is_remote = self.is_remote();
2119 let open_buffer = if is_remote || self.is_shared() {
2120 OpenBuffer::Strong(buffer.clone())
2121 } else {
2122 OpenBuffer::Weak(buffer.downgrade())
2123 };
2124
2125 match self.opened_buffers.entry(remote_id) {
2126 hash_map::Entry::Vacant(entry) => {
2127 entry.insert(open_buffer);
2128 }
2129 hash_map::Entry::Occupied(mut entry) => {
2130 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2131 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2132 } else if entry.get().upgrade().is_some() {
2133 if is_remote {
2134 return Ok(());
2135 } else {
2136 debug_panic!("buffer {} was already registered", remote_id);
2137 Err(anyhow!("buffer {} was already registered", remote_id))?;
2138 }
2139 }
2140 entry.insert(open_buffer);
2141 }
2142 }
2143 cx.subscribe(buffer, |this, buffer, event, cx| {
2144 this.on_buffer_event(buffer, event, cx);
2145 })
2146 .detach();
2147
2148 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2149 if file.is_local {
2150 self.local_buffer_ids_by_path.insert(
2151 ProjectPath {
2152 worktree_id: file.worktree_id(cx),
2153 path: file.path.clone(),
2154 },
2155 remote_id,
2156 );
2157
2158 if let Some(entry_id) = file.entry_id {
2159 self.local_buffer_ids_by_entry_id
2160 .insert(entry_id, remote_id);
2161 }
2162 }
2163 }
2164
2165 self.detect_language_for_buffer(buffer, cx);
2166 self.register_buffer_with_language_servers(buffer, cx);
2167 self.register_buffer_with_copilot(buffer, cx);
2168 cx.observe_release(buffer, |this, buffer, cx| {
2169 if let Some(file) = File::from_dyn(buffer.file()) {
2170 if file.is_local() {
2171 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2172 for server in this.language_servers_for_buffer(buffer, cx) {
2173 server
2174 .1
2175 .notify::<lsp::notification::DidCloseTextDocument>(
2176 lsp::DidCloseTextDocumentParams {
2177 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2178 },
2179 )
2180 .log_err();
2181 }
2182 }
2183 }
2184 })
2185 .detach();
2186
2187 if let Some(senders) = self.loading_buffers.remove(&remote_id) {
2188 for sender in senders {
2189 sender.send(Ok(buffer.clone())).ok();
2190 }
2191 }
2192 Ok(())
2193 }
2194
2195 fn register_buffer_with_language_servers(
2196 &mut self,
2197 buffer_handle: &Model<Buffer>,
2198 cx: &mut ModelContext<Self>,
2199 ) {
2200 let buffer = buffer_handle.read(cx);
2201 let buffer_id = buffer.remote_id();
2202
2203 if let Some(file) = File::from_dyn(buffer.file()) {
2204 if !file.is_local() {
2205 return;
2206 }
2207
2208 let abs_path = file.abs_path(cx);
2209 let uri = lsp::Url::from_file_path(&abs_path)
2210 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2211 let initial_snapshot = buffer.text_snapshot();
2212 let language = buffer.language().cloned();
2213 let worktree_id = file.worktree_id(cx);
2214
2215 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2216 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2217 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2218 .log_err();
2219 }
2220 }
2221
2222 if let Some(language) = language {
2223 for adapter in self.languages.lsp_adapters(&language) {
2224 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2225 let server = self
2226 .language_server_ids
2227 .get(&(worktree_id, adapter.name.clone()))
2228 .and_then(|id| self.language_servers.get(id))
2229 .and_then(|server_state| {
2230 if let LanguageServerState::Running { server, .. } = server_state {
2231 Some(server.clone())
2232 } else {
2233 None
2234 }
2235 });
2236 let server = match server {
2237 Some(server) => server,
2238 None => continue,
2239 };
2240
2241 server
2242 .notify::<lsp::notification::DidOpenTextDocument>(
2243 lsp::DidOpenTextDocumentParams {
2244 text_document: lsp::TextDocumentItem::new(
2245 uri.clone(),
2246 language_id.unwrap_or_default(),
2247 0,
2248 initial_snapshot.text(),
2249 ),
2250 },
2251 )
2252 .log_err();
2253
2254 buffer_handle.update(cx, |buffer, cx| {
2255 buffer.set_completion_triggers(
2256 server
2257 .capabilities()
2258 .completion_provider
2259 .as_ref()
2260 .and_then(|provider| provider.trigger_characters.clone())
2261 .unwrap_or_default(),
2262 cx,
2263 );
2264 });
2265
2266 let snapshot = LspBufferSnapshot {
2267 version: 0,
2268 snapshot: initial_snapshot.clone(),
2269 };
2270 self.buffer_snapshots
2271 .entry(buffer_id)
2272 .or_default()
2273 .insert(server.server_id(), vec![snapshot]);
2274 }
2275 }
2276 }
2277 }
2278
2279 fn unregister_buffer_from_language_servers(
2280 &mut self,
2281 buffer: &Model<Buffer>,
2282 old_file: &File,
2283 cx: &mut ModelContext<Self>,
2284 ) {
2285 let old_path = match old_file.as_local() {
2286 Some(local) => local.abs_path(cx),
2287 None => return,
2288 };
2289
2290 buffer.update(cx, |buffer, cx| {
2291 let worktree_id = old_file.worktree_id(cx);
2292 let ids = &self.language_server_ids;
2293
2294 if let Some(language) = buffer.language().cloned() {
2295 for adapter in self.languages.lsp_adapters(&language) {
2296 if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) {
2297 buffer.update_diagnostics(*server_id, Default::default(), cx);
2298 }
2299 }
2300 }
2301
2302 self.buffer_snapshots.remove(&buffer.remote_id());
2303 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2304 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2305 language_server
2306 .notify::<lsp::notification::DidCloseTextDocument>(
2307 lsp::DidCloseTextDocumentParams {
2308 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2309 },
2310 )
2311 .log_err();
2312 }
2313 });
2314 }
2315
2316 fn register_buffer_with_copilot(
2317 &self,
2318 buffer_handle: &Model<Buffer>,
2319 cx: &mut ModelContext<Self>,
2320 ) {
2321 if let Some(copilot) = Copilot::global(cx) {
2322 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2323 }
2324 }
2325
2326 async fn send_buffer_ordered_messages(
2327 this: WeakModel<Self>,
2328 rx: UnboundedReceiver<BufferOrderedMessage>,
2329 mut cx: AsyncAppContext,
2330 ) -> Result<()> {
2331 const MAX_BATCH_SIZE: usize = 128;
2332
2333 let mut operations_by_buffer_id = HashMap::default();
2334 async fn flush_operations(
2335 this: &WeakModel<Project>,
2336 operations_by_buffer_id: &mut HashMap<BufferId, Vec<proto::Operation>>,
2337 needs_resync_with_host: &mut bool,
2338 is_local: bool,
2339 cx: &mut AsyncAppContext,
2340 ) -> Result<()> {
2341 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2342 let request = this.update(cx, |this, _| {
2343 let project_id = this.remote_id()?;
2344 Some(this.client.request(proto::UpdateBuffer {
2345 buffer_id: buffer_id.into(),
2346 project_id,
2347 operations,
2348 }))
2349 })?;
2350 if let Some(request) = request {
2351 if request.await.is_err() && !is_local {
2352 *needs_resync_with_host = true;
2353 break;
2354 }
2355 }
2356 }
2357 Ok(())
2358 }
2359
2360 let mut needs_resync_with_host = false;
2361 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2362
2363 while let Some(changes) = changes.next().await {
2364 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2365
2366 for change in changes {
2367 match change {
2368 BufferOrderedMessage::Operation {
2369 buffer_id,
2370 operation,
2371 } => {
2372 if needs_resync_with_host {
2373 continue;
2374 }
2375
2376 operations_by_buffer_id
2377 .entry(buffer_id)
2378 .or_insert(Vec::new())
2379 .push(operation);
2380 }
2381
2382 BufferOrderedMessage::Resync => {
2383 operations_by_buffer_id.clear();
2384 if this
2385 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2386 .await
2387 .is_ok()
2388 {
2389 needs_resync_with_host = false;
2390 }
2391 }
2392
2393 BufferOrderedMessage::LanguageServerUpdate {
2394 language_server_id,
2395 message,
2396 } => {
2397 flush_operations(
2398 &this,
2399 &mut operations_by_buffer_id,
2400 &mut needs_resync_with_host,
2401 is_local,
2402 &mut cx,
2403 )
2404 .await?;
2405
2406 this.update(&mut cx, |this, _| {
2407 if let Some(project_id) = this.remote_id() {
2408 this.client
2409 .send(proto::UpdateLanguageServer {
2410 project_id,
2411 language_server_id: language_server_id.0 as u64,
2412 variant: Some(message),
2413 })
2414 .log_err();
2415 }
2416 })?;
2417 }
2418 }
2419 }
2420
2421 flush_operations(
2422 &this,
2423 &mut operations_by_buffer_id,
2424 &mut needs_resync_with_host,
2425 is_local,
2426 &mut cx,
2427 )
2428 .await?;
2429 }
2430
2431 Ok(())
2432 }
2433
2434 fn on_buffer_event(
2435 &mut self,
2436 buffer: Model<Buffer>,
2437 event: &BufferEvent,
2438 cx: &mut ModelContext<Self>,
2439 ) -> Option<()> {
2440 if matches!(
2441 event,
2442 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2443 ) {
2444 self.request_buffer_diff_recalculation(&buffer, cx);
2445 }
2446
2447 match event {
2448 BufferEvent::Operation(operation) => {
2449 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation {
2450 buffer_id: buffer.read(cx).remote_id(),
2451 operation: language::proto::serialize_operation(operation),
2452 })
2453 .ok();
2454 }
2455
2456 BufferEvent::Edited { .. } => {
2457 let buffer = buffer.read(cx);
2458 let file = File::from_dyn(buffer.file())?;
2459 let abs_path = file.as_local()?.abs_path(cx);
2460 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2461 let next_snapshot = buffer.text_snapshot();
2462
2463 let language_servers: Vec<_> = self
2464 .language_servers_for_buffer(buffer, cx)
2465 .map(|i| i.1.clone())
2466 .collect();
2467
2468 for language_server in language_servers {
2469 let language_server = language_server.clone();
2470
2471 let buffer_snapshots = self
2472 .buffer_snapshots
2473 .get_mut(&buffer.remote_id())
2474 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2475 let previous_snapshot = buffer_snapshots.last()?;
2476
2477 let build_incremental_change = || {
2478 buffer
2479 .edits_since::<(PointUtf16, usize)>(
2480 previous_snapshot.snapshot.version(),
2481 )
2482 .map(|edit| {
2483 let edit_start = edit.new.start.0;
2484 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2485 let new_text = next_snapshot
2486 .text_for_range(edit.new.start.1..edit.new.end.1)
2487 .collect();
2488 lsp::TextDocumentContentChangeEvent {
2489 range: Some(lsp::Range::new(
2490 point_to_lsp(edit_start),
2491 point_to_lsp(edit_end),
2492 )),
2493 range_length: None,
2494 text: new_text,
2495 }
2496 })
2497 .collect()
2498 };
2499
2500 let document_sync_kind = language_server
2501 .capabilities()
2502 .text_document_sync
2503 .as_ref()
2504 .and_then(|sync| match sync {
2505 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2506 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2507 });
2508
2509 let content_changes: Vec<_> = match document_sync_kind {
2510 Some(lsp::TextDocumentSyncKind::FULL) => {
2511 vec![lsp::TextDocumentContentChangeEvent {
2512 range: None,
2513 range_length: None,
2514 text: next_snapshot.text(),
2515 }]
2516 }
2517 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2518 _ => {
2519 #[cfg(any(test, feature = "test-support"))]
2520 {
2521 build_incremental_change()
2522 }
2523
2524 #[cfg(not(any(test, feature = "test-support")))]
2525 {
2526 continue;
2527 }
2528 }
2529 };
2530
2531 let next_version = previous_snapshot.version + 1;
2532
2533 buffer_snapshots.push(LspBufferSnapshot {
2534 version: next_version,
2535 snapshot: next_snapshot.clone(),
2536 });
2537
2538 language_server
2539 .notify::<lsp::notification::DidChangeTextDocument>(
2540 lsp::DidChangeTextDocumentParams {
2541 text_document: lsp::VersionedTextDocumentIdentifier::new(
2542 uri.clone(),
2543 next_version,
2544 ),
2545 content_changes,
2546 },
2547 )
2548 .log_err();
2549 }
2550 }
2551
2552 BufferEvent::Saved => {
2553 let file = File::from_dyn(buffer.read(cx).file())?;
2554 let worktree_id = file.worktree_id(cx);
2555 let abs_path = file.as_local()?.abs_path(cx);
2556 let text_document = lsp::TextDocumentIdentifier {
2557 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2558 };
2559
2560 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2561 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2562
2563 server
2564 .notify::<lsp::notification::DidSaveTextDocument>(
2565 lsp::DidSaveTextDocumentParams {
2566 text_document: text_document.clone(),
2567 text,
2568 },
2569 )
2570 .log_err();
2571 }
2572
2573 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2574 for language_server_id in language_server_ids {
2575 if let Some(LanguageServerState::Running {
2576 adapter,
2577 simulate_disk_based_diagnostics_completion,
2578 ..
2579 }) = self.language_servers.get_mut(&language_server_id)
2580 {
2581 // After saving a buffer using a language server that doesn't provide
2582 // a disk-based progress token, kick off a timer that will reset every
2583 // time the buffer is saved. If the timer eventually fires, simulate
2584 // disk-based diagnostics being finished so that other pieces of UI
2585 // (e.g., project diagnostics view, diagnostic status bar) can update.
2586 // We don't emit an event right away because the language server might take
2587 // some time to publish diagnostics.
2588 if adapter.disk_based_diagnostics_progress_token.is_none() {
2589 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2590 Duration::from_secs(1);
2591
2592 let task = cx.spawn(move |this, mut cx| async move {
2593 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2594 if let Some(this) = this.upgrade() {
2595 this.update(&mut cx, |this, cx| {
2596 this.disk_based_diagnostics_finished(
2597 language_server_id,
2598 cx,
2599 );
2600 this.enqueue_buffer_ordered_message(
2601 BufferOrderedMessage::LanguageServerUpdate {
2602 language_server_id,
2603 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2604 },
2605 )
2606 .ok();
2607 }).ok();
2608 }
2609 });
2610 *simulate_disk_based_diagnostics_completion = Some(task);
2611 }
2612 }
2613 }
2614 }
2615 BufferEvent::FileHandleChanged => {
2616 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2617 return None;
2618 };
2619
2620 let remote_id = buffer.read(cx).remote_id();
2621 if let Some(entry_id) = file.entry_id {
2622 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2623 Some(_) => {
2624 return None;
2625 }
2626 None => {
2627 self.local_buffer_ids_by_entry_id
2628 .insert(entry_id, remote_id);
2629 }
2630 }
2631 };
2632 self.local_buffer_ids_by_path.insert(
2633 ProjectPath {
2634 worktree_id: file.worktree_id(cx),
2635 path: file.path.clone(),
2636 },
2637 remote_id,
2638 );
2639 }
2640 _ => {}
2641 }
2642
2643 None
2644 }
2645
2646 fn request_buffer_diff_recalculation(
2647 &mut self,
2648 buffer: &Model<Buffer>,
2649 cx: &mut ModelContext<Self>,
2650 ) {
2651 self.buffers_needing_diff.insert(buffer.downgrade());
2652 let first_insertion = self.buffers_needing_diff.len() == 1;
2653
2654 let settings = ProjectSettings::get_global(cx);
2655 let delay = if let Some(delay) = settings.git.gutter_debounce {
2656 delay
2657 } else {
2658 if first_insertion {
2659 let this = cx.weak_model();
2660 cx.defer(move |cx| {
2661 if let Some(this) = this.upgrade() {
2662 this.update(cx, |this, cx| {
2663 this.recalculate_buffer_diffs(cx).detach();
2664 });
2665 }
2666 });
2667 }
2668 return;
2669 };
2670
2671 const MIN_DELAY: u64 = 50;
2672 let delay = delay.max(MIN_DELAY);
2673 let duration = Duration::from_millis(delay);
2674
2675 self.git_diff_debouncer
2676 .fire_new(duration, cx, move |this, cx| {
2677 this.recalculate_buffer_diffs(cx)
2678 });
2679 }
2680
2681 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2682 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2683 cx.spawn(move |this, mut cx| async move {
2684 let tasks: Vec<_> = buffers
2685 .iter()
2686 .filter_map(|buffer| {
2687 let buffer = buffer.upgrade()?;
2688 buffer
2689 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2690 .ok()
2691 .flatten()
2692 })
2693 .collect();
2694
2695 futures::future::join_all(tasks).await;
2696
2697 this.update(&mut cx, |this, cx| {
2698 if !this.buffers_needing_diff.is_empty() {
2699 this.recalculate_buffer_diffs(cx).detach();
2700 } else {
2701 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2702 for buffer in buffers {
2703 if let Some(buffer) = buffer.upgrade() {
2704 buffer.update(cx, |_, cx| cx.notify());
2705 }
2706 }
2707 }
2708 })
2709 .ok();
2710 })
2711 }
2712
2713 fn language_servers_for_worktree(
2714 &self,
2715 worktree_id: WorktreeId,
2716 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2717 self.language_server_ids
2718 .iter()
2719 .filter_map(move |((language_server_worktree_id, _), id)| {
2720 if *language_server_worktree_id == worktree_id {
2721 if let Some(LanguageServerState::Running {
2722 adapter,
2723 language,
2724 server,
2725 ..
2726 }) = self.language_servers.get(id)
2727 {
2728 return Some((adapter, language, server));
2729 }
2730 }
2731 None
2732 })
2733 }
2734
2735 fn maintain_buffer_languages(
2736 languages: Arc<LanguageRegistry>,
2737 cx: &mut ModelContext<Project>,
2738 ) -> Task<()> {
2739 let mut subscription = languages.subscribe();
2740 let mut prev_reload_count = languages.reload_count();
2741 cx.spawn(move |project, mut cx| async move {
2742 while let Some(()) = subscription.next().await {
2743 if let Some(project) = project.upgrade() {
2744 // If the language registry has been reloaded, then remove and
2745 // re-assign the languages on all open buffers.
2746 let reload_count = languages.reload_count();
2747 if reload_count > prev_reload_count {
2748 prev_reload_count = reload_count;
2749 project
2750 .update(&mut cx, |this, cx| {
2751 let buffers = this
2752 .opened_buffers
2753 .values()
2754 .filter_map(|b| b.upgrade())
2755 .collect::<Vec<_>>();
2756 for buffer in buffers {
2757 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2758 {
2759 this.unregister_buffer_from_language_servers(
2760 &buffer, &f, cx,
2761 );
2762 buffer
2763 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2764 }
2765 }
2766 })
2767 .ok();
2768 }
2769
2770 project
2771 .update(&mut cx, |project, cx| {
2772 let mut plain_text_buffers = Vec::new();
2773 let mut buffers_with_unknown_injections = Vec::new();
2774 for buffer in project.opened_buffers.values() {
2775 if let Some(handle) = buffer.upgrade() {
2776 let buffer = &handle.read(cx);
2777 if buffer.language().is_none()
2778 || buffer.language() == Some(&*language::PLAIN_TEXT)
2779 {
2780 plain_text_buffers.push(handle);
2781 } else if buffer.contains_unknown_injections() {
2782 buffers_with_unknown_injections.push(handle);
2783 }
2784 }
2785 }
2786
2787 for buffer in plain_text_buffers {
2788 project.detect_language_for_buffer(&buffer, cx);
2789 project.register_buffer_with_language_servers(&buffer, cx);
2790 }
2791
2792 for buffer in buffers_with_unknown_injections {
2793 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2794 }
2795 })
2796 .ok();
2797 }
2798 }
2799 })
2800 }
2801
2802 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2803 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2804 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2805
2806 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2807 *settings_changed_tx.borrow_mut() = ();
2808 });
2809
2810 cx.spawn(move |this, mut cx| async move {
2811 while let Some(()) = settings_changed_rx.next().await {
2812 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2813 this.language_servers
2814 .values()
2815 .filter_map(|state| match state {
2816 LanguageServerState::Starting(_) => None,
2817 LanguageServerState::Running {
2818 adapter, server, ..
2819 } => Some((adapter.clone(), server.clone())),
2820 })
2821 .collect()
2822 })?;
2823
2824 for (adapter, server) in servers {
2825 let settings =
2826 cx.update(|cx| adapter.workspace_configuration(server.root_path(), cx))?;
2827
2828 server
2829 .notify::<lsp::notification::DidChangeConfiguration>(
2830 lsp::DidChangeConfigurationParams { settings },
2831 )
2832 .ok();
2833 }
2834 }
2835
2836 drop(settings_observation);
2837 anyhow::Ok(())
2838 })
2839 }
2840
2841 fn detect_language_for_buffer(
2842 &mut self,
2843 buffer_handle: &Model<Buffer>,
2844 cx: &mut ModelContext<Self>,
2845 ) {
2846 // If the buffer has a language, set it and start the language server if we haven't already.
2847 let buffer = buffer_handle.read(cx);
2848 let Some(file) = buffer.file() else {
2849 return;
2850 };
2851 let content = buffer.as_rope();
2852 let Some(new_language_result) = self
2853 .languages
2854 .language_for_file(file, Some(content), cx)
2855 .now_or_never()
2856 else {
2857 return;
2858 };
2859
2860 match new_language_result {
2861 Err(e) => {
2862 if e.is::<language::LanguageNotFound>() {
2863 cx.emit(Event::LanguageNotFound(buffer_handle.clone()))
2864 }
2865 }
2866 Ok(new_language) => {
2867 self.set_language_for_buffer(buffer_handle, new_language, cx);
2868 }
2869 };
2870 }
2871
2872 pub fn set_language_for_buffer(
2873 &mut self,
2874 buffer: &Model<Buffer>,
2875 new_language: Arc<Language>,
2876 cx: &mut ModelContext<Self>,
2877 ) {
2878 buffer.update(cx, |buffer, cx| {
2879 if buffer.language().map_or(true, |old_language| {
2880 !Arc::ptr_eq(old_language, &new_language)
2881 }) {
2882 buffer.set_language(Some(new_language.clone()), cx);
2883 }
2884 });
2885
2886 let buffer_file = buffer.read(cx).file().cloned();
2887 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2888 let buffer_file = File::from_dyn(buffer_file.as_ref());
2889 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2890 if let Some(prettier_plugins) =
2891 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2892 {
2893 self.install_default_prettier(worktree, prettier_plugins.iter().cloned(), cx);
2894 };
2895 if let Some(file) = buffer_file {
2896 let worktree = file.worktree.clone();
2897 if worktree.read(cx).is_local() {
2898 self.start_language_servers(&worktree, new_language, cx);
2899 }
2900 }
2901 }
2902
2903 fn start_language_servers(
2904 &mut self,
2905 worktree: &Model<Worktree>,
2906 language: Arc<Language>,
2907 cx: &mut ModelContext<Self>,
2908 ) {
2909 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2910 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2911 if !settings.enable_language_server {
2912 return;
2913 }
2914
2915 for adapter in self.languages.clone().lsp_adapters(&language) {
2916 self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
2917 }
2918 }
2919
2920 fn start_language_server(
2921 &mut self,
2922 worktree_handle: &Model<Worktree>,
2923 adapter: Arc<CachedLspAdapter>,
2924 language: Arc<Language>,
2925 cx: &mut ModelContext<Self>,
2926 ) {
2927 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2928 return;
2929 }
2930
2931 let worktree = worktree_handle.read(cx);
2932 let worktree_id = worktree.id();
2933 let worktree_path = worktree.abs_path();
2934 let key = (worktree_id, adapter.name.clone());
2935 if self.language_server_ids.contains_key(&key) {
2936 return;
2937 }
2938
2939 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2940 let pending_server = match self.languages.create_pending_language_server(
2941 stderr_capture.clone(),
2942 language.clone(),
2943 adapter.clone(),
2944 Arc::clone(&worktree_path),
2945 ProjectLspAdapterDelegate::new(self, worktree_handle, cx),
2946 cx,
2947 ) {
2948 Some(pending_server) => pending_server,
2949 None => return,
2950 };
2951
2952 let project_settings = ProjectSettings::get(
2953 Some(SettingsLocation {
2954 worktree_id: worktree_id.to_proto() as usize,
2955 path: Path::new(""),
2956 }),
2957 cx,
2958 );
2959 let lsp = project_settings.lsp.get(&adapter.name.0);
2960 let override_options = lsp.and_then(|s| s.initialization_options.clone());
2961
2962 let server_id = pending_server.server_id;
2963 let container_dir = pending_server.container_dir.clone();
2964 let state = LanguageServerState::Starting({
2965 let adapter = adapter.clone();
2966 let server_name = adapter.name.0.clone();
2967 let language = language.clone();
2968 let key = key.clone();
2969
2970 cx.spawn(move |this, mut cx| async move {
2971 let result = Self::setup_and_insert_language_server(
2972 this.clone(),
2973 &worktree_path,
2974 override_options,
2975 pending_server,
2976 adapter.clone(),
2977 language.clone(),
2978 server_id,
2979 key,
2980 &mut cx,
2981 )
2982 .await;
2983
2984 match result {
2985 Ok(server) => {
2986 stderr_capture.lock().take();
2987 server
2988 }
2989
2990 Err(err) => {
2991 log::error!("failed to start language server {server_name:?}: {err}");
2992 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2993
2994 let this = this.upgrade()?;
2995 let container_dir = container_dir?;
2996
2997 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2998 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2999 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
3000 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
3001 return None;
3002 }
3003
3004 log::info!(
3005 "retrying installation of language server {server_name:?} in {}s",
3006 SERVER_REINSTALL_DEBOUNCE_TIMEOUT.as_secs()
3007 );
3008 cx.background_executor()
3009 .timer(SERVER_REINSTALL_DEBOUNCE_TIMEOUT)
3010 .await;
3011
3012 let installation_test_binary = adapter
3013 .installation_test_binary(container_dir.to_path_buf())
3014 .await;
3015
3016 this.update(&mut cx, |_, cx| {
3017 Self::check_errored_server(
3018 language,
3019 adapter,
3020 server_id,
3021 installation_test_binary,
3022 cx,
3023 )
3024 })
3025 .ok();
3026
3027 None
3028 }
3029 }
3030 })
3031 });
3032
3033 self.language_servers.insert(server_id, state);
3034 self.language_server_ids.insert(key, server_id);
3035 }
3036
3037 fn reinstall_language_server(
3038 &mut self,
3039 language: Arc<Language>,
3040 adapter: Arc<CachedLspAdapter>,
3041 server_id: LanguageServerId,
3042 cx: &mut ModelContext<Self>,
3043 ) -> Option<Task<()>> {
3044 log::info!("beginning to reinstall server");
3045
3046 let existing_server = match self.language_servers.remove(&server_id) {
3047 Some(LanguageServerState::Running { server, .. }) => Some(server),
3048 _ => None,
3049 };
3050
3051 for worktree in &self.worktrees {
3052 if let Some(worktree) = worktree.upgrade() {
3053 let key = (worktree.read(cx).id(), adapter.name.clone());
3054 self.language_server_ids.remove(&key);
3055 }
3056 }
3057
3058 Some(cx.spawn(move |this, mut cx| async move {
3059 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
3060 log::info!("shutting down existing server");
3061 task.await;
3062 }
3063
3064 // TODO: This is race-safe with regards to preventing new instances from
3065 // starting while deleting, but existing instances in other projects are going
3066 // to be very confused and messed up
3067 let Some(task) = this
3068 .update(&mut cx, |this, cx| {
3069 this.languages.delete_server_container(adapter.clone(), cx)
3070 })
3071 .log_err()
3072 else {
3073 return;
3074 };
3075 task.await;
3076
3077 this.update(&mut cx, |this, cx| {
3078 let worktrees = this.worktrees.clone();
3079 for worktree in worktrees {
3080 if let Some(worktree) = worktree.upgrade() {
3081 this.start_language_server(
3082 &worktree,
3083 adapter.clone(),
3084 language.clone(),
3085 cx,
3086 );
3087 }
3088 }
3089 })
3090 .ok();
3091 }))
3092 }
3093
3094 #[allow(clippy::too_many_arguments)]
3095 async fn setup_and_insert_language_server(
3096 this: WeakModel<Self>,
3097 worktree_path: &Path,
3098 override_initialization_options: Option<serde_json::Value>,
3099 pending_server: PendingLanguageServer,
3100 adapter: Arc<CachedLspAdapter>,
3101 language: Arc<Language>,
3102 server_id: LanguageServerId,
3103 key: (WorktreeId, LanguageServerName),
3104 cx: &mut AsyncAppContext,
3105 ) -> Result<Option<Arc<LanguageServer>>> {
3106 let language_server = Self::setup_pending_language_server(
3107 this.clone(),
3108 override_initialization_options,
3109 pending_server,
3110 worktree_path,
3111 adapter.clone(),
3112 server_id,
3113 cx,
3114 )
3115 .await?;
3116
3117 let this = match this.upgrade() {
3118 Some(this) => this,
3119 None => return Err(anyhow!("failed to upgrade project handle")),
3120 };
3121
3122 this.update(cx, |this, cx| {
3123 this.insert_newly_running_language_server(
3124 language,
3125 adapter,
3126 language_server.clone(),
3127 server_id,
3128 key,
3129 cx,
3130 )
3131 })??;
3132
3133 Ok(Some(language_server))
3134 }
3135
3136 async fn setup_pending_language_server(
3137 this: WeakModel<Self>,
3138 override_options: Option<serde_json::Value>,
3139 pending_server: PendingLanguageServer,
3140 worktree_path: &Path,
3141 adapter: Arc<CachedLspAdapter>,
3142 server_id: LanguageServerId,
3143 cx: &mut AsyncAppContext,
3144 ) -> Result<Arc<LanguageServer>> {
3145 let workspace_config =
3146 cx.update(|cx| adapter.workspace_configuration(worktree_path, cx))?;
3147 let (language_server, mut initialization_options) = pending_server.task.await?;
3148
3149 let name = language_server.name();
3150 language_server
3151 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3152 let adapter = adapter.clone();
3153 let this = this.clone();
3154 move |mut params, mut cx| {
3155 let adapter = adapter.clone();
3156 if let Some(this) = this.upgrade() {
3157 adapter.process_diagnostics(&mut params);
3158 this.update(&mut cx, |this, cx| {
3159 this.update_diagnostics(
3160 server_id,
3161 params,
3162 &adapter.disk_based_diagnostic_sources,
3163 cx,
3164 )
3165 .log_err();
3166 })
3167 .ok();
3168 }
3169 }
3170 })
3171 .detach();
3172
3173 language_server
3174 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3175 let adapter = adapter.clone();
3176 let worktree_path = worktree_path.to_path_buf();
3177 move |params, cx| {
3178 let adapter = adapter.clone();
3179 let worktree_path = worktree_path.clone();
3180 async move {
3181 let workspace_config =
3182 cx.update(|cx| adapter.workspace_configuration(&worktree_path, cx))?;
3183 Ok(params
3184 .items
3185 .into_iter()
3186 .map(|item| {
3187 if let Some(section) = &item.section {
3188 workspace_config
3189 .get(section)
3190 .cloned()
3191 .unwrap_or(serde_json::Value::Null)
3192 } else {
3193 workspace_config.clone()
3194 }
3195 })
3196 .collect())
3197 }
3198 }
3199 })
3200 .detach();
3201
3202 // Even though we don't have handling for these requests, respond to them to
3203 // avoid stalling any language server like `gopls` which waits for a response
3204 // to these requests when initializing.
3205 language_server
3206 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3207 let this = this.clone();
3208 move |params, mut cx| {
3209 let this = this.clone();
3210 async move {
3211 this.update(&mut cx, |this, _| {
3212 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3213 {
3214 if let lsp::NumberOrString::String(token) = params.token {
3215 status.progress_tokens.insert(token);
3216 }
3217 }
3218 })?;
3219
3220 Ok(())
3221 }
3222 }
3223 })
3224 .detach();
3225
3226 language_server
3227 .on_request::<lsp::request::RegisterCapability, _, _>({
3228 let this = this.clone();
3229 move |params, mut cx| {
3230 let this = this.clone();
3231 async move {
3232 for reg in params.registrations {
3233 if reg.method == "workspace/didChangeWatchedFiles" {
3234 if let Some(options) = reg.register_options {
3235 let options = serde_json::from_value(options)?;
3236 this.update(&mut cx, |this, cx| {
3237 this.on_lsp_did_change_watched_files(
3238 server_id, options, cx,
3239 );
3240 })?;
3241 }
3242 }
3243 }
3244 Ok(())
3245 }
3246 }
3247 })
3248 .detach();
3249
3250 language_server
3251 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3252 let adapter = adapter.clone();
3253 let this = this.clone();
3254 move |params, cx| {
3255 Self::on_lsp_workspace_edit(
3256 this.clone(),
3257 params,
3258 server_id,
3259 adapter.clone(),
3260 cx,
3261 )
3262 }
3263 })
3264 .detach();
3265
3266 language_server
3267 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3268 let this = this.clone();
3269 move |(), mut cx| {
3270 let this = this.clone();
3271 async move {
3272 this.update(&mut cx, |project, cx| {
3273 cx.emit(Event::RefreshInlayHints);
3274 project.remote_id().map(|project_id| {
3275 project.client.send(proto::RefreshInlayHints { project_id })
3276 })
3277 })?
3278 .transpose()?;
3279 Ok(())
3280 }
3281 }
3282 })
3283 .detach();
3284
3285 language_server
3286 .on_request::<lsp::request::ShowMessageRequest, _, _>({
3287 let this = this.clone();
3288 let name = name.to_string();
3289 move |params, mut cx| {
3290 let this = this.clone();
3291 let name = name.to_string();
3292 async move {
3293 if let Some(actions) = params.actions {
3294 let (tx, mut rx) = smol::channel::bounded(1);
3295 let request = LanguageServerPromptRequest {
3296 level: match params.typ {
3297 lsp::MessageType::ERROR => PromptLevel::Critical,
3298 lsp::MessageType::WARNING => PromptLevel::Warning,
3299 _ => PromptLevel::Info,
3300 },
3301 message: params.message,
3302 actions,
3303 response_channel: tx,
3304 lsp_name: name.clone(),
3305 };
3306
3307 if let Ok(_) = this.update(&mut cx, |_, cx| {
3308 cx.emit(Event::LanguageServerPrompt(request));
3309 }) {
3310 let response = rx.next().await;
3311
3312 Ok(response)
3313 } else {
3314 Ok(None)
3315 }
3316 } else {
3317 Ok(None)
3318 }
3319 }
3320 }
3321 })
3322 .detach();
3323
3324 let disk_based_diagnostics_progress_token =
3325 adapter.disk_based_diagnostics_progress_token.clone();
3326
3327 language_server
3328 .on_notification::<ServerStatus, _>({
3329 let this = this.clone();
3330 let name = name.to_string();
3331 move |params, mut cx| {
3332 let this = this.clone();
3333 let name = name.to_string();
3334 if let Some(ref message) = params.message {
3335 let message = message.trim();
3336 if !message.is_empty() {
3337 let formatted_message = format!(
3338 "Language server {name} (id {server_id}) status update: {message}"
3339 );
3340 match params.health {
3341 ServerHealthStatus::Ok => log::info!("{}", formatted_message),
3342 ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
3343 ServerHealthStatus::Error => {
3344 log::error!("{}", formatted_message);
3345 let (tx, _rx) = smol::channel::bounded(1);
3346 let request = LanguageServerPromptRequest {
3347 level: PromptLevel::Critical,
3348 message: params.message.unwrap_or_default(),
3349 actions: Vec::new(),
3350 response_channel: tx,
3351 lsp_name: name.clone(),
3352 };
3353 let _ = this
3354 .update(&mut cx, |_, cx| {
3355 cx.emit(Event::LanguageServerPrompt(request));
3356 })
3357 .ok();
3358 }
3359 ServerHealthStatus::Other(status) => {
3360 log::info!(
3361 "Unknown server health: {status}\n{formatted_message}"
3362 )
3363 }
3364 }
3365 }
3366 }
3367 }
3368 })
3369 .detach();
3370
3371 language_server
3372 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3373 if let Some(this) = this.upgrade() {
3374 this.update(&mut cx, |this, cx| {
3375 this.on_lsp_progress(
3376 params,
3377 server_id,
3378 disk_based_diagnostics_progress_token.clone(),
3379 cx,
3380 );
3381 })
3382 .ok();
3383 }
3384 })
3385 .detach();
3386
3387 match (&mut initialization_options, override_options) {
3388 (Some(initialization_options), Some(override_options)) => {
3389 merge_json_value_into(override_options, initialization_options);
3390 }
3391 (None, override_options) => initialization_options = override_options,
3392 _ => {}
3393 }
3394 let language_server = cx
3395 .update(|cx| language_server.initialize(initialization_options, cx))?
3396 .await?;
3397
3398 language_server
3399 .notify::<lsp::notification::DidChangeConfiguration>(
3400 lsp::DidChangeConfigurationParams {
3401 settings: workspace_config,
3402 },
3403 )
3404 .ok();
3405
3406 Ok(language_server)
3407 }
3408
3409 fn insert_newly_running_language_server(
3410 &mut self,
3411 language: Arc<Language>,
3412 adapter: Arc<CachedLspAdapter>,
3413 language_server: Arc<LanguageServer>,
3414 server_id: LanguageServerId,
3415 key: (WorktreeId, LanguageServerName),
3416 cx: &mut ModelContext<Self>,
3417 ) -> Result<()> {
3418 // If the language server for this key doesn't match the server id, don't store the
3419 // server. Which will cause it to be dropped, killing the process
3420 if self
3421 .language_server_ids
3422 .get(&key)
3423 .map(|id| id != &server_id)
3424 .unwrap_or(false)
3425 {
3426 return Ok(());
3427 }
3428
3429 // Update language_servers collection with Running variant of LanguageServerState
3430 // indicating that the server is up and running and ready
3431 self.language_servers.insert(
3432 server_id,
3433 LanguageServerState::Running {
3434 adapter: adapter.clone(),
3435 language: language.clone(),
3436 server: language_server.clone(),
3437 simulate_disk_based_diagnostics_completion: None,
3438 },
3439 );
3440
3441 self.language_server_statuses.insert(
3442 server_id,
3443 LanguageServerStatus {
3444 name: language_server.name().to_string(),
3445 pending_work: Default::default(),
3446 has_pending_diagnostic_updates: false,
3447 progress_tokens: Default::default(),
3448 },
3449 );
3450
3451 cx.emit(Event::LanguageServerAdded(server_id));
3452
3453 if let Some(project_id) = self.remote_id() {
3454 self.client.send(proto::StartLanguageServer {
3455 project_id,
3456 server: Some(proto::LanguageServer {
3457 id: server_id.0 as u64,
3458 name: language_server.name().to_string(),
3459 }),
3460 })?;
3461 }
3462
3463 // Tell the language server about every open buffer in the worktree that matches the language.
3464 for buffer in self.opened_buffers.values() {
3465 if let Some(buffer_handle) = buffer.upgrade() {
3466 let buffer = buffer_handle.read(cx);
3467 let file = match File::from_dyn(buffer.file()) {
3468 Some(file) => file,
3469 None => continue,
3470 };
3471 let language = match buffer.language() {
3472 Some(language) => language,
3473 None => continue,
3474 };
3475
3476 if file.worktree.read(cx).id() != key.0
3477 || !self
3478 .languages
3479 .lsp_adapters(&language)
3480 .iter()
3481 .any(|a| a.name == key.1)
3482 {
3483 continue;
3484 }
3485
3486 let file = match file.as_local() {
3487 Some(file) => file,
3488 None => continue,
3489 };
3490
3491 let versions = self
3492 .buffer_snapshots
3493 .entry(buffer.remote_id())
3494 .or_default()
3495 .entry(server_id)
3496 .or_insert_with(|| {
3497 vec![LspBufferSnapshot {
3498 version: 0,
3499 snapshot: buffer.text_snapshot(),
3500 }]
3501 });
3502
3503 let snapshot = versions.last().unwrap();
3504 let version = snapshot.version;
3505 let initial_snapshot = &snapshot.snapshot;
3506 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3507 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3508 lsp::DidOpenTextDocumentParams {
3509 text_document: lsp::TextDocumentItem::new(
3510 uri,
3511 adapter
3512 .language_ids
3513 .get(language.name().as_ref())
3514 .cloned()
3515 .unwrap_or_default(),
3516 version,
3517 initial_snapshot.text(),
3518 ),
3519 },
3520 )?;
3521
3522 buffer_handle.update(cx, |buffer, cx| {
3523 buffer.set_completion_triggers(
3524 language_server
3525 .capabilities()
3526 .completion_provider
3527 .as_ref()
3528 .and_then(|provider| provider.trigger_characters.clone())
3529 .unwrap_or_default(),
3530 cx,
3531 )
3532 });
3533 }
3534 }
3535
3536 cx.notify();
3537 Ok(())
3538 }
3539
3540 // Returns a list of all of the worktrees which no longer have a language server and the root path
3541 // for the stopped server
3542 fn stop_language_server(
3543 &mut self,
3544 worktree_id: WorktreeId,
3545 adapter_name: LanguageServerName,
3546 cx: &mut ModelContext<Self>,
3547 ) -> Task<Vec<WorktreeId>> {
3548 let key = (worktree_id, adapter_name);
3549 if let Some(server_id) = self.language_server_ids.remove(&key) {
3550 let name = key.1 .0;
3551 log::info!("stopping language server {name}");
3552
3553 // Remove other entries for this language server as well
3554 let mut orphaned_worktrees = vec![worktree_id];
3555 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3556 for other_key in other_keys {
3557 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3558 self.language_server_ids.remove(&other_key);
3559 orphaned_worktrees.push(other_key.0);
3560 }
3561 }
3562
3563 for buffer in self.opened_buffers.values() {
3564 if let Some(buffer) = buffer.upgrade() {
3565 buffer.update(cx, |buffer, cx| {
3566 buffer.update_diagnostics(server_id, Default::default(), cx);
3567 });
3568 }
3569 }
3570 for worktree in &self.worktrees {
3571 if let Some(worktree) = worktree.upgrade() {
3572 worktree.update(cx, |worktree, cx| {
3573 if let Some(worktree) = worktree.as_local_mut() {
3574 worktree.clear_diagnostics_for_language_server(server_id, cx);
3575 }
3576 });
3577 }
3578 }
3579
3580 self.language_server_watched_paths.remove(&server_id);
3581 self.language_server_statuses.remove(&server_id);
3582 cx.notify();
3583
3584 let server_state = self.language_servers.remove(&server_id);
3585 cx.emit(Event::LanguageServerRemoved(server_id));
3586 cx.spawn(move |_, cx| async move {
3587 Self::shutdown_language_server(server_state, name, cx).await;
3588 orphaned_worktrees
3589 })
3590 } else {
3591 Task::ready(Vec::new())
3592 }
3593 }
3594
3595 async fn shutdown_language_server(
3596 server_state: Option<LanguageServerState>,
3597 name: Arc<str>,
3598 cx: AsyncAppContext,
3599 ) {
3600 let server = match server_state {
3601 Some(LanguageServerState::Starting(task)) => {
3602 let mut timer = cx
3603 .background_executor()
3604 .timer(SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT)
3605 .fuse();
3606
3607 select! {
3608 server = task.fuse() => server,
3609 _ = timer => {
3610 log::info!(
3611 "timeout waiting for language server {} to finish launching before stopping",
3612 name
3613 );
3614 None
3615 },
3616 }
3617 }
3618
3619 Some(LanguageServerState::Running { server, .. }) => Some(server),
3620
3621 None => None,
3622 };
3623
3624 if let Some(server) = server {
3625 if let Some(shutdown) = server.shutdown() {
3626 shutdown.await;
3627 }
3628 }
3629 }
3630
3631 pub fn restart_language_servers_for_buffers(
3632 &mut self,
3633 buffers: impl IntoIterator<Item = Model<Buffer>>,
3634 cx: &mut ModelContext<Self>,
3635 ) -> Option<()> {
3636 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3637 .into_iter()
3638 .filter_map(|buffer| {
3639 let buffer = buffer.read(cx);
3640 let file = buffer.file()?;
3641 let worktree = File::from_dyn(Some(file))?.worktree.clone();
3642 let language = self
3643 .languages
3644 .language_for_file(file, Some(buffer.as_rope()), cx)
3645 .now_or_never()?
3646 .ok()?;
3647 Some((worktree, language))
3648 })
3649 .collect();
3650 for (worktree, language) in language_server_lookup_info {
3651 self.restart_language_servers(worktree, language, cx);
3652 }
3653
3654 None
3655 }
3656
3657 fn restart_language_servers(
3658 &mut self,
3659 worktree: Model<Worktree>,
3660 language: Arc<Language>,
3661 cx: &mut ModelContext<Self>,
3662 ) {
3663 let worktree_id = worktree.read(cx).id();
3664
3665 let stop_tasks = self
3666 .languages
3667 .clone()
3668 .lsp_adapters(&language)
3669 .iter()
3670 .map(|adapter| {
3671 let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
3672 (stop_task, adapter.name.clone())
3673 })
3674 .collect::<Vec<_>>();
3675 if stop_tasks.is_empty() {
3676 return;
3677 }
3678
3679 cx.spawn(move |this, mut cx| async move {
3680 // For each stopped language server, record all of the worktrees with which
3681 // it was associated.
3682 let mut affected_worktrees = Vec::new();
3683 for (stop_task, language_server_name) in stop_tasks {
3684 for affected_worktree_id in stop_task.await {
3685 affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
3686 }
3687 }
3688
3689 this.update(&mut cx, |this, cx| {
3690 // Restart the language server for the given worktree.
3691 this.start_language_servers(&worktree, language.clone(), cx);
3692
3693 // Lookup new server ids and set them for each of the orphaned worktrees
3694 for (affected_worktree_id, language_server_name) in affected_worktrees {
3695 if let Some(new_server_id) = this
3696 .language_server_ids
3697 .get(&(worktree_id, language_server_name.clone()))
3698 .cloned()
3699 {
3700 this.language_server_ids
3701 .insert((affected_worktree_id, language_server_name), new_server_id);
3702 }
3703 }
3704 })
3705 .ok();
3706 })
3707 .detach();
3708 }
3709
3710 fn check_errored_server(
3711 language: Arc<Language>,
3712 adapter: Arc<CachedLspAdapter>,
3713 server_id: LanguageServerId,
3714 installation_test_binary: Option<LanguageServerBinary>,
3715 cx: &mut ModelContext<Self>,
3716 ) {
3717 if !adapter.can_be_reinstalled() {
3718 log::info!(
3719 "Validation check requested for {:?} but it cannot be reinstalled",
3720 adapter.name.0
3721 );
3722 return;
3723 }
3724
3725 cx.spawn(move |this, mut cx| async move {
3726 log::info!("About to spawn test binary");
3727
3728 // A lack of test binary counts as a failure
3729 let process = installation_test_binary.and_then(|binary| {
3730 smol::process::Command::new(&binary.path)
3731 .current_dir(&binary.path)
3732 .args(binary.arguments)
3733 .stdin(Stdio::piped())
3734 .stdout(Stdio::piped())
3735 .stderr(Stdio::inherit())
3736 .kill_on_drop(true)
3737 .spawn()
3738 .ok()
3739 });
3740
3741 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3742 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3743
3744 let mut errored = false;
3745 if let Some(mut process) = process {
3746 futures::select! {
3747 status = process.status().fuse() => match status {
3748 Ok(status) => errored = !status.success(),
3749 Err(_) => errored = true,
3750 },
3751
3752 _ = timeout => {
3753 log::info!("test binary time-ed out, this counts as a success");
3754 _ = process.kill();
3755 }
3756 }
3757 } else {
3758 log::warn!("test binary failed to launch");
3759 errored = true;
3760 }
3761
3762 if errored {
3763 log::warn!("test binary check failed");
3764 let task = this
3765 .update(&mut cx, move |this, cx| {
3766 this.reinstall_language_server(language, adapter, server_id, cx)
3767 })
3768 .ok()
3769 .flatten();
3770
3771 if let Some(task) = task {
3772 task.await;
3773 }
3774 }
3775 })
3776 .detach();
3777 }
3778
3779 fn enqueue_language_server_progress(
3780 &mut self,
3781 message: BufferOrderedMessage,
3782 cx: &mut ModelContext<Self>,
3783 ) {
3784 self.pending_language_server_update.replace(message);
3785 self.flush_language_server_update.get_or_insert_with(|| {
3786 cx.spawn(|this, mut cx| async move {
3787 cx.background_executor()
3788 .timer(SERVER_PROGRESS_DEBOUNCE_TIMEOUT)
3789 .await;
3790 this.update(&mut cx, |this, _| {
3791 this.flush_language_server_update.take();
3792 if let Some(update) = this.pending_language_server_update.take() {
3793 this.enqueue_buffer_ordered_message(update).ok();
3794 }
3795 })
3796 .ok();
3797 })
3798 });
3799 }
3800
3801 fn enqueue_buffer_ordered_message(&mut self, message: BufferOrderedMessage) -> Result<()> {
3802 if let Some(pending_message) = self.pending_language_server_update.take() {
3803 self.flush_language_server_update.take();
3804 self.buffer_ordered_messages_tx
3805 .unbounded_send(pending_message)
3806 .map_err(|e| anyhow!(e))?;
3807 }
3808 self.buffer_ordered_messages_tx
3809 .unbounded_send(message)
3810 .map_err(|e| anyhow!(e))
3811 }
3812
3813 fn on_lsp_progress(
3814 &mut self,
3815 progress: lsp::ProgressParams,
3816 language_server_id: LanguageServerId,
3817 disk_based_diagnostics_progress_token: Option<String>,
3818 cx: &mut ModelContext<Self>,
3819 ) {
3820 let token = match progress.token {
3821 lsp::NumberOrString::String(token) => token,
3822 lsp::NumberOrString::Number(token) => {
3823 log::info!("skipping numeric progress token {}", token);
3824 return;
3825 }
3826 };
3827 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3828 let language_server_status =
3829 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3830 status
3831 } else {
3832 return;
3833 };
3834
3835 if !language_server_status.progress_tokens.contains(&token) {
3836 return;
3837 }
3838
3839 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3840 .as_ref()
3841 .map_or(false, |disk_based_token| {
3842 token.starts_with(disk_based_token)
3843 });
3844
3845 match progress {
3846 lsp::WorkDoneProgress::Begin(report) => {
3847 if is_disk_based_diagnostics_progress {
3848 language_server_status.has_pending_diagnostic_updates = true;
3849 self.disk_based_diagnostics_started(language_server_id, cx);
3850 self.enqueue_buffer_ordered_message(BufferOrderedMessage::LanguageServerUpdate {
3851 language_server_id,
3852 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3853 })
3854 .ok();
3855 } else {
3856 self.on_lsp_work_start(
3857 language_server_id,
3858 token.clone(),
3859 LanguageServerProgress {
3860 message: report.message.clone(),
3861 percentage: report.percentage.map(|p| p as usize),
3862 last_update_at: Instant::now(),
3863 },
3864 cx,
3865 );
3866 self.enqueue_buffer_ordered_message(
3867 BufferOrderedMessage::LanguageServerUpdate {
3868 language_server_id,
3869 message: proto::update_language_server::Variant::WorkStart(
3870 proto::LspWorkStart {
3871 token,
3872 message: report.message,
3873 percentage: report.percentage,
3874 },
3875 ),
3876 },
3877 )
3878 .ok();
3879 }
3880 }
3881 lsp::WorkDoneProgress::Report(report) => {
3882 if !is_disk_based_diagnostics_progress {
3883 self.on_lsp_work_progress(
3884 language_server_id,
3885 token.clone(),
3886 LanguageServerProgress {
3887 message: report.message.clone(),
3888 percentage: report.percentage.map(|p| p as usize),
3889 last_update_at: Instant::now(),
3890 },
3891 cx,
3892 );
3893 self.enqueue_language_server_progress(
3894 BufferOrderedMessage::LanguageServerUpdate {
3895 language_server_id,
3896 message: proto::update_language_server::Variant::WorkProgress(
3897 proto::LspWorkProgress {
3898 token,
3899 message: report.message,
3900 percentage: report.percentage,
3901 },
3902 ),
3903 },
3904 cx,
3905 );
3906 }
3907 }
3908 lsp::WorkDoneProgress::End(_) => {
3909 language_server_status.progress_tokens.remove(&token);
3910
3911 if is_disk_based_diagnostics_progress {
3912 language_server_status.has_pending_diagnostic_updates = false;
3913 self.disk_based_diagnostics_finished(language_server_id, cx);
3914 self.enqueue_buffer_ordered_message(
3915 BufferOrderedMessage::LanguageServerUpdate {
3916 language_server_id,
3917 message:
3918 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3919 Default::default(),
3920 ),
3921 },
3922 )
3923 .ok();
3924 } else {
3925 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3926 self.enqueue_buffer_ordered_message(
3927 BufferOrderedMessage::LanguageServerUpdate {
3928 language_server_id,
3929 message: proto::update_language_server::Variant::WorkEnd(
3930 proto::LspWorkEnd { token },
3931 ),
3932 },
3933 )
3934 .ok();
3935 }
3936 }
3937 }
3938 }
3939
3940 fn on_lsp_work_start(
3941 &mut self,
3942 language_server_id: LanguageServerId,
3943 token: String,
3944 progress: LanguageServerProgress,
3945 cx: &mut ModelContext<Self>,
3946 ) {
3947 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3948 status.pending_work.insert(token, progress);
3949 cx.notify();
3950 }
3951 }
3952
3953 fn on_lsp_work_progress(
3954 &mut self,
3955 language_server_id: LanguageServerId,
3956 token: String,
3957 progress: LanguageServerProgress,
3958 cx: &mut ModelContext<Self>,
3959 ) {
3960 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3961 let entry = status
3962 .pending_work
3963 .entry(token)
3964 .or_insert(LanguageServerProgress {
3965 message: Default::default(),
3966 percentage: Default::default(),
3967 last_update_at: progress.last_update_at,
3968 });
3969 if progress.message.is_some() {
3970 entry.message = progress.message;
3971 }
3972 if progress.percentage.is_some() {
3973 entry.percentage = progress.percentage;
3974 }
3975 entry.last_update_at = progress.last_update_at;
3976 cx.notify();
3977 }
3978 }
3979
3980 fn on_lsp_work_end(
3981 &mut self,
3982 language_server_id: LanguageServerId,
3983 token: String,
3984 cx: &mut ModelContext<Self>,
3985 ) {
3986 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3987 cx.emit(Event::RefreshInlayHints);
3988 status.pending_work.remove(&token);
3989 cx.notify();
3990 }
3991 }
3992
3993 fn on_lsp_did_change_watched_files(
3994 &mut self,
3995 language_server_id: LanguageServerId,
3996 params: DidChangeWatchedFilesRegistrationOptions,
3997 cx: &mut ModelContext<Self>,
3998 ) {
3999 let watched_paths = self
4000 .language_server_watched_paths
4001 .entry(language_server_id)
4002 .or_default();
4003
4004 let mut builders = HashMap::default();
4005 for watcher in params.watchers {
4006 for worktree in &self.worktrees {
4007 if let Some(worktree) = worktree.upgrade() {
4008 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
4009 if let Some(abs_path) = tree.abs_path().to_str() {
4010 let relative_glob_pattern = match &watcher.glob_pattern {
4011 lsp::GlobPattern::String(s) => Some(
4012 s.strip_prefix(abs_path)
4013 .unwrap_or(s)
4014 .strip_prefix(std::path::MAIN_SEPARATOR)
4015 .unwrap_or(s),
4016 ),
4017 lsp::GlobPattern::Relative(rp) => {
4018 let base_uri = match &rp.base_uri {
4019 lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
4020 lsp::OneOf::Right(base_uri) => base_uri,
4021 };
4022 base_uri.to_file_path().ok().and_then(|file_path| {
4023 (file_path.to_str() == Some(abs_path))
4024 .then_some(rp.pattern.as_str())
4025 })
4026 }
4027 };
4028 if let Some(relative_glob_pattern) = relative_glob_pattern {
4029 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
4030 tree.as_local_mut()
4031 .unwrap()
4032 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
4033 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
4034 builders
4035 .entry(tree.id())
4036 .or_insert_with(|| GlobSetBuilder::new())
4037 .add(glob);
4038 }
4039 return true;
4040 }
4041 }
4042 false
4043 });
4044 if glob_is_inside_worktree {
4045 break;
4046 }
4047 }
4048 }
4049 }
4050
4051 watched_paths.clear();
4052 for (worktree_id, builder) in builders {
4053 if let Ok(globset) = builder.build() {
4054 watched_paths.insert(worktree_id, globset);
4055 }
4056 }
4057
4058 cx.notify();
4059 }
4060
4061 async fn on_lsp_workspace_edit(
4062 this: WeakModel<Self>,
4063 params: lsp::ApplyWorkspaceEditParams,
4064 server_id: LanguageServerId,
4065 adapter: Arc<CachedLspAdapter>,
4066 mut cx: AsyncAppContext,
4067 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
4068 let this = this
4069 .upgrade()
4070 .ok_or_else(|| anyhow!("project project closed"))?;
4071 let language_server = this
4072 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
4073 .ok_or_else(|| anyhow!("language server not found"))?;
4074 let transaction = Self::deserialize_workspace_edit(
4075 this.clone(),
4076 params.edit,
4077 true,
4078 adapter.clone(),
4079 language_server.clone(),
4080 &mut cx,
4081 )
4082 .await
4083 .log_err();
4084 this.update(&mut cx, |this, _| {
4085 if let Some(transaction) = transaction {
4086 this.last_workspace_edits_by_language_server
4087 .insert(server_id, transaction);
4088 }
4089 })?;
4090 Ok(lsp::ApplyWorkspaceEditResponse {
4091 applied: true,
4092 failed_change: None,
4093 failure_reason: None,
4094 })
4095 }
4096
4097 pub fn language_server_statuses(
4098 &self,
4099 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
4100 self.language_server_statuses.values()
4101 }
4102
4103 pub fn last_formatting_failure(&self) -> Option<&str> {
4104 self.last_formatting_failure.as_deref()
4105 }
4106
4107 pub fn update_diagnostics(
4108 &mut self,
4109 language_server_id: LanguageServerId,
4110 mut params: lsp::PublishDiagnosticsParams,
4111 disk_based_sources: &[String],
4112 cx: &mut ModelContext<Self>,
4113 ) -> Result<()> {
4114 let abs_path = params
4115 .uri
4116 .to_file_path()
4117 .map_err(|_| anyhow!("URI is not a file"))?;
4118 let mut diagnostics = Vec::default();
4119 let mut primary_diagnostic_group_ids = HashMap::default();
4120 let mut sources_by_group_id = HashMap::default();
4121 let mut supporting_diagnostics = HashMap::default();
4122
4123 // Ensure that primary diagnostics are always the most severe
4124 params.diagnostics.sort_by_key(|item| item.severity);
4125
4126 for diagnostic in ¶ms.diagnostics {
4127 let source = diagnostic.source.as_ref();
4128 let code = diagnostic.code.as_ref().map(|code| match code {
4129 lsp::NumberOrString::Number(code) => code.to_string(),
4130 lsp::NumberOrString::String(code) => code.clone(),
4131 });
4132 let range = range_from_lsp(diagnostic.range);
4133 let is_supporting = diagnostic
4134 .related_information
4135 .as_ref()
4136 .map_or(false, |infos| {
4137 infos.iter().any(|info| {
4138 primary_diagnostic_group_ids.contains_key(&(
4139 source,
4140 code.clone(),
4141 range_from_lsp(info.location.range),
4142 ))
4143 })
4144 });
4145
4146 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
4147 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
4148 });
4149
4150 if is_supporting {
4151 supporting_diagnostics.insert(
4152 (source, code.clone(), range),
4153 (diagnostic.severity, is_unnecessary),
4154 );
4155 } else {
4156 let group_id = post_inc(&mut self.next_diagnostic_group_id);
4157 let is_disk_based =
4158 source.map_or(false, |source| disk_based_sources.contains(source));
4159
4160 sources_by_group_id.insert(group_id, source);
4161 primary_diagnostic_group_ids
4162 .insert((source, code.clone(), range.clone()), group_id);
4163
4164 diagnostics.push(DiagnosticEntry {
4165 range,
4166 diagnostic: Diagnostic {
4167 source: diagnostic.source.clone(),
4168 code: code.clone(),
4169 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
4170 message: diagnostic.message.trim().to_string(),
4171 group_id,
4172 is_primary: true,
4173 is_disk_based,
4174 is_unnecessary,
4175 },
4176 });
4177 if let Some(infos) = &diagnostic.related_information {
4178 for info in infos {
4179 if info.location.uri == params.uri && !info.message.is_empty() {
4180 let range = range_from_lsp(info.location.range);
4181 diagnostics.push(DiagnosticEntry {
4182 range,
4183 diagnostic: Diagnostic {
4184 source: diagnostic.source.clone(),
4185 code: code.clone(),
4186 severity: DiagnosticSeverity::INFORMATION,
4187 message: info.message.trim().to_string(),
4188 group_id,
4189 is_primary: false,
4190 is_disk_based,
4191 is_unnecessary: false,
4192 },
4193 });
4194 }
4195 }
4196 }
4197 }
4198 }
4199
4200 for entry in &mut diagnostics {
4201 let diagnostic = &mut entry.diagnostic;
4202 if !diagnostic.is_primary {
4203 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
4204 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
4205 source,
4206 diagnostic.code.clone(),
4207 entry.range.clone(),
4208 )) {
4209 if let Some(severity) = severity {
4210 diagnostic.severity = severity;
4211 }
4212 diagnostic.is_unnecessary = is_unnecessary;
4213 }
4214 }
4215 }
4216
4217 self.update_diagnostic_entries(
4218 language_server_id,
4219 abs_path,
4220 params.version,
4221 diagnostics,
4222 cx,
4223 )?;
4224 Ok(())
4225 }
4226
4227 pub fn update_diagnostic_entries(
4228 &mut self,
4229 server_id: LanguageServerId,
4230 abs_path: PathBuf,
4231 version: Option<i32>,
4232 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4233 cx: &mut ModelContext<Project>,
4234 ) -> Result<(), anyhow::Error> {
4235 let (worktree, relative_path) = self
4236 .find_local_worktree(&abs_path, cx)
4237 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
4238
4239 let project_path = ProjectPath {
4240 worktree_id: worktree.read(cx).id(),
4241 path: relative_path.into(),
4242 };
4243
4244 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
4245 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
4246 }
4247
4248 let updated = worktree.update(cx, |worktree, cx| {
4249 worktree
4250 .as_local_mut()
4251 .ok_or_else(|| anyhow!("not a local worktree"))?
4252 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
4253 })?;
4254 if updated {
4255 cx.emit(Event::DiagnosticsUpdated {
4256 language_server_id: server_id,
4257 path: project_path,
4258 });
4259 }
4260 Ok(())
4261 }
4262
4263 fn update_buffer_diagnostics(
4264 &mut self,
4265 buffer: &Model<Buffer>,
4266 server_id: LanguageServerId,
4267 version: Option<i32>,
4268 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4269 cx: &mut ModelContext<Self>,
4270 ) -> Result<()> {
4271 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4272 Ordering::Equal
4273 .then_with(|| b.is_primary.cmp(&a.is_primary))
4274 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4275 .then_with(|| a.severity.cmp(&b.severity))
4276 .then_with(|| a.message.cmp(&b.message))
4277 }
4278
4279 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4280
4281 diagnostics.sort_unstable_by(|a, b| {
4282 Ordering::Equal
4283 .then_with(|| a.range.start.cmp(&b.range.start))
4284 .then_with(|| b.range.end.cmp(&a.range.end))
4285 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4286 });
4287
4288 let mut sanitized_diagnostics = Vec::new();
4289 let edits_since_save = Patch::new(
4290 snapshot
4291 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4292 .collect(),
4293 );
4294 for entry in diagnostics {
4295 let start;
4296 let end;
4297 if entry.diagnostic.is_disk_based {
4298 // Some diagnostics are based on files on disk instead of buffers'
4299 // current contents. Adjust these diagnostics' ranges to reflect
4300 // any unsaved edits.
4301 start = edits_since_save.old_to_new(entry.range.start);
4302 end = edits_since_save.old_to_new(entry.range.end);
4303 } else {
4304 start = entry.range.start;
4305 end = entry.range.end;
4306 }
4307
4308 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4309 ..snapshot.clip_point_utf16(end, Bias::Right);
4310
4311 // Expand empty ranges by one codepoint
4312 if range.start == range.end {
4313 // This will be go to the next boundary when being clipped
4314 range.end.column += 1;
4315 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4316 if range.start == range.end && range.end.column > 0 {
4317 range.start.column -= 1;
4318 range.start = snapshot.clip_point_utf16(Unclipped(range.start), Bias::Left);
4319 }
4320 }
4321
4322 sanitized_diagnostics.push(DiagnosticEntry {
4323 range,
4324 diagnostic: entry.diagnostic,
4325 });
4326 }
4327 drop(edits_since_save);
4328
4329 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4330 buffer.update(cx, |buffer, cx| {
4331 buffer.update_diagnostics(server_id, set, cx)
4332 });
4333 Ok(())
4334 }
4335
4336 pub fn reload_buffers(
4337 &self,
4338 buffers: HashSet<Model<Buffer>>,
4339 push_to_history: bool,
4340 cx: &mut ModelContext<Self>,
4341 ) -> Task<Result<ProjectTransaction>> {
4342 let mut local_buffers = Vec::new();
4343 let mut remote_buffers = None;
4344 for buffer_handle in buffers {
4345 let buffer = buffer_handle.read(cx);
4346 if buffer.is_dirty() {
4347 if let Some(file) = File::from_dyn(buffer.file()) {
4348 if file.is_local() {
4349 local_buffers.push(buffer_handle);
4350 } else {
4351 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4352 }
4353 }
4354 }
4355 }
4356
4357 let remote_buffers = self.remote_id().zip(remote_buffers);
4358 let client = self.client.clone();
4359
4360 cx.spawn(move |this, mut cx| async move {
4361 let mut project_transaction = ProjectTransaction::default();
4362
4363 if let Some((project_id, remote_buffers)) = remote_buffers {
4364 let response = client
4365 .request(proto::ReloadBuffers {
4366 project_id,
4367 buffer_ids: remote_buffers
4368 .iter()
4369 .filter_map(|buffer| {
4370 buffer
4371 .update(&mut cx, |buffer, _| buffer.remote_id().into())
4372 .ok()
4373 })
4374 .collect(),
4375 })
4376 .await?
4377 .transaction
4378 .ok_or_else(|| anyhow!("missing transaction"))?;
4379 project_transaction = this
4380 .update(&mut cx, |this, cx| {
4381 this.deserialize_project_transaction(response, push_to_history, cx)
4382 })?
4383 .await?;
4384 }
4385
4386 for buffer in local_buffers {
4387 let transaction = buffer
4388 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4389 .await?;
4390 buffer.update(&mut cx, |buffer, cx| {
4391 if let Some(transaction) = transaction {
4392 if !push_to_history {
4393 buffer.forget_transaction(transaction.id);
4394 }
4395 project_transaction.0.insert(cx.handle(), transaction);
4396 }
4397 })?;
4398 }
4399
4400 Ok(project_transaction)
4401 })
4402 }
4403
4404 pub fn format(
4405 &mut self,
4406 buffers: HashSet<Model<Buffer>>,
4407 push_to_history: bool,
4408 trigger: FormatTrigger,
4409 cx: &mut ModelContext<Project>,
4410 ) -> Task<anyhow::Result<ProjectTransaction>> {
4411 if self.is_local() {
4412 let buffers_with_paths = buffers
4413 .into_iter()
4414 .filter_map(|buffer_handle| {
4415 let buffer = buffer_handle.read(cx);
4416 let file = File::from_dyn(buffer.file())?;
4417 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4418 Some((buffer_handle, buffer_abs_path))
4419 })
4420 .collect::<Vec<_>>();
4421
4422 cx.spawn(move |project, mut cx| async move {
4423 let result = Self::format_locally(
4424 project.clone(),
4425 buffers_with_paths,
4426 push_to_history,
4427 trigger,
4428 cx.clone(),
4429 )
4430 .await;
4431
4432 project.update(&mut cx, |project, _| match &result {
4433 Ok(_) => project.last_formatting_failure = None,
4434 Err(error) => {
4435 project.last_formatting_failure.replace(error.to_string());
4436 }
4437 })?;
4438
4439 result
4440 })
4441 } else {
4442 let remote_id = self.remote_id();
4443 let client = self.client.clone();
4444 cx.spawn(move |this, mut cx| async move {
4445 let mut project_transaction = ProjectTransaction::default();
4446 if let Some(project_id) = remote_id {
4447 let response = client
4448 .request(proto::FormatBuffers {
4449 project_id,
4450 trigger: trigger as i32,
4451 buffer_ids: buffers
4452 .iter()
4453 .map(|buffer| {
4454 buffer.update(&mut cx, |buffer, _| buffer.remote_id().into())
4455 })
4456 .collect::<Result<_>>()?,
4457 })
4458 .await?
4459 .transaction
4460 .ok_or_else(|| anyhow!("missing transaction"))?;
4461 project_transaction = this
4462 .update(&mut cx, |this, cx| {
4463 this.deserialize_project_transaction(response, push_to_history, cx)
4464 })?
4465 .await?;
4466 }
4467 Ok(project_transaction)
4468 })
4469 }
4470 }
4471
4472 async fn format_locally(
4473 project: WeakModel<Project>,
4474 mut buffers_with_paths: Vec<(Model<Buffer>, Option<PathBuf>)>,
4475 push_to_history: bool,
4476 trigger: FormatTrigger,
4477 mut cx: AsyncAppContext,
4478 ) -> anyhow::Result<ProjectTransaction> {
4479 // Do not allow multiple concurrent formatting requests for the
4480 // same buffer.
4481 project.update(&mut cx, |this, cx| {
4482 buffers_with_paths.retain(|(buffer, _)| {
4483 this.buffers_being_formatted
4484 .insert(buffer.read(cx).remote_id())
4485 });
4486 })?;
4487
4488 let _cleanup = defer({
4489 let this = project.clone();
4490 let mut cx = cx.clone();
4491 let buffers = &buffers_with_paths;
4492 move || {
4493 this.update(&mut cx, |this, cx| {
4494 for (buffer, _) in buffers {
4495 this.buffers_being_formatted
4496 .remove(&buffer.read(cx).remote_id());
4497 }
4498 })
4499 .ok();
4500 }
4501 });
4502
4503 let mut project_transaction = ProjectTransaction::default();
4504 for (buffer, buffer_abs_path) in &buffers_with_paths {
4505 let adapters_and_servers: Vec<_> = project.update(&mut cx, |project, cx| {
4506 project
4507 .language_servers_for_buffer(&buffer.read(cx), cx)
4508 .map(|(adapter, lsp)| (adapter.clone(), lsp.clone()))
4509 .collect()
4510 })?;
4511
4512 let settings = buffer.update(&mut cx, |buffer, cx| {
4513 language_settings(buffer.language(), buffer.file(), cx).clone()
4514 })?;
4515
4516 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4517 let ensure_final_newline = settings.ensure_final_newline_on_save;
4518 let tab_size = settings.tab_size;
4519
4520 // First, format buffer's whitespace according to the settings.
4521 let trailing_whitespace_diff = if remove_trailing_whitespace {
4522 Some(
4523 buffer
4524 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4525 .await,
4526 )
4527 } else {
4528 None
4529 };
4530 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4531 buffer.finalize_last_transaction();
4532 buffer.start_transaction();
4533 if let Some(diff) = trailing_whitespace_diff {
4534 buffer.apply_diff(diff, cx);
4535 }
4536 if ensure_final_newline {
4537 buffer.ensure_final_newline(cx);
4538 }
4539 buffer.end_transaction(cx)
4540 })?;
4541
4542 // Apply the `code_actions_on_format` before we run the formatter.
4543 let code_actions = deserialize_code_actions(&settings.code_actions_on_format);
4544 #[allow(clippy::nonminimal_bool)]
4545 if !code_actions.is_empty()
4546 && !(trigger == FormatTrigger::Save && settings.format_on_save == FormatOnSave::Off)
4547 {
4548 Self::execute_code_actions_on_servers(
4549 &project,
4550 &adapters_and_servers,
4551 code_actions,
4552 buffer,
4553 push_to_history,
4554 &mut project_transaction,
4555 &mut cx,
4556 )
4557 .await?;
4558 }
4559
4560 // Apply language-specific formatting using either the primary language server
4561 // or external command.
4562 // Except for code actions, which are applied with all connected language servers.
4563 let primary_language_server = adapters_and_servers
4564 .first()
4565 .cloned()
4566 .map(|(_, lsp)| lsp.clone());
4567 let server_and_buffer = primary_language_server
4568 .as_ref()
4569 .zip(buffer_abs_path.as_ref());
4570
4571 let mut format_operation = None;
4572 match (&settings.formatter, &settings.format_on_save) {
4573 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4574
4575 (Formatter::CodeActions(code_actions), FormatOnSave::On | FormatOnSave::Off)
4576 | (_, FormatOnSave::CodeActions(code_actions)) => {
4577 let code_actions = deserialize_code_actions(code_actions);
4578 if !code_actions.is_empty() {
4579 Self::execute_code_actions_on_servers(
4580 &project,
4581 &adapters_and_servers,
4582 code_actions,
4583 buffer,
4584 push_to_history,
4585 &mut project_transaction,
4586 &mut cx,
4587 )
4588 .await?;
4589 }
4590 }
4591 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4592 | (_, FormatOnSave::LanguageServer) => {
4593 if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4594 format_operation = Some(FormatOperation::Lsp(
4595 Self::format_via_lsp(
4596 &project,
4597 buffer,
4598 buffer_abs_path,
4599 language_server,
4600 tab_size,
4601 &mut cx,
4602 )
4603 .await
4604 .context("failed to format via language server")?,
4605 ));
4606 }
4607 }
4608
4609 (
4610 Formatter::External { command, arguments },
4611 FormatOnSave::On | FormatOnSave::Off,
4612 )
4613 | (_, FormatOnSave::External { command, arguments }) => {
4614 if let Some(buffer_abs_path) = buffer_abs_path {
4615 format_operation = Self::format_via_external_command(
4616 buffer,
4617 buffer_abs_path,
4618 command,
4619 arguments,
4620 &mut cx,
4621 )
4622 .await
4623 .context(format!(
4624 "failed to format via external command {:?}",
4625 command
4626 ))?
4627 .map(FormatOperation::External);
4628 }
4629 }
4630 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4631 let prettier =
4632 prettier_support::format_with_prettier(&project, buffer, &mut cx).await;
4633
4634 if let Some(operation) = prettier {
4635 format_operation = Some(operation?);
4636 } else if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4637 format_operation = Some(FormatOperation::Lsp(
4638 Self::format_via_lsp(
4639 &project,
4640 buffer,
4641 buffer_abs_path,
4642 language_server,
4643 tab_size,
4644 &mut cx,
4645 )
4646 .await
4647 .context("failed to format via language server")?,
4648 ));
4649 }
4650 }
4651 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4652 let prettier =
4653 prettier_support::format_with_prettier(&project, buffer, &mut cx).await;
4654
4655 if let Some(operation) = prettier {
4656 format_operation = Some(operation?);
4657 }
4658 }
4659 };
4660
4661 buffer.update(&mut cx, |b, cx| {
4662 // If the buffer had its whitespace formatted and was edited while the language-specific
4663 // formatting was being computed, avoid applying the language-specific formatting, because
4664 // it can't be grouped with the whitespace formatting in the undo history.
4665 if let Some(transaction_id) = whitespace_transaction_id {
4666 if b.peek_undo_stack()
4667 .map_or(true, |e| e.transaction_id() != transaction_id)
4668 {
4669 format_operation.take();
4670 }
4671 }
4672
4673 // Apply any language-specific formatting, and group the two formatting operations
4674 // in the buffer's undo history.
4675 if let Some(operation) = format_operation {
4676 match operation {
4677 FormatOperation::Lsp(edits) => {
4678 b.edit(edits, None, cx);
4679 }
4680 FormatOperation::External(diff) => {
4681 b.apply_diff(diff, cx);
4682 }
4683 FormatOperation::Prettier(diff) => {
4684 b.apply_diff(diff, cx);
4685 }
4686 }
4687
4688 if let Some(transaction_id) = whitespace_transaction_id {
4689 b.group_until_transaction(transaction_id);
4690 } else if let Some(transaction) = project_transaction.0.get(buffer) {
4691 b.group_until_transaction(transaction.id)
4692 }
4693 }
4694
4695 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4696 if !push_to_history {
4697 b.forget_transaction(transaction.id);
4698 }
4699 project_transaction.0.insert(buffer.clone(), transaction);
4700 }
4701 })?;
4702 }
4703
4704 Ok(project_transaction)
4705 }
4706
4707 async fn format_via_lsp(
4708 this: &WeakModel<Self>,
4709 buffer: &Model<Buffer>,
4710 abs_path: &Path,
4711 language_server: &Arc<LanguageServer>,
4712 tab_size: NonZeroU32,
4713 cx: &mut AsyncAppContext,
4714 ) -> Result<Vec<(Range<Anchor>, String)>> {
4715 let uri = lsp::Url::from_file_path(abs_path)
4716 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4717 let text_document = lsp::TextDocumentIdentifier::new(uri);
4718 let capabilities = &language_server.capabilities();
4719
4720 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4721 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4722
4723 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4724 language_server
4725 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4726 text_document,
4727 options: lsp_command::lsp_formatting_options(tab_size.get()),
4728 work_done_progress_params: Default::default(),
4729 })
4730 .await?
4731 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4732 let buffer_start = lsp::Position::new(0, 0);
4733 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4734
4735 language_server
4736 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4737 text_document,
4738 range: lsp::Range::new(buffer_start, buffer_end),
4739 options: lsp_command::lsp_formatting_options(tab_size.get()),
4740 work_done_progress_params: Default::default(),
4741 })
4742 .await?
4743 } else {
4744 None
4745 };
4746
4747 if let Some(lsp_edits) = lsp_edits {
4748 this.update(cx, |this, cx| {
4749 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4750 })?
4751 .await
4752 } else {
4753 Ok(Vec::new())
4754 }
4755 }
4756
4757 async fn format_via_external_command(
4758 buffer: &Model<Buffer>,
4759 buffer_abs_path: &Path,
4760 command: &str,
4761 arguments: &[String],
4762 cx: &mut AsyncAppContext,
4763 ) -> Result<Option<Diff>> {
4764 let working_dir_path = buffer.update(cx, |buffer, cx| {
4765 let file = File::from_dyn(buffer.file())?;
4766 let worktree = file.worktree.read(cx).as_local()?;
4767 let mut worktree_path = worktree.abs_path().to_path_buf();
4768 if worktree.root_entry()?.is_file() {
4769 worktree_path.pop();
4770 }
4771 Some(worktree_path)
4772 })?;
4773
4774 if let Some(working_dir_path) = working_dir_path {
4775 let mut child =
4776 smol::process::Command::new(command)
4777 .args(arguments.iter().map(|arg| {
4778 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4779 }))
4780 .current_dir(&working_dir_path)
4781 .stdin(smol::process::Stdio::piped())
4782 .stdout(smol::process::Stdio::piped())
4783 .stderr(smol::process::Stdio::piped())
4784 .spawn()?;
4785 let stdin = child
4786 .stdin
4787 .as_mut()
4788 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4789 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4790 for chunk in text.chunks() {
4791 stdin.write_all(chunk.as_bytes()).await?;
4792 }
4793 stdin.flush().await?;
4794
4795 let output = child.output().await?;
4796 if !output.status.success() {
4797 return Err(anyhow!(
4798 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4799 output.status.code(),
4800 String::from_utf8_lossy(&output.stdout),
4801 String::from_utf8_lossy(&output.stderr),
4802 ));
4803 }
4804
4805 let stdout = String::from_utf8(output.stdout)?;
4806 Ok(Some(
4807 buffer
4808 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4809 .await,
4810 ))
4811 } else {
4812 Ok(None)
4813 }
4814 }
4815
4816 #[inline(never)]
4817 fn definition_impl(
4818 &self,
4819 buffer: &Model<Buffer>,
4820 position: PointUtf16,
4821 cx: &mut ModelContext<Self>,
4822 ) -> Task<Result<Vec<LocationLink>>> {
4823 self.request_lsp(
4824 buffer.clone(),
4825 LanguageServerToQuery::Primary,
4826 GetDefinition { position },
4827 cx,
4828 )
4829 }
4830 pub fn definition<T: ToPointUtf16>(
4831 &self,
4832 buffer: &Model<Buffer>,
4833 position: T,
4834 cx: &mut ModelContext<Self>,
4835 ) -> Task<Result<Vec<LocationLink>>> {
4836 let position = position.to_point_utf16(buffer.read(cx));
4837 self.definition_impl(buffer, position, cx)
4838 }
4839
4840 fn type_definition_impl(
4841 &self,
4842 buffer: &Model<Buffer>,
4843 position: PointUtf16,
4844 cx: &mut ModelContext<Self>,
4845 ) -> Task<Result<Vec<LocationLink>>> {
4846 self.request_lsp(
4847 buffer.clone(),
4848 LanguageServerToQuery::Primary,
4849 GetTypeDefinition { position },
4850 cx,
4851 )
4852 }
4853
4854 pub fn type_definition<T: ToPointUtf16>(
4855 &self,
4856 buffer: &Model<Buffer>,
4857 position: T,
4858 cx: &mut ModelContext<Self>,
4859 ) -> Task<Result<Vec<LocationLink>>> {
4860 let position = position.to_point_utf16(buffer.read(cx));
4861 self.type_definition_impl(buffer, position, cx)
4862 }
4863
4864 fn implementation_impl(
4865 &self,
4866 buffer: &Model<Buffer>,
4867 position: PointUtf16,
4868 cx: &mut ModelContext<Self>,
4869 ) -> Task<Result<Vec<LocationLink>>> {
4870 self.request_lsp(
4871 buffer.clone(),
4872 LanguageServerToQuery::Primary,
4873 GetImplementation { position },
4874 cx,
4875 )
4876 }
4877
4878 pub fn implementation<T: ToPointUtf16>(
4879 &self,
4880 buffer: &Model<Buffer>,
4881 position: T,
4882 cx: &mut ModelContext<Self>,
4883 ) -> Task<Result<Vec<LocationLink>>> {
4884 let position = position.to_point_utf16(buffer.read(cx));
4885 self.implementation_impl(buffer, position, cx)
4886 }
4887
4888 fn references_impl(
4889 &self,
4890 buffer: &Model<Buffer>,
4891 position: PointUtf16,
4892 cx: &mut ModelContext<Self>,
4893 ) -> Task<Result<Vec<Location>>> {
4894 self.request_lsp(
4895 buffer.clone(),
4896 LanguageServerToQuery::Primary,
4897 GetReferences { position },
4898 cx,
4899 )
4900 }
4901 pub fn references<T: ToPointUtf16>(
4902 &self,
4903 buffer: &Model<Buffer>,
4904 position: T,
4905 cx: &mut ModelContext<Self>,
4906 ) -> Task<Result<Vec<Location>>> {
4907 let position = position.to_point_utf16(buffer.read(cx));
4908 self.references_impl(buffer, position, cx)
4909 }
4910
4911 fn document_highlights_impl(
4912 &self,
4913 buffer: &Model<Buffer>,
4914 position: PointUtf16,
4915 cx: &mut ModelContext<Self>,
4916 ) -> Task<Result<Vec<DocumentHighlight>>> {
4917 self.request_lsp(
4918 buffer.clone(),
4919 LanguageServerToQuery::Primary,
4920 GetDocumentHighlights { position },
4921 cx,
4922 )
4923 }
4924
4925 pub fn document_highlights<T: ToPointUtf16>(
4926 &self,
4927 buffer: &Model<Buffer>,
4928 position: T,
4929 cx: &mut ModelContext<Self>,
4930 ) -> Task<Result<Vec<DocumentHighlight>>> {
4931 let position = position.to_point_utf16(buffer.read(cx));
4932 self.document_highlights_impl(buffer, position, cx)
4933 }
4934
4935 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4936 if self.is_local() {
4937 let mut requests = Vec::new();
4938 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4939 let Some(worktree_handle) = self.worktree_for_id(*worktree_id, cx) else {
4940 continue;
4941 };
4942 let worktree = worktree_handle.read(cx);
4943 if !worktree.is_visible() {
4944 continue;
4945 }
4946 let Some(worktree) = worktree.as_local() else {
4947 continue;
4948 };
4949 let worktree_abs_path = worktree.abs_path().clone();
4950
4951 let (adapter, language, server) = match self.language_servers.get(server_id) {
4952 Some(LanguageServerState::Running {
4953 adapter,
4954 language,
4955 server,
4956 ..
4957 }) => (adapter.clone(), language.clone(), server),
4958
4959 _ => continue,
4960 };
4961
4962 requests.push(
4963 server
4964 .request::<lsp::request::WorkspaceSymbolRequest>(
4965 lsp::WorkspaceSymbolParams {
4966 query: query.to_string(),
4967 ..Default::default()
4968 },
4969 )
4970 .log_err()
4971 .map(move |response| {
4972 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4973 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4974 flat_responses.into_iter().map(|lsp_symbol| {
4975 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4976 }).collect::<Vec<_>>()
4977 }
4978 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4979 nested_responses.into_iter().filter_map(|lsp_symbol| {
4980 let location = match lsp_symbol.location {
4981 OneOf::Left(location) => location,
4982 OneOf::Right(_) => {
4983 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4984 return None
4985 }
4986 };
4987 Some((lsp_symbol.name, lsp_symbol.kind, location))
4988 }).collect::<Vec<_>>()
4989 }
4990 }).unwrap_or_default();
4991
4992 (
4993 adapter,
4994 language,
4995 worktree_handle.downgrade(),
4996 worktree_abs_path,
4997 lsp_symbols,
4998 )
4999 }),
5000 );
5001 }
5002
5003 cx.spawn(move |this, mut cx| async move {
5004 let responses = futures::future::join_all(requests).await;
5005 let this = match this.upgrade() {
5006 Some(this) => this,
5007 None => return Ok(Vec::new()),
5008 };
5009
5010 let symbols = this.update(&mut cx, |this, cx| {
5011 let mut symbols = Vec::new();
5012 for (
5013 adapter,
5014 adapter_language,
5015 source_worktree,
5016 worktree_abs_path,
5017 lsp_symbols,
5018 ) in responses
5019 {
5020 symbols.extend(lsp_symbols.into_iter().filter_map(
5021 |(symbol_name, symbol_kind, symbol_location)| {
5022 let abs_path = symbol_location.uri.to_file_path().ok()?;
5023 let source_worktree = source_worktree.upgrade()?;
5024 let source_worktree_id = source_worktree.read(cx).id();
5025
5026 let path;
5027 let worktree;
5028 if let Some((tree, rel_path)) =
5029 this.find_local_worktree(&abs_path, cx)
5030 {
5031 worktree = tree;
5032 path = rel_path;
5033 } else {
5034 worktree = source_worktree.clone();
5035 path = relativize_path(&worktree_abs_path, &abs_path);
5036 }
5037
5038 let worktree_id = worktree.read(cx).id();
5039 let project_path = ProjectPath {
5040 worktree_id,
5041 path: path.into(),
5042 };
5043 let signature = this.symbol_signature(&project_path);
5044 let adapter_language = adapter_language.clone();
5045 let language = this
5046 .languages
5047 .language_for_file_path(&project_path.path)
5048 .unwrap_or_else(move |_| adapter_language);
5049 let adapter = adapter.clone();
5050 Some(async move {
5051 let language = language.await;
5052 let label = adapter
5053 .label_for_symbol(&symbol_name, symbol_kind, &language)
5054 .await;
5055
5056 Symbol {
5057 language_server_name: adapter.name.clone(),
5058 source_worktree_id,
5059 path: project_path,
5060 label: label.unwrap_or_else(|| {
5061 CodeLabel::plain(symbol_name.clone(), None)
5062 }),
5063 kind: symbol_kind,
5064 name: symbol_name,
5065 range: range_from_lsp(symbol_location.range),
5066 signature,
5067 }
5068 })
5069 },
5070 ));
5071 }
5072
5073 symbols
5074 })?;
5075
5076 Ok(futures::future::join_all(symbols).await)
5077 })
5078 } else if let Some(project_id) = self.remote_id() {
5079 let request = self.client.request(proto::GetProjectSymbols {
5080 project_id,
5081 query: query.to_string(),
5082 });
5083 cx.spawn(move |this, mut cx| async move {
5084 let response = request.await?;
5085 let mut symbols = Vec::new();
5086 if let Some(this) = this.upgrade() {
5087 let new_symbols = this.update(&mut cx, |this, _| {
5088 response
5089 .symbols
5090 .into_iter()
5091 .map(|symbol| this.deserialize_symbol(symbol))
5092 .collect::<Vec<_>>()
5093 })?;
5094 symbols = futures::future::join_all(new_symbols)
5095 .await
5096 .into_iter()
5097 .filter_map(|symbol| symbol.log_err())
5098 .collect::<Vec<_>>();
5099 }
5100 Ok(symbols)
5101 })
5102 } else {
5103 Task::ready(Ok(Default::default()))
5104 }
5105 }
5106
5107 pub fn open_buffer_for_symbol(
5108 &mut self,
5109 symbol: &Symbol,
5110 cx: &mut ModelContext<Self>,
5111 ) -> Task<Result<Model<Buffer>>> {
5112 if self.is_local() {
5113 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
5114 symbol.source_worktree_id,
5115 symbol.language_server_name.clone(),
5116 )) {
5117 *id
5118 } else {
5119 return Task::ready(Err(anyhow!(
5120 "language server for worktree and language not found"
5121 )));
5122 };
5123
5124 let worktree_abs_path = if let Some(worktree_abs_path) = self
5125 .worktree_for_id(symbol.path.worktree_id, cx)
5126 .and_then(|worktree| worktree.read(cx).as_local())
5127 .map(|local_worktree| local_worktree.abs_path())
5128 {
5129 worktree_abs_path
5130 } else {
5131 return Task::ready(Err(anyhow!("worktree not found for symbol")));
5132 };
5133
5134 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
5135 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
5136 uri
5137 } else {
5138 return Task::ready(Err(anyhow!("invalid symbol path")));
5139 };
5140
5141 self.open_local_buffer_via_lsp(
5142 symbol_uri,
5143 language_server_id,
5144 symbol.language_server_name.clone(),
5145 cx,
5146 )
5147 } else if let Some(project_id) = self.remote_id() {
5148 let request = self.client.request(proto::OpenBufferForSymbol {
5149 project_id,
5150 symbol: Some(serialize_symbol(symbol)),
5151 });
5152 cx.spawn(move |this, mut cx| async move {
5153 let response = request.await?;
5154 let buffer_id = BufferId::new(response.buffer_id)?;
5155 this.update(&mut cx, |this, cx| {
5156 this.wait_for_remote_buffer(buffer_id, cx)
5157 })?
5158 .await
5159 })
5160 } else {
5161 Task::ready(Err(anyhow!("project does not have a remote id")))
5162 }
5163 }
5164
5165 fn hover_impl(
5166 &self,
5167 buffer: &Model<Buffer>,
5168 position: PointUtf16,
5169 cx: &mut ModelContext<Self>,
5170 ) -> Task<Vec<Hover>> {
5171 if self.is_local() {
5172 let all_actions_task = self.request_multiple_lsp_locally(
5173 &buffer,
5174 Some(position),
5175 |server_capabilities| match server_capabilities.hover_provider {
5176 Some(lsp::HoverProviderCapability::Simple(enabled)) => enabled,
5177 Some(lsp::HoverProviderCapability::Options(_)) => true,
5178 None => false,
5179 },
5180 GetHover { position },
5181 cx,
5182 );
5183 cx.spawn(|_, _| async move {
5184 all_actions_task
5185 .await
5186 .into_iter()
5187 .filter_map(|hover| remove_empty_hover_blocks(hover?))
5188 .collect()
5189 })
5190 } else if let Some(project_id) = self.remote_id() {
5191 let request_task = self.client().request(proto::MultiLspQuery {
5192 buffer_id: buffer.read(cx).remote_id().into(),
5193 version: serialize_version(&buffer.read(cx).version()),
5194 project_id,
5195 strategy: Some(proto::multi_lsp_query::Strategy::All(
5196 proto::AllLanguageServers {},
5197 )),
5198 request: Some(proto::multi_lsp_query::Request::GetHover(
5199 GetHover { position }.to_proto(project_id, buffer.read(cx)),
5200 )),
5201 });
5202 let buffer = buffer.clone();
5203 cx.spawn(|weak_project, cx| async move {
5204 let Some(project) = weak_project.upgrade() else {
5205 return Vec::new();
5206 };
5207 join_all(
5208 request_task
5209 .await
5210 .log_err()
5211 .map(|response| response.responses)
5212 .unwrap_or_default()
5213 .into_iter()
5214 .filter_map(|lsp_response| match lsp_response.response? {
5215 proto::lsp_response::Response::GetHoverResponse(response) => {
5216 Some(response)
5217 }
5218 unexpected => {
5219 debug_panic!("Unexpected response: {unexpected:?}");
5220 None
5221 }
5222 })
5223 .map(|hover_response| {
5224 let response = GetHover { position }.response_from_proto(
5225 hover_response,
5226 project.clone(),
5227 buffer.clone(),
5228 cx.clone(),
5229 );
5230 async move {
5231 response
5232 .await
5233 .log_err()
5234 .flatten()
5235 .and_then(remove_empty_hover_blocks)
5236 }
5237 }),
5238 )
5239 .await
5240 .into_iter()
5241 .flatten()
5242 .collect()
5243 })
5244 } else {
5245 log::error!("cannot show hovers: project does not have a remote id");
5246 Task::ready(Vec::new())
5247 }
5248 }
5249
5250 pub fn hover<T: ToPointUtf16>(
5251 &self,
5252 buffer: &Model<Buffer>,
5253 position: T,
5254 cx: &mut ModelContext<Self>,
5255 ) -> Task<Vec<Hover>> {
5256 let position = position.to_point_utf16(buffer.read(cx));
5257 self.hover_impl(buffer, position, cx)
5258 }
5259
5260 #[inline(never)]
5261 fn completions_impl(
5262 &self,
5263 buffer: &Model<Buffer>,
5264 position: PointUtf16,
5265 cx: &mut ModelContext<Self>,
5266 ) -> Task<Result<Vec<Completion>>> {
5267 if self.is_local() {
5268 let snapshot = buffer.read(cx).snapshot();
5269 let offset = position.to_offset(&snapshot);
5270 let scope = snapshot.language_scope_at(offset);
5271
5272 let server_ids: Vec<_> = self
5273 .language_servers_for_buffer(buffer.read(cx), cx)
5274 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
5275 .filter(|(adapter, _)| {
5276 scope
5277 .as_ref()
5278 .map(|scope| scope.language_allowed(&adapter.name))
5279 .unwrap_or(true)
5280 })
5281 .map(|(_, server)| server.server_id())
5282 .collect();
5283
5284 let buffer = buffer.clone();
5285 cx.spawn(move |this, mut cx| async move {
5286 let mut tasks = Vec::with_capacity(server_ids.len());
5287 this.update(&mut cx, |this, cx| {
5288 for server_id in server_ids {
5289 tasks.push(this.request_lsp(
5290 buffer.clone(),
5291 LanguageServerToQuery::Other(server_id),
5292 GetCompletions { position },
5293 cx,
5294 ));
5295 }
5296 })?;
5297
5298 let mut completions = Vec::new();
5299 for task in tasks {
5300 if let Ok(new_completions) = task.await {
5301 completions.extend_from_slice(&new_completions);
5302 }
5303 }
5304
5305 Ok(completions)
5306 })
5307 } else if let Some(project_id) = self.remote_id() {
5308 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
5309 } else {
5310 Task::ready(Ok(Default::default()))
5311 }
5312 }
5313 pub fn completions<T: ToOffset + ToPointUtf16>(
5314 &self,
5315 buffer: &Model<Buffer>,
5316 position: T,
5317 cx: &mut ModelContext<Self>,
5318 ) -> Task<Result<Vec<Completion>>> {
5319 let position = position.to_point_utf16(buffer.read(cx));
5320 self.completions_impl(buffer, position, cx)
5321 }
5322
5323 pub fn resolve_completions(
5324 &self,
5325 completion_indices: Vec<usize>,
5326 completions: Arc<RwLock<Box<[Completion]>>>,
5327 cx: &mut ModelContext<Self>,
5328 ) -> Task<Result<bool>> {
5329 let client = self.client();
5330 let language_registry = self.languages().clone();
5331
5332 let is_remote = self.is_remote();
5333 let project_id = self.remote_id();
5334
5335 cx.spawn(move |this, mut cx| async move {
5336 let mut did_resolve = false;
5337 if is_remote {
5338 let project_id =
5339 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
5340
5341 for completion_index in completion_indices {
5342 let (server_id, completion) = {
5343 let completions_guard = completions.read();
5344 let completion = &completions_guard[completion_index];
5345 if completion.documentation.is_some() {
5346 continue;
5347 }
5348
5349 did_resolve = true;
5350 let server_id = completion.server_id;
5351 let completion = completion.lsp_completion.clone();
5352
5353 (server_id, completion)
5354 };
5355
5356 Self::resolve_completion_documentation_remote(
5357 project_id,
5358 server_id,
5359 completions.clone(),
5360 completion_index,
5361 completion,
5362 client.clone(),
5363 language_registry.clone(),
5364 )
5365 .await;
5366 }
5367 } else {
5368 for completion_index in completion_indices {
5369 let (server_id, completion) = {
5370 let completions_guard = completions.read();
5371 let completion = &completions_guard[completion_index];
5372 if completion.documentation.is_some() {
5373 continue;
5374 }
5375
5376 let server_id = completion.server_id;
5377 let completion = completion.lsp_completion.clone();
5378
5379 (server_id, completion)
5380 };
5381
5382 let server = this
5383 .read_with(&mut cx, |project, _| {
5384 project.language_server_for_id(server_id)
5385 })
5386 .ok()
5387 .flatten();
5388 let Some(server) = server else {
5389 continue;
5390 };
5391
5392 did_resolve = true;
5393 Self::resolve_completion_documentation_local(
5394 server,
5395 completions.clone(),
5396 completion_index,
5397 completion,
5398 language_registry.clone(),
5399 )
5400 .await;
5401 }
5402 }
5403
5404 Ok(did_resolve)
5405 })
5406 }
5407
5408 async fn resolve_completion_documentation_local(
5409 server: Arc<lsp::LanguageServer>,
5410 completions: Arc<RwLock<Box<[Completion]>>>,
5411 completion_index: usize,
5412 completion: lsp::CompletionItem,
5413 language_registry: Arc<LanguageRegistry>,
5414 ) {
5415 let can_resolve = server
5416 .capabilities()
5417 .completion_provider
5418 .as_ref()
5419 .and_then(|options| options.resolve_provider)
5420 .unwrap_or(false);
5421 if !can_resolve {
5422 return;
5423 }
5424
5425 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
5426 let Some(completion_item) = request.await.log_err() else {
5427 return;
5428 };
5429
5430 if let Some(lsp_documentation) = completion_item.documentation {
5431 let documentation = language::prepare_completion_documentation(
5432 &lsp_documentation,
5433 &language_registry,
5434 None, // TODO: Try to reasonably work out which language the completion is for
5435 )
5436 .await;
5437
5438 let mut completions = completions.write();
5439 let completion = &mut completions[completion_index];
5440 completion.documentation = Some(documentation);
5441 } else {
5442 let mut completions = completions.write();
5443 let completion = &mut completions[completion_index];
5444 completion.documentation = Some(Documentation::Undocumented);
5445 }
5446 }
5447
5448 async fn resolve_completion_documentation_remote(
5449 project_id: u64,
5450 server_id: LanguageServerId,
5451 completions: Arc<RwLock<Box<[Completion]>>>,
5452 completion_index: usize,
5453 completion: lsp::CompletionItem,
5454 client: Arc<Client>,
5455 language_registry: Arc<LanguageRegistry>,
5456 ) {
5457 let request = proto::ResolveCompletionDocumentation {
5458 project_id,
5459 language_server_id: server_id.0 as u64,
5460 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
5461 };
5462
5463 let Some(response) = client
5464 .request(request)
5465 .await
5466 .context("completion documentation resolve proto request")
5467 .log_err()
5468 else {
5469 return;
5470 };
5471
5472 if response.text.is_empty() {
5473 let mut completions = completions.write();
5474 let completion = &mut completions[completion_index];
5475 completion.documentation = Some(Documentation::Undocumented);
5476 }
5477
5478 let documentation = if response.is_markdown {
5479 Documentation::MultiLineMarkdown(
5480 markdown::parse_markdown(&response.text, &language_registry, None).await,
5481 )
5482 } else if response.text.lines().count() <= 1 {
5483 Documentation::SingleLine(response.text)
5484 } else {
5485 Documentation::MultiLinePlainText(response.text)
5486 };
5487
5488 let mut completions = completions.write();
5489 let completion = &mut completions[completion_index];
5490 completion.documentation = Some(documentation);
5491 }
5492
5493 pub fn apply_additional_edits_for_completion(
5494 &self,
5495 buffer_handle: Model<Buffer>,
5496 completion: Completion,
5497 push_to_history: bool,
5498 cx: &mut ModelContext<Self>,
5499 ) -> Task<Result<Option<Transaction>>> {
5500 let buffer = buffer_handle.read(cx);
5501 let buffer_id = buffer.remote_id();
5502
5503 if self.is_local() {
5504 let server_id = completion.server_id;
5505 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
5506 Some((_, server)) => server.clone(),
5507 _ => return Task::ready(Ok(Default::default())),
5508 };
5509
5510 cx.spawn(move |this, mut cx| async move {
5511 let can_resolve = lang_server
5512 .capabilities()
5513 .completion_provider
5514 .as_ref()
5515 .and_then(|options| options.resolve_provider)
5516 .unwrap_or(false);
5517 let additional_text_edits = if can_resolve {
5518 lang_server
5519 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
5520 .await?
5521 .additional_text_edits
5522 } else {
5523 completion.lsp_completion.additional_text_edits
5524 };
5525 if let Some(edits) = additional_text_edits {
5526 let edits = this
5527 .update(&mut cx, |this, cx| {
5528 this.edits_from_lsp(
5529 &buffer_handle,
5530 edits,
5531 lang_server.server_id(),
5532 None,
5533 cx,
5534 )
5535 })?
5536 .await?;
5537
5538 buffer_handle.update(&mut cx, |buffer, cx| {
5539 buffer.finalize_last_transaction();
5540 buffer.start_transaction();
5541
5542 for (range, text) in edits {
5543 let primary = &completion.old_range;
5544 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5545 && primary.end.cmp(&range.start, buffer).is_ge();
5546 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5547 && range.end.cmp(&primary.end, buffer).is_ge();
5548
5549 //Skip additional edits which overlap with the primary completion edit
5550 //https://github.com/zed-industries/zed/pull/1871
5551 if !start_within && !end_within {
5552 buffer.edit([(range, text)], None, cx);
5553 }
5554 }
5555
5556 let transaction = if buffer.end_transaction(cx).is_some() {
5557 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5558 if !push_to_history {
5559 buffer.forget_transaction(transaction.id);
5560 }
5561 Some(transaction)
5562 } else {
5563 None
5564 };
5565 Ok(transaction)
5566 })?
5567 } else {
5568 Ok(None)
5569 }
5570 })
5571 } else if let Some(project_id) = self.remote_id() {
5572 let client = self.client.clone();
5573 cx.spawn(move |_, mut cx| async move {
5574 let response = client
5575 .request(proto::ApplyCompletionAdditionalEdits {
5576 project_id,
5577 buffer_id: buffer_id.into(),
5578 completion: Some(language::proto::serialize_completion(&completion)),
5579 })
5580 .await?;
5581
5582 if let Some(transaction) = response.transaction {
5583 let transaction = language::proto::deserialize_transaction(transaction)?;
5584 buffer_handle
5585 .update(&mut cx, |buffer, _| {
5586 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5587 })?
5588 .await?;
5589 if push_to_history {
5590 buffer_handle.update(&mut cx, |buffer, _| {
5591 buffer.push_transaction(transaction.clone(), Instant::now());
5592 })?;
5593 }
5594 Ok(Some(transaction))
5595 } else {
5596 Ok(None)
5597 }
5598 })
5599 } else {
5600 Task::ready(Err(anyhow!("project does not have a remote id")))
5601 }
5602 }
5603
5604 fn code_actions_impl(
5605 &self,
5606 buffer_handle: &Model<Buffer>,
5607 range: Range<Anchor>,
5608 cx: &mut ModelContext<Self>,
5609 ) -> Task<Vec<CodeAction>> {
5610 if self.is_local() {
5611 let all_actions_task = self.request_multiple_lsp_locally(
5612 &buffer_handle,
5613 Some(range.start),
5614 GetCodeActions::supports_code_actions,
5615 GetCodeActions {
5616 range: range.clone(),
5617 kinds: None,
5618 },
5619 cx,
5620 );
5621 cx.spawn(|_, _| async move { all_actions_task.await.into_iter().flatten().collect() })
5622 } else if let Some(project_id) = self.remote_id() {
5623 let request_task = self.client().request(proto::MultiLspQuery {
5624 buffer_id: buffer_handle.read(cx).remote_id().into(),
5625 version: serialize_version(&buffer_handle.read(cx).version()),
5626 project_id,
5627 strategy: Some(proto::multi_lsp_query::Strategy::All(
5628 proto::AllLanguageServers {},
5629 )),
5630 request: Some(proto::multi_lsp_query::Request::GetCodeActions(
5631 GetCodeActions {
5632 range: range.clone(),
5633 kinds: None,
5634 }
5635 .to_proto(project_id, buffer_handle.read(cx)),
5636 )),
5637 });
5638 let buffer = buffer_handle.clone();
5639 cx.spawn(|weak_project, cx| async move {
5640 let Some(project) = weak_project.upgrade() else {
5641 return Vec::new();
5642 };
5643 join_all(
5644 request_task
5645 .await
5646 .log_err()
5647 .map(|response| response.responses)
5648 .unwrap_or_default()
5649 .into_iter()
5650 .filter_map(|lsp_response| match lsp_response.response? {
5651 proto::lsp_response::Response::GetCodeActionsResponse(response) => {
5652 Some(response)
5653 }
5654 unexpected => {
5655 debug_panic!("Unexpected response: {unexpected:?}");
5656 None
5657 }
5658 })
5659 .map(|code_actions_response| {
5660 let response = GetCodeActions {
5661 range: range.clone(),
5662 kinds: None,
5663 }
5664 .response_from_proto(
5665 code_actions_response,
5666 project.clone(),
5667 buffer.clone(),
5668 cx.clone(),
5669 );
5670 async move { response.await.log_err().unwrap_or_default() }
5671 }),
5672 )
5673 .await
5674 .into_iter()
5675 .flatten()
5676 .collect()
5677 })
5678 } else {
5679 log::error!("cannot fetch actions: project does not have a remote id");
5680 Task::ready(Vec::new())
5681 }
5682 }
5683
5684 pub fn code_actions<T: Clone + ToOffset>(
5685 &self,
5686 buffer_handle: &Model<Buffer>,
5687 range: Range<T>,
5688 cx: &mut ModelContext<Self>,
5689 ) -> Task<Vec<CodeAction>> {
5690 let buffer = buffer_handle.read(cx);
5691 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5692 self.code_actions_impl(buffer_handle, range, cx)
5693 }
5694
5695 pub fn apply_code_action(
5696 &self,
5697 buffer_handle: Model<Buffer>,
5698 mut action: CodeAction,
5699 push_to_history: bool,
5700 cx: &mut ModelContext<Self>,
5701 ) -> Task<Result<ProjectTransaction>> {
5702 if self.is_local() {
5703 let buffer = buffer_handle.read(cx);
5704 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5705 self.language_server_for_buffer(buffer, action.server_id, cx)
5706 {
5707 (adapter.clone(), server.clone())
5708 } else {
5709 return Task::ready(Ok(Default::default()));
5710 };
5711 cx.spawn(move |this, mut cx| async move {
5712 Self::try_resolve_code_action(&lang_server, &mut action)
5713 .await
5714 .context("resolving a code action")?;
5715 if let Some(edit) = action.lsp_action.edit {
5716 if edit.changes.is_some() || edit.document_changes.is_some() {
5717 return Self::deserialize_workspace_edit(
5718 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5719 edit,
5720 push_to_history,
5721 lsp_adapter.clone(),
5722 lang_server.clone(),
5723 &mut cx,
5724 )
5725 .await;
5726 }
5727 }
5728
5729 if let Some(command) = action.lsp_action.command {
5730 this.update(&mut cx, |this, _| {
5731 this.last_workspace_edits_by_language_server
5732 .remove(&lang_server.server_id());
5733 })?;
5734
5735 let result = lang_server
5736 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5737 command: command.command,
5738 arguments: command.arguments.unwrap_or_default(),
5739 ..Default::default()
5740 })
5741 .await;
5742
5743 if let Err(err) = result {
5744 // TODO: LSP ERROR
5745 return Err(err);
5746 }
5747
5748 return this.update(&mut cx, |this, _| {
5749 this.last_workspace_edits_by_language_server
5750 .remove(&lang_server.server_id())
5751 .unwrap_or_default()
5752 });
5753 }
5754
5755 Ok(ProjectTransaction::default())
5756 })
5757 } else if let Some(project_id) = self.remote_id() {
5758 let client = self.client.clone();
5759 let request = proto::ApplyCodeAction {
5760 project_id,
5761 buffer_id: buffer_handle.read(cx).remote_id().into(),
5762 action: Some(language::proto::serialize_code_action(&action)),
5763 };
5764 cx.spawn(move |this, mut cx| async move {
5765 let response = client
5766 .request(request)
5767 .await?
5768 .transaction
5769 .ok_or_else(|| anyhow!("missing transaction"))?;
5770 this.update(&mut cx, |this, cx| {
5771 this.deserialize_project_transaction(response, push_to_history, cx)
5772 })?
5773 .await
5774 })
5775 } else {
5776 Task::ready(Err(anyhow!("project does not have a remote id")))
5777 }
5778 }
5779
5780 fn apply_on_type_formatting(
5781 &self,
5782 buffer: Model<Buffer>,
5783 position: Anchor,
5784 trigger: String,
5785 cx: &mut ModelContext<Self>,
5786 ) -> Task<Result<Option<Transaction>>> {
5787 if self.is_local() {
5788 cx.spawn(move |this, mut cx| async move {
5789 // Do not allow multiple concurrent formatting requests for the
5790 // same buffer.
5791 this.update(&mut cx, |this, cx| {
5792 this.buffers_being_formatted
5793 .insert(buffer.read(cx).remote_id())
5794 })?;
5795
5796 let _cleanup = defer({
5797 let this = this.clone();
5798 let mut cx = cx.clone();
5799 let closure_buffer = buffer.clone();
5800 move || {
5801 this.update(&mut cx, |this, cx| {
5802 this.buffers_being_formatted
5803 .remove(&closure_buffer.read(cx).remote_id());
5804 })
5805 .ok();
5806 }
5807 });
5808
5809 buffer
5810 .update(&mut cx, |buffer, _| {
5811 buffer.wait_for_edits(Some(position.timestamp))
5812 })?
5813 .await?;
5814 this.update(&mut cx, |this, cx| {
5815 let position = position.to_point_utf16(buffer.read(cx));
5816 this.on_type_format(buffer, position, trigger, false, cx)
5817 })?
5818 .await
5819 })
5820 } else if let Some(project_id) = self.remote_id() {
5821 let client = self.client.clone();
5822 let request = proto::OnTypeFormatting {
5823 project_id,
5824 buffer_id: buffer.read(cx).remote_id().into(),
5825 position: Some(serialize_anchor(&position)),
5826 trigger,
5827 version: serialize_version(&buffer.read(cx).version()),
5828 };
5829 cx.spawn(move |_, _| async move {
5830 client
5831 .request(request)
5832 .await?
5833 .transaction
5834 .map(language::proto::deserialize_transaction)
5835 .transpose()
5836 })
5837 } else {
5838 Task::ready(Err(anyhow!("project does not have a remote id")))
5839 }
5840 }
5841
5842 async fn deserialize_edits(
5843 this: Model<Self>,
5844 buffer_to_edit: Model<Buffer>,
5845 edits: Vec<lsp::TextEdit>,
5846 push_to_history: bool,
5847 _: Arc<CachedLspAdapter>,
5848 language_server: Arc<LanguageServer>,
5849 cx: &mut AsyncAppContext,
5850 ) -> Result<Option<Transaction>> {
5851 let edits = this
5852 .update(cx, |this, cx| {
5853 this.edits_from_lsp(
5854 &buffer_to_edit,
5855 edits,
5856 language_server.server_id(),
5857 None,
5858 cx,
5859 )
5860 })?
5861 .await?;
5862
5863 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5864 buffer.finalize_last_transaction();
5865 buffer.start_transaction();
5866 for (range, text) in edits {
5867 buffer.edit([(range, text)], None, cx);
5868 }
5869
5870 if buffer.end_transaction(cx).is_some() {
5871 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5872 if !push_to_history {
5873 buffer.forget_transaction(transaction.id);
5874 }
5875 Some(transaction)
5876 } else {
5877 None
5878 }
5879 })?;
5880
5881 Ok(transaction)
5882 }
5883
5884 async fn deserialize_workspace_edit(
5885 this: Model<Self>,
5886 edit: lsp::WorkspaceEdit,
5887 push_to_history: bool,
5888 lsp_adapter: Arc<CachedLspAdapter>,
5889 language_server: Arc<LanguageServer>,
5890 cx: &mut AsyncAppContext,
5891 ) -> Result<ProjectTransaction> {
5892 let fs = this.update(cx, |this, _| this.fs.clone())?;
5893 let mut operations = Vec::new();
5894 if let Some(document_changes) = edit.document_changes {
5895 match document_changes {
5896 lsp::DocumentChanges::Edits(edits) => {
5897 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5898 }
5899 lsp::DocumentChanges::Operations(ops) => operations = ops,
5900 }
5901 } else if let Some(changes) = edit.changes {
5902 operations.extend(changes.into_iter().map(|(uri, edits)| {
5903 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5904 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5905 uri,
5906 version: None,
5907 },
5908 edits: edits.into_iter().map(OneOf::Left).collect(),
5909 })
5910 }));
5911 }
5912
5913 let mut project_transaction = ProjectTransaction::default();
5914 for operation in operations {
5915 match operation {
5916 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5917 let abs_path = op
5918 .uri
5919 .to_file_path()
5920 .map_err(|_| anyhow!("can't convert URI to path"))?;
5921
5922 if let Some(parent_path) = abs_path.parent() {
5923 fs.create_dir(parent_path).await?;
5924 }
5925 if abs_path.ends_with("/") {
5926 fs.create_dir(&abs_path).await?;
5927 } else {
5928 fs.create_file(
5929 &abs_path,
5930 op.options
5931 .map(|options| fs::CreateOptions {
5932 overwrite: options.overwrite.unwrap_or(false),
5933 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5934 })
5935 .unwrap_or_default(),
5936 )
5937 .await?;
5938 }
5939 }
5940
5941 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5942 let source_abs_path = op
5943 .old_uri
5944 .to_file_path()
5945 .map_err(|_| anyhow!("can't convert URI to path"))?;
5946 let target_abs_path = op
5947 .new_uri
5948 .to_file_path()
5949 .map_err(|_| anyhow!("can't convert URI to path"))?;
5950 fs.rename(
5951 &source_abs_path,
5952 &target_abs_path,
5953 op.options
5954 .map(|options| fs::RenameOptions {
5955 overwrite: options.overwrite.unwrap_or(false),
5956 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5957 })
5958 .unwrap_or_default(),
5959 )
5960 .await?;
5961 }
5962
5963 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5964 let abs_path = op
5965 .uri
5966 .to_file_path()
5967 .map_err(|_| anyhow!("can't convert URI to path"))?;
5968 let options = op
5969 .options
5970 .map(|options| fs::RemoveOptions {
5971 recursive: options.recursive.unwrap_or(false),
5972 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5973 })
5974 .unwrap_or_default();
5975 if abs_path.ends_with("/") {
5976 fs.remove_dir(&abs_path, options).await?;
5977 } else {
5978 fs.remove_file(&abs_path, options).await?;
5979 }
5980 }
5981
5982 lsp::DocumentChangeOperation::Edit(op) => {
5983 let buffer_to_edit = this
5984 .update(cx, |this, cx| {
5985 this.open_local_buffer_via_lsp(
5986 op.text_document.uri,
5987 language_server.server_id(),
5988 lsp_adapter.name.clone(),
5989 cx,
5990 )
5991 })?
5992 .await?;
5993
5994 let edits = this
5995 .update(cx, |this, cx| {
5996 let edits = op.edits.into_iter().map(|edit| match edit {
5997 OneOf::Left(edit) => edit,
5998 OneOf::Right(edit) => edit.text_edit,
5999 });
6000 this.edits_from_lsp(
6001 &buffer_to_edit,
6002 edits,
6003 language_server.server_id(),
6004 op.text_document.version,
6005 cx,
6006 )
6007 })?
6008 .await?;
6009
6010 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
6011 buffer.finalize_last_transaction();
6012 buffer.start_transaction();
6013 for (range, text) in edits {
6014 buffer.edit([(range, text)], None, cx);
6015 }
6016 let transaction = if buffer.end_transaction(cx).is_some() {
6017 let transaction = buffer.finalize_last_transaction().unwrap().clone();
6018 if !push_to_history {
6019 buffer.forget_transaction(transaction.id);
6020 }
6021 Some(transaction)
6022 } else {
6023 None
6024 };
6025
6026 transaction
6027 })?;
6028 if let Some(transaction) = transaction {
6029 project_transaction.0.insert(buffer_to_edit, transaction);
6030 }
6031 }
6032 }
6033 }
6034
6035 Ok(project_transaction)
6036 }
6037
6038 fn prepare_rename_impl(
6039 &self,
6040 buffer: Model<Buffer>,
6041 position: PointUtf16,
6042 cx: &mut ModelContext<Self>,
6043 ) -> Task<Result<Option<Range<Anchor>>>> {
6044 self.request_lsp(
6045 buffer,
6046 LanguageServerToQuery::Primary,
6047 PrepareRename { position },
6048 cx,
6049 )
6050 }
6051 pub fn prepare_rename<T: ToPointUtf16>(
6052 &self,
6053 buffer: Model<Buffer>,
6054 position: T,
6055 cx: &mut ModelContext<Self>,
6056 ) -> Task<Result<Option<Range<Anchor>>>> {
6057 let position = position.to_point_utf16(buffer.read(cx));
6058 self.prepare_rename_impl(buffer, position, cx)
6059 }
6060
6061 fn perform_rename_impl(
6062 &self,
6063 buffer: Model<Buffer>,
6064 position: PointUtf16,
6065 new_name: String,
6066 push_to_history: bool,
6067 cx: &mut ModelContext<Self>,
6068 ) -> Task<Result<ProjectTransaction>> {
6069 let position = position.to_point_utf16(buffer.read(cx));
6070 self.request_lsp(
6071 buffer,
6072 LanguageServerToQuery::Primary,
6073 PerformRename {
6074 position,
6075 new_name,
6076 push_to_history,
6077 },
6078 cx,
6079 )
6080 }
6081 pub fn perform_rename<T: ToPointUtf16>(
6082 &self,
6083 buffer: Model<Buffer>,
6084 position: T,
6085 new_name: String,
6086 push_to_history: bool,
6087 cx: &mut ModelContext<Self>,
6088 ) -> Task<Result<ProjectTransaction>> {
6089 let position = position.to_point_utf16(buffer.read(cx));
6090 self.perform_rename_impl(buffer, position, new_name, push_to_history, cx)
6091 }
6092
6093 pub fn on_type_format_impl(
6094 &self,
6095 buffer: Model<Buffer>,
6096 position: PointUtf16,
6097 trigger: String,
6098 push_to_history: bool,
6099 cx: &mut ModelContext<Self>,
6100 ) -> Task<Result<Option<Transaction>>> {
6101 let tab_size = buffer.update(cx, |buffer, cx| {
6102 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx).tab_size
6103 });
6104 self.request_lsp(
6105 buffer.clone(),
6106 LanguageServerToQuery::Primary,
6107 OnTypeFormatting {
6108 position,
6109 trigger,
6110 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
6111 push_to_history,
6112 },
6113 cx,
6114 )
6115 }
6116
6117 pub fn on_type_format<T: ToPointUtf16>(
6118 &self,
6119 buffer: Model<Buffer>,
6120 position: T,
6121 trigger: String,
6122 push_to_history: bool,
6123 cx: &mut ModelContext<Self>,
6124 ) -> Task<Result<Option<Transaction>>> {
6125 let position = position.to_point_utf16(buffer.read(cx));
6126 self.on_type_format_impl(buffer, position, trigger, push_to_history, cx)
6127 }
6128
6129 pub fn inlay_hints<T: ToOffset>(
6130 &self,
6131 buffer_handle: Model<Buffer>,
6132 range: Range<T>,
6133 cx: &mut ModelContext<Self>,
6134 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
6135 let buffer = buffer_handle.read(cx);
6136 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
6137 self.inlay_hints_impl(buffer_handle, range, cx)
6138 }
6139 fn inlay_hints_impl(
6140 &self,
6141 buffer_handle: Model<Buffer>,
6142 range: Range<Anchor>,
6143 cx: &mut ModelContext<Self>,
6144 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
6145 let buffer = buffer_handle.read(cx);
6146 let range_start = range.start;
6147 let range_end = range.end;
6148 let buffer_id = buffer.remote_id().into();
6149 let lsp_request = InlayHints { range };
6150
6151 if self.is_local() {
6152 let lsp_request_task = self.request_lsp(
6153 buffer_handle.clone(),
6154 LanguageServerToQuery::Primary,
6155 lsp_request,
6156 cx,
6157 );
6158 cx.spawn(move |_, mut cx| async move {
6159 buffer_handle
6160 .update(&mut cx, |buffer, _| {
6161 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
6162 })?
6163 .await
6164 .context("waiting for inlay hint request range edits")?;
6165 lsp_request_task.await.context("inlay hints LSP request")
6166 })
6167 } else if let Some(project_id) = self.remote_id() {
6168 let client = self.client.clone();
6169 let request = proto::InlayHints {
6170 project_id,
6171 buffer_id,
6172 start: Some(serialize_anchor(&range_start)),
6173 end: Some(serialize_anchor(&range_end)),
6174 version: serialize_version(&buffer_handle.read(cx).version()),
6175 };
6176 cx.spawn(move |project, cx| async move {
6177 let response = client
6178 .request(request)
6179 .await
6180 .context("inlay hints proto request")?;
6181 LspCommand::response_from_proto(
6182 lsp_request,
6183 response,
6184 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
6185 buffer_handle.clone(),
6186 cx.clone(),
6187 )
6188 .await
6189 .context("inlay hints proto response conversion")
6190 })
6191 } else {
6192 Task::ready(Err(anyhow!("project does not have a remote id")))
6193 }
6194 }
6195
6196 pub fn resolve_inlay_hint(
6197 &self,
6198 hint: InlayHint,
6199 buffer_handle: Model<Buffer>,
6200 server_id: LanguageServerId,
6201 cx: &mut ModelContext<Self>,
6202 ) -> Task<anyhow::Result<InlayHint>> {
6203 if self.is_local() {
6204 let buffer = buffer_handle.read(cx);
6205 let (_, lang_server) = if let Some((adapter, server)) =
6206 self.language_server_for_buffer(buffer, server_id, cx)
6207 {
6208 (adapter.clone(), server.clone())
6209 } else {
6210 return Task::ready(Ok(hint));
6211 };
6212 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
6213 return Task::ready(Ok(hint));
6214 }
6215
6216 let buffer_snapshot = buffer.snapshot();
6217 cx.spawn(move |_, mut cx| async move {
6218 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
6219 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
6220 );
6221 let resolved_hint = resolve_task
6222 .await
6223 .context("inlay hint resolve LSP request")?;
6224 let resolved_hint = InlayHints::lsp_to_project_hint(
6225 resolved_hint,
6226 &buffer_handle,
6227 server_id,
6228 ResolveState::Resolved,
6229 false,
6230 &mut cx,
6231 )
6232 .await?;
6233 Ok(resolved_hint)
6234 })
6235 } else if let Some(project_id) = self.remote_id() {
6236 let client = self.client.clone();
6237 let request = proto::ResolveInlayHint {
6238 project_id,
6239 buffer_id: buffer_handle.read(cx).remote_id().into(),
6240 language_server_id: server_id.0 as u64,
6241 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
6242 };
6243 cx.spawn(move |_, _| async move {
6244 let response = client
6245 .request(request)
6246 .await
6247 .context("inlay hints proto request")?;
6248 match response.hint {
6249 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
6250 .context("inlay hints proto resolve response conversion"),
6251 None => Ok(hint),
6252 }
6253 })
6254 } else {
6255 Task::ready(Err(anyhow!("project does not have a remote id")))
6256 }
6257 }
6258
6259 #[allow(clippy::type_complexity)]
6260 pub fn search(
6261 &self,
6262 query: SearchQuery,
6263 cx: &mut ModelContext<Self>,
6264 ) -> Receiver<SearchResult> {
6265 if self.is_local() {
6266 self.search_local(query, cx)
6267 } else if let Some(project_id) = self.remote_id() {
6268 let (tx, rx) = smol::channel::unbounded();
6269 let request = self.client.request(query.to_proto(project_id));
6270 cx.spawn(move |this, mut cx| async move {
6271 let response = request.await?;
6272 let mut result = HashMap::default();
6273 for location in response.locations {
6274 let buffer_id = BufferId::new(location.buffer_id)?;
6275 let target_buffer = this
6276 .update(&mut cx, |this, cx| {
6277 this.wait_for_remote_buffer(buffer_id, cx)
6278 })?
6279 .await?;
6280 let start = location
6281 .start
6282 .and_then(deserialize_anchor)
6283 .ok_or_else(|| anyhow!("missing target start"))?;
6284 let end = location
6285 .end
6286 .and_then(deserialize_anchor)
6287 .ok_or_else(|| anyhow!("missing target end"))?;
6288 result
6289 .entry(target_buffer)
6290 .or_insert(Vec::new())
6291 .push(start..end)
6292 }
6293 for (buffer, ranges) in result {
6294 let _ = tx.send(SearchResult::Buffer { buffer, ranges }).await;
6295 }
6296
6297 if response.limit_reached {
6298 let _ = tx.send(SearchResult::LimitReached).await;
6299 }
6300
6301 Result::<(), anyhow::Error>::Ok(())
6302 })
6303 .detach_and_log_err(cx);
6304 rx
6305 } else {
6306 unimplemented!();
6307 }
6308 }
6309
6310 pub fn search_local(
6311 &self,
6312 query: SearchQuery,
6313 cx: &mut ModelContext<Self>,
6314 ) -> Receiver<SearchResult> {
6315 // Local search is split into several phases.
6316 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
6317 // and the second phase that finds positions of all the matches found in the candidate files.
6318 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
6319 //
6320 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
6321 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
6322 //
6323 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
6324 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
6325 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
6326 // 2. At this point, we have a list of all potentially matching buffers/files.
6327 // We sort that list by buffer path - this list is retained for later use.
6328 // We ensure that all buffers are now opened and available in project.
6329 // 3. We run a scan over all the candidate buffers on multiple background threads.
6330 // We cannot assume that there will even be a match - while at least one match
6331 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
6332 // There is also an auxiliary background thread responsible for result gathering.
6333 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
6334 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
6335 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
6336 // entry - which might already be available thanks to out-of-order processing.
6337 //
6338 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
6339 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
6340 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
6341 // in face of constantly updating list of sorted matches.
6342 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
6343 let snapshots = self
6344 .visible_worktrees(cx)
6345 .filter_map(|tree| {
6346 let tree = tree.read(cx).as_local()?;
6347 Some(tree.snapshot())
6348 })
6349 .collect::<Vec<_>>();
6350 let include_root = snapshots.len() > 1;
6351
6352 let background = cx.background_executor().clone();
6353 let path_count: usize = snapshots
6354 .iter()
6355 .map(|s| {
6356 if query.include_ignored() {
6357 s.file_count()
6358 } else {
6359 s.visible_file_count()
6360 }
6361 })
6362 .sum();
6363 if path_count == 0 {
6364 let (_, rx) = smol::channel::bounded(1024);
6365 return rx;
6366 }
6367 let workers = background.num_cpus().min(path_count);
6368 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
6369 let mut unnamed_files = vec![];
6370 let opened_buffers = self
6371 .opened_buffers
6372 .iter()
6373 .filter_map(|(_, b)| {
6374 let buffer = b.upgrade()?;
6375 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
6376 let is_ignored = buffer
6377 .project_path(cx)
6378 .and_then(|path| self.entry_for_path(&path, cx))
6379 .map_or(false, |entry| entry.is_ignored);
6380 (is_ignored, buffer.snapshot())
6381 });
6382 if is_ignored && !query.include_ignored() {
6383 return None;
6384 } else if let Some(file) = snapshot.file() {
6385 let matched_path = if include_root {
6386 query.file_matches(Some(&file.full_path(cx)))
6387 } else {
6388 query.file_matches(Some(file.path()))
6389 };
6390
6391 if matched_path {
6392 Some((file.path().clone(), (buffer, snapshot)))
6393 } else {
6394 None
6395 }
6396 } else {
6397 unnamed_files.push(buffer);
6398 None
6399 }
6400 })
6401 .collect();
6402 cx.background_executor()
6403 .spawn(Self::background_search(
6404 unnamed_files,
6405 opened_buffers,
6406 cx.background_executor().clone(),
6407 self.fs.clone(),
6408 workers,
6409 query.clone(),
6410 include_root,
6411 path_count,
6412 snapshots,
6413 matching_paths_tx,
6414 ))
6415 .detach();
6416
6417 let (result_tx, result_rx) = smol::channel::bounded(1024);
6418
6419 cx.spawn(|this, mut cx| async move {
6420 const MAX_SEARCH_RESULT_FILES: usize = 5_000;
6421 const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
6422
6423 let mut matching_paths = matching_paths_rx
6424 .take(MAX_SEARCH_RESULT_FILES + 1)
6425 .collect::<Vec<_>>()
6426 .await;
6427 let mut limit_reached = if matching_paths.len() > MAX_SEARCH_RESULT_FILES {
6428 matching_paths.pop();
6429 true
6430 } else {
6431 false
6432 };
6433 matching_paths.sort_by_key(|candidate| (candidate.is_ignored(), candidate.path()));
6434
6435 let mut range_count = 0;
6436 let query = Arc::new(query);
6437
6438 // Now that we know what paths match the query, we will load at most
6439 // 64 buffers at a time to avoid overwhelming the main thread. For each
6440 // opened buffer, we will spawn a background task that retrieves all the
6441 // ranges in the buffer matched by the query.
6442 'outer: for matching_paths_chunk in matching_paths.chunks(64) {
6443 let mut chunk_results = Vec::new();
6444 for matching_path in matching_paths_chunk {
6445 let query = query.clone();
6446 let buffer = match matching_path {
6447 SearchMatchCandidate::OpenBuffer { buffer, .. } => {
6448 Task::ready(Ok(buffer.clone()))
6449 }
6450 SearchMatchCandidate::Path {
6451 worktree_id, path, ..
6452 } => this.update(&mut cx, |this, cx| {
6453 this.open_buffer((*worktree_id, path.clone()), cx)
6454 })?,
6455 };
6456
6457 chunk_results.push(cx.spawn(|cx| async move {
6458 let buffer = buffer.await?;
6459 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
6460 let ranges = cx
6461 .background_executor()
6462 .spawn(async move {
6463 query
6464 .search(&snapshot, None)
6465 .await
6466 .iter()
6467 .map(|range| {
6468 snapshot.anchor_before(range.start)
6469 ..snapshot.anchor_after(range.end)
6470 })
6471 .collect::<Vec<_>>()
6472 })
6473 .await;
6474 anyhow::Ok((buffer, ranges))
6475 }));
6476 }
6477
6478 let chunk_results = futures::future::join_all(chunk_results).await;
6479 for result in chunk_results {
6480 if let Some((buffer, ranges)) = result.log_err() {
6481 range_count += ranges.len();
6482 result_tx
6483 .send(SearchResult::Buffer { buffer, ranges })
6484 .await?;
6485 if range_count > MAX_SEARCH_RESULT_RANGES {
6486 limit_reached = true;
6487 break 'outer;
6488 }
6489 }
6490 }
6491 }
6492
6493 if limit_reached {
6494 result_tx.send(SearchResult::LimitReached).await?;
6495 }
6496
6497 anyhow::Ok(())
6498 })
6499 .detach();
6500
6501 result_rx
6502 }
6503
6504 /// Pick paths that might potentially contain a match of a given search query.
6505 #[allow(clippy::too_many_arguments)]
6506 async fn background_search(
6507 unnamed_buffers: Vec<Model<Buffer>>,
6508 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
6509 executor: BackgroundExecutor,
6510 fs: Arc<dyn Fs>,
6511 workers: usize,
6512 query: SearchQuery,
6513 include_root: bool,
6514 path_count: usize,
6515 snapshots: Vec<LocalSnapshot>,
6516 matching_paths_tx: Sender<SearchMatchCandidate>,
6517 ) {
6518 let fs = &fs;
6519 let query = &query;
6520 let matching_paths_tx = &matching_paths_tx;
6521 let snapshots = &snapshots;
6522 for buffer in unnamed_buffers {
6523 matching_paths_tx
6524 .send(SearchMatchCandidate::OpenBuffer {
6525 buffer: buffer.clone(),
6526 path: None,
6527 })
6528 .await
6529 .log_err();
6530 }
6531 for (path, (buffer, _)) in opened_buffers.iter() {
6532 matching_paths_tx
6533 .send(SearchMatchCandidate::OpenBuffer {
6534 buffer: buffer.clone(),
6535 path: Some(path.clone()),
6536 })
6537 .await
6538 .log_err();
6539 }
6540
6541 let paths_per_worker = (path_count + workers - 1) / workers;
6542
6543 executor
6544 .scoped(|scope| {
6545 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
6546
6547 for worker_ix in 0..workers {
6548 let worker_start_ix = worker_ix * paths_per_worker;
6549 let worker_end_ix = worker_start_ix + paths_per_worker;
6550 let opened_buffers = opened_buffers.clone();
6551 let limiter = Arc::clone(&max_concurrent_workers);
6552 scope.spawn({
6553 async move {
6554 let _guard = limiter.acquire().await;
6555 search_snapshots(
6556 snapshots,
6557 worker_start_ix,
6558 worker_end_ix,
6559 query,
6560 matching_paths_tx,
6561 &opened_buffers,
6562 include_root,
6563 fs,
6564 )
6565 .await;
6566 }
6567 });
6568 }
6569
6570 if query.include_ignored() {
6571 for snapshot in snapshots {
6572 for ignored_entry in snapshot.entries(true).filter(|e| e.is_ignored) {
6573 let limiter = Arc::clone(&max_concurrent_workers);
6574 scope.spawn(async move {
6575 let _guard = limiter.acquire().await;
6576 search_ignored_entry(
6577 snapshot,
6578 ignored_entry,
6579 fs,
6580 query,
6581 matching_paths_tx,
6582 )
6583 .await;
6584 });
6585 }
6586 }
6587 }
6588 })
6589 .await;
6590 }
6591
6592 pub fn request_lsp<R: LspCommand>(
6593 &self,
6594 buffer_handle: Model<Buffer>,
6595 server: LanguageServerToQuery,
6596 request: R,
6597 cx: &mut ModelContext<Self>,
6598 ) -> Task<Result<R::Response>>
6599 where
6600 <R::LspRequest as lsp::request::Request>::Result: Send,
6601 <R::LspRequest as lsp::request::Request>::Params: Send,
6602 {
6603 let buffer = buffer_handle.read(cx);
6604 if self.is_local() {
6605 let language_server = match server {
6606 LanguageServerToQuery::Primary => {
6607 match self.primary_language_server_for_buffer(buffer, cx) {
6608 Some((_, server)) => Some(Arc::clone(server)),
6609 None => return Task::ready(Ok(Default::default())),
6610 }
6611 }
6612 LanguageServerToQuery::Other(id) => self
6613 .language_server_for_buffer(buffer, id, cx)
6614 .map(|(_, server)| Arc::clone(server)),
6615 };
6616 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6617 if let (Some(file), Some(language_server)) = (file, language_server) {
6618 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6619 return cx.spawn(move |this, cx| async move {
6620 if !request.check_capabilities(language_server.capabilities()) {
6621 return Ok(Default::default());
6622 }
6623
6624 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6625 let response = match result {
6626 Ok(response) => response,
6627
6628 Err(err) => {
6629 log::warn!(
6630 "Generic lsp request to {} failed: {}",
6631 language_server.name(),
6632 err
6633 );
6634 return Err(err);
6635 }
6636 };
6637
6638 request
6639 .response_from_lsp(
6640 response,
6641 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6642 buffer_handle,
6643 language_server.server_id(),
6644 cx,
6645 )
6646 .await
6647 });
6648 }
6649 } else if let Some(project_id) = self.remote_id() {
6650 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6651 }
6652
6653 Task::ready(Ok(Default::default()))
6654 }
6655
6656 fn request_multiple_lsp_locally<P, R>(
6657 &self,
6658 buffer: &Model<Buffer>,
6659 position: Option<P>,
6660 server_capabilities_check: fn(&ServerCapabilities) -> bool,
6661 request: R,
6662 cx: &mut ModelContext<'_, Self>,
6663 ) -> Task<Vec<R::Response>>
6664 where
6665 P: ToOffset,
6666 R: LspCommand + Clone,
6667 <R::LspRequest as lsp::request::Request>::Result: Send,
6668 <R::LspRequest as lsp::request::Request>::Params: Send,
6669 {
6670 if !self.is_local() {
6671 debug_panic!("Should not request multiple lsp commands in non-local project");
6672 return Task::ready(Vec::new());
6673 }
6674 let snapshot = buffer.read(cx).snapshot();
6675 let scope = position.and_then(|position| snapshot.language_scope_at(position));
6676 let mut response_results = self
6677 .language_servers_for_buffer(buffer.read(cx), cx)
6678 .filter(|(_, server)| server_capabilities_check(server.capabilities()))
6679 .filter(|(adapter, _)| {
6680 scope
6681 .as_ref()
6682 .map(|scope| scope.language_allowed(&adapter.name))
6683 .unwrap_or(true)
6684 })
6685 .map(|(_, server)| server.server_id())
6686 .map(|server_id| {
6687 self.request_lsp(
6688 buffer.clone(),
6689 LanguageServerToQuery::Other(server_id),
6690 request.clone(),
6691 cx,
6692 )
6693 })
6694 .collect::<FuturesUnordered<_>>();
6695
6696 return cx.spawn(|_, _| async move {
6697 let mut responses = Vec::with_capacity(response_results.len());
6698 while let Some(response_result) = response_results.next().await {
6699 if let Some(response) = response_result.log_err() {
6700 responses.push(response);
6701 }
6702 }
6703 responses
6704 });
6705 }
6706
6707 fn send_lsp_proto_request<R: LspCommand>(
6708 &self,
6709 buffer: Model<Buffer>,
6710 project_id: u64,
6711 request: R,
6712 cx: &mut ModelContext<'_, Project>,
6713 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6714 let rpc = self.client.clone();
6715 let message = request.to_proto(project_id, buffer.read(cx));
6716 cx.spawn(move |this, mut cx| async move {
6717 // Ensure the project is still alive by the time the task
6718 // is scheduled.
6719 this.upgrade().context("project dropped")?;
6720 let response = rpc.request(message).await?;
6721 let this = this.upgrade().context("project dropped")?;
6722 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6723 Err(anyhow!("disconnected before completing request"))
6724 } else {
6725 request
6726 .response_from_proto(response, this, buffer, cx)
6727 .await
6728 }
6729 })
6730 }
6731
6732 pub fn find_or_create_local_worktree(
6733 &mut self,
6734 abs_path: impl AsRef<Path>,
6735 visible: bool,
6736 cx: &mut ModelContext<Self>,
6737 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6738 let abs_path = abs_path.as_ref();
6739 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6740 Task::ready(Ok((tree, relative_path)))
6741 } else {
6742 let worktree = self.create_local_worktree(abs_path, visible, cx);
6743 cx.background_executor()
6744 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6745 }
6746 }
6747
6748 pub fn find_local_worktree(
6749 &self,
6750 abs_path: &Path,
6751 cx: &AppContext,
6752 ) -> Option<(Model<Worktree>, PathBuf)> {
6753 for tree in &self.worktrees {
6754 if let Some(tree) = tree.upgrade() {
6755 if let Some(relative_path) = tree
6756 .read(cx)
6757 .as_local()
6758 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6759 {
6760 return Some((tree.clone(), relative_path.into()));
6761 }
6762 }
6763 }
6764 None
6765 }
6766
6767 pub fn is_shared(&self) -> bool {
6768 match &self.client_state {
6769 ProjectClientState::Shared { .. } => true,
6770 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6771 }
6772 }
6773
6774 fn create_local_worktree(
6775 &mut self,
6776 abs_path: impl AsRef<Path>,
6777 visible: bool,
6778 cx: &mut ModelContext<Self>,
6779 ) -> Task<Result<Model<Worktree>>> {
6780 let fs = self.fs.clone();
6781 let client = self.client.clone();
6782 let next_entry_id = self.next_entry_id.clone();
6783 let path: Arc<Path> = abs_path.as_ref().into();
6784 let task = self
6785 .loading_local_worktrees
6786 .entry(path.clone())
6787 .or_insert_with(|| {
6788 cx.spawn(move |project, mut cx| {
6789 async move {
6790 let worktree = Worktree::local(
6791 client.clone(),
6792 path.clone(),
6793 visible,
6794 fs,
6795 next_entry_id,
6796 &mut cx,
6797 )
6798 .await;
6799
6800 project.update(&mut cx, |project, _| {
6801 project.loading_local_worktrees.remove(&path);
6802 })?;
6803
6804 let worktree = worktree?;
6805 project
6806 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6807
6808 if visible {
6809 cx.update(|cx| {
6810 cx.add_recent_document(&path);
6811 })
6812 .log_err();
6813 }
6814
6815 Ok(worktree)
6816 }
6817 .map_err(Arc::new)
6818 })
6819 .shared()
6820 })
6821 .clone();
6822 cx.background_executor().spawn(async move {
6823 match task.await {
6824 Ok(worktree) => Ok(worktree),
6825 Err(err) => Err(anyhow!("{}", err)),
6826 }
6827 })
6828 }
6829
6830 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6831 let mut servers_to_remove = HashMap::default();
6832 let mut servers_to_preserve = HashSet::default();
6833 for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
6834 if worktree_id == &id_to_remove {
6835 servers_to_remove.insert(server_id, server_name.clone());
6836 } else {
6837 servers_to_preserve.insert(server_id);
6838 }
6839 }
6840 servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id));
6841 for (server_id_to_remove, server_name) in servers_to_remove {
6842 self.language_server_ids
6843 .remove(&(id_to_remove, server_name));
6844 self.language_server_statuses.remove(&server_id_to_remove);
6845 self.language_server_watched_paths
6846 .remove(&server_id_to_remove);
6847 self.last_workspace_edits_by_language_server
6848 .remove(&server_id_to_remove);
6849 self.language_servers.remove(&server_id_to_remove);
6850 cx.emit(Event::LanguageServerRemoved(server_id_to_remove));
6851 }
6852
6853 let mut prettier_instances_to_clean = FuturesUnordered::new();
6854 if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) {
6855 for path in prettier_paths.iter().flatten() {
6856 if let Some(prettier_instance) = self.prettier_instances.remove(path) {
6857 prettier_instances_to_clean.push(async move {
6858 prettier_instance
6859 .server()
6860 .await
6861 .map(|server| server.server_id())
6862 });
6863 }
6864 }
6865 }
6866 cx.spawn(|project, mut cx| async move {
6867 while let Some(prettier_server_id) = prettier_instances_to_clean.next().await {
6868 if let Some(prettier_server_id) = prettier_server_id {
6869 project
6870 .update(&mut cx, |project, cx| {
6871 project
6872 .supplementary_language_servers
6873 .remove(&prettier_server_id);
6874 cx.emit(Event::LanguageServerRemoved(prettier_server_id));
6875 })
6876 .ok();
6877 }
6878 }
6879 })
6880 .detach();
6881
6882 self.task_inventory().update(cx, |inventory, _| {
6883 inventory.remove_worktree_sources(id_to_remove);
6884 });
6885
6886 self.worktrees.retain(|worktree| {
6887 if let Some(worktree) = worktree.upgrade() {
6888 let id = worktree.read(cx).id();
6889 if id == id_to_remove {
6890 cx.emit(Event::WorktreeRemoved(id));
6891 false
6892 } else {
6893 true
6894 }
6895 } else {
6896 false
6897 }
6898 });
6899 self.metadata_changed(cx);
6900 }
6901
6902 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6903 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6904 cx.subscribe(worktree, |this, worktree, event, cx| {
6905 let is_local = worktree.read(cx).is_local();
6906 match event {
6907 worktree::Event::UpdatedEntries(changes) => {
6908 if is_local {
6909 this.update_local_worktree_buffers(&worktree, changes, cx);
6910 this.update_local_worktree_language_servers(&worktree, changes, cx);
6911 this.update_local_worktree_settings(&worktree, changes, cx);
6912 this.update_prettier_settings(&worktree, changes, cx);
6913 }
6914
6915 cx.emit(Event::WorktreeUpdatedEntries(
6916 worktree.read(cx).id(),
6917 changes.clone(),
6918 ));
6919 }
6920 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6921 if is_local {
6922 this.update_local_worktree_buffers_git_repos(
6923 worktree.clone(),
6924 updated_repos,
6925 cx,
6926 )
6927 }
6928 cx.emit(Event::WorktreeUpdatedGitRepositories);
6929 }
6930 }
6931 })
6932 .detach();
6933
6934 let push_strong_handle = {
6935 let worktree = worktree.read(cx);
6936 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6937 };
6938 if push_strong_handle {
6939 self.worktrees
6940 .push(WorktreeHandle::Strong(worktree.clone()));
6941 } else {
6942 self.worktrees
6943 .push(WorktreeHandle::Weak(worktree.downgrade()));
6944 }
6945
6946 let handle_id = worktree.entity_id();
6947 cx.observe_release(worktree, move |this, worktree, cx| {
6948 let _ = this.remove_worktree(worktree.id(), cx);
6949 cx.update_global::<SettingsStore, _>(|store, cx| {
6950 store
6951 .clear_local_settings(handle_id.as_u64() as usize, cx)
6952 .log_err()
6953 });
6954 })
6955 .detach();
6956
6957 cx.emit(Event::WorktreeAdded);
6958 self.metadata_changed(cx);
6959 }
6960
6961 fn update_local_worktree_buffers(
6962 &mut self,
6963 worktree_handle: &Model<Worktree>,
6964 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6965 cx: &mut ModelContext<Self>,
6966 ) {
6967 let snapshot = worktree_handle.read(cx).snapshot();
6968
6969 let mut renamed_buffers = Vec::new();
6970 for (path, entry_id, _) in changes {
6971 let worktree_id = worktree_handle.read(cx).id();
6972 let project_path = ProjectPath {
6973 worktree_id,
6974 path: path.clone(),
6975 };
6976
6977 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6978 Some(&buffer_id) => buffer_id,
6979 None => match self.local_buffer_ids_by_path.get(&project_path) {
6980 Some(&buffer_id) => buffer_id,
6981 None => {
6982 continue;
6983 }
6984 },
6985 };
6986
6987 let open_buffer = self.opened_buffers.get(&buffer_id);
6988 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6989 buffer
6990 } else {
6991 self.opened_buffers.remove(&buffer_id);
6992 self.local_buffer_ids_by_path.remove(&project_path);
6993 self.local_buffer_ids_by_entry_id.remove(entry_id);
6994 continue;
6995 };
6996
6997 buffer.update(cx, |buffer, cx| {
6998 if let Some(old_file) = File::from_dyn(buffer.file()) {
6999 if old_file.worktree != *worktree_handle {
7000 return;
7001 }
7002
7003 let new_file = if let Some(entry) = old_file
7004 .entry_id
7005 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
7006 {
7007 File {
7008 is_local: true,
7009 entry_id: Some(entry.id),
7010 mtime: entry.mtime,
7011 path: entry.path.clone(),
7012 worktree: worktree_handle.clone(),
7013 is_deleted: false,
7014 is_private: entry.is_private,
7015 }
7016 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
7017 File {
7018 is_local: true,
7019 entry_id: Some(entry.id),
7020 mtime: entry.mtime,
7021 path: entry.path.clone(),
7022 worktree: worktree_handle.clone(),
7023 is_deleted: false,
7024 is_private: entry.is_private,
7025 }
7026 } else {
7027 File {
7028 is_local: true,
7029 entry_id: old_file.entry_id,
7030 path: old_file.path().clone(),
7031 mtime: old_file.mtime(),
7032 worktree: worktree_handle.clone(),
7033 is_deleted: true,
7034 is_private: old_file.is_private,
7035 }
7036 };
7037
7038 let old_path = old_file.abs_path(cx);
7039 if new_file.abs_path(cx) != old_path {
7040 renamed_buffers.push((cx.handle(), old_file.clone()));
7041 self.local_buffer_ids_by_path.remove(&project_path);
7042 self.local_buffer_ids_by_path.insert(
7043 ProjectPath {
7044 worktree_id,
7045 path: path.clone(),
7046 },
7047 buffer_id,
7048 );
7049 }
7050
7051 if new_file.entry_id != Some(*entry_id) {
7052 self.local_buffer_ids_by_entry_id.remove(entry_id);
7053 if let Some(entry_id) = new_file.entry_id {
7054 self.local_buffer_ids_by_entry_id
7055 .insert(entry_id, buffer_id);
7056 }
7057 }
7058
7059 if new_file != *old_file {
7060 if let Some(project_id) = self.remote_id() {
7061 self.client
7062 .send(proto::UpdateBufferFile {
7063 project_id,
7064 buffer_id: buffer_id.into(),
7065 file: Some(new_file.to_proto()),
7066 })
7067 .log_err();
7068 }
7069
7070 buffer.file_updated(Arc::new(new_file), cx);
7071 }
7072 }
7073 });
7074 }
7075
7076 for (buffer, old_file) in renamed_buffers {
7077 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
7078 self.detect_language_for_buffer(&buffer, cx);
7079 self.register_buffer_with_language_servers(&buffer, cx);
7080 }
7081 }
7082
7083 fn update_local_worktree_language_servers(
7084 &mut self,
7085 worktree_handle: &Model<Worktree>,
7086 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
7087 cx: &mut ModelContext<Self>,
7088 ) {
7089 if changes.is_empty() {
7090 return;
7091 }
7092
7093 let worktree_id = worktree_handle.read(cx).id();
7094 let mut language_server_ids = self
7095 .language_server_ids
7096 .iter()
7097 .filter_map(|((server_worktree_id, _), server_id)| {
7098 (*server_worktree_id == worktree_id).then_some(*server_id)
7099 })
7100 .collect::<Vec<_>>();
7101 language_server_ids.sort();
7102 language_server_ids.dedup();
7103
7104 let abs_path = worktree_handle.read(cx).abs_path();
7105 for server_id in &language_server_ids {
7106 if let Some(LanguageServerState::Running { server, .. }) =
7107 self.language_servers.get(server_id)
7108 {
7109 if let Some(watched_paths) = self
7110 .language_server_watched_paths
7111 .get(&server_id)
7112 .and_then(|paths| paths.get(&worktree_id))
7113 {
7114 let params = lsp::DidChangeWatchedFilesParams {
7115 changes: changes
7116 .iter()
7117 .filter_map(|(path, _, change)| {
7118 if !watched_paths.is_match(&path) {
7119 return None;
7120 }
7121 let typ = match change {
7122 PathChange::Loaded => return None,
7123 PathChange::Added => lsp::FileChangeType::CREATED,
7124 PathChange::Removed => lsp::FileChangeType::DELETED,
7125 PathChange::Updated => lsp::FileChangeType::CHANGED,
7126 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
7127 };
7128 Some(lsp::FileEvent {
7129 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
7130 typ,
7131 })
7132 })
7133 .collect(),
7134 };
7135 if !params.changes.is_empty() {
7136 server
7137 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
7138 .log_err();
7139 }
7140 }
7141 }
7142 }
7143 }
7144
7145 fn update_local_worktree_buffers_git_repos(
7146 &mut self,
7147 worktree_handle: Model<Worktree>,
7148 changed_repos: &UpdatedGitRepositoriesSet,
7149 cx: &mut ModelContext<Self>,
7150 ) {
7151 debug_assert!(worktree_handle.read(cx).is_local());
7152
7153 // Identify the loading buffers whose containing repository that has changed.
7154 let future_buffers = self
7155 .loading_buffers_by_path
7156 .iter()
7157 .filter_map(|(project_path, receiver)| {
7158 if project_path.worktree_id != worktree_handle.read(cx).id() {
7159 return None;
7160 }
7161 let path = &project_path.path;
7162 changed_repos
7163 .iter()
7164 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7165 let receiver = receiver.clone();
7166 let path = path.clone();
7167 let abs_path = worktree_handle.read(cx).absolutize(&path).ok()?;
7168 Some(async move {
7169 wait_for_loading_buffer(receiver)
7170 .await
7171 .ok()
7172 .map(|buffer| (buffer, path, abs_path))
7173 })
7174 })
7175 .collect::<FuturesUnordered<_>>();
7176
7177 // Identify the current buffers whose containing repository has changed.
7178 let current_buffers = self
7179 .opened_buffers
7180 .values()
7181 .filter_map(|buffer| {
7182 let buffer = buffer.upgrade()?;
7183 let file = File::from_dyn(buffer.read(cx).file())?;
7184 if file.worktree != worktree_handle {
7185 return None;
7186 }
7187 let path = file.path();
7188 changed_repos
7189 .iter()
7190 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7191 Some((buffer, path.clone(), file.abs_path(cx)))
7192 })
7193 .collect::<Vec<_>>();
7194
7195 if future_buffers.len() + current_buffers.len() == 0 {
7196 return;
7197 }
7198
7199 let remote_id = self.remote_id();
7200 let client = self.client.clone();
7201 let fs = self.fs.clone();
7202 cx.spawn(move |_, mut cx| async move {
7203 // Wait for all of the buffers to load.
7204 let future_buffers = future_buffers.collect::<Vec<_>>().await;
7205
7206 // Reload the diff base for every buffer whose containing git repository has changed.
7207 let snapshot =
7208 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
7209 let diff_bases_by_buffer = cx
7210 .background_executor()
7211 .spawn(async move {
7212 let mut diff_base_tasks = future_buffers
7213 .into_iter()
7214 .flatten()
7215 .chain(current_buffers)
7216 .filter_map(|(buffer, path, abs_path)| {
7217 let (work_directory, repo) =
7218 snapshot.repository_and_work_directory_for_path(&path)?;
7219 let repo_entry = snapshot.get_local_repo(&repo)?;
7220 Some((buffer, path, abs_path, work_directory, repo_entry))
7221 })
7222 .map(|(buffer, path, abs_path, work_directory, repo_entry)| {
7223 let fs = fs.clone();
7224 async move {
7225 let abs_path_metadata = fs
7226 .metadata(&abs_path)
7227 .await
7228 .with_context(|| {
7229 format!("loading file and FS metadata for {path:?}")
7230 })
7231 .log_err()
7232 .flatten()?;
7233 let base_text = if abs_path_metadata.is_dir
7234 || abs_path_metadata.is_symlink
7235 {
7236 None
7237 } else {
7238 let relative_path = path.strip_prefix(&work_directory).ok()?;
7239 repo_entry.repo().lock().load_index_text(relative_path)
7240 };
7241 Some((buffer, base_text))
7242 }
7243 })
7244 .collect::<FuturesUnordered<_>>();
7245
7246 let mut diff_bases = Vec::with_capacity(diff_base_tasks.len());
7247 while let Some(diff_base) = diff_base_tasks.next().await {
7248 if let Some(diff_base) = diff_base {
7249 diff_bases.push(diff_base);
7250 }
7251 }
7252 diff_bases
7253 })
7254 .await;
7255
7256 // Assign the new diff bases on all of the buffers.
7257 for (buffer, diff_base) in diff_bases_by_buffer {
7258 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
7259 buffer.set_diff_base(diff_base.clone(), cx);
7260 buffer.remote_id().into()
7261 })?;
7262 if let Some(project_id) = remote_id {
7263 client
7264 .send(proto::UpdateDiffBase {
7265 project_id,
7266 buffer_id,
7267 diff_base,
7268 })
7269 .log_err();
7270 }
7271 }
7272
7273 anyhow::Ok(())
7274 })
7275 .detach();
7276 }
7277
7278 fn update_local_worktree_settings(
7279 &mut self,
7280 worktree: &Model<Worktree>,
7281 changes: &UpdatedEntriesSet,
7282 cx: &mut ModelContext<Self>,
7283 ) {
7284 if worktree.read(cx).as_local().is_none() {
7285 return;
7286 }
7287 let project_id = self.remote_id();
7288 let worktree_id = worktree.entity_id();
7289 let remote_worktree_id = worktree.read(cx).id();
7290
7291 let mut settings_contents = Vec::new();
7292 for (path, _, change) in changes.iter() {
7293 let removed = change == &PathChange::Removed;
7294 let abs_path = match worktree.read(cx).absolutize(path) {
7295 Ok(abs_path) => abs_path,
7296 Err(e) => {
7297 log::warn!("Cannot absolutize {path:?} received as {change:?} FS change: {e}");
7298 continue;
7299 }
7300 };
7301
7302 if abs_path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
7303 let settings_dir = Arc::from(
7304 path.ancestors()
7305 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
7306 .unwrap(),
7307 );
7308 let fs = self.fs.clone();
7309 settings_contents.push(async move {
7310 (
7311 settings_dir,
7312 if removed {
7313 None
7314 } else {
7315 Some(async move { fs.load(&abs_path).await }.await)
7316 },
7317 )
7318 });
7319 } else if abs_path.ends_with(&*LOCAL_TASKS_RELATIVE_PATH) {
7320 self.task_inventory().update(cx, |task_inventory, cx| {
7321 if removed {
7322 task_inventory.remove_local_static_source(&abs_path);
7323 } else {
7324 let fs = self.fs.clone();
7325 let task_abs_path = abs_path.clone();
7326 task_inventory.add_source(
7327 TaskSourceKind::Worktree {
7328 id: remote_worktree_id,
7329 abs_path,
7330 },
7331 |cx| {
7332 let tasks_file_rx =
7333 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7334 StaticSource::new(
7335 format!("local_tasks_for_workspace_{remote_worktree_id}"),
7336 TrackedFile::new(tasks_file_rx, cx),
7337 cx,
7338 )
7339 },
7340 cx,
7341 );
7342 }
7343 })
7344 } else if abs_path.ends_with(&*LOCAL_VSCODE_TASKS_RELATIVE_PATH) {
7345 self.task_inventory().update(cx, |task_inventory, cx| {
7346 if removed {
7347 task_inventory.remove_local_static_source(&abs_path);
7348 } else {
7349 let fs = self.fs.clone();
7350 let task_abs_path = abs_path.clone();
7351 task_inventory.add_source(
7352 TaskSourceKind::Worktree {
7353 id: remote_worktree_id,
7354 abs_path,
7355 },
7356 |cx| {
7357 let tasks_file_rx =
7358 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7359 StaticSource::new(
7360 format!(
7361 "local_vscode_tasks_for_workspace_{remote_worktree_id}"
7362 ),
7363 TrackedFile::new_convertible::<task::VsCodeTaskFile>(
7364 tasks_file_rx,
7365 cx,
7366 ),
7367 cx,
7368 )
7369 },
7370 cx,
7371 );
7372 }
7373 })
7374 }
7375 }
7376
7377 if settings_contents.is_empty() {
7378 return;
7379 }
7380
7381 let client = self.client.clone();
7382 cx.spawn(move |_, cx| async move {
7383 let settings_contents: Vec<(Arc<Path>, _)> =
7384 futures::future::join_all(settings_contents).await;
7385 cx.update(|cx| {
7386 cx.update_global::<SettingsStore, _>(|store, cx| {
7387 for (directory, file_content) in settings_contents {
7388 let file_content = file_content.and_then(|content| content.log_err());
7389 store
7390 .set_local_settings(
7391 worktree_id.as_u64() as usize,
7392 directory.clone(),
7393 file_content.as_deref(),
7394 cx,
7395 )
7396 .log_err();
7397 if let Some(remote_id) = project_id {
7398 client
7399 .send(proto::UpdateWorktreeSettings {
7400 project_id: remote_id,
7401 worktree_id: remote_worktree_id.to_proto(),
7402 path: directory.to_string_lossy().into_owned(),
7403 content: file_content,
7404 })
7405 .log_err();
7406 }
7407 }
7408 });
7409 })
7410 .ok();
7411 })
7412 .detach();
7413 }
7414
7415 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
7416 let new_active_entry = entry.and_then(|project_path| {
7417 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
7418 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
7419 Some(entry.id)
7420 });
7421 if new_active_entry != self.active_entry {
7422 self.active_entry = new_active_entry;
7423 cx.emit(Event::ActiveEntryChanged(new_active_entry));
7424 }
7425 }
7426
7427 pub fn language_servers_running_disk_based_diagnostics(
7428 &self,
7429 ) -> impl Iterator<Item = LanguageServerId> + '_ {
7430 self.language_server_statuses
7431 .iter()
7432 .filter_map(|(id, status)| {
7433 if status.has_pending_diagnostic_updates {
7434 Some(*id)
7435 } else {
7436 None
7437 }
7438 })
7439 }
7440
7441 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
7442 let mut summary = DiagnosticSummary::default();
7443 for (_, _, path_summary) in
7444 self.diagnostic_summaries(include_ignored, cx)
7445 .filter(|(path, _, _)| {
7446 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7447 include_ignored || worktree == Some(false)
7448 })
7449 {
7450 summary.error_count += path_summary.error_count;
7451 summary.warning_count += path_summary.warning_count;
7452 }
7453 summary
7454 }
7455
7456 pub fn diagnostic_summaries<'a>(
7457 &'a self,
7458 include_ignored: bool,
7459 cx: &'a AppContext,
7460 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
7461 self.visible_worktrees(cx)
7462 .flat_map(move |worktree| {
7463 let worktree = worktree.read(cx);
7464 let worktree_id = worktree.id();
7465 worktree
7466 .diagnostic_summaries()
7467 .map(move |(path, server_id, summary)| {
7468 (ProjectPath { worktree_id, path }, server_id, summary)
7469 })
7470 })
7471 .filter(move |(path, _, _)| {
7472 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7473 include_ignored || worktree == Some(false)
7474 })
7475 }
7476
7477 pub fn disk_based_diagnostics_started(
7478 &mut self,
7479 language_server_id: LanguageServerId,
7480 cx: &mut ModelContext<Self>,
7481 ) {
7482 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
7483 }
7484
7485 pub fn disk_based_diagnostics_finished(
7486 &mut self,
7487 language_server_id: LanguageServerId,
7488 cx: &mut ModelContext<Self>,
7489 ) {
7490 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
7491 }
7492
7493 pub fn active_entry(&self) -> Option<ProjectEntryId> {
7494 self.active_entry
7495 }
7496
7497 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
7498 self.worktree_for_id(path.worktree_id, cx)?
7499 .read(cx)
7500 .entry_for_path(&path.path)
7501 .cloned()
7502 }
7503
7504 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
7505 let worktree = self.worktree_for_entry(entry_id, cx)?;
7506 let worktree = worktree.read(cx);
7507 let worktree_id = worktree.id();
7508 let path = worktree.entry_for_id(entry_id)?.path.clone();
7509 Some(ProjectPath { worktree_id, path })
7510 }
7511
7512 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
7513 let workspace_root = self
7514 .worktree_for_id(project_path.worktree_id, cx)?
7515 .read(cx)
7516 .abs_path();
7517 let project_path = project_path.path.as_ref();
7518
7519 Some(if project_path == Path::new("") {
7520 workspace_root.to_path_buf()
7521 } else {
7522 workspace_root.join(project_path)
7523 })
7524 }
7525
7526 pub fn get_workspace_root(
7527 &self,
7528 project_path: &ProjectPath,
7529 cx: &AppContext,
7530 ) -> Option<PathBuf> {
7531 Some(
7532 self.worktree_for_id(project_path.worktree_id, cx)?
7533 .read(cx)
7534 .abs_path()
7535 .to_path_buf(),
7536 )
7537 }
7538
7539 pub fn get_repo(
7540 &self,
7541 project_path: &ProjectPath,
7542 cx: &AppContext,
7543 ) -> Option<Arc<Mutex<dyn GitRepository>>> {
7544 self.worktree_for_id(project_path.worktree_id, cx)?
7545 .read(cx)
7546 .as_local()?
7547 .snapshot()
7548 .local_git_repo(&project_path.path)
7549 }
7550
7551 pub fn blame_buffer(
7552 &self,
7553 buffer: &Model<Buffer>,
7554 version: Option<clock::Global>,
7555 cx: &AppContext,
7556 ) -> Task<Result<Blame>> {
7557 if self.is_local() {
7558 let blame_params = maybe!({
7559 let buffer = buffer.read(cx);
7560 let buffer_project_path = buffer
7561 .project_path(cx)
7562 .context("failed to get buffer project path")?;
7563
7564 let worktree = self
7565 .worktree_for_id(buffer_project_path.worktree_id, cx)
7566 .context("failed to get worktree")?
7567 .read(cx)
7568 .as_local()
7569 .context("worktree was not local")?
7570 .snapshot();
7571 let (work_directory, repo) = worktree
7572 .repository_and_work_directory_for_path(&buffer_project_path.path)
7573 .context("failed to get repo for blamed buffer")?;
7574
7575 let repo_entry = worktree
7576 .get_local_repo(&repo)
7577 .context("failed to get repo for blamed buffer")?;
7578
7579 let relative_path = buffer_project_path
7580 .path
7581 .strip_prefix(&work_directory)?
7582 .to_path_buf();
7583
7584 let content = match version {
7585 Some(version) => buffer.rope_for_version(&version).clone(),
7586 None => buffer.as_rope().clone(),
7587 };
7588 let repo = repo_entry.repo().clone();
7589
7590 anyhow::Ok((repo, relative_path, content))
7591 });
7592
7593 cx.background_executor().spawn(async move {
7594 let (repo, relative_path, content) = blame_params?;
7595 let lock = repo.lock();
7596 lock.blame(&relative_path, content)
7597 })
7598 } else {
7599 let project_id = self.remote_id();
7600 let buffer_id = buffer.read(cx).remote_id();
7601 let client = self.client.clone();
7602 let version = buffer.read(cx).version();
7603
7604 cx.spawn(|_| async move {
7605 let project_id = project_id.context("unable to get project id for buffer")?;
7606 let response = client
7607 .request(proto::BlameBuffer {
7608 project_id,
7609 buffer_id: buffer_id.into(),
7610 version: serialize_version(&version),
7611 })
7612 .await?;
7613
7614 Ok(deserialize_blame_buffer_response(response))
7615 })
7616 }
7617 }
7618
7619 // RPC message handlers
7620
7621 async fn handle_blame_buffer(
7622 this: Model<Self>,
7623 envelope: TypedEnvelope<proto::BlameBuffer>,
7624 _: Arc<Client>,
7625 mut cx: AsyncAppContext,
7626 ) -> Result<proto::BlameBufferResponse> {
7627 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
7628 let version = deserialize_version(&envelope.payload.version);
7629
7630 let buffer = this.update(&mut cx, |this, _cx| {
7631 this.opened_buffers
7632 .get(&buffer_id)
7633 .and_then(|buffer| buffer.upgrade())
7634 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7635 })??;
7636
7637 buffer
7638 .update(&mut cx, |buffer, _| {
7639 buffer.wait_for_version(version.clone())
7640 })?
7641 .await?;
7642
7643 let blame = this
7644 .update(&mut cx, |this, cx| {
7645 this.blame_buffer(&buffer, Some(version), cx)
7646 })?
7647 .await?;
7648
7649 Ok(serialize_blame_buffer_response(blame))
7650 }
7651
7652 async fn handle_multi_lsp_query(
7653 project: Model<Self>,
7654 envelope: TypedEnvelope<proto::MultiLspQuery>,
7655 _: Arc<Client>,
7656 mut cx: AsyncAppContext,
7657 ) -> Result<proto::MultiLspQueryResponse> {
7658 let sender_id = envelope.original_sender_id()?;
7659 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
7660 let version = deserialize_version(&envelope.payload.version);
7661 let buffer = project.update(&mut cx, |project, _cx| {
7662 project
7663 .opened_buffers
7664 .get(&buffer_id)
7665 .and_then(|buffer| buffer.upgrade())
7666 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7667 })??;
7668 buffer
7669 .update(&mut cx, |buffer, _| {
7670 buffer.wait_for_version(version.clone())
7671 })?
7672 .await?;
7673 let buffer_version = buffer.update(&mut cx, |buffer, _| buffer.version())?;
7674 match envelope
7675 .payload
7676 .strategy
7677 .context("invalid request without the strategy")?
7678 {
7679 proto::multi_lsp_query::Strategy::All(_) => {
7680 // currently, there's only one multiple language servers query strategy,
7681 // so just ensure it's specified correctly
7682 }
7683 }
7684 match envelope.payload.request {
7685 Some(proto::multi_lsp_query::Request::GetHover(get_hover)) => {
7686 let get_hover =
7687 GetHover::from_proto(get_hover, project.clone(), buffer.clone(), cx.clone())
7688 .await?;
7689 let all_hovers = project
7690 .update(&mut cx, |project, cx| {
7691 project.request_multiple_lsp_locally(
7692 &buffer,
7693 Some(get_hover.position),
7694 |server_capabilities| match server_capabilities.hover_provider {
7695 Some(lsp::HoverProviderCapability::Simple(enabled)) => enabled,
7696 Some(lsp::HoverProviderCapability::Options(_)) => true,
7697 None => false,
7698 },
7699 get_hover,
7700 cx,
7701 )
7702 })?
7703 .await
7704 .into_iter()
7705 .filter_map(|hover| remove_empty_hover_blocks(hover?));
7706 project.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
7707 responses: all_hovers
7708 .map(|hover| proto::LspResponse {
7709 response: Some(proto::lsp_response::Response::GetHoverResponse(
7710 GetHover::response_to_proto(
7711 Some(hover),
7712 project,
7713 sender_id,
7714 &buffer_version,
7715 cx,
7716 ),
7717 )),
7718 })
7719 .collect(),
7720 })
7721 }
7722 Some(proto::multi_lsp_query::Request::GetCodeActions(get_code_actions)) => {
7723 let get_code_actions = GetCodeActions::from_proto(
7724 get_code_actions,
7725 project.clone(),
7726 buffer.clone(),
7727 cx.clone(),
7728 )
7729 .await?;
7730
7731 let all_actions = project
7732 .update(&mut cx, |project, cx| {
7733 project.request_multiple_lsp_locally(
7734 &buffer,
7735 Some(get_code_actions.range.start),
7736 GetCodeActions::supports_code_actions,
7737 get_code_actions,
7738 cx,
7739 )
7740 })?
7741 .await
7742 .into_iter();
7743
7744 project.update(&mut cx, |project, cx| proto::MultiLspQueryResponse {
7745 responses: all_actions
7746 .map(|code_actions| proto::LspResponse {
7747 response: Some(proto::lsp_response::Response::GetCodeActionsResponse(
7748 GetCodeActions::response_to_proto(
7749 code_actions,
7750 project,
7751 sender_id,
7752 &buffer_version,
7753 cx,
7754 ),
7755 )),
7756 })
7757 .collect(),
7758 })
7759 }
7760 None => anyhow::bail!("empty multi lsp query request"),
7761 }
7762 }
7763
7764 async fn handle_unshare_project(
7765 this: Model<Self>,
7766 _: TypedEnvelope<proto::UnshareProject>,
7767 _: Arc<Client>,
7768 mut cx: AsyncAppContext,
7769 ) -> Result<()> {
7770 this.update(&mut cx, |this, cx| {
7771 if this.is_local() {
7772 this.unshare(cx)?;
7773 } else {
7774 this.disconnected_from_host(cx);
7775 }
7776 Ok(())
7777 })?
7778 }
7779
7780 async fn handle_add_collaborator(
7781 this: Model<Self>,
7782 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
7783 _: Arc<Client>,
7784 mut cx: AsyncAppContext,
7785 ) -> Result<()> {
7786 let collaborator = envelope
7787 .payload
7788 .collaborator
7789 .take()
7790 .ok_or_else(|| anyhow!("empty collaborator"))?;
7791
7792 let collaborator = Collaborator::from_proto(collaborator)?;
7793 this.update(&mut cx, |this, cx| {
7794 this.shared_buffers.remove(&collaborator.peer_id);
7795 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
7796 this.collaborators
7797 .insert(collaborator.peer_id, collaborator);
7798 cx.notify();
7799 })?;
7800
7801 Ok(())
7802 }
7803
7804 async fn handle_update_project_collaborator(
7805 this: Model<Self>,
7806 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
7807 _: Arc<Client>,
7808 mut cx: AsyncAppContext,
7809 ) -> Result<()> {
7810 let old_peer_id = envelope
7811 .payload
7812 .old_peer_id
7813 .ok_or_else(|| anyhow!("missing old peer id"))?;
7814 let new_peer_id = envelope
7815 .payload
7816 .new_peer_id
7817 .ok_or_else(|| anyhow!("missing new peer id"))?;
7818 this.update(&mut cx, |this, cx| {
7819 let collaborator = this
7820 .collaborators
7821 .remove(&old_peer_id)
7822 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
7823 let is_host = collaborator.replica_id == 0;
7824 this.collaborators.insert(new_peer_id, collaborator);
7825
7826 let buffers = this.shared_buffers.remove(&old_peer_id);
7827 log::info!(
7828 "peer {} became {}. moving buffers {:?}",
7829 old_peer_id,
7830 new_peer_id,
7831 &buffers
7832 );
7833 if let Some(buffers) = buffers {
7834 this.shared_buffers.insert(new_peer_id, buffers);
7835 }
7836
7837 if is_host {
7838 this.opened_buffers
7839 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
7840 this.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
7841 .unwrap();
7842 }
7843
7844 cx.emit(Event::CollaboratorUpdated {
7845 old_peer_id,
7846 new_peer_id,
7847 });
7848 cx.notify();
7849 Ok(())
7850 })?
7851 }
7852
7853 async fn handle_remove_collaborator(
7854 this: Model<Self>,
7855 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7856 _: Arc<Client>,
7857 mut cx: AsyncAppContext,
7858 ) -> Result<()> {
7859 this.update(&mut cx, |this, cx| {
7860 let peer_id = envelope
7861 .payload
7862 .peer_id
7863 .ok_or_else(|| anyhow!("invalid peer id"))?;
7864 let replica_id = this
7865 .collaborators
7866 .remove(&peer_id)
7867 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7868 .replica_id;
7869 for buffer in this.opened_buffers.values() {
7870 if let Some(buffer) = buffer.upgrade() {
7871 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7872 }
7873 }
7874 this.shared_buffers.remove(&peer_id);
7875
7876 cx.emit(Event::CollaboratorLeft(peer_id));
7877 cx.notify();
7878 Ok(())
7879 })?
7880 }
7881
7882 async fn handle_update_project(
7883 this: Model<Self>,
7884 envelope: TypedEnvelope<proto::UpdateProject>,
7885 _: Arc<Client>,
7886 mut cx: AsyncAppContext,
7887 ) -> Result<()> {
7888 this.update(&mut cx, |this, cx| {
7889 // Don't handle messages that were sent before the response to us joining the project
7890 if envelope.message_id > this.join_project_response_message_id {
7891 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7892 }
7893 Ok(())
7894 })?
7895 }
7896
7897 async fn handle_update_worktree(
7898 this: Model<Self>,
7899 envelope: TypedEnvelope<proto::UpdateWorktree>,
7900 _: Arc<Client>,
7901 mut cx: AsyncAppContext,
7902 ) -> Result<()> {
7903 this.update(&mut cx, |this, cx| {
7904 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7905 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7906 worktree.update(cx, |worktree, _| {
7907 let worktree = worktree.as_remote_mut().unwrap();
7908 worktree.update_from_remote(envelope.payload);
7909 });
7910 }
7911 Ok(())
7912 })?
7913 }
7914
7915 async fn handle_update_worktree_settings(
7916 this: Model<Self>,
7917 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7918 _: Arc<Client>,
7919 mut cx: AsyncAppContext,
7920 ) -> Result<()> {
7921 this.update(&mut cx, |this, cx| {
7922 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7923 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7924 cx.update_global::<SettingsStore, _>(|store, cx| {
7925 store
7926 .set_local_settings(
7927 worktree.entity_id().as_u64() as usize,
7928 PathBuf::from(&envelope.payload.path).into(),
7929 envelope.payload.content.as_deref(),
7930 cx,
7931 )
7932 .log_err();
7933 });
7934 }
7935 Ok(())
7936 })?
7937 }
7938
7939 async fn handle_create_project_entry(
7940 this: Model<Self>,
7941 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7942 _: Arc<Client>,
7943 mut cx: AsyncAppContext,
7944 ) -> Result<proto::ProjectEntryResponse> {
7945 let worktree = this.update(&mut cx, |this, cx| {
7946 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7947 this.worktree_for_id(worktree_id, cx)
7948 .ok_or_else(|| anyhow!("worktree not found"))
7949 })??;
7950 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7951 let entry = worktree
7952 .update(&mut cx, |worktree, cx| {
7953 let worktree = worktree.as_local_mut().unwrap();
7954 let path = PathBuf::from(envelope.payload.path);
7955 worktree.create_entry(path, envelope.payload.is_directory, cx)
7956 })?
7957 .await?;
7958 Ok(proto::ProjectEntryResponse {
7959 entry: entry.as_ref().map(|e| e.into()),
7960 worktree_scan_id: worktree_scan_id as u64,
7961 })
7962 }
7963
7964 async fn handle_rename_project_entry(
7965 this: Model<Self>,
7966 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7967 _: Arc<Client>,
7968 mut cx: AsyncAppContext,
7969 ) -> Result<proto::ProjectEntryResponse> {
7970 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7971 let worktree = this.update(&mut cx, |this, cx| {
7972 this.worktree_for_entry(entry_id, cx)
7973 .ok_or_else(|| anyhow!("worktree not found"))
7974 })??;
7975 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7976 let entry = worktree
7977 .update(&mut cx, |worktree, cx| {
7978 let new_path = PathBuf::from(envelope.payload.new_path);
7979 worktree
7980 .as_local_mut()
7981 .unwrap()
7982 .rename_entry(entry_id, new_path, cx)
7983 })?
7984 .await?;
7985 Ok(proto::ProjectEntryResponse {
7986 entry: entry.as_ref().map(|e| e.into()),
7987 worktree_scan_id: worktree_scan_id as u64,
7988 })
7989 }
7990
7991 async fn handle_copy_project_entry(
7992 this: Model<Self>,
7993 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7994 _: Arc<Client>,
7995 mut cx: AsyncAppContext,
7996 ) -> Result<proto::ProjectEntryResponse> {
7997 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7998 let worktree = this.update(&mut cx, |this, cx| {
7999 this.worktree_for_entry(entry_id, cx)
8000 .ok_or_else(|| anyhow!("worktree not found"))
8001 })??;
8002 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
8003 let entry = worktree
8004 .update(&mut cx, |worktree, cx| {
8005 let new_path = PathBuf::from(envelope.payload.new_path);
8006 worktree
8007 .as_local_mut()
8008 .unwrap()
8009 .copy_entry(entry_id, new_path, cx)
8010 })?
8011 .await?;
8012 Ok(proto::ProjectEntryResponse {
8013 entry: entry.as_ref().map(|e| e.into()),
8014 worktree_scan_id: worktree_scan_id as u64,
8015 })
8016 }
8017
8018 async fn handle_delete_project_entry(
8019 this: Model<Self>,
8020 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
8021 _: Arc<Client>,
8022 mut cx: AsyncAppContext,
8023 ) -> Result<proto::ProjectEntryResponse> {
8024 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
8025
8026 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
8027
8028 let worktree = this.update(&mut cx, |this, cx| {
8029 this.worktree_for_entry(entry_id, cx)
8030 .ok_or_else(|| anyhow!("worktree not found"))
8031 })??;
8032 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
8033 worktree
8034 .update(&mut cx, |worktree, cx| {
8035 worktree
8036 .as_local_mut()
8037 .unwrap()
8038 .delete_entry(entry_id, cx)
8039 .ok_or_else(|| anyhow!("invalid entry"))
8040 })??
8041 .await?;
8042 Ok(proto::ProjectEntryResponse {
8043 entry: None,
8044 worktree_scan_id: worktree_scan_id as u64,
8045 })
8046 }
8047
8048 async fn handle_expand_project_entry(
8049 this: Model<Self>,
8050 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
8051 _: Arc<Client>,
8052 mut cx: AsyncAppContext,
8053 ) -> Result<proto::ExpandProjectEntryResponse> {
8054 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
8055 let worktree = this
8056 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
8057 .ok_or_else(|| anyhow!("invalid request"))?;
8058 worktree
8059 .update(&mut cx, |worktree, cx| {
8060 worktree
8061 .as_local_mut()
8062 .unwrap()
8063 .expand_entry(entry_id, cx)
8064 .ok_or_else(|| anyhow!("invalid entry"))
8065 })??
8066 .await?;
8067 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
8068 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
8069 }
8070
8071 async fn handle_update_diagnostic_summary(
8072 this: Model<Self>,
8073 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
8074 _: Arc<Client>,
8075 mut cx: AsyncAppContext,
8076 ) -> Result<()> {
8077 this.update(&mut cx, |this, cx| {
8078 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8079 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
8080 if let Some(summary) = envelope.payload.summary {
8081 let project_path = ProjectPath {
8082 worktree_id,
8083 path: Path::new(&summary.path).into(),
8084 };
8085 worktree.update(cx, |worktree, _| {
8086 worktree
8087 .as_remote_mut()
8088 .unwrap()
8089 .update_diagnostic_summary(project_path.path.clone(), &summary);
8090 });
8091 cx.emit(Event::DiagnosticsUpdated {
8092 language_server_id: LanguageServerId(summary.language_server_id as usize),
8093 path: project_path,
8094 });
8095 }
8096 }
8097 Ok(())
8098 })?
8099 }
8100
8101 async fn handle_start_language_server(
8102 this: Model<Self>,
8103 envelope: TypedEnvelope<proto::StartLanguageServer>,
8104 _: Arc<Client>,
8105 mut cx: AsyncAppContext,
8106 ) -> Result<()> {
8107 let server = envelope
8108 .payload
8109 .server
8110 .ok_or_else(|| anyhow!("invalid server"))?;
8111 this.update(&mut cx, |this, cx| {
8112 this.language_server_statuses.insert(
8113 LanguageServerId(server.id as usize),
8114 LanguageServerStatus {
8115 name: server.name,
8116 pending_work: Default::default(),
8117 has_pending_diagnostic_updates: false,
8118 progress_tokens: Default::default(),
8119 },
8120 );
8121 cx.notify();
8122 })?;
8123 Ok(())
8124 }
8125
8126 async fn handle_update_language_server(
8127 this: Model<Self>,
8128 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
8129 _: Arc<Client>,
8130 mut cx: AsyncAppContext,
8131 ) -> Result<()> {
8132 this.update(&mut cx, |this, cx| {
8133 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
8134
8135 match envelope
8136 .payload
8137 .variant
8138 .ok_or_else(|| anyhow!("invalid variant"))?
8139 {
8140 proto::update_language_server::Variant::WorkStart(payload) => {
8141 this.on_lsp_work_start(
8142 language_server_id,
8143 payload.token,
8144 LanguageServerProgress {
8145 message: payload.message,
8146 percentage: payload.percentage.map(|p| p as usize),
8147 last_update_at: Instant::now(),
8148 },
8149 cx,
8150 );
8151 }
8152
8153 proto::update_language_server::Variant::WorkProgress(payload) => {
8154 this.on_lsp_work_progress(
8155 language_server_id,
8156 payload.token,
8157 LanguageServerProgress {
8158 message: payload.message,
8159 percentage: payload.percentage.map(|p| p as usize),
8160 last_update_at: Instant::now(),
8161 },
8162 cx,
8163 );
8164 }
8165
8166 proto::update_language_server::Variant::WorkEnd(payload) => {
8167 this.on_lsp_work_end(language_server_id, payload.token, cx);
8168 }
8169
8170 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
8171 this.disk_based_diagnostics_started(language_server_id, cx);
8172 }
8173
8174 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
8175 this.disk_based_diagnostics_finished(language_server_id, cx)
8176 }
8177 }
8178
8179 Ok(())
8180 })?
8181 }
8182
8183 async fn handle_update_buffer(
8184 this: Model<Self>,
8185 envelope: TypedEnvelope<proto::UpdateBuffer>,
8186 _: Arc<Client>,
8187 mut cx: AsyncAppContext,
8188 ) -> Result<proto::Ack> {
8189 this.update(&mut cx, |this, cx| {
8190 let payload = envelope.payload.clone();
8191 let buffer_id = BufferId::new(payload.buffer_id)?;
8192 let ops = payload
8193 .operations
8194 .into_iter()
8195 .map(language::proto::deserialize_operation)
8196 .collect::<Result<Vec<_>, _>>()?;
8197 let is_remote = this.is_remote();
8198 match this.opened_buffers.entry(buffer_id) {
8199 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
8200 OpenBuffer::Strong(buffer) => {
8201 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
8202 }
8203 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
8204 OpenBuffer::Weak(_) => {}
8205 },
8206 hash_map::Entry::Vacant(e) => {
8207 assert!(
8208 is_remote,
8209 "received buffer update from {:?}",
8210 envelope.original_sender_id
8211 );
8212 e.insert(OpenBuffer::Operations(ops));
8213 }
8214 }
8215 Ok(proto::Ack {})
8216 })?
8217 }
8218
8219 async fn handle_create_buffer_for_peer(
8220 this: Model<Self>,
8221 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
8222 _: Arc<Client>,
8223 mut cx: AsyncAppContext,
8224 ) -> Result<()> {
8225 this.update(&mut cx, |this, cx| {
8226 match envelope
8227 .payload
8228 .variant
8229 .ok_or_else(|| anyhow!("missing variant"))?
8230 {
8231 proto::create_buffer_for_peer::Variant::State(mut state) => {
8232 let buffer_id = BufferId::new(state.id)?;
8233
8234 let buffer_result = maybe!({
8235 let mut buffer_file = None;
8236 if let Some(file) = state.file.take() {
8237 let worktree_id = WorktreeId::from_proto(file.worktree_id);
8238 let worktree =
8239 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
8240 anyhow!("no worktree found for id {}", file.worktree_id)
8241 })?;
8242 buffer_file =
8243 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
8244 as Arc<dyn language::File>);
8245 }
8246 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
8247 });
8248
8249 match buffer_result {
8250 Ok(buffer) => {
8251 let buffer = cx.new_model(|_| buffer);
8252 this.incomplete_remote_buffers.insert(buffer_id, buffer);
8253 }
8254 Err(error) => {
8255 if let Some(listeners) = this.loading_buffers.remove(&buffer_id) {
8256 for listener in listeners {
8257 listener.send(Err(anyhow!(error.cloned()))).ok();
8258 }
8259 }
8260 }
8261 };
8262 }
8263 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
8264 let buffer_id = BufferId::new(chunk.buffer_id)?;
8265 let buffer = this
8266 .incomplete_remote_buffers
8267 .get(&buffer_id)
8268 .cloned()
8269 .ok_or_else(|| {
8270 anyhow!(
8271 "received chunk for buffer {} without initial state",
8272 chunk.buffer_id
8273 )
8274 })?;
8275
8276 let result = maybe!({
8277 let operations = chunk
8278 .operations
8279 .into_iter()
8280 .map(language::proto::deserialize_operation)
8281 .collect::<Result<Vec<_>>>()?;
8282 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))
8283 });
8284
8285 if let Err(error) = result {
8286 this.incomplete_remote_buffers.remove(&buffer_id);
8287 if let Some(listeners) = this.loading_buffers.remove(&buffer_id) {
8288 for listener in listeners {
8289 listener.send(Err(error.cloned())).ok();
8290 }
8291 }
8292 } else {
8293 if chunk.is_last {
8294 this.incomplete_remote_buffers.remove(&buffer_id);
8295 this.register_buffer(&buffer, cx)?;
8296 }
8297 }
8298 }
8299 }
8300
8301 Ok(())
8302 })?
8303 }
8304
8305 async fn handle_update_diff_base(
8306 this: Model<Self>,
8307 envelope: TypedEnvelope<proto::UpdateDiffBase>,
8308 _: Arc<Client>,
8309 mut cx: AsyncAppContext,
8310 ) -> Result<()> {
8311 this.update(&mut cx, |this, cx| {
8312 let buffer_id = envelope.payload.buffer_id;
8313 let buffer_id = BufferId::new(buffer_id)?;
8314 let diff_base = envelope.payload.diff_base;
8315 if let Some(buffer) = this
8316 .opened_buffers
8317 .get_mut(&buffer_id)
8318 .and_then(|b| b.upgrade())
8319 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned())
8320 {
8321 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
8322 }
8323 Ok(())
8324 })?
8325 }
8326
8327 async fn handle_update_buffer_file(
8328 this: Model<Self>,
8329 envelope: TypedEnvelope<proto::UpdateBufferFile>,
8330 _: Arc<Client>,
8331 mut cx: AsyncAppContext,
8332 ) -> Result<()> {
8333 let buffer_id = envelope.payload.buffer_id;
8334 let buffer_id = BufferId::new(buffer_id)?;
8335
8336 this.update(&mut cx, |this, cx| {
8337 let payload = envelope.payload.clone();
8338 if let Some(buffer) = this
8339 .opened_buffers
8340 .get(&buffer_id)
8341 .and_then(|b| b.upgrade())
8342 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned())
8343 {
8344 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
8345 let worktree = this
8346 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
8347 .ok_or_else(|| anyhow!("no such worktree"))?;
8348 let file = File::from_proto(file, worktree, cx)?;
8349 buffer.update(cx, |buffer, cx| {
8350 buffer.file_updated(Arc::new(file), cx);
8351 });
8352 this.detect_language_for_buffer(&buffer, cx);
8353 }
8354 Ok(())
8355 })?
8356 }
8357
8358 async fn handle_save_buffer(
8359 this: Model<Self>,
8360 envelope: TypedEnvelope<proto::SaveBuffer>,
8361 _: Arc<Client>,
8362 mut cx: AsyncAppContext,
8363 ) -> Result<proto::BufferSaved> {
8364 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8365 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
8366 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
8367 let buffer = this
8368 .opened_buffers
8369 .get(&buffer_id)
8370 .and_then(|buffer| buffer.upgrade())
8371 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8372 anyhow::Ok((project_id, buffer))
8373 })??;
8374 buffer
8375 .update(&mut cx, |buffer, _| {
8376 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8377 })?
8378 .await?;
8379 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
8380
8381 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
8382 .await?;
8383 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
8384 project_id,
8385 buffer_id: buffer_id.into(),
8386 version: serialize_version(buffer.saved_version()),
8387 mtime: buffer.saved_mtime().map(|time| time.into()),
8388 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
8389 })
8390 }
8391
8392 async fn handle_reload_buffers(
8393 this: Model<Self>,
8394 envelope: TypedEnvelope<proto::ReloadBuffers>,
8395 _: Arc<Client>,
8396 mut cx: AsyncAppContext,
8397 ) -> Result<proto::ReloadBuffersResponse> {
8398 let sender_id = envelope.original_sender_id()?;
8399 let reload = this.update(&mut cx, |this, cx| {
8400 let mut buffers = HashSet::default();
8401 for buffer_id in &envelope.payload.buffer_ids {
8402 let buffer_id = BufferId::new(*buffer_id)?;
8403 buffers.insert(
8404 this.opened_buffers
8405 .get(&buffer_id)
8406 .and_then(|buffer| buffer.upgrade())
8407 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8408 );
8409 }
8410 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
8411 })??;
8412
8413 let project_transaction = reload.await?;
8414 let project_transaction = this.update(&mut cx, |this, cx| {
8415 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8416 })?;
8417 Ok(proto::ReloadBuffersResponse {
8418 transaction: Some(project_transaction),
8419 })
8420 }
8421
8422 async fn handle_synchronize_buffers(
8423 this: Model<Self>,
8424 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
8425 _: Arc<Client>,
8426 mut cx: AsyncAppContext,
8427 ) -> Result<proto::SynchronizeBuffersResponse> {
8428 let project_id = envelope.payload.project_id;
8429 let mut response = proto::SynchronizeBuffersResponse {
8430 buffers: Default::default(),
8431 };
8432
8433 this.update(&mut cx, |this, cx| {
8434 let Some(guest_id) = envelope.original_sender_id else {
8435 error!("missing original_sender_id on SynchronizeBuffers request");
8436 bail!("missing original_sender_id on SynchronizeBuffers request");
8437 };
8438
8439 this.shared_buffers.entry(guest_id).or_default().clear();
8440 for buffer in envelope.payload.buffers {
8441 let buffer_id = BufferId::new(buffer.id)?;
8442 let remote_version = language::proto::deserialize_version(&buffer.version);
8443 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8444 this.shared_buffers
8445 .entry(guest_id)
8446 .or_default()
8447 .insert(buffer_id);
8448
8449 let buffer = buffer.read(cx);
8450 response.buffers.push(proto::BufferVersion {
8451 id: buffer_id.into(),
8452 version: language::proto::serialize_version(&buffer.version),
8453 });
8454
8455 let operations = buffer.serialize_ops(Some(remote_version), cx);
8456 let client = this.client.clone();
8457 if let Some(file) = buffer.file() {
8458 client
8459 .send(proto::UpdateBufferFile {
8460 project_id,
8461 buffer_id: buffer_id.into(),
8462 file: Some(file.to_proto()),
8463 })
8464 .log_err();
8465 }
8466
8467 client
8468 .send(proto::UpdateDiffBase {
8469 project_id,
8470 buffer_id: buffer_id.into(),
8471 diff_base: buffer.diff_base().map(Into::into),
8472 })
8473 .log_err();
8474
8475 client
8476 .send(proto::BufferReloaded {
8477 project_id,
8478 buffer_id: buffer_id.into(),
8479 version: language::proto::serialize_version(buffer.saved_version()),
8480 mtime: buffer.saved_mtime().map(|time| time.into()),
8481 fingerprint: language::proto::serialize_fingerprint(
8482 buffer.saved_version_fingerprint(),
8483 ),
8484 line_ending: language::proto::serialize_line_ending(
8485 buffer.line_ending(),
8486 ) as i32,
8487 })
8488 .log_err();
8489
8490 cx.background_executor()
8491 .spawn(
8492 async move {
8493 let operations = operations.await;
8494 for chunk in split_operations(operations) {
8495 client
8496 .request(proto::UpdateBuffer {
8497 project_id,
8498 buffer_id: buffer_id.into(),
8499 operations: chunk,
8500 })
8501 .await?;
8502 }
8503 anyhow::Ok(())
8504 }
8505 .log_err(),
8506 )
8507 .detach();
8508 }
8509 }
8510 Ok(())
8511 })??;
8512
8513 Ok(response)
8514 }
8515
8516 async fn handle_format_buffers(
8517 this: Model<Self>,
8518 envelope: TypedEnvelope<proto::FormatBuffers>,
8519 _: Arc<Client>,
8520 mut cx: AsyncAppContext,
8521 ) -> Result<proto::FormatBuffersResponse> {
8522 let sender_id = envelope.original_sender_id()?;
8523 let format = this.update(&mut cx, |this, cx| {
8524 let mut buffers = HashSet::default();
8525 for buffer_id in &envelope.payload.buffer_ids {
8526 let buffer_id = BufferId::new(*buffer_id)?;
8527 buffers.insert(
8528 this.opened_buffers
8529 .get(&buffer_id)
8530 .and_then(|buffer| buffer.upgrade())
8531 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8532 );
8533 }
8534 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
8535 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
8536 })??;
8537
8538 let project_transaction = format.await?;
8539 let project_transaction = this.update(&mut cx, |this, cx| {
8540 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8541 })?;
8542 Ok(proto::FormatBuffersResponse {
8543 transaction: Some(project_transaction),
8544 })
8545 }
8546
8547 async fn handle_apply_additional_edits_for_completion(
8548 this: Model<Self>,
8549 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
8550 _: Arc<Client>,
8551 mut cx: AsyncAppContext,
8552 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
8553 let languages = this.update(&mut cx, |this, _| this.languages.clone())?;
8554 let (buffer, completion) = this.update(&mut cx, |this, cx| {
8555 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8556 let buffer = this
8557 .opened_buffers
8558 .get(&buffer_id)
8559 .and_then(|buffer| buffer.upgrade())
8560 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8561 let language = buffer.read(cx).language();
8562 let completion = language::proto::deserialize_completion(
8563 envelope
8564 .payload
8565 .completion
8566 .ok_or_else(|| anyhow!("invalid completion"))?,
8567 language.cloned(),
8568 &languages,
8569 );
8570 Ok::<_, anyhow::Error>((buffer, completion))
8571 })??;
8572
8573 let completion = completion.await?;
8574
8575 let apply_additional_edits = this.update(&mut cx, |this, cx| {
8576 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
8577 })?;
8578
8579 Ok(proto::ApplyCompletionAdditionalEditsResponse {
8580 transaction: apply_additional_edits
8581 .await?
8582 .as_ref()
8583 .map(language::proto::serialize_transaction),
8584 })
8585 }
8586
8587 async fn handle_resolve_completion_documentation(
8588 this: Model<Self>,
8589 envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
8590 _: Arc<Client>,
8591 mut cx: AsyncAppContext,
8592 ) -> Result<proto::ResolveCompletionDocumentationResponse> {
8593 let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
8594
8595 let completion = this
8596 .read_with(&mut cx, |this, _| {
8597 let id = LanguageServerId(envelope.payload.language_server_id as usize);
8598 let Some(server) = this.language_server_for_id(id) else {
8599 return Err(anyhow!("No language server {id}"));
8600 };
8601
8602 Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
8603 })??
8604 .await?;
8605
8606 let mut is_markdown = false;
8607 let text = match completion.documentation {
8608 Some(lsp::Documentation::String(text)) => text,
8609
8610 Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
8611 is_markdown = kind == lsp::MarkupKind::Markdown;
8612 value
8613 }
8614
8615 _ => String::new(),
8616 };
8617
8618 Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
8619 }
8620
8621 async fn handle_apply_code_action(
8622 this: Model<Self>,
8623 envelope: TypedEnvelope<proto::ApplyCodeAction>,
8624 _: Arc<Client>,
8625 mut cx: AsyncAppContext,
8626 ) -> Result<proto::ApplyCodeActionResponse> {
8627 let sender_id = envelope.original_sender_id()?;
8628 let action = language::proto::deserialize_code_action(
8629 envelope
8630 .payload
8631 .action
8632 .ok_or_else(|| anyhow!("invalid action"))?,
8633 )?;
8634 let apply_code_action = this.update(&mut cx, |this, cx| {
8635 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8636 let buffer = this
8637 .opened_buffers
8638 .get(&buffer_id)
8639 .and_then(|buffer| buffer.upgrade())
8640 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
8641 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
8642 })??;
8643
8644 let project_transaction = apply_code_action.await?;
8645 let project_transaction = this.update(&mut cx, |this, cx| {
8646 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8647 })?;
8648 Ok(proto::ApplyCodeActionResponse {
8649 transaction: Some(project_transaction),
8650 })
8651 }
8652
8653 async fn handle_on_type_formatting(
8654 this: Model<Self>,
8655 envelope: TypedEnvelope<proto::OnTypeFormatting>,
8656 _: Arc<Client>,
8657 mut cx: AsyncAppContext,
8658 ) -> Result<proto::OnTypeFormattingResponse> {
8659 let on_type_formatting = this.update(&mut cx, |this, cx| {
8660 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8661 let buffer = this
8662 .opened_buffers
8663 .get(&buffer_id)
8664 .and_then(|buffer| buffer.upgrade())
8665 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8666 let position = envelope
8667 .payload
8668 .position
8669 .and_then(deserialize_anchor)
8670 .ok_or_else(|| anyhow!("invalid position"))?;
8671 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
8672 buffer,
8673 position,
8674 envelope.payload.trigger.clone(),
8675 cx,
8676 ))
8677 })??;
8678
8679 let transaction = on_type_formatting
8680 .await?
8681 .as_ref()
8682 .map(language::proto::serialize_transaction);
8683 Ok(proto::OnTypeFormattingResponse { transaction })
8684 }
8685
8686 async fn handle_inlay_hints(
8687 this: Model<Self>,
8688 envelope: TypedEnvelope<proto::InlayHints>,
8689 _: Arc<Client>,
8690 mut cx: AsyncAppContext,
8691 ) -> Result<proto::InlayHintsResponse> {
8692 let sender_id = envelope.original_sender_id()?;
8693 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8694 let buffer = this.update(&mut cx, |this, _| {
8695 this.opened_buffers
8696 .get(&buffer_id)
8697 .and_then(|buffer| buffer.upgrade())
8698 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
8699 })??;
8700 buffer
8701 .update(&mut cx, |buffer, _| {
8702 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8703 })?
8704 .await
8705 .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?;
8706
8707 let start = envelope
8708 .payload
8709 .start
8710 .and_then(deserialize_anchor)
8711 .context("missing range start")?;
8712 let end = envelope
8713 .payload
8714 .end
8715 .and_then(deserialize_anchor)
8716 .context("missing range end")?;
8717 let buffer_hints = this
8718 .update(&mut cx, |project, cx| {
8719 project.inlay_hints(buffer.clone(), start..end, cx)
8720 })?
8721 .await
8722 .context("inlay hints fetch")?;
8723
8724 this.update(&mut cx, |project, cx| {
8725 InlayHints::response_to_proto(
8726 buffer_hints,
8727 project,
8728 sender_id,
8729 &buffer.read(cx).version(),
8730 cx,
8731 )
8732 })
8733 }
8734
8735 async fn handle_resolve_inlay_hint(
8736 this: Model<Self>,
8737 envelope: TypedEnvelope<proto::ResolveInlayHint>,
8738 _: Arc<Client>,
8739 mut cx: AsyncAppContext,
8740 ) -> Result<proto::ResolveInlayHintResponse> {
8741 let proto_hint = envelope
8742 .payload
8743 .hint
8744 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
8745 let hint = InlayHints::proto_to_project_hint(proto_hint)
8746 .context("resolved proto inlay hint conversion")?;
8747 let buffer = this.update(&mut cx, |this, _cx| {
8748 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8749 this.opened_buffers
8750 .get(&buffer_id)
8751 .and_then(|buffer| buffer.upgrade())
8752 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8753 })??;
8754 let response_hint = this
8755 .update(&mut cx, |project, cx| {
8756 project.resolve_inlay_hint(
8757 hint,
8758 buffer,
8759 LanguageServerId(envelope.payload.language_server_id as usize),
8760 cx,
8761 )
8762 })?
8763 .await
8764 .context("inlay hints fetch")?;
8765 Ok(proto::ResolveInlayHintResponse {
8766 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
8767 })
8768 }
8769
8770 async fn try_resolve_code_action(
8771 lang_server: &LanguageServer,
8772 action: &mut CodeAction,
8773 ) -> anyhow::Result<()> {
8774 if GetCodeActions::can_resolve_actions(&lang_server.capabilities()) {
8775 if action.lsp_action.data.is_some()
8776 && (action.lsp_action.command.is_none() || action.lsp_action.edit.is_none())
8777 {
8778 action.lsp_action = lang_server
8779 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action.clone())
8780 .await?;
8781 }
8782 }
8783
8784 anyhow::Ok(())
8785 }
8786
8787 async fn execute_code_actions_on_servers(
8788 project: &WeakModel<Project>,
8789 adapters_and_servers: &Vec<(Arc<CachedLspAdapter>, Arc<LanguageServer>)>,
8790 code_actions: Vec<lsp::CodeActionKind>,
8791 buffer: &Model<Buffer>,
8792 push_to_history: bool,
8793 project_transaction: &mut ProjectTransaction,
8794 cx: &mut AsyncAppContext,
8795 ) -> Result<(), anyhow::Error> {
8796 for (lsp_adapter, language_server) in adapters_and_servers.iter() {
8797 let code_actions = code_actions.clone();
8798
8799 let actions = project
8800 .update(cx, move |this, cx| {
8801 let request = GetCodeActions {
8802 range: text::Anchor::MIN..text::Anchor::MAX,
8803 kinds: Some(code_actions),
8804 };
8805 let server = LanguageServerToQuery::Other(language_server.server_id());
8806 this.request_lsp(buffer.clone(), server, request, cx)
8807 })?
8808 .await?;
8809
8810 for mut action in actions {
8811 Self::try_resolve_code_action(&language_server, &mut action)
8812 .await
8813 .context("resolving a formatting code action")?;
8814
8815 if let Some(edit) = action.lsp_action.edit {
8816 if edit.changes.is_none() && edit.document_changes.is_none() {
8817 continue;
8818 }
8819
8820 let new = Self::deserialize_workspace_edit(
8821 project
8822 .upgrade()
8823 .ok_or_else(|| anyhow!("project dropped"))?,
8824 edit,
8825 push_to_history,
8826 lsp_adapter.clone(),
8827 language_server.clone(),
8828 cx,
8829 )
8830 .await?;
8831 project_transaction.0.extend(new.0);
8832 }
8833
8834 if let Some(command) = action.lsp_action.command {
8835 project.update(cx, |this, _| {
8836 this.last_workspace_edits_by_language_server
8837 .remove(&language_server.server_id());
8838 })?;
8839
8840 language_server
8841 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
8842 command: command.command,
8843 arguments: command.arguments.unwrap_or_default(),
8844 ..Default::default()
8845 })
8846 .await?;
8847
8848 project.update(cx, |this, _| {
8849 project_transaction.0.extend(
8850 this.last_workspace_edits_by_language_server
8851 .remove(&language_server.server_id())
8852 .unwrap_or_default()
8853 .0,
8854 )
8855 })?;
8856 }
8857 }
8858 }
8859
8860 Ok(())
8861 }
8862
8863 async fn handle_refresh_inlay_hints(
8864 this: Model<Self>,
8865 _: TypedEnvelope<proto::RefreshInlayHints>,
8866 _: Arc<Client>,
8867 mut cx: AsyncAppContext,
8868 ) -> Result<proto::Ack> {
8869 this.update(&mut cx, |_, cx| {
8870 cx.emit(Event::RefreshInlayHints);
8871 })?;
8872 Ok(proto::Ack {})
8873 }
8874
8875 async fn handle_lsp_command<T: LspCommand>(
8876 this: Model<Self>,
8877 envelope: TypedEnvelope<T::ProtoRequest>,
8878 _: Arc<Client>,
8879 mut cx: AsyncAppContext,
8880 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
8881 where
8882 <T::LspRequest as lsp::request::Request>::Params: Send,
8883 <T::LspRequest as lsp::request::Request>::Result: Send,
8884 {
8885 let sender_id = envelope.original_sender_id()?;
8886 let buffer_id = T::buffer_id_from_proto(&envelope.payload)?;
8887 let buffer_handle = this.update(&mut cx, |this, _cx| {
8888 this.opened_buffers
8889 .get(&buffer_id)
8890 .and_then(|buffer| buffer.upgrade())
8891 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8892 })??;
8893 let request = T::from_proto(
8894 envelope.payload,
8895 this.clone(),
8896 buffer_handle.clone(),
8897 cx.clone(),
8898 )
8899 .await?;
8900 let response = this
8901 .update(&mut cx, |this, cx| {
8902 this.request_lsp(
8903 buffer_handle.clone(),
8904 LanguageServerToQuery::Primary,
8905 request,
8906 cx,
8907 )
8908 })?
8909 .await?;
8910 this.update(&mut cx, |this, cx| {
8911 Ok(T::response_to_proto(
8912 response,
8913 this,
8914 sender_id,
8915 &buffer_handle.read(cx).version(),
8916 cx,
8917 ))
8918 })?
8919 }
8920
8921 async fn handle_get_project_symbols(
8922 this: Model<Self>,
8923 envelope: TypedEnvelope<proto::GetProjectSymbols>,
8924 _: Arc<Client>,
8925 mut cx: AsyncAppContext,
8926 ) -> Result<proto::GetProjectSymbolsResponse> {
8927 let symbols = this
8928 .update(&mut cx, |this, cx| {
8929 this.symbols(&envelope.payload.query, cx)
8930 })?
8931 .await?;
8932
8933 Ok(proto::GetProjectSymbolsResponse {
8934 symbols: symbols.iter().map(serialize_symbol).collect(),
8935 })
8936 }
8937
8938 async fn handle_search_project(
8939 this: Model<Self>,
8940 envelope: TypedEnvelope<proto::SearchProject>,
8941 _: Arc<Client>,
8942 mut cx: AsyncAppContext,
8943 ) -> Result<proto::SearchProjectResponse> {
8944 let peer_id = envelope.original_sender_id()?;
8945 let query = SearchQuery::from_proto(envelope.payload)?;
8946 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
8947
8948 cx.spawn(move |mut cx| async move {
8949 let mut locations = Vec::new();
8950 let mut limit_reached = false;
8951 while let Some(result) = result.next().await {
8952 match result {
8953 SearchResult::Buffer { buffer, ranges } => {
8954 for range in ranges {
8955 let start = serialize_anchor(&range.start);
8956 let end = serialize_anchor(&range.end);
8957 let buffer_id = this.update(&mut cx, |this, cx| {
8958 this.create_buffer_for_peer(&buffer, peer_id, cx).into()
8959 })?;
8960 locations.push(proto::Location {
8961 buffer_id,
8962 start: Some(start),
8963 end: Some(end),
8964 });
8965 }
8966 }
8967 SearchResult::LimitReached => limit_reached = true,
8968 }
8969 }
8970 Ok(proto::SearchProjectResponse {
8971 locations,
8972 limit_reached,
8973 })
8974 })
8975 .await
8976 }
8977
8978 async fn handle_open_buffer_for_symbol(
8979 this: Model<Self>,
8980 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
8981 _: Arc<Client>,
8982 mut cx: AsyncAppContext,
8983 ) -> Result<proto::OpenBufferForSymbolResponse> {
8984 let peer_id = envelope.original_sender_id()?;
8985 let symbol = envelope
8986 .payload
8987 .symbol
8988 .ok_or_else(|| anyhow!("invalid symbol"))?;
8989 let symbol = this
8990 .update(&mut cx, |this, _cx| this.deserialize_symbol(symbol))?
8991 .await?;
8992 let symbol = this.update(&mut cx, |this, _| {
8993 let signature = this.symbol_signature(&symbol.path);
8994 if signature == symbol.signature {
8995 Ok(symbol)
8996 } else {
8997 Err(anyhow!("invalid symbol signature"))
8998 }
8999 })??;
9000 let buffer = this
9001 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
9002 .await?;
9003
9004 this.update(&mut cx, |this, cx| {
9005 let is_private = buffer
9006 .read(cx)
9007 .file()
9008 .map(|f| f.is_private())
9009 .unwrap_or_default();
9010 if is_private {
9011 Err(anyhow!(ErrorCode::UnsharedItem))
9012 } else {
9013 Ok(proto::OpenBufferForSymbolResponse {
9014 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
9015 })
9016 }
9017 })?
9018 }
9019
9020 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
9021 let mut hasher = Sha256::new();
9022 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
9023 hasher.update(project_path.path.to_string_lossy().as_bytes());
9024 hasher.update(self.nonce.to_be_bytes());
9025 hasher.finalize().as_slice().try_into().unwrap()
9026 }
9027
9028 async fn handle_open_buffer_by_id(
9029 this: Model<Self>,
9030 envelope: TypedEnvelope<proto::OpenBufferById>,
9031 _: Arc<Client>,
9032 mut cx: AsyncAppContext,
9033 ) -> Result<proto::OpenBufferResponse> {
9034 let peer_id = envelope.original_sender_id()?;
9035 let buffer_id = BufferId::new(envelope.payload.id)?;
9036 let buffer = this
9037 .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))?
9038 .await?;
9039 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
9040 }
9041
9042 async fn handle_open_buffer_by_path(
9043 this: Model<Self>,
9044 envelope: TypedEnvelope<proto::OpenBufferByPath>,
9045 _: Arc<Client>,
9046 mut cx: AsyncAppContext,
9047 ) -> Result<proto::OpenBufferResponse> {
9048 let peer_id = envelope.original_sender_id()?;
9049 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
9050 let open_buffer = this.update(&mut cx, |this, cx| {
9051 this.open_buffer(
9052 ProjectPath {
9053 worktree_id,
9054 path: PathBuf::from(envelope.payload.path).into(),
9055 },
9056 cx,
9057 )
9058 })?;
9059
9060 let buffer = open_buffer.await?;
9061 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
9062 }
9063
9064 fn respond_to_open_buffer_request(
9065 this: Model<Self>,
9066 buffer: Model<Buffer>,
9067 peer_id: proto::PeerId,
9068 cx: &mut AsyncAppContext,
9069 ) -> Result<proto::OpenBufferResponse> {
9070 this.update(cx, |this, cx| {
9071 let is_private = buffer
9072 .read(cx)
9073 .file()
9074 .map(|f| f.is_private())
9075 .unwrap_or_default();
9076 if is_private {
9077 Err(anyhow!(ErrorCode::UnsharedItem))
9078 } else {
9079 Ok(proto::OpenBufferResponse {
9080 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
9081 })
9082 }
9083 })?
9084 }
9085
9086 fn serialize_project_transaction_for_peer(
9087 &mut self,
9088 project_transaction: ProjectTransaction,
9089 peer_id: proto::PeerId,
9090 cx: &mut AppContext,
9091 ) -> proto::ProjectTransaction {
9092 let mut serialized_transaction = proto::ProjectTransaction {
9093 buffer_ids: Default::default(),
9094 transactions: Default::default(),
9095 };
9096 for (buffer, transaction) in project_transaction.0 {
9097 serialized_transaction
9098 .buffer_ids
9099 .push(self.create_buffer_for_peer(&buffer, peer_id, cx).into());
9100 serialized_transaction
9101 .transactions
9102 .push(language::proto::serialize_transaction(&transaction));
9103 }
9104 serialized_transaction
9105 }
9106
9107 fn deserialize_project_transaction(
9108 &mut self,
9109 message: proto::ProjectTransaction,
9110 push_to_history: bool,
9111 cx: &mut ModelContext<Self>,
9112 ) -> Task<Result<ProjectTransaction>> {
9113 cx.spawn(move |this, mut cx| async move {
9114 let mut project_transaction = ProjectTransaction::default();
9115 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
9116 {
9117 let buffer_id = BufferId::new(buffer_id)?;
9118 let buffer = this
9119 .update(&mut cx, |this, cx| {
9120 this.wait_for_remote_buffer(buffer_id, cx)
9121 })?
9122 .await?;
9123 let transaction = language::proto::deserialize_transaction(transaction)?;
9124 project_transaction.0.insert(buffer, transaction);
9125 }
9126
9127 for (buffer, transaction) in &project_transaction.0 {
9128 buffer
9129 .update(&mut cx, |buffer, _| {
9130 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
9131 })?
9132 .await?;
9133
9134 if push_to_history {
9135 buffer.update(&mut cx, |buffer, _| {
9136 buffer.push_transaction(transaction.clone(), Instant::now());
9137 })?;
9138 }
9139 }
9140
9141 Ok(project_transaction)
9142 })
9143 }
9144
9145 fn create_buffer_for_peer(
9146 &mut self,
9147 buffer: &Model<Buffer>,
9148 peer_id: proto::PeerId,
9149 cx: &mut AppContext,
9150 ) -> BufferId {
9151 let buffer_id = buffer.read(cx).remote_id();
9152 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
9153 updates_tx
9154 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
9155 .ok();
9156 }
9157 buffer_id
9158 }
9159
9160 fn wait_for_remote_buffer(
9161 &mut self,
9162 id: BufferId,
9163 cx: &mut ModelContext<Self>,
9164 ) -> Task<Result<Model<Buffer>>> {
9165 let buffer = self
9166 .opened_buffers
9167 .get(&id)
9168 .and_then(|buffer| buffer.upgrade());
9169
9170 if let Some(buffer) = buffer {
9171 return Task::ready(Ok(buffer));
9172 }
9173
9174 let (tx, rx) = oneshot::channel();
9175 self.loading_buffers.entry(id).or_default().push(tx);
9176
9177 cx.background_executor().spawn(async move { rx.await? })
9178 }
9179
9180 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
9181 let project_id = match self.client_state {
9182 ProjectClientState::Remote {
9183 sharing_has_stopped,
9184 remote_id,
9185 ..
9186 } => {
9187 if sharing_has_stopped {
9188 return Task::ready(Err(anyhow!(
9189 "can't synchronize remote buffers on a readonly project"
9190 )));
9191 } else {
9192 remote_id
9193 }
9194 }
9195 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
9196 return Task::ready(Err(anyhow!(
9197 "can't synchronize remote buffers on a local project"
9198 )))
9199 }
9200 };
9201
9202 let client = self.client.clone();
9203 cx.spawn(move |this, mut cx| async move {
9204 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
9205 let buffers = this
9206 .opened_buffers
9207 .iter()
9208 .filter_map(|(id, buffer)| {
9209 let buffer = buffer.upgrade()?;
9210 Some(proto::BufferVersion {
9211 id: (*id).into(),
9212 version: language::proto::serialize_version(&buffer.read(cx).version),
9213 })
9214 })
9215 .collect();
9216 let incomplete_buffer_ids = this
9217 .incomplete_remote_buffers
9218 .keys()
9219 .copied()
9220 .collect::<Vec<_>>();
9221
9222 (buffers, incomplete_buffer_ids)
9223 })?;
9224 let response = client
9225 .request(proto::SynchronizeBuffers {
9226 project_id,
9227 buffers,
9228 })
9229 .await?;
9230
9231 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
9232 response
9233 .buffers
9234 .into_iter()
9235 .map(|buffer| {
9236 let client = client.clone();
9237 let buffer_id = match BufferId::new(buffer.id) {
9238 Ok(id) => id,
9239 Err(e) => {
9240 return Task::ready(Err(e));
9241 }
9242 };
9243 let remote_version = language::proto::deserialize_version(&buffer.version);
9244 if let Some(buffer) = this.buffer_for_id(buffer_id) {
9245 let operations =
9246 buffer.read(cx).serialize_ops(Some(remote_version), cx);
9247 cx.background_executor().spawn(async move {
9248 let operations = operations.await;
9249 for chunk in split_operations(operations) {
9250 client
9251 .request(proto::UpdateBuffer {
9252 project_id,
9253 buffer_id: buffer_id.into(),
9254 operations: chunk,
9255 })
9256 .await?;
9257 }
9258 anyhow::Ok(())
9259 })
9260 } else {
9261 Task::ready(Ok(()))
9262 }
9263 })
9264 .collect::<Vec<_>>()
9265 })?;
9266
9267 // Any incomplete buffers have open requests waiting. Request that the host sends
9268 // creates these buffers for us again to unblock any waiting futures.
9269 for id in incomplete_buffer_ids {
9270 cx.background_executor()
9271 .spawn(client.request(proto::OpenBufferById {
9272 project_id,
9273 id: id.into(),
9274 }))
9275 .detach();
9276 }
9277
9278 futures::future::join_all(send_updates_for_buffers)
9279 .await
9280 .into_iter()
9281 .collect()
9282 })
9283 }
9284
9285 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
9286 self.worktrees()
9287 .map(|worktree| {
9288 let worktree = worktree.read(cx);
9289 proto::WorktreeMetadata {
9290 id: worktree.id().to_proto(),
9291 root_name: worktree.root_name().into(),
9292 visible: worktree.is_visible(),
9293 abs_path: worktree.abs_path().to_string_lossy().into(),
9294 }
9295 })
9296 .collect()
9297 }
9298
9299 fn set_worktrees_from_proto(
9300 &mut self,
9301 worktrees: Vec<proto::WorktreeMetadata>,
9302 cx: &mut ModelContext<Project>,
9303 ) -> Result<()> {
9304 let replica_id = self.replica_id();
9305 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
9306
9307 let mut old_worktrees_by_id = self
9308 .worktrees
9309 .drain(..)
9310 .filter_map(|worktree| {
9311 let worktree = worktree.upgrade()?;
9312 Some((worktree.read(cx).id(), worktree))
9313 })
9314 .collect::<HashMap<_, _>>();
9315
9316 for worktree in worktrees {
9317 if let Some(old_worktree) =
9318 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
9319 {
9320 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
9321 } else {
9322 let worktree =
9323 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
9324 let _ = self.add_worktree(&worktree, cx);
9325 }
9326 }
9327
9328 self.metadata_changed(cx);
9329 for id in old_worktrees_by_id.keys() {
9330 cx.emit(Event::WorktreeRemoved(*id));
9331 }
9332
9333 Ok(())
9334 }
9335
9336 fn set_collaborators_from_proto(
9337 &mut self,
9338 messages: Vec<proto::Collaborator>,
9339 cx: &mut ModelContext<Self>,
9340 ) -> Result<()> {
9341 let mut collaborators = HashMap::default();
9342 for message in messages {
9343 let collaborator = Collaborator::from_proto(message)?;
9344 collaborators.insert(collaborator.peer_id, collaborator);
9345 }
9346 for old_peer_id in self.collaborators.keys() {
9347 if !collaborators.contains_key(old_peer_id) {
9348 cx.emit(Event::CollaboratorLeft(*old_peer_id));
9349 }
9350 }
9351 self.collaborators = collaborators;
9352 Ok(())
9353 }
9354
9355 fn deserialize_symbol(
9356 &self,
9357 serialized_symbol: proto::Symbol,
9358 ) -> impl Future<Output = Result<Symbol>> {
9359 let languages = self.languages.clone();
9360 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
9361 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
9362 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
9363 let path = ProjectPath {
9364 worktree_id,
9365 path: PathBuf::from(serialized_symbol.path).into(),
9366 };
9367 let language = languages.language_for_file_path(&path.path);
9368
9369 async move {
9370 let language = language.await.log_err();
9371 let adapter = language
9372 .as_ref()
9373 .and_then(|language| languages.lsp_adapters(language).first().cloned());
9374 let start = serialized_symbol
9375 .start
9376 .ok_or_else(|| anyhow!("invalid start"))?;
9377 let end = serialized_symbol
9378 .end
9379 .ok_or_else(|| anyhow!("invalid end"))?;
9380 Ok(Symbol {
9381 language_server_name: LanguageServerName(
9382 serialized_symbol.language_server_name.into(),
9383 ),
9384 source_worktree_id,
9385 path,
9386 label: {
9387 match language.as_ref().zip(adapter.as_ref()) {
9388 Some((language, adapter)) => {
9389 adapter
9390 .label_for_symbol(&serialized_symbol.name, kind, language)
9391 .await
9392 }
9393 None => None,
9394 }
9395 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
9396 },
9397
9398 name: serialized_symbol.name,
9399 range: Unclipped(PointUtf16::new(start.row, start.column))
9400 ..Unclipped(PointUtf16::new(end.row, end.column)),
9401 kind,
9402 signature: serialized_symbol
9403 .signature
9404 .try_into()
9405 .map_err(|_| anyhow!("invalid signature"))?,
9406 })
9407 }
9408 }
9409
9410 async fn handle_buffer_saved(
9411 this: Model<Self>,
9412 envelope: TypedEnvelope<proto::BufferSaved>,
9413 _: Arc<Client>,
9414 mut cx: AsyncAppContext,
9415 ) -> Result<()> {
9416 let fingerprint = Default::default();
9417 let version = deserialize_version(&envelope.payload.version);
9418 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
9419 let mtime = envelope.payload.mtime.map(|time| time.into());
9420
9421 this.update(&mut cx, |this, cx| {
9422 let buffer = this
9423 .opened_buffers
9424 .get(&buffer_id)
9425 .and_then(|buffer| buffer.upgrade())
9426 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned());
9427 if let Some(buffer) = buffer {
9428 buffer.update(cx, |buffer, cx| {
9429 buffer.did_save(version, fingerprint, mtime, cx);
9430 });
9431 }
9432 Ok(())
9433 })?
9434 }
9435
9436 async fn handle_buffer_reloaded(
9437 this: Model<Self>,
9438 envelope: TypedEnvelope<proto::BufferReloaded>,
9439 _: Arc<Client>,
9440 mut cx: AsyncAppContext,
9441 ) -> Result<()> {
9442 let payload = envelope.payload;
9443 let version = deserialize_version(&payload.version);
9444 let fingerprint = RopeFingerprint::default();
9445 let line_ending = deserialize_line_ending(
9446 proto::LineEnding::from_i32(payload.line_ending)
9447 .ok_or_else(|| anyhow!("missing line ending"))?,
9448 );
9449 let mtime = payload.mtime.map(|time| time.into());
9450 let buffer_id = BufferId::new(payload.buffer_id)?;
9451 this.update(&mut cx, |this, cx| {
9452 let buffer = this
9453 .opened_buffers
9454 .get(&buffer_id)
9455 .and_then(|buffer| buffer.upgrade())
9456 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned());
9457 if let Some(buffer) = buffer {
9458 buffer.update(cx, |buffer, cx| {
9459 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
9460 });
9461 }
9462 Ok(())
9463 })?
9464 }
9465
9466 #[allow(clippy::type_complexity)]
9467 fn edits_from_lsp(
9468 &mut self,
9469 buffer: &Model<Buffer>,
9470 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
9471 server_id: LanguageServerId,
9472 version: Option<i32>,
9473 cx: &mut ModelContext<Self>,
9474 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
9475 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
9476 cx.background_executor().spawn(async move {
9477 let snapshot = snapshot?;
9478 let mut lsp_edits = lsp_edits
9479 .into_iter()
9480 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
9481 .collect::<Vec<_>>();
9482 lsp_edits.sort_by_key(|(range, _)| range.start);
9483
9484 let mut lsp_edits = lsp_edits.into_iter().peekable();
9485 let mut edits = Vec::new();
9486 while let Some((range, mut new_text)) = lsp_edits.next() {
9487 // Clip invalid ranges provided by the language server.
9488 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
9489 ..snapshot.clip_point_utf16(range.end, Bias::Left);
9490
9491 // Combine any LSP edits that are adjacent.
9492 //
9493 // Also, combine LSP edits that are separated from each other by only
9494 // a newline. This is important because for some code actions,
9495 // Rust-analyzer rewrites the entire buffer via a series of edits that
9496 // are separated by unchanged newline characters.
9497 //
9498 // In order for the diffing logic below to work properly, any edits that
9499 // cancel each other out must be combined into one.
9500 while let Some((next_range, next_text)) = lsp_edits.peek() {
9501 if next_range.start.0 > range.end {
9502 if next_range.start.0.row > range.end.row + 1
9503 || next_range.start.0.column > 0
9504 || snapshot.clip_point_utf16(
9505 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
9506 Bias::Left,
9507 ) > range.end
9508 {
9509 break;
9510 }
9511 new_text.push('\n');
9512 }
9513 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
9514 new_text.push_str(next_text);
9515 lsp_edits.next();
9516 }
9517
9518 // For multiline edits, perform a diff of the old and new text so that
9519 // we can identify the changes more precisely, preserving the locations
9520 // of any anchors positioned in the unchanged regions.
9521 if range.end.row > range.start.row {
9522 let mut offset = range.start.to_offset(&snapshot);
9523 let old_text = snapshot.text_for_range(range).collect::<String>();
9524
9525 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
9526 let mut moved_since_edit = true;
9527 for change in diff.iter_all_changes() {
9528 let tag = change.tag();
9529 let value = change.value();
9530 match tag {
9531 ChangeTag::Equal => {
9532 offset += value.len();
9533 moved_since_edit = true;
9534 }
9535 ChangeTag::Delete => {
9536 let start = snapshot.anchor_after(offset);
9537 let end = snapshot.anchor_before(offset + value.len());
9538 if moved_since_edit {
9539 edits.push((start..end, String::new()));
9540 } else {
9541 edits.last_mut().unwrap().0.end = end;
9542 }
9543 offset += value.len();
9544 moved_since_edit = false;
9545 }
9546 ChangeTag::Insert => {
9547 if moved_since_edit {
9548 let anchor = snapshot.anchor_after(offset);
9549 edits.push((anchor..anchor, value.to_string()));
9550 } else {
9551 edits.last_mut().unwrap().1.push_str(value);
9552 }
9553 moved_since_edit = false;
9554 }
9555 }
9556 }
9557 } else if range.end == range.start {
9558 let anchor = snapshot.anchor_after(range.start);
9559 edits.push((anchor..anchor, new_text));
9560 } else {
9561 let edit_start = snapshot.anchor_after(range.start);
9562 let edit_end = snapshot.anchor_before(range.end);
9563 edits.push((edit_start..edit_end, new_text));
9564 }
9565 }
9566
9567 Ok(edits)
9568 })
9569 }
9570
9571 fn buffer_snapshot_for_lsp_version(
9572 &mut self,
9573 buffer: &Model<Buffer>,
9574 server_id: LanguageServerId,
9575 version: Option<i32>,
9576 cx: &AppContext,
9577 ) -> Result<TextBufferSnapshot> {
9578 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
9579
9580 if let Some(version) = version {
9581 let buffer_id = buffer.read(cx).remote_id();
9582 let snapshots = self
9583 .buffer_snapshots
9584 .get_mut(&buffer_id)
9585 .and_then(|m| m.get_mut(&server_id))
9586 .ok_or_else(|| {
9587 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
9588 })?;
9589
9590 let found_snapshot = snapshots
9591 .binary_search_by_key(&version, |e| e.version)
9592 .map(|ix| snapshots[ix].snapshot.clone())
9593 .map_err(|_| {
9594 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
9595 })?;
9596
9597 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
9598 Ok(found_snapshot)
9599 } else {
9600 Ok((buffer.read(cx)).text_snapshot())
9601 }
9602 }
9603
9604 pub fn language_servers(
9605 &self,
9606 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
9607 self.language_server_ids
9608 .iter()
9609 .map(|((worktree_id, server_name), server_id)| {
9610 (*server_id, server_name.clone(), *worktree_id)
9611 })
9612 }
9613
9614 pub fn supplementary_language_servers(
9615 &self,
9616 ) -> impl '_
9617 + Iterator<
9618 Item = (
9619 &LanguageServerId,
9620 &(LanguageServerName, Arc<LanguageServer>),
9621 ),
9622 > {
9623 self.supplementary_language_servers.iter()
9624 }
9625
9626 pub fn language_server_adapter_for_id(
9627 &self,
9628 id: LanguageServerId,
9629 ) -> Option<Arc<CachedLspAdapter>> {
9630 if let Some(LanguageServerState::Running { adapter, .. }) = self.language_servers.get(&id) {
9631 Some(adapter.clone())
9632 } else {
9633 None
9634 }
9635 }
9636
9637 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
9638 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
9639 Some(server.clone())
9640 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
9641 Some(Arc::clone(server))
9642 } else {
9643 None
9644 }
9645 }
9646
9647 pub fn language_servers_for_buffer(
9648 &self,
9649 buffer: &Buffer,
9650 cx: &AppContext,
9651 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9652 self.language_server_ids_for_buffer(buffer, cx)
9653 .into_iter()
9654 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
9655 LanguageServerState::Running {
9656 adapter, server, ..
9657 } => Some((adapter, server)),
9658 _ => None,
9659 })
9660 }
9661
9662 fn primary_language_server_for_buffer(
9663 &self,
9664 buffer: &Buffer,
9665 cx: &AppContext,
9666 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9667 self.language_servers_for_buffer(buffer, cx)
9668 .find(|s| s.0.is_primary)
9669 }
9670
9671 pub fn language_server_for_buffer(
9672 &self,
9673 buffer: &Buffer,
9674 server_id: LanguageServerId,
9675 cx: &AppContext,
9676 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9677 self.language_servers_for_buffer(buffer, cx)
9678 .find(|(_, s)| s.server_id() == server_id)
9679 }
9680
9681 fn language_server_ids_for_buffer(
9682 &self,
9683 buffer: &Buffer,
9684 cx: &AppContext,
9685 ) -> Vec<LanguageServerId> {
9686 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
9687 let worktree_id = file.worktree_id(cx);
9688 self.languages
9689 .lsp_adapters(&language)
9690 .iter()
9691 .flat_map(|adapter| {
9692 let key = (worktree_id, adapter.name.clone());
9693 self.language_server_ids.get(&key).copied()
9694 })
9695 .collect()
9696 } else {
9697 Vec::new()
9698 }
9699 }
9700}
9701
9702fn deserialize_code_actions(code_actions: &HashMap<String, bool>) -> Vec<lsp::CodeActionKind> {
9703 code_actions
9704 .iter()
9705 .flat_map(|(kind, enabled)| {
9706 if *enabled {
9707 Some(kind.clone().into())
9708 } else {
9709 None
9710 }
9711 })
9712 .collect()
9713}
9714
9715#[allow(clippy::too_many_arguments)]
9716async fn search_snapshots(
9717 snapshots: &Vec<LocalSnapshot>,
9718 worker_start_ix: usize,
9719 worker_end_ix: usize,
9720 query: &SearchQuery,
9721 results_tx: &Sender<SearchMatchCandidate>,
9722 opened_buffers: &HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
9723 include_root: bool,
9724 fs: &Arc<dyn Fs>,
9725) {
9726 let mut snapshot_start_ix = 0;
9727 let mut abs_path = PathBuf::new();
9728
9729 for snapshot in snapshots {
9730 let snapshot_end_ix = snapshot_start_ix
9731 + if query.include_ignored() {
9732 snapshot.file_count()
9733 } else {
9734 snapshot.visible_file_count()
9735 };
9736 if worker_end_ix <= snapshot_start_ix {
9737 break;
9738 } else if worker_start_ix > snapshot_end_ix {
9739 snapshot_start_ix = snapshot_end_ix;
9740 continue;
9741 } else {
9742 let start_in_snapshot = worker_start_ix.saturating_sub(snapshot_start_ix);
9743 let end_in_snapshot = cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
9744
9745 for entry in snapshot
9746 .files(false, start_in_snapshot)
9747 .take(end_in_snapshot - start_in_snapshot)
9748 {
9749 if results_tx.is_closed() {
9750 break;
9751 }
9752 if opened_buffers.contains_key(&entry.path) {
9753 continue;
9754 }
9755
9756 let matched_path = if include_root {
9757 let mut full_path = PathBuf::from(snapshot.root_name());
9758 full_path.push(&entry.path);
9759 query.file_matches(Some(&full_path))
9760 } else {
9761 query.file_matches(Some(&entry.path))
9762 };
9763
9764 let matches = if matched_path {
9765 abs_path.clear();
9766 abs_path.push(&snapshot.abs_path());
9767 abs_path.push(&entry.path);
9768 if let Some(file) = fs.open_sync(&abs_path).await.log_err() {
9769 query.detect(file).unwrap_or(false)
9770 } else {
9771 false
9772 }
9773 } else {
9774 false
9775 };
9776
9777 if matches {
9778 let project_path = SearchMatchCandidate::Path {
9779 worktree_id: snapshot.id(),
9780 path: entry.path.clone(),
9781 is_ignored: entry.is_ignored,
9782 };
9783 if results_tx.send(project_path).await.is_err() {
9784 return;
9785 }
9786 }
9787 }
9788
9789 snapshot_start_ix = snapshot_end_ix;
9790 }
9791 }
9792}
9793
9794async fn search_ignored_entry(
9795 snapshot: &LocalSnapshot,
9796 ignored_entry: &Entry,
9797 fs: &Arc<dyn Fs>,
9798 query: &SearchQuery,
9799 counter_tx: &Sender<SearchMatchCandidate>,
9800) {
9801 let mut ignored_paths_to_process =
9802 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
9803
9804 while let Some(ignored_abs_path) = ignored_paths_to_process.pop_front() {
9805 let metadata = fs
9806 .metadata(&ignored_abs_path)
9807 .await
9808 .with_context(|| format!("fetching fs metadata for {ignored_abs_path:?}"))
9809 .log_err()
9810 .flatten();
9811
9812 if let Some(fs_metadata) = metadata {
9813 if fs_metadata.is_dir {
9814 let files = fs
9815 .read_dir(&ignored_abs_path)
9816 .await
9817 .with_context(|| format!("listing ignored path {ignored_abs_path:?}"))
9818 .log_err();
9819
9820 if let Some(mut subfiles) = files {
9821 while let Some(subfile) = subfiles.next().await {
9822 if let Some(subfile) = subfile.log_err() {
9823 ignored_paths_to_process.push_back(subfile);
9824 }
9825 }
9826 }
9827 } else if !fs_metadata.is_symlink {
9828 if !query.file_matches(Some(&ignored_abs_path))
9829 || snapshot.is_path_excluded(ignored_entry.path.to_path_buf())
9830 {
9831 continue;
9832 }
9833 let matches = if let Some(file) = fs
9834 .open_sync(&ignored_abs_path)
9835 .await
9836 .with_context(|| format!("Opening ignored path {ignored_abs_path:?}"))
9837 .log_err()
9838 {
9839 query.detect(file).unwrap_or(false)
9840 } else {
9841 false
9842 };
9843
9844 if matches {
9845 let project_path = SearchMatchCandidate::Path {
9846 worktree_id: snapshot.id(),
9847 path: Arc::from(
9848 ignored_abs_path
9849 .strip_prefix(snapshot.abs_path())
9850 .expect("scanning worktree-related files"),
9851 ),
9852 is_ignored: true,
9853 };
9854 if counter_tx.send(project_path).await.is_err() {
9855 return;
9856 }
9857 }
9858 }
9859 }
9860 }
9861}
9862
9863fn subscribe_for_copilot_events(
9864 copilot: &Model<Copilot>,
9865 cx: &mut ModelContext<'_, Project>,
9866) -> gpui::Subscription {
9867 cx.subscribe(
9868 copilot,
9869 |project, copilot, copilot_event, cx| match copilot_event {
9870 copilot::Event::CopilotLanguageServerStarted => {
9871 match copilot.read(cx).language_server() {
9872 Some((name, copilot_server)) => {
9873 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
9874 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
9875 let new_server_id = copilot_server.server_id();
9876 let weak_project = cx.weak_model();
9877 let copilot_log_subscription = copilot_server
9878 .on_notification::<copilot::request::LogMessage, _>(
9879 move |params, mut cx| {
9880 weak_project.update(&mut cx, |_, cx| {
9881 cx.emit(Event::LanguageServerLog(
9882 new_server_id,
9883 params.message,
9884 ));
9885 }).ok();
9886 },
9887 );
9888 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
9889 project.copilot_log_subscription = Some(copilot_log_subscription);
9890 cx.emit(Event::LanguageServerAdded(new_server_id));
9891 }
9892 }
9893 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
9894 }
9895 }
9896 },
9897 )
9898}
9899
9900fn glob_literal_prefix(glob: &str) -> &str {
9901 let mut literal_end = 0;
9902 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
9903 if part.contains(&['*', '?', '{', '}']) {
9904 break;
9905 } else {
9906 if i > 0 {
9907 // Account for separator prior to this part
9908 literal_end += path::MAIN_SEPARATOR.len_utf8();
9909 }
9910 literal_end += part.len();
9911 }
9912 }
9913 &glob[..literal_end]
9914}
9915
9916impl WorktreeHandle {
9917 pub fn upgrade(&self) -> Option<Model<Worktree>> {
9918 match self {
9919 WorktreeHandle::Strong(handle) => Some(handle.clone()),
9920 WorktreeHandle::Weak(handle) => handle.upgrade(),
9921 }
9922 }
9923
9924 pub fn handle_id(&self) -> usize {
9925 match self {
9926 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
9927 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
9928 }
9929 }
9930}
9931
9932impl OpenBuffer {
9933 pub fn upgrade(&self) -> Option<Model<Buffer>> {
9934 match self {
9935 OpenBuffer::Strong(handle) => Some(handle.clone()),
9936 OpenBuffer::Weak(handle) => handle.upgrade(),
9937 OpenBuffer::Operations(_) => None,
9938 }
9939 }
9940}
9941
9942pub struct PathMatchCandidateSet {
9943 pub snapshot: Snapshot,
9944 pub include_ignored: bool,
9945 pub include_root_name: bool,
9946}
9947
9948impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
9949 type Candidates = PathMatchCandidateSetIter<'a>;
9950
9951 fn id(&self) -> usize {
9952 self.snapshot.id().to_usize()
9953 }
9954
9955 fn len(&self) -> usize {
9956 if self.include_ignored {
9957 self.snapshot.file_count()
9958 } else {
9959 self.snapshot.visible_file_count()
9960 }
9961 }
9962
9963 fn prefix(&self) -> Arc<str> {
9964 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
9965 self.snapshot.root_name().into()
9966 } else if self.include_root_name {
9967 format!("{}/", self.snapshot.root_name()).into()
9968 } else {
9969 "".into()
9970 }
9971 }
9972
9973 fn candidates(&'a self, start: usize) -> Self::Candidates {
9974 PathMatchCandidateSetIter {
9975 traversal: self.snapshot.files(self.include_ignored, start),
9976 }
9977 }
9978}
9979
9980pub struct PathMatchCandidateSetIter<'a> {
9981 traversal: Traversal<'a>,
9982}
9983
9984impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
9985 type Item = fuzzy::PathMatchCandidate<'a>;
9986
9987 fn next(&mut self) -> Option<Self::Item> {
9988 self.traversal.next().map(|entry| {
9989 if let EntryKind::File(char_bag) = entry.kind {
9990 fuzzy::PathMatchCandidate {
9991 path: &entry.path,
9992 char_bag,
9993 }
9994 } else {
9995 unreachable!()
9996 }
9997 })
9998 }
9999}
10000
10001impl EventEmitter<Event> for Project {}
10002
10003impl<'a> Into<SettingsLocation<'a>> for &'a ProjectPath {
10004 fn into(self) -> SettingsLocation<'a> {
10005 SettingsLocation {
10006 worktree_id: self.worktree_id.to_usize(),
10007 path: self.path.as_ref(),
10008 }
10009 }
10010}
10011
10012impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
10013 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
10014 Self {
10015 worktree_id,
10016 path: path.as_ref().into(),
10017 }
10018 }
10019}
10020
10021struct ProjectLspAdapterDelegate {
10022 project: WeakModel<Project>,
10023 worktree: worktree::Snapshot,
10024 fs: Arc<dyn Fs>,
10025 http_client: Arc<dyn HttpClient>,
10026 language_registry: Arc<LanguageRegistry>,
10027 shell_env: Mutex<Option<HashMap<String, String>>>,
10028}
10029
10030impl ProjectLspAdapterDelegate {
10031 fn new(project: &Project, worktree: &Model<Worktree>, cx: &ModelContext<Project>) -> Arc<Self> {
10032 Arc::new(Self {
10033 project: cx.weak_model(),
10034 worktree: worktree.read(cx).snapshot(),
10035 fs: project.fs.clone(),
10036 http_client: project.client.http_client(),
10037 language_registry: project.languages.clone(),
10038 shell_env: Default::default(),
10039 })
10040 }
10041
10042 async fn load_shell_env(&self) {
10043 let worktree_abs_path = self.worktree.abs_path();
10044 let shell_env = load_shell_environment(&worktree_abs_path)
10045 .await
10046 .with_context(|| {
10047 format!("failed to determine load login shell environment in {worktree_abs_path:?}")
10048 })
10049 .log_err()
10050 .unwrap_or_default();
10051 *self.shell_env.lock() = Some(shell_env);
10052 }
10053}
10054
10055#[async_trait]
10056impl LspAdapterDelegate for ProjectLspAdapterDelegate {
10057 fn show_notification(&self, message: &str, cx: &mut AppContext) {
10058 self.project
10059 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())))
10060 .ok();
10061 }
10062
10063 fn http_client(&self) -> Arc<dyn HttpClient> {
10064 self.http_client.clone()
10065 }
10066
10067 async fn shell_env(&self) -> HashMap<String, String> {
10068 self.load_shell_env().await;
10069 self.shell_env.lock().as_ref().cloned().unwrap_or_default()
10070 }
10071
10072 #[cfg(not(target_os = "windows"))]
10073 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
10074 let worktree_abs_path = self.worktree.abs_path();
10075 self.load_shell_env().await;
10076 let shell_path = self
10077 .shell_env
10078 .lock()
10079 .as_ref()
10080 .and_then(|shell_env| shell_env.get("PATH").cloned());
10081 which::which_in(command, shell_path.as_ref(), &worktree_abs_path).ok()
10082 }
10083
10084 #[cfg(target_os = "windows")]
10085 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
10086 // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms
10087 // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal
10088 // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup
10089 which::which(command).ok()
10090 }
10091
10092 fn update_status(
10093 &self,
10094 server_name: LanguageServerName,
10095 status: language::LanguageServerBinaryStatus,
10096 ) {
10097 self.language_registry
10098 .update_lsp_status(server_name, status);
10099 }
10100
10101 async fn read_text_file(&self, path: PathBuf) -> Result<String> {
10102 if self.worktree.entry_for_path(&path).is_none() {
10103 return Err(anyhow!("no such path {path:?}"));
10104 }
10105 let path = self.worktree.absolutize(path.as_ref())?;
10106 let content = self.fs.load(&path).await?;
10107 Ok(content)
10108 }
10109}
10110
10111fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
10112 proto::Symbol {
10113 language_server_name: symbol.language_server_name.0.to_string(),
10114 source_worktree_id: symbol.source_worktree_id.to_proto(),
10115 worktree_id: symbol.path.worktree_id.to_proto(),
10116 path: symbol.path.path.to_string_lossy().to_string(),
10117 name: symbol.name.clone(),
10118 kind: unsafe { mem::transmute(symbol.kind) },
10119 start: Some(proto::PointUtf16 {
10120 row: symbol.range.start.0.row,
10121 column: symbol.range.start.0.column,
10122 }),
10123 end: Some(proto::PointUtf16 {
10124 row: symbol.range.end.0.row,
10125 column: symbol.range.end.0.column,
10126 }),
10127 signature: symbol.signature.to_vec(),
10128 }
10129}
10130
10131fn relativize_path(base: &Path, path: &Path) -> PathBuf {
10132 let mut path_components = path.components();
10133 let mut base_components = base.components();
10134 let mut components: Vec<Component> = Vec::new();
10135 loop {
10136 match (path_components.next(), base_components.next()) {
10137 (None, None) => break,
10138 (Some(a), None) => {
10139 components.push(a);
10140 components.extend(path_components.by_ref());
10141 break;
10142 }
10143 (None, _) => components.push(Component::ParentDir),
10144 (Some(a), Some(b)) if components.is_empty() && a == b => (),
10145 (Some(a), Some(Component::CurDir)) => components.push(a),
10146 (Some(a), Some(_)) => {
10147 components.push(Component::ParentDir);
10148 for _ in base_components {
10149 components.push(Component::ParentDir);
10150 }
10151 components.push(a);
10152 components.extend(path_components.by_ref());
10153 break;
10154 }
10155 }
10156 }
10157 components.iter().map(|c| c.as_os_str()).collect()
10158}
10159
10160fn resolve_path(base: &Path, path: &Path) -> PathBuf {
10161 let mut result = base.to_path_buf();
10162 for component in path.components() {
10163 match component {
10164 Component::ParentDir => {
10165 result.pop();
10166 }
10167 Component::CurDir => (),
10168 _ => result.push(component),
10169 }
10170 }
10171 result
10172}
10173
10174impl Item for Buffer {
10175 fn try_open(
10176 project: &Model<Project>,
10177 path: &ProjectPath,
10178 cx: &mut AppContext,
10179 ) -> Option<Task<Result<Model<Self>>>> {
10180 Some(project.update(cx, |project, cx| project.open_buffer(path.clone(), cx)))
10181 }
10182
10183 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
10184 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
10185 }
10186
10187 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
10188 File::from_dyn(self.file()).map(|file| ProjectPath {
10189 worktree_id: file.worktree_id(cx),
10190 path: file.path().clone(),
10191 })
10192 }
10193}
10194
10195async fn wait_for_loading_buffer(
10196 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
10197) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
10198 loop {
10199 if let Some(result) = receiver.borrow().as_ref() {
10200 match result {
10201 Ok(buffer) => return Ok(buffer.to_owned()),
10202 Err(e) => return Err(e.to_owned()),
10203 }
10204 }
10205 receiver.next().await;
10206 }
10207}
10208
10209fn include_text(server: &lsp::LanguageServer) -> bool {
10210 server
10211 .capabilities()
10212 .text_document_sync
10213 .as_ref()
10214 .and_then(|sync| match sync {
10215 lsp::TextDocumentSyncCapability::Kind(_) => None,
10216 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
10217 })
10218 .and_then(|save_options| match save_options {
10219 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
10220 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
10221 })
10222 .unwrap_or(false)
10223}
10224
10225async fn load_shell_environment(dir: &Path) -> Result<HashMap<String, String>> {
10226 let marker = "ZED_SHELL_START";
10227 let shell = env::var("SHELL").context(
10228 "SHELL environment variable is not assigned so we can't source login environment variables",
10229 )?;
10230
10231 // What we're doing here is to spawn a shell and then `cd` into
10232 // the project directory to get the env in there as if the user
10233 // `cd`'d into it. We do that because tools like direnv, asdf, ...
10234 // hook into `cd` and only set up the env after that.
10235 //
10236 // In certain shells we need to execute additional_command in order to
10237 // trigger the behavior of direnv, etc.
10238 //
10239 //
10240 // The `exit 0` is the result of hours of debugging, trying to find out
10241 // why running this command here, without `exit 0`, would mess
10242 // up signal process for our process so that `ctrl-c` doesn't work
10243 // anymore.
10244 //
10245 // We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would
10246 // do that, but it does, and `exit 0` helps.
10247 let additional_command = PathBuf::from(&shell)
10248 .file_name()
10249 .and_then(|f| f.to_str())
10250 .and_then(|shell| match shell {
10251 "fish" => Some("emit fish_prompt;"),
10252 _ => None,
10253 });
10254
10255 let command = format!(
10256 "cd '{}';{} printf '%s' {marker}; /usr/bin/env -0; exit 0;",
10257 dir.display(),
10258 additional_command.unwrap_or("")
10259 );
10260
10261 let output = smol::process::Command::new(&shell)
10262 .args(["-i", "-c", &command])
10263 .output()
10264 .await
10265 .context("failed to spawn login shell to source login environment variables")?;
10266
10267 anyhow::ensure!(
10268 output.status.success(),
10269 "login shell exited with error {:?}",
10270 output.status
10271 );
10272
10273 let stdout = String::from_utf8_lossy(&output.stdout);
10274 let env_output_start = stdout.find(marker).ok_or_else(|| {
10275 anyhow!(
10276 "failed to parse output of `env` command in login shell: {}",
10277 stdout
10278 )
10279 })?;
10280
10281 let mut parsed_env = HashMap::default();
10282 let env_output = &stdout[env_output_start + marker.len()..];
10283 for line in env_output.split_terminator('\0') {
10284 if let Some(separator_index) = line.find('=') {
10285 let key = line[..separator_index].to_string();
10286 let value = line[separator_index + 1..].to_string();
10287 parsed_env.insert(key, value);
10288 }
10289 }
10290 Ok(parsed_env)
10291}
10292
10293fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBufferResponse {
10294 let entries = blame
10295 .entries
10296 .into_iter()
10297 .map(|entry| proto::BlameEntry {
10298 sha: entry.sha.as_bytes().into(),
10299 start_line: entry.range.start,
10300 end_line: entry.range.end,
10301 original_line_number: entry.original_line_number,
10302 author: entry.author.clone(),
10303 author_mail: entry.author_mail.clone(),
10304 author_time: entry.author_time,
10305 author_tz: entry.author_tz.clone(),
10306 committer: entry.committer.clone(),
10307 committer_mail: entry.committer_mail.clone(),
10308 committer_time: entry.committer_time,
10309 committer_tz: entry.committer_tz.clone(),
10310 summary: entry.summary.clone(),
10311 previous: entry.previous.clone(),
10312 filename: entry.filename.clone(),
10313 })
10314 .collect::<Vec<_>>();
10315
10316 let messages = blame
10317 .messages
10318 .into_iter()
10319 .map(|(oid, message)| proto::CommitMessage {
10320 oid: oid.as_bytes().into(),
10321 message,
10322 })
10323 .collect::<Vec<_>>();
10324
10325 let permalinks = blame
10326 .permalinks
10327 .into_iter()
10328 .map(|(oid, url)| proto::CommitPermalink {
10329 oid: oid.as_bytes().into(),
10330 permalink: url.to_string(),
10331 })
10332 .collect::<Vec<_>>();
10333
10334 proto::BlameBufferResponse {
10335 entries,
10336 messages,
10337 permalinks,
10338 }
10339}
10340
10341fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> git::blame::Blame {
10342 let entries = response
10343 .entries
10344 .into_iter()
10345 .filter_map(|entry| {
10346 Some(git::blame::BlameEntry {
10347 sha: git::Oid::from_bytes(&entry.sha).ok()?,
10348 range: entry.start_line..entry.end_line,
10349 original_line_number: entry.original_line_number,
10350 committer: entry.committer,
10351 committer_time: entry.committer_time,
10352 committer_tz: entry.committer_tz,
10353 committer_mail: entry.committer_mail,
10354 author: entry.author,
10355 author_mail: entry.author_mail,
10356 author_time: entry.author_time,
10357 author_tz: entry.author_tz,
10358 summary: entry.summary,
10359 previous: entry.previous,
10360 filename: entry.filename,
10361 })
10362 })
10363 .collect::<Vec<_>>();
10364
10365 let messages = response
10366 .messages
10367 .into_iter()
10368 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
10369 .collect::<HashMap<_, _>>();
10370
10371 let permalinks = response
10372 .permalinks
10373 .into_iter()
10374 .filter_map(|permalink| {
10375 Some((
10376 git::Oid::from_bytes(&permalink.oid).ok()?,
10377 Url::from_str(&permalink.permalink).ok()?,
10378 ))
10379 })
10380 .collect::<HashMap<_, _>>();
10381
10382 Blame {
10383 entries,
10384 permalinks,
10385 messages,
10386 }
10387}
10388
10389fn remove_empty_hover_blocks(mut hover: Hover) -> Option<Hover> {
10390 hover
10391 .contents
10392 .retain(|hover_block| !hover_block.text.trim().is_empty());
10393 if hover.contents.is_empty() {
10394 None
10395 } else {
10396 Some(hover)
10397 }
10398}