1pub mod debounced_delay;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7mod task_inventory;
8pub mod terminals;
9
10#[cfg(test)]
11mod project_tests;
12
13use anyhow::{anyhow, bail, Context as _, Result};
14use async_trait::async_trait;
15use client::{
16 proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore,
17};
18use clock::ReplicaId;
19use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
20use copilot::Copilot;
21use debounced_delay::DebouncedDelay;
22use fs::repository::GitRepository;
23use futures::{
24 channel::{
25 mpsc::{self, UnboundedReceiver},
26 oneshot,
27 },
28 future::{try_join_all, Shared},
29 select,
30 stream::FuturesUnordered,
31 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
32};
33use git::blame::Blame;
34use globset::{Glob, GlobSet, GlobSetBuilder};
35use gpui::{
36 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, BorrowAppContext, Context, Entity,
37 EventEmitter, Model, ModelContext, PromptLevel, Task, WeakModel,
38};
39use itertools::Itertools;
40use language::{
41 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
42 markdown, point_to_lsp,
43 proto::{
44 deserialize_anchor, deserialize_line_ending, deserialize_version, serialize_anchor,
45 serialize_version, split_operations,
46 },
47 range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeAction,
48 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
49 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
50 LspAdapterDelegate, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
51 ToOffset, ToPointUtf16, Transaction, Unclipped,
52};
53use log::error;
54use lsp::{
55 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
56 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId,
57 MessageActionItem, OneOf, ServerHealthStatus, ServerStatus,
58};
59use lsp_command::*;
60use node_runtime::NodeRuntime;
61use parking_lot::{Mutex, RwLock};
62use postage::watch;
63use prettier_support::{DefaultPrettier, PrettierInstance};
64use project_settings::{LspSettings, ProjectSettings};
65use rand::prelude::*;
66use worktree::LocalSnapshot;
67
68use rpc::{ErrorCode, ErrorExt as _};
69use search::SearchQuery;
70use serde::Serialize;
71use settings::{watch_config_file, Settings, SettingsLocation, SettingsStore};
72use sha2::{Digest, Sha256};
73use similar::{ChangeTag, TextDiff};
74use smol::channel::{Receiver, Sender};
75use smol::lock::Semaphore;
76use std::{
77 cmp::{self, Ordering},
78 convert::TryInto,
79 env,
80 ffi::OsStr,
81 hash::Hash,
82 io, mem,
83 num::NonZeroU32,
84 ops::Range,
85 path::{self, Component, Path, PathBuf},
86 process::Stdio,
87 str::{self, FromStr},
88 sync::{
89 atomic::{AtomicUsize, Ordering::SeqCst},
90 Arc,
91 },
92 time::{Duration, Instant},
93};
94use task::static_source::{StaticSource, TrackedFile};
95use terminals::Terminals;
96use text::{Anchor, BufferId, RopeFingerprint};
97use util::{
98 debug_panic, defer,
99 http::{HttpClient, Url},
100 maybe, merge_json_value_into,
101 paths::{
102 LOCAL_SETTINGS_RELATIVE_PATH, LOCAL_TASKS_RELATIVE_PATH, LOCAL_VSCODE_TASKS_RELATIVE_PATH,
103 },
104 post_inc, ResultExt, TryFutureExt as _,
105};
106use worktree::{Snapshot, Traversal};
107
108pub use fs::*;
109pub use language::Location;
110#[cfg(any(test, feature = "test-support"))]
111pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
112#[cfg(feature = "test-support")]
113pub use task_inventory::test_inventory::*;
114pub use task_inventory::{Inventory, TaskSourceKind};
115pub use worktree::{
116 DiagnosticSummary, Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId,
117 RepositoryEntry, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId,
118 WorktreeSettings, FS_WATCH_LATENCY,
119};
120
121const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
122const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
123const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
124pub const SERVER_PROGRESS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
125
126pub trait Item {
127 fn try_open(
128 project: &Model<Project>,
129 path: &ProjectPath,
130 cx: &mut AppContext,
131 ) -> Option<Task<Result<Model<Self>>>>
132 where
133 Self: Sized;
134 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
135 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
136}
137
138#[derive(Clone)]
139pub enum OpenedBufferEvent {
140 Disconnected,
141 Ok(BufferId),
142 Err(BufferId, Arc<anyhow::Error>),
143}
144
145/// Semantics-aware entity that is relevant to one or more [`Worktree`] with the files.
146/// `Project` is responsible for tasks, LSP and collab queries, synchronizing worktree states accordingly.
147/// Maps [`Worktree`] entries with its own logic using [`ProjectEntryId`] and [`ProjectPath`] structs.
148///
149/// Can be either local (for the project opened on the same host) or remote.(for collab projects, browsed by multiple remote users).
150pub struct Project {
151 worktrees: Vec<WorktreeHandle>,
152 active_entry: Option<ProjectEntryId>,
153 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
154 pending_language_server_update: Option<BufferOrderedMessage>,
155 flush_language_server_update: Option<Task<()>>,
156
157 languages: Arc<LanguageRegistry>,
158 supplementary_language_servers:
159 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
160 language_servers: HashMap<LanguageServerId, LanguageServerState>,
161 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
162 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
163 last_formatting_failure: Option<String>,
164 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
165 language_server_watched_paths: HashMap<LanguageServerId, HashMap<WorktreeId, GlobSet>>,
166 client: Arc<client::Client>,
167 next_entry_id: Arc<AtomicUsize>,
168 join_project_response_message_id: u32,
169 next_diagnostic_group_id: usize,
170 user_store: Model<UserStore>,
171 fs: Arc<dyn Fs>,
172 client_state: ProjectClientState,
173 collaborators: HashMap<proto::PeerId, Collaborator>,
174 client_subscriptions: Vec<client::Subscription>,
175 _subscriptions: Vec<gpui::Subscription>,
176 next_buffer_id: BufferId,
177 loading_buffers: HashMap<BufferId, Vec<oneshot::Sender<Result<Model<Buffer>, anyhow::Error>>>>,
178 incomplete_remote_buffers: HashMap<BufferId, Model<Buffer>>,
179 shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
180 #[allow(clippy::type_complexity)]
181 loading_buffers_by_path: HashMap<
182 ProjectPath,
183 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
184 >,
185 #[allow(clippy::type_complexity)]
186 loading_local_worktrees:
187 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
188 opened_buffers: HashMap<BufferId, OpenBuffer>,
189 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
190 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
191 buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
192 buffers_being_formatted: HashSet<BufferId>,
193 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
194 git_diff_debouncer: DebouncedDelay,
195 nonce: u128,
196 _maintain_buffer_languages: Task<()>,
197 _maintain_workspace_config: Task<Result<()>>,
198 terminals: Terminals,
199 copilot_lsp_subscription: Option<gpui::Subscription>,
200 copilot_log_subscription: Option<lsp::Subscription>,
201 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
202 node: Option<Arc<dyn NodeRuntime>>,
203 default_prettier: DefaultPrettier,
204 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
205 prettier_instances: HashMap<PathBuf, PrettierInstance>,
206 tasks: Model<Inventory>,
207 hosted_project_id: Option<ProjectId>,
208}
209
210pub enum LanguageServerToQuery {
211 Primary,
212 Other(LanguageServerId),
213}
214
215struct LspBufferSnapshot {
216 version: i32,
217 snapshot: TextBufferSnapshot,
218}
219
220/// Message ordered with respect to buffer operations
221#[derive(Debug)]
222enum BufferOrderedMessage {
223 Operation {
224 buffer_id: BufferId,
225 operation: proto::Operation,
226 },
227 LanguageServerUpdate {
228 language_server_id: LanguageServerId,
229 message: proto::update_language_server::Variant,
230 },
231 Resync,
232}
233
234enum LocalProjectUpdate {
235 WorktreesChanged,
236 CreateBufferForPeer {
237 peer_id: proto::PeerId,
238 buffer_id: BufferId,
239 },
240}
241
242enum OpenBuffer {
243 Strong(Model<Buffer>),
244 Weak(WeakModel<Buffer>),
245 Operations(Vec<Operation>),
246}
247
248#[derive(Clone)]
249enum WorktreeHandle {
250 Strong(Model<Worktree>),
251 Weak(WeakModel<Worktree>),
252}
253
254#[derive(Debug)]
255enum ProjectClientState {
256 Local,
257 Shared {
258 remote_id: u64,
259 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
260 _send_updates: Task<Result<()>>,
261 },
262 Remote {
263 sharing_has_stopped: bool,
264 capability: Capability,
265 remote_id: u64,
266 replica_id: ReplicaId,
267 },
268}
269
270/// A prompt requested by LSP server.
271#[derive(Clone, Debug)]
272pub struct LanguageServerPromptRequest {
273 pub level: PromptLevel,
274 pub message: String,
275 pub actions: Vec<MessageActionItem>,
276 pub lsp_name: String,
277 response_channel: Sender<MessageActionItem>,
278}
279
280impl LanguageServerPromptRequest {
281 pub async fn respond(self, index: usize) -> Option<()> {
282 if let Some(response) = self.actions.into_iter().nth(index) {
283 self.response_channel.send(response).await.ok()
284 } else {
285 None
286 }
287 }
288}
289impl PartialEq for LanguageServerPromptRequest {
290 fn eq(&self, other: &Self) -> bool {
291 self.message == other.message && self.actions == other.actions
292 }
293}
294
295#[derive(Clone, Debug, PartialEq)]
296pub enum Event {
297 LanguageServerAdded(LanguageServerId),
298 LanguageServerRemoved(LanguageServerId),
299 LanguageServerLog(LanguageServerId, String),
300 Notification(String),
301 LanguageServerPrompt(LanguageServerPromptRequest),
302 LanguageNotFound(Model<Buffer>),
303 ActiveEntryChanged(Option<ProjectEntryId>),
304 ActivateProjectPanel,
305 WorktreeAdded,
306 WorktreeRemoved(WorktreeId),
307 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
308 WorktreeUpdatedGitRepositories,
309 DiskBasedDiagnosticsStarted {
310 language_server_id: LanguageServerId,
311 },
312 DiskBasedDiagnosticsFinished {
313 language_server_id: LanguageServerId,
314 },
315 DiagnosticsUpdated {
316 path: ProjectPath,
317 language_server_id: LanguageServerId,
318 },
319 RemoteIdChanged(Option<u64>),
320 DisconnectedFromHost,
321 Closed,
322 DeletedEntry(ProjectEntryId),
323 CollaboratorUpdated {
324 old_peer_id: proto::PeerId,
325 new_peer_id: proto::PeerId,
326 },
327 CollaboratorJoined(proto::PeerId),
328 CollaboratorLeft(proto::PeerId),
329 RefreshInlayHints,
330 RevealInProjectPanel(ProjectEntryId),
331}
332
333pub enum LanguageServerState {
334 Starting(Task<Option<Arc<LanguageServer>>>),
335
336 Running {
337 language: Arc<Language>,
338 adapter: Arc<CachedLspAdapter>,
339 server: Arc<LanguageServer>,
340 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
341 },
342}
343
344#[derive(Serialize)]
345pub struct LanguageServerStatus {
346 pub name: String,
347 pub pending_work: BTreeMap<String, LanguageServerProgress>,
348 pub has_pending_diagnostic_updates: bool,
349 progress_tokens: HashSet<String>,
350}
351
352#[derive(Clone, Debug, Serialize)]
353pub struct LanguageServerProgress {
354 pub message: Option<String>,
355 pub percentage: Option<usize>,
356 #[serde(skip_serializing)]
357 pub last_update_at: Instant,
358}
359
360#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
361pub struct ProjectPath {
362 pub worktree_id: WorktreeId,
363 pub path: Arc<Path>,
364}
365
366#[derive(Debug, Clone, PartialEq, Eq)]
367pub struct InlayHint {
368 pub position: language::Anchor,
369 pub label: InlayHintLabel,
370 pub kind: Option<InlayHintKind>,
371 pub padding_left: bool,
372 pub padding_right: bool,
373 pub tooltip: Option<InlayHintTooltip>,
374 pub resolve_state: ResolveState,
375}
376
377#[derive(Debug, Clone, PartialEq, Eq)]
378pub enum ResolveState {
379 Resolved,
380 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
381 Resolving,
382}
383
384impl InlayHint {
385 pub fn text(&self) -> String {
386 match &self.label {
387 InlayHintLabel::String(s) => s.to_owned(),
388 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
389 }
390 }
391}
392
393#[derive(Debug, Clone, PartialEq, Eq)]
394pub enum InlayHintLabel {
395 String(String),
396 LabelParts(Vec<InlayHintLabelPart>),
397}
398
399#[derive(Debug, Clone, PartialEq, Eq)]
400pub struct InlayHintLabelPart {
401 pub value: String,
402 pub tooltip: Option<InlayHintLabelPartTooltip>,
403 pub location: Option<(LanguageServerId, lsp::Location)>,
404}
405
406#[derive(Debug, Clone, PartialEq, Eq)]
407pub enum InlayHintTooltip {
408 String(String),
409 MarkupContent(MarkupContent),
410}
411
412#[derive(Debug, Clone, PartialEq, Eq)]
413pub enum InlayHintLabelPartTooltip {
414 String(String),
415 MarkupContent(MarkupContent),
416}
417
418#[derive(Debug, Clone, PartialEq, Eq)]
419pub struct MarkupContent {
420 pub kind: HoverBlockKind,
421 pub value: String,
422}
423
424#[derive(Debug, Clone)]
425pub struct LocationLink {
426 pub origin: Option<Location>,
427 pub target: Location,
428}
429
430#[derive(Debug)]
431pub struct DocumentHighlight {
432 pub range: Range<language::Anchor>,
433 pub kind: DocumentHighlightKind,
434}
435
436#[derive(Clone, Debug)]
437pub struct Symbol {
438 pub language_server_name: LanguageServerName,
439 pub source_worktree_id: WorktreeId,
440 pub path: ProjectPath,
441 pub label: CodeLabel,
442 pub name: String,
443 pub kind: lsp::SymbolKind,
444 pub range: Range<Unclipped<PointUtf16>>,
445 pub signature: [u8; 32],
446}
447
448#[derive(Clone, Debug, PartialEq)]
449pub struct HoverBlock {
450 pub text: String,
451 pub kind: HoverBlockKind,
452}
453
454#[derive(Clone, Debug, PartialEq, Eq)]
455pub enum HoverBlockKind {
456 PlainText,
457 Markdown,
458 Code { language: String },
459}
460
461#[derive(Debug)]
462pub struct Hover {
463 pub contents: Vec<HoverBlock>,
464 pub range: Option<Range<language::Anchor>>,
465 pub language: Option<Arc<Language>>,
466}
467
468impl Hover {
469 pub fn is_empty(&self) -> bool {
470 self.contents.iter().all(|block| block.text.is_empty())
471 }
472}
473
474#[derive(Default)]
475pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
476
477#[derive(Debug, Clone, Copy, PartialEq, Eq)]
478pub enum FormatTrigger {
479 Save,
480 Manual,
481}
482
483// Currently, formatting operations are represented differently depending on
484// whether they come from a language server or an external command.
485enum FormatOperation {
486 Lsp(Vec<(Range<Anchor>, String)>),
487 External(Diff),
488 Prettier(Diff),
489}
490
491impl FormatTrigger {
492 fn from_proto(value: i32) -> FormatTrigger {
493 match value {
494 0 => FormatTrigger::Save,
495 1 => FormatTrigger::Manual,
496 _ => FormatTrigger::Save,
497 }
498 }
499}
500
501#[derive(Clone, Debug, PartialEq)]
502enum SearchMatchCandidate {
503 OpenBuffer {
504 buffer: Model<Buffer>,
505 // This might be an unnamed file without representation on filesystem
506 path: Option<Arc<Path>>,
507 },
508 Path {
509 worktree_id: WorktreeId,
510 is_ignored: bool,
511 path: Arc<Path>,
512 },
513}
514
515impl SearchMatchCandidate {
516 fn path(&self) -> Option<Arc<Path>> {
517 match self {
518 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
519 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
520 }
521 }
522
523 fn is_ignored(&self) -> bool {
524 matches!(
525 self,
526 SearchMatchCandidate::Path {
527 is_ignored: true,
528 ..
529 }
530 )
531 }
532}
533
534pub enum SearchResult {
535 Buffer {
536 buffer: Model<Buffer>,
537 ranges: Vec<Range<Anchor>>,
538 },
539 LimitReached,
540}
541
542impl Project {
543 pub fn init_settings(cx: &mut AppContext) {
544 WorktreeSettings::register(cx);
545 ProjectSettings::register(cx);
546 }
547
548 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
549 Self::init_settings(cx);
550
551 client.add_model_message_handler(Self::handle_add_collaborator);
552 client.add_model_message_handler(Self::handle_update_project_collaborator);
553 client.add_model_message_handler(Self::handle_remove_collaborator);
554 client.add_model_message_handler(Self::handle_buffer_reloaded);
555 client.add_model_message_handler(Self::handle_buffer_saved);
556 client.add_model_message_handler(Self::handle_start_language_server);
557 client.add_model_message_handler(Self::handle_update_language_server);
558 client.add_model_message_handler(Self::handle_update_project);
559 client.add_model_message_handler(Self::handle_unshare_project);
560 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
561 client.add_model_message_handler(Self::handle_update_buffer_file);
562 client.add_model_request_handler(Self::handle_update_buffer);
563 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
564 client.add_model_message_handler(Self::handle_update_worktree);
565 client.add_model_message_handler(Self::handle_update_worktree_settings);
566 client.add_model_request_handler(Self::handle_create_project_entry);
567 client.add_model_request_handler(Self::handle_rename_project_entry);
568 client.add_model_request_handler(Self::handle_copy_project_entry);
569 client.add_model_request_handler(Self::handle_delete_project_entry);
570 client.add_model_request_handler(Self::handle_expand_project_entry);
571 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
572 client.add_model_request_handler(Self::handle_resolve_completion_documentation);
573 client.add_model_request_handler(Self::handle_apply_code_action);
574 client.add_model_request_handler(Self::handle_on_type_formatting);
575 client.add_model_request_handler(Self::handle_inlay_hints);
576 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
577 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
578 client.add_model_request_handler(Self::handle_reload_buffers);
579 client.add_model_request_handler(Self::handle_synchronize_buffers);
580 client.add_model_request_handler(Self::handle_format_buffers);
581 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
582 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
583 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
584 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
585 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
586 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
587 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
588 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
589 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
590 client.add_model_request_handler(Self::handle_search_project);
591 client.add_model_request_handler(Self::handle_get_project_symbols);
592 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
593 client.add_model_request_handler(Self::handle_open_buffer_by_id);
594 client.add_model_request_handler(Self::handle_open_buffer_by_path);
595 client.add_model_request_handler(Self::handle_save_buffer);
596 client.add_model_message_handler(Self::handle_update_diff_base);
597 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
598 client.add_model_request_handler(Self::handle_blame_buffer);
599 }
600
601 pub fn local(
602 client: Arc<Client>,
603 node: Arc<dyn NodeRuntime>,
604 user_store: Model<UserStore>,
605 languages: Arc<LanguageRegistry>,
606 fs: Arc<dyn Fs>,
607 cx: &mut AppContext,
608 ) -> Model<Self> {
609 cx.new_model(|cx: &mut ModelContext<Self>| {
610 let (tx, rx) = mpsc::unbounded();
611 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
612 .detach();
613 let copilot_lsp_subscription =
614 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
615 let tasks = Inventory::new(cx);
616
617 Self {
618 worktrees: Vec::new(),
619 buffer_ordered_messages_tx: tx,
620 flush_language_server_update: None,
621 pending_language_server_update: None,
622 collaborators: Default::default(),
623 next_buffer_id: BufferId::new(1).unwrap(),
624 opened_buffers: Default::default(),
625 shared_buffers: Default::default(),
626 loading_buffers_by_path: Default::default(),
627 loading_local_worktrees: Default::default(),
628 local_buffer_ids_by_path: Default::default(),
629 local_buffer_ids_by_entry_id: Default::default(),
630 buffer_snapshots: Default::default(),
631 join_project_response_message_id: 0,
632 client_state: ProjectClientState::Local,
633 loading_buffers: HashMap::default(),
634 incomplete_remote_buffers: HashMap::default(),
635 client_subscriptions: Vec::new(),
636 _subscriptions: vec![
637 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
638 cx.on_release(Self::release),
639 cx.on_app_quit(Self::shutdown_language_servers),
640 ],
641 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
642 _maintain_workspace_config: Self::maintain_workspace_config(cx),
643 active_entry: None,
644 languages,
645 client,
646 user_store,
647 fs,
648 next_entry_id: Default::default(),
649 next_diagnostic_group_id: Default::default(),
650 supplementary_language_servers: HashMap::default(),
651 language_servers: Default::default(),
652 language_server_ids: HashMap::default(),
653 language_server_statuses: Default::default(),
654 last_formatting_failure: None,
655 last_workspace_edits_by_language_server: Default::default(),
656 language_server_watched_paths: HashMap::default(),
657 buffers_being_formatted: Default::default(),
658 buffers_needing_diff: Default::default(),
659 git_diff_debouncer: DebouncedDelay::new(),
660 nonce: StdRng::from_entropy().gen(),
661 terminals: Terminals {
662 local_handles: Vec::new(),
663 },
664 copilot_lsp_subscription,
665 copilot_log_subscription: None,
666 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
667 node: Some(node),
668 default_prettier: DefaultPrettier::default(),
669 prettiers_per_worktree: HashMap::default(),
670 prettier_instances: HashMap::default(),
671 tasks,
672 hosted_project_id: None,
673 }
674 })
675 }
676
677 pub async fn remote(
678 remote_id: u64,
679 client: Arc<Client>,
680 user_store: Model<UserStore>,
681 languages: Arc<LanguageRegistry>,
682 fs: Arc<dyn Fs>,
683 cx: AsyncAppContext,
684 ) -> Result<Model<Self>> {
685 client.authenticate_and_connect(true, &cx).await?;
686
687 let subscription = client.subscribe_to_entity(remote_id)?;
688 let response = client
689 .request_envelope(proto::JoinProject {
690 project_id: remote_id,
691 })
692 .await?;
693 Self::from_join_project_response(
694 response,
695 subscription,
696 client,
697 user_store,
698 languages,
699 fs,
700 cx,
701 )
702 .await
703 }
704 async fn from_join_project_response(
705 response: TypedEnvelope<proto::JoinProjectResponse>,
706 subscription: PendingEntitySubscription<Project>,
707 client: Arc<Client>,
708 user_store: Model<UserStore>,
709 languages: Arc<LanguageRegistry>,
710 fs: Arc<dyn Fs>,
711 mut cx: AsyncAppContext,
712 ) -> Result<Model<Self>> {
713 let remote_id = response.payload.project_id;
714 let role = response.payload.role();
715 let this = cx.new_model(|cx| {
716 let replica_id = response.payload.replica_id as ReplicaId;
717 let tasks = Inventory::new(cx);
718 // BIG CAUTION NOTE: The order in which we initialize fields here matters and it should match what's done in Self::local.
719 // Otherwise, you might run into issues where worktree id on remote is different than what's on local host.
720 // That's because Worktree's identifier is entity id, which should probably be changed.
721 let mut worktrees = Vec::new();
722 for worktree in response.payload.worktrees {
723 let worktree =
724 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
725 worktrees.push(worktree);
726 }
727
728 let (tx, rx) = mpsc::unbounded();
729 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
730 .detach();
731 let copilot_lsp_subscription =
732 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
733 let mut this = Self {
734 worktrees: Vec::new(),
735 buffer_ordered_messages_tx: tx,
736 pending_language_server_update: None,
737 flush_language_server_update: None,
738 loading_buffers_by_path: Default::default(),
739 next_buffer_id: BufferId::new(1).unwrap(),
740 loading_buffers: Default::default(),
741 shared_buffers: Default::default(),
742 incomplete_remote_buffers: Default::default(),
743 loading_local_worktrees: Default::default(),
744 local_buffer_ids_by_path: Default::default(),
745 local_buffer_ids_by_entry_id: Default::default(),
746 active_entry: None,
747 collaborators: Default::default(),
748 join_project_response_message_id: response.message_id,
749 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
750 _maintain_workspace_config: Self::maintain_workspace_config(cx),
751 languages,
752 user_store: user_store.clone(),
753 fs,
754 next_entry_id: Default::default(),
755 next_diagnostic_group_id: Default::default(),
756 client_subscriptions: Default::default(),
757 _subscriptions: vec![
758 cx.on_release(Self::release),
759 cx.on_app_quit(Self::shutdown_language_servers),
760 ],
761 client: client.clone(),
762 client_state: ProjectClientState::Remote {
763 sharing_has_stopped: false,
764 capability: Capability::ReadWrite,
765 remote_id,
766 replica_id,
767 },
768 supplementary_language_servers: HashMap::default(),
769 language_servers: Default::default(),
770 language_server_ids: HashMap::default(),
771 language_server_statuses: response
772 .payload
773 .language_servers
774 .into_iter()
775 .map(|server| {
776 (
777 LanguageServerId(server.id as usize),
778 LanguageServerStatus {
779 name: server.name,
780 pending_work: Default::default(),
781 has_pending_diagnostic_updates: false,
782 progress_tokens: Default::default(),
783 },
784 )
785 })
786 .collect(),
787 last_formatting_failure: None,
788 last_workspace_edits_by_language_server: Default::default(),
789 language_server_watched_paths: HashMap::default(),
790 opened_buffers: Default::default(),
791 buffers_being_formatted: Default::default(),
792 buffers_needing_diff: Default::default(),
793 git_diff_debouncer: DebouncedDelay::new(),
794 buffer_snapshots: Default::default(),
795 nonce: StdRng::from_entropy().gen(),
796 terminals: Terminals {
797 local_handles: Vec::new(),
798 },
799 copilot_lsp_subscription,
800 copilot_log_subscription: None,
801 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
802 node: None,
803 default_prettier: DefaultPrettier::default(),
804 prettiers_per_worktree: HashMap::default(),
805 prettier_instances: HashMap::default(),
806 tasks,
807 hosted_project_id: None,
808 };
809 this.set_role(role, cx);
810 for worktree in worktrees {
811 let _ = this.add_worktree(&worktree, cx);
812 }
813 this
814 })?;
815 let subscription = subscription.set_model(&this, &mut cx);
816
817 let user_ids = response
818 .payload
819 .collaborators
820 .iter()
821 .map(|peer| peer.user_id)
822 .collect();
823 user_store
824 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
825 .await?;
826
827 this.update(&mut cx, |this, cx| {
828 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
829 this.client_subscriptions.push(subscription);
830 anyhow::Ok(())
831 })??;
832
833 Ok(this)
834 }
835
836 pub async fn hosted(
837 remote_id: ProjectId,
838 user_store: Model<UserStore>,
839 client: Arc<Client>,
840 languages: Arc<LanguageRegistry>,
841 fs: Arc<dyn Fs>,
842 cx: AsyncAppContext,
843 ) -> Result<Model<Self>> {
844 client.authenticate_and_connect(true, &cx).await?;
845
846 let subscription = client.subscribe_to_entity(remote_id.0)?;
847 let response = client
848 .request_envelope(proto::JoinHostedProject {
849 project_id: remote_id.0,
850 })
851 .await?;
852 Self::from_join_project_response(
853 response,
854 subscription,
855 client,
856 user_store,
857 languages,
858 fs,
859 cx,
860 )
861 .await
862 }
863
864 fn release(&mut self, cx: &mut AppContext) {
865 match &self.client_state {
866 ProjectClientState::Local => {}
867 ProjectClientState::Shared { .. } => {
868 let _ = self.unshare_internal(cx);
869 }
870 ProjectClientState::Remote { remote_id, .. } => {
871 let _ = self.client.send(proto::LeaveProject {
872 project_id: *remote_id,
873 });
874 self.disconnected_from_host_internal(cx);
875 }
876 }
877 }
878
879 fn shutdown_language_servers(
880 &mut self,
881 _cx: &mut ModelContext<Self>,
882 ) -> impl Future<Output = ()> {
883 let shutdown_futures = self
884 .language_servers
885 .drain()
886 .map(|(_, server_state)| async {
887 use LanguageServerState::*;
888 match server_state {
889 Running { server, .. } => server.shutdown()?.await,
890 Starting(task) => task.await?.shutdown()?.await,
891 }
892 })
893 .collect::<Vec<_>>();
894
895 async move {
896 futures::future::join_all(shutdown_futures).await;
897 }
898 }
899
900 #[cfg(any(test, feature = "test-support"))]
901 pub async fn test(
902 fs: Arc<dyn Fs>,
903 root_paths: impl IntoIterator<Item = &Path>,
904 cx: &mut gpui::TestAppContext,
905 ) -> Model<Project> {
906 use clock::FakeSystemClock;
907
908 let languages = LanguageRegistry::test(cx.executor());
909 let clock = Arc::new(FakeSystemClock::default());
910 let http_client = util::http::FakeHttpClient::with_404_response();
911 let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
912 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
913 let project = cx.update(|cx| {
914 Project::local(
915 client,
916 node_runtime::FakeNodeRuntime::new(),
917 user_store,
918 Arc::new(languages),
919 fs,
920 cx,
921 )
922 });
923 for path in root_paths {
924 let (tree, _) = project
925 .update(cx, |project, cx| {
926 project.find_or_create_local_worktree(path, true, cx)
927 })
928 .await
929 .unwrap();
930 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
931 .await;
932 }
933 project
934 }
935
936 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
937 let mut language_servers_to_start = Vec::new();
938 let mut language_formatters_to_check = Vec::new();
939 for buffer in self.opened_buffers.values() {
940 if let Some(buffer) = buffer.upgrade() {
941 let buffer = buffer.read(cx);
942 let buffer_file = File::from_dyn(buffer.file());
943 let buffer_language = buffer.language();
944 let settings = language_settings(buffer_language, buffer.file(), cx);
945 if let Some(language) = buffer_language {
946 if settings.enable_language_server {
947 if let Some(file) = buffer_file {
948 language_servers_to_start
949 .push((file.worktree.clone(), Arc::clone(language)));
950 }
951 }
952 language_formatters_to_check.push((
953 buffer_file.map(|f| f.worktree_id(cx)),
954 Arc::clone(language),
955 settings.clone(),
956 ));
957 }
958 }
959 }
960
961 let mut language_servers_to_stop = Vec::new();
962 let mut language_servers_to_restart = Vec::new();
963 let languages = self.languages.to_vec();
964
965 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
966 let current_lsp_settings = &self.current_lsp_settings;
967 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
968 let language = languages.iter().find_map(|l| {
969 let adapter = self
970 .languages
971 .lsp_adapters(l)
972 .iter()
973 .find(|adapter| &adapter.name == started_lsp_name)?
974 .clone();
975 Some((l, adapter))
976 });
977 if let Some((language, adapter)) = language {
978 let worktree = self.worktree_for_id(*worktree_id, cx);
979 let file = worktree.as_ref().and_then(|tree| {
980 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
981 });
982 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
983 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
984 } else if let Some(worktree) = worktree {
985 let server_name = &adapter.name.0;
986 match (
987 current_lsp_settings.get(server_name),
988 new_lsp_settings.get(server_name),
989 ) {
990 (None, None) => {}
991 (Some(_), None) | (None, Some(_)) => {
992 language_servers_to_restart.push((worktree, Arc::clone(language)));
993 }
994 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
995 if current_lsp_settings != new_lsp_settings {
996 language_servers_to_restart.push((worktree, Arc::clone(language)));
997 }
998 }
999 }
1000 }
1001 }
1002 }
1003 self.current_lsp_settings = new_lsp_settings;
1004
1005 // Stop all newly-disabled language servers.
1006 for (worktree_id, adapter_name) in language_servers_to_stop {
1007 self.stop_language_server(worktree_id, adapter_name, cx)
1008 .detach();
1009 }
1010
1011 let mut prettier_plugins_by_worktree = HashMap::default();
1012 for (worktree, language, settings) in language_formatters_to_check {
1013 if let Some(plugins) =
1014 prettier_support::prettier_plugins_for_language(&language, &settings)
1015 {
1016 prettier_plugins_by_worktree
1017 .entry(worktree)
1018 .or_insert_with(|| HashSet::default())
1019 .extend(plugins.iter().cloned());
1020 }
1021 }
1022 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
1023 self.install_default_prettier(worktree, prettier_plugins.into_iter(), cx);
1024 }
1025
1026 // Start all the newly-enabled language servers.
1027 for (worktree, language) in language_servers_to_start {
1028 self.start_language_servers(&worktree, language, cx);
1029 }
1030
1031 // Restart all language servers with changed initialization options.
1032 for (worktree, language) in language_servers_to_restart {
1033 self.restart_language_servers(worktree, language, cx);
1034 }
1035
1036 if self.copilot_lsp_subscription.is_none() {
1037 if let Some(copilot) = Copilot::global(cx) {
1038 for buffer in self.opened_buffers.values() {
1039 if let Some(buffer) = buffer.upgrade() {
1040 self.register_buffer_with_copilot(&buffer, cx);
1041 }
1042 }
1043 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
1044 }
1045 }
1046
1047 cx.notify();
1048 }
1049
1050 pub fn buffer_for_id(&self, remote_id: BufferId) -> Option<Model<Buffer>> {
1051 self.opened_buffers
1052 .get(&remote_id)
1053 .and_then(|buffer| buffer.upgrade())
1054 }
1055
1056 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1057 &self.languages
1058 }
1059
1060 pub fn client(&self) -> Arc<Client> {
1061 self.client.clone()
1062 }
1063
1064 pub fn user_store(&self) -> Model<UserStore> {
1065 self.user_store.clone()
1066 }
1067
1068 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1069 self.opened_buffers
1070 .values()
1071 .filter_map(|b| b.upgrade())
1072 .collect()
1073 }
1074
1075 #[cfg(any(test, feature = "test-support"))]
1076 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1077 let path = path.into();
1078 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1079 self.opened_buffers.iter().any(|(_, buffer)| {
1080 if let Some(buffer) = buffer.upgrade() {
1081 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1082 if file.worktree == worktree && file.path() == &path.path {
1083 return true;
1084 }
1085 }
1086 }
1087 false
1088 })
1089 } else {
1090 false
1091 }
1092 }
1093
1094 pub fn fs(&self) -> &Arc<dyn Fs> {
1095 &self.fs
1096 }
1097
1098 pub fn remote_id(&self) -> Option<u64> {
1099 match self.client_state {
1100 ProjectClientState::Local => None,
1101 ProjectClientState::Shared { remote_id, .. }
1102 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1103 }
1104 }
1105
1106 pub fn hosted_project_id(&self) -> Option<ProjectId> {
1107 self.hosted_project_id
1108 }
1109
1110 pub fn replica_id(&self) -> ReplicaId {
1111 match self.client_state {
1112 ProjectClientState::Remote { replica_id, .. } => replica_id,
1113 _ => 0,
1114 }
1115 }
1116
1117 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1118 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1119 updates_tx
1120 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1121 .ok();
1122 }
1123 cx.notify();
1124 }
1125
1126 pub fn task_inventory(&self) -> &Model<Inventory> {
1127 &self.tasks
1128 }
1129
1130 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1131 &self.collaborators
1132 }
1133
1134 pub fn host(&self) -> Option<&Collaborator> {
1135 self.collaborators.values().find(|c| c.replica_id == 0)
1136 }
1137
1138 /// Collect all worktrees, including ones that don't appear in the project panel
1139 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
1140 self.worktrees
1141 .iter()
1142 .filter_map(move |worktree| worktree.upgrade())
1143 }
1144
1145 /// Collect all user-visible worktrees, the ones that appear in the project panel
1146 pub fn visible_worktrees<'a>(
1147 &'a self,
1148 cx: &'a AppContext,
1149 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1150 self.worktrees.iter().filter_map(|worktree| {
1151 worktree.upgrade().and_then(|worktree| {
1152 if worktree.read(cx).is_visible() {
1153 Some(worktree)
1154 } else {
1155 None
1156 }
1157 })
1158 })
1159 }
1160
1161 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1162 self.visible_worktrees(cx)
1163 .map(|tree| tree.read(cx).root_name())
1164 }
1165
1166 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1167 self.worktrees()
1168 .find(|worktree| worktree.read(cx).id() == id)
1169 }
1170
1171 pub fn worktree_for_entry(
1172 &self,
1173 entry_id: ProjectEntryId,
1174 cx: &AppContext,
1175 ) -> Option<Model<Worktree>> {
1176 self.worktrees()
1177 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1178 }
1179
1180 pub fn worktree_id_for_entry(
1181 &self,
1182 entry_id: ProjectEntryId,
1183 cx: &AppContext,
1184 ) -> Option<WorktreeId> {
1185 self.worktree_for_entry(entry_id, cx)
1186 .map(|worktree| worktree.read(cx).id())
1187 }
1188
1189 pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option<bool> {
1190 paths
1191 .iter()
1192 .map(|path| self.visibility_for_path(path, cx))
1193 .max()
1194 .flatten()
1195 }
1196
1197 pub fn visibility_for_path(&self, path: &Path, cx: &AppContext) -> Option<bool> {
1198 self.worktrees()
1199 .filter_map(|worktree| {
1200 let worktree = worktree.read(cx);
1201 worktree
1202 .as_local()?
1203 .contains_abs_path(path)
1204 .then(|| worktree.is_visible())
1205 })
1206 .max()
1207 }
1208
1209 pub fn create_entry(
1210 &mut self,
1211 project_path: impl Into<ProjectPath>,
1212 is_directory: bool,
1213 cx: &mut ModelContext<Self>,
1214 ) -> Task<Result<Option<Entry>>> {
1215 let project_path = project_path.into();
1216 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1217 return Task::ready(Ok(None));
1218 };
1219 if self.is_local() {
1220 worktree.update(cx, |worktree, cx| {
1221 worktree
1222 .as_local_mut()
1223 .unwrap()
1224 .create_entry(project_path.path, is_directory, cx)
1225 })
1226 } else {
1227 let client = self.client.clone();
1228 let project_id = self.remote_id().unwrap();
1229 cx.spawn(move |_, mut cx| async move {
1230 let response = client
1231 .request(proto::CreateProjectEntry {
1232 worktree_id: project_path.worktree_id.to_proto(),
1233 project_id,
1234 path: project_path.path.to_string_lossy().into(),
1235 is_directory,
1236 })
1237 .await?;
1238 match response.entry {
1239 Some(entry) => worktree
1240 .update(&mut cx, |worktree, cx| {
1241 worktree.as_remote_mut().unwrap().insert_entry(
1242 entry,
1243 response.worktree_scan_id as usize,
1244 cx,
1245 )
1246 })?
1247 .await
1248 .map(Some),
1249 None => Ok(None),
1250 }
1251 })
1252 }
1253 }
1254
1255 pub fn copy_entry(
1256 &mut self,
1257 entry_id: ProjectEntryId,
1258 new_path: impl Into<Arc<Path>>,
1259 cx: &mut ModelContext<Self>,
1260 ) -> Task<Result<Option<Entry>>> {
1261 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1262 return Task::ready(Ok(None));
1263 };
1264 let new_path = new_path.into();
1265 if self.is_local() {
1266 worktree.update(cx, |worktree, cx| {
1267 worktree
1268 .as_local_mut()
1269 .unwrap()
1270 .copy_entry(entry_id, new_path, cx)
1271 })
1272 } else {
1273 let client = self.client.clone();
1274 let project_id = self.remote_id().unwrap();
1275
1276 cx.spawn(move |_, mut cx| async move {
1277 let response = client
1278 .request(proto::CopyProjectEntry {
1279 project_id,
1280 entry_id: entry_id.to_proto(),
1281 new_path: new_path.to_string_lossy().into(),
1282 })
1283 .await?;
1284 match response.entry {
1285 Some(entry) => worktree
1286 .update(&mut cx, |worktree, cx| {
1287 worktree.as_remote_mut().unwrap().insert_entry(
1288 entry,
1289 response.worktree_scan_id as usize,
1290 cx,
1291 )
1292 })?
1293 .await
1294 .map(Some),
1295 None => Ok(None),
1296 }
1297 })
1298 }
1299 }
1300
1301 pub fn rename_entry(
1302 &mut self,
1303 entry_id: ProjectEntryId,
1304 new_path: impl Into<Arc<Path>>,
1305 cx: &mut ModelContext<Self>,
1306 ) -> Task<Result<Option<Entry>>> {
1307 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1308 return Task::ready(Ok(None));
1309 };
1310 let new_path = new_path.into();
1311 if self.is_local() {
1312 worktree.update(cx, |worktree, cx| {
1313 worktree
1314 .as_local_mut()
1315 .unwrap()
1316 .rename_entry(entry_id, new_path, cx)
1317 })
1318 } else {
1319 let client = self.client.clone();
1320 let project_id = self.remote_id().unwrap();
1321
1322 cx.spawn(move |_, mut cx| async move {
1323 let response = client
1324 .request(proto::RenameProjectEntry {
1325 project_id,
1326 entry_id: entry_id.to_proto(),
1327 new_path: new_path.to_string_lossy().into(),
1328 })
1329 .await?;
1330 match response.entry {
1331 Some(entry) => worktree
1332 .update(&mut cx, |worktree, cx| {
1333 worktree.as_remote_mut().unwrap().insert_entry(
1334 entry,
1335 response.worktree_scan_id as usize,
1336 cx,
1337 )
1338 })?
1339 .await
1340 .map(Some),
1341 None => Ok(None),
1342 }
1343 })
1344 }
1345 }
1346
1347 pub fn delete_entry(
1348 &mut self,
1349 entry_id: ProjectEntryId,
1350 cx: &mut ModelContext<Self>,
1351 ) -> Option<Task<Result<()>>> {
1352 let worktree = self.worktree_for_entry(entry_id, cx)?;
1353
1354 cx.emit(Event::DeletedEntry(entry_id));
1355
1356 if self.is_local() {
1357 worktree.update(cx, |worktree, cx| {
1358 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1359 })
1360 } else {
1361 let client = self.client.clone();
1362 let project_id = self.remote_id().unwrap();
1363 Some(cx.spawn(move |_, mut cx| async move {
1364 let response = client
1365 .request(proto::DeleteProjectEntry {
1366 project_id,
1367 entry_id: entry_id.to_proto(),
1368 })
1369 .await?;
1370 worktree
1371 .update(&mut cx, move |worktree, cx| {
1372 worktree.as_remote_mut().unwrap().delete_entry(
1373 entry_id,
1374 response.worktree_scan_id as usize,
1375 cx,
1376 )
1377 })?
1378 .await
1379 }))
1380 }
1381 }
1382
1383 pub fn expand_entry(
1384 &mut self,
1385 worktree_id: WorktreeId,
1386 entry_id: ProjectEntryId,
1387 cx: &mut ModelContext<Self>,
1388 ) -> Option<Task<Result<()>>> {
1389 let worktree = self.worktree_for_id(worktree_id, cx)?;
1390 if self.is_local() {
1391 worktree.update(cx, |worktree, cx| {
1392 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1393 })
1394 } else {
1395 let worktree = worktree.downgrade();
1396 let request = self.client.request(proto::ExpandProjectEntry {
1397 project_id: self.remote_id().unwrap(),
1398 entry_id: entry_id.to_proto(),
1399 });
1400 Some(cx.spawn(move |_, mut cx| async move {
1401 let response = request.await?;
1402 if let Some(worktree) = worktree.upgrade() {
1403 worktree
1404 .update(&mut cx, |worktree, _| {
1405 worktree
1406 .as_remote_mut()
1407 .unwrap()
1408 .wait_for_snapshot(response.worktree_scan_id as usize)
1409 })?
1410 .await?;
1411 }
1412 Ok(())
1413 }))
1414 }
1415 }
1416
1417 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1418 if !matches!(self.client_state, ProjectClientState::Local) {
1419 return Err(anyhow!("project was already shared"));
1420 }
1421 self.client_subscriptions.push(
1422 self.client
1423 .subscribe_to_entity(project_id)?
1424 .set_model(&cx.handle(), &mut cx.to_async()),
1425 );
1426
1427 for open_buffer in self.opened_buffers.values_mut() {
1428 match open_buffer {
1429 OpenBuffer::Strong(_) => {}
1430 OpenBuffer::Weak(buffer) => {
1431 if let Some(buffer) = buffer.upgrade() {
1432 *open_buffer = OpenBuffer::Strong(buffer);
1433 }
1434 }
1435 OpenBuffer::Operations(_) => unreachable!(),
1436 }
1437 }
1438
1439 for worktree_handle in self.worktrees.iter_mut() {
1440 match worktree_handle {
1441 WorktreeHandle::Strong(_) => {}
1442 WorktreeHandle::Weak(worktree) => {
1443 if let Some(worktree) = worktree.upgrade() {
1444 *worktree_handle = WorktreeHandle::Strong(worktree);
1445 }
1446 }
1447 }
1448 }
1449
1450 for (server_id, status) in &self.language_server_statuses {
1451 self.client
1452 .send(proto::StartLanguageServer {
1453 project_id,
1454 server: Some(proto::LanguageServer {
1455 id: server_id.0 as u64,
1456 name: status.name.clone(),
1457 }),
1458 })
1459 .log_err();
1460 }
1461
1462 let store = cx.global::<SettingsStore>();
1463 for worktree in self.worktrees() {
1464 let worktree_id = worktree.read(cx).id().to_proto();
1465 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1466 self.client
1467 .send(proto::UpdateWorktreeSettings {
1468 project_id,
1469 worktree_id,
1470 path: path.to_string_lossy().into(),
1471 content: Some(content),
1472 })
1473 .log_err();
1474 }
1475 }
1476
1477 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1478 let client = self.client.clone();
1479 self.client_state = ProjectClientState::Shared {
1480 remote_id: project_id,
1481 updates_tx,
1482 _send_updates: cx.spawn(move |this, mut cx| async move {
1483 while let Some(update) = updates_rx.next().await {
1484 match update {
1485 LocalProjectUpdate::WorktreesChanged => {
1486 let worktrees = this.update(&mut cx, |this, _cx| {
1487 this.worktrees().collect::<Vec<_>>()
1488 })?;
1489 let update_project = this
1490 .update(&mut cx, |this, cx| {
1491 this.client.request(proto::UpdateProject {
1492 project_id,
1493 worktrees: this.worktree_metadata_protos(cx),
1494 })
1495 })?
1496 .await;
1497 if update_project.is_ok() {
1498 for worktree in worktrees {
1499 worktree.update(&mut cx, |worktree, cx| {
1500 let worktree = worktree.as_local_mut().unwrap();
1501 worktree.share(project_id, cx).detach_and_log_err(cx)
1502 })?;
1503 }
1504 }
1505 }
1506 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1507 let buffer = this.update(&mut cx, |this, _| {
1508 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1509 let shared_buffers =
1510 this.shared_buffers.entry(peer_id).or_default();
1511 if shared_buffers.insert(buffer_id) {
1512 if let OpenBuffer::Strong(buffer) = buffer {
1513 Some(buffer.clone())
1514 } else {
1515 None
1516 }
1517 } else {
1518 None
1519 }
1520 })?;
1521
1522 let Some(buffer) = buffer else { continue };
1523 let operations =
1524 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1525 let operations = operations.await;
1526 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1527
1528 let initial_state = proto::CreateBufferForPeer {
1529 project_id,
1530 peer_id: Some(peer_id),
1531 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1532 };
1533 if client.send(initial_state).log_err().is_some() {
1534 let client = client.clone();
1535 cx.background_executor()
1536 .spawn(async move {
1537 let mut chunks = split_operations(operations).peekable();
1538 while let Some(chunk) = chunks.next() {
1539 let is_last = chunks.peek().is_none();
1540 client.send(proto::CreateBufferForPeer {
1541 project_id,
1542 peer_id: Some(peer_id),
1543 variant: Some(
1544 proto::create_buffer_for_peer::Variant::Chunk(
1545 proto::BufferChunk {
1546 buffer_id: buffer_id.into(),
1547 operations: chunk,
1548 is_last,
1549 },
1550 ),
1551 ),
1552 })?;
1553 }
1554 anyhow::Ok(())
1555 })
1556 .await
1557 .log_err();
1558 }
1559 }
1560 }
1561 }
1562 Ok(())
1563 }),
1564 };
1565
1566 self.metadata_changed(cx);
1567 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1568 cx.notify();
1569 Ok(())
1570 }
1571
1572 pub fn reshared(
1573 &mut self,
1574 message: proto::ResharedProject,
1575 cx: &mut ModelContext<Self>,
1576 ) -> Result<()> {
1577 self.shared_buffers.clear();
1578 self.set_collaborators_from_proto(message.collaborators, cx)?;
1579 self.metadata_changed(cx);
1580 Ok(())
1581 }
1582
1583 pub fn rejoined(
1584 &mut self,
1585 message: proto::RejoinedProject,
1586 message_id: u32,
1587 cx: &mut ModelContext<Self>,
1588 ) -> Result<()> {
1589 cx.update_global::<SettingsStore, _>(|store, cx| {
1590 for worktree in &self.worktrees {
1591 store
1592 .clear_local_settings(worktree.handle_id(), cx)
1593 .log_err();
1594 }
1595 });
1596
1597 self.join_project_response_message_id = message_id;
1598 self.set_worktrees_from_proto(message.worktrees, cx)?;
1599 self.set_collaborators_from_proto(message.collaborators, cx)?;
1600 self.language_server_statuses = message
1601 .language_servers
1602 .into_iter()
1603 .map(|server| {
1604 (
1605 LanguageServerId(server.id as usize),
1606 LanguageServerStatus {
1607 name: server.name,
1608 pending_work: Default::default(),
1609 has_pending_diagnostic_updates: false,
1610 progress_tokens: Default::default(),
1611 },
1612 )
1613 })
1614 .collect();
1615 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
1616 .unwrap();
1617 cx.notify();
1618 Ok(())
1619 }
1620
1621 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1622 self.unshare_internal(cx)?;
1623 self.metadata_changed(cx);
1624 cx.notify();
1625 Ok(())
1626 }
1627
1628 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1629 if self.is_remote() {
1630 return Err(anyhow!("attempted to unshare a remote project"));
1631 }
1632
1633 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1634 self.client_state = ProjectClientState::Local;
1635 self.collaborators.clear();
1636 self.shared_buffers.clear();
1637 self.client_subscriptions.clear();
1638
1639 for worktree_handle in self.worktrees.iter_mut() {
1640 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1641 let is_visible = worktree.update(cx, |worktree, _| {
1642 worktree.as_local_mut().unwrap().unshare();
1643 worktree.is_visible()
1644 });
1645 if !is_visible {
1646 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1647 }
1648 }
1649 }
1650
1651 for open_buffer in self.opened_buffers.values_mut() {
1652 // Wake up any tasks waiting for peers' edits to this buffer.
1653 if let Some(buffer) = open_buffer.upgrade() {
1654 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1655 }
1656
1657 if let OpenBuffer::Strong(buffer) = open_buffer {
1658 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1659 }
1660 }
1661
1662 self.client.send(proto::UnshareProject {
1663 project_id: remote_id,
1664 })?;
1665
1666 Ok(())
1667 } else {
1668 Err(anyhow!("attempted to unshare an unshared project"))
1669 }
1670 }
1671
1672 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1673 self.disconnected_from_host_internal(cx);
1674 cx.emit(Event::DisconnectedFromHost);
1675 cx.notify();
1676 }
1677
1678 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1679 let new_capability =
1680 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1681 Capability::ReadWrite
1682 } else {
1683 Capability::ReadOnly
1684 };
1685 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1686 if *capability == new_capability {
1687 return;
1688 }
1689
1690 *capability = new_capability;
1691 for buffer in self.opened_buffers() {
1692 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1693 }
1694 }
1695 }
1696
1697 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1698 if let ProjectClientState::Remote {
1699 sharing_has_stopped,
1700 ..
1701 } = &mut self.client_state
1702 {
1703 *sharing_has_stopped = true;
1704
1705 self.collaborators.clear();
1706
1707 for worktree in &self.worktrees {
1708 if let Some(worktree) = worktree.upgrade() {
1709 worktree.update(cx, |worktree, _| {
1710 if let Some(worktree) = worktree.as_remote_mut() {
1711 worktree.disconnected_from_host();
1712 }
1713 });
1714 }
1715 }
1716
1717 for open_buffer in self.opened_buffers.values_mut() {
1718 // Wake up any tasks waiting for peers' edits to this buffer.
1719 if let Some(buffer) = open_buffer.upgrade() {
1720 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1721 }
1722
1723 if let OpenBuffer::Strong(buffer) = open_buffer {
1724 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1725 }
1726 }
1727
1728 // Wake up all futures currently waiting on a buffer to get opened,
1729 // to give them a chance to fail now that we've disconnected.
1730 self.loading_buffers.clear();
1731 // self.opened_buffer.send(OpenedBufferEvent::Disconnected);
1732 }
1733 }
1734
1735 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1736 cx.emit(Event::Closed);
1737 }
1738
1739 pub fn is_disconnected(&self) -> bool {
1740 match &self.client_state {
1741 ProjectClientState::Remote {
1742 sharing_has_stopped,
1743 ..
1744 } => *sharing_has_stopped,
1745 _ => false,
1746 }
1747 }
1748
1749 pub fn capability(&self) -> Capability {
1750 match &self.client_state {
1751 ProjectClientState::Remote { capability, .. } => *capability,
1752 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1753 }
1754 }
1755
1756 pub fn is_read_only(&self) -> bool {
1757 self.is_disconnected() || self.capability() == Capability::ReadOnly
1758 }
1759
1760 pub fn is_local(&self) -> bool {
1761 match &self.client_state {
1762 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1763 ProjectClientState::Remote { .. } => false,
1764 }
1765 }
1766
1767 pub fn is_remote(&self) -> bool {
1768 !self.is_local()
1769 }
1770
1771 pub fn create_buffer(
1772 &mut self,
1773 text: &str,
1774 language: Option<Arc<Language>>,
1775 cx: &mut ModelContext<Self>,
1776 ) -> Result<Model<Buffer>> {
1777 if self.is_remote() {
1778 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1779 }
1780 let id = self.next_buffer_id.next();
1781 let buffer = cx.new_model(|cx| {
1782 Buffer::new(self.replica_id(), id, text)
1783 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1784 });
1785 self.register_buffer(&buffer, cx)?;
1786 Ok(buffer)
1787 }
1788
1789 pub fn open_path(
1790 &mut self,
1791 path: ProjectPath,
1792 cx: &mut ModelContext<Self>,
1793 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1794 let task = self.open_buffer(path.clone(), cx);
1795 cx.spawn(move |_, cx| async move {
1796 let buffer = task.await?;
1797 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1798 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1799 })?;
1800
1801 let buffer: &AnyModel = &buffer;
1802 Ok((project_entry_id, buffer.clone()))
1803 })
1804 }
1805
1806 pub fn open_local_buffer(
1807 &mut self,
1808 abs_path: impl AsRef<Path>,
1809 cx: &mut ModelContext<Self>,
1810 ) -> Task<Result<Model<Buffer>>> {
1811 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1812 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1813 } else {
1814 Task::ready(Err(anyhow!("no such path")))
1815 }
1816 }
1817
1818 pub fn open_buffer(
1819 &mut self,
1820 path: impl Into<ProjectPath>,
1821 cx: &mut ModelContext<Self>,
1822 ) -> Task<Result<Model<Buffer>>> {
1823 let project_path = path.into();
1824 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1825 worktree
1826 } else {
1827 return Task::ready(Err(anyhow!("no such worktree")));
1828 };
1829
1830 // If there is already a buffer for the given path, then return it.
1831 let existing_buffer = self.get_open_buffer(&project_path, cx);
1832 if let Some(existing_buffer) = existing_buffer {
1833 return Task::ready(Ok(existing_buffer));
1834 }
1835
1836 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1837 // If the given path is already being loaded, then wait for that existing
1838 // task to complete and return the same buffer.
1839 hash_map::Entry::Occupied(e) => e.get().clone(),
1840
1841 // Otherwise, record the fact that this path is now being loaded.
1842 hash_map::Entry::Vacant(entry) => {
1843 let (mut tx, rx) = postage::watch::channel();
1844 entry.insert(rx.clone());
1845
1846 let project_path = project_path.clone();
1847 let load_buffer = if worktree.read(cx).is_local() {
1848 self.open_local_buffer_internal(project_path.path.clone(), worktree, cx)
1849 } else {
1850 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1851 };
1852
1853 cx.spawn(move |this, mut cx| async move {
1854 let load_result = load_buffer.await;
1855 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1856 // Record the fact that the buffer is no longer loading.
1857 this.loading_buffers_by_path.remove(&project_path);
1858 let buffer = load_result.map_err(Arc::new)?;
1859 Ok(buffer)
1860 })?);
1861 anyhow::Ok(())
1862 })
1863 .detach();
1864 rx
1865 }
1866 };
1867
1868 cx.background_executor().spawn(async move {
1869 wait_for_loading_buffer(loading_watch)
1870 .await
1871 .map_err(|e| e.cloned())
1872 })
1873 }
1874
1875 fn open_local_buffer_internal(
1876 &mut self,
1877 path: Arc<Path>,
1878 worktree: Model<Worktree>,
1879 cx: &mut ModelContext<Self>,
1880 ) -> Task<Result<Model<Buffer>>> {
1881 let buffer_id = self.next_buffer_id.next();
1882 let load_buffer = worktree.update(cx, |worktree, cx| {
1883 let worktree = worktree.as_local_mut().unwrap();
1884 worktree.load_buffer(buffer_id, &path, cx)
1885 });
1886 fn is_not_found_error(error: &anyhow::Error) -> bool {
1887 error
1888 .root_cause()
1889 .downcast_ref::<io::Error>()
1890 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
1891 }
1892 cx.spawn(move |this, mut cx| async move {
1893 let buffer = match load_buffer.await {
1894 Ok(buffer) => Ok(buffer),
1895 Err(error) if is_not_found_error(&error) => {
1896 worktree.update(&mut cx, |worktree, cx| {
1897 let worktree = worktree.as_local_mut().unwrap();
1898 worktree.new_buffer(buffer_id, path, cx)
1899 })
1900 }
1901 Err(e) => Err(e),
1902 }?;
1903 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1904 Ok(buffer)
1905 })
1906 }
1907
1908 fn open_remote_buffer_internal(
1909 &mut self,
1910 path: &Arc<Path>,
1911 worktree: &Model<Worktree>,
1912 cx: &mut ModelContext<Self>,
1913 ) -> Task<Result<Model<Buffer>>> {
1914 let rpc = self.client.clone();
1915 let project_id = self.remote_id().unwrap();
1916 let remote_worktree_id = worktree.read(cx).id();
1917 let path = path.clone();
1918 let path_string = path.to_string_lossy().to_string();
1919 cx.spawn(move |this, mut cx| async move {
1920 let response = rpc
1921 .request(proto::OpenBufferByPath {
1922 project_id,
1923 worktree_id: remote_worktree_id.to_proto(),
1924 path: path_string,
1925 })
1926 .await?;
1927 let buffer_id = BufferId::new(response.buffer_id)?;
1928 this.update(&mut cx, |this, cx| {
1929 this.wait_for_remote_buffer(buffer_id, cx)
1930 })?
1931 .await
1932 })
1933 }
1934
1935 /// LanguageServerName is owned, because it is inserted into a map
1936 pub fn open_local_buffer_via_lsp(
1937 &mut self,
1938 abs_path: lsp::Url,
1939 language_server_id: LanguageServerId,
1940 language_server_name: LanguageServerName,
1941 cx: &mut ModelContext<Self>,
1942 ) -> Task<Result<Model<Buffer>>> {
1943 cx.spawn(move |this, mut cx| async move {
1944 let abs_path = abs_path
1945 .to_file_path()
1946 .map_err(|_| anyhow!("can't convert URI to path"))?;
1947 let (worktree, relative_path) = if let Some(result) =
1948 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1949 {
1950 result
1951 } else {
1952 let worktree = this
1953 .update(&mut cx, |this, cx| {
1954 this.create_local_worktree(&abs_path, false, cx)
1955 })?
1956 .await?;
1957 this.update(&mut cx, |this, cx| {
1958 this.language_server_ids.insert(
1959 (worktree.read(cx).id(), language_server_name),
1960 language_server_id,
1961 );
1962 })
1963 .ok();
1964 (worktree, PathBuf::new())
1965 };
1966
1967 let project_path = ProjectPath {
1968 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1969 path: relative_path.into(),
1970 };
1971 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1972 .await
1973 })
1974 }
1975
1976 pub fn open_buffer_by_id(
1977 &mut self,
1978 id: BufferId,
1979 cx: &mut ModelContext<Self>,
1980 ) -> Task<Result<Model<Buffer>>> {
1981 if let Some(buffer) = self.buffer_for_id(id) {
1982 Task::ready(Ok(buffer))
1983 } else if self.is_local() {
1984 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1985 } else if let Some(project_id) = self.remote_id() {
1986 let request = self.client.request(proto::OpenBufferById {
1987 project_id,
1988 id: id.into(),
1989 });
1990 cx.spawn(move |this, mut cx| async move {
1991 let buffer_id = BufferId::new(request.await?.buffer_id)?;
1992 this.update(&mut cx, |this, cx| {
1993 this.wait_for_remote_buffer(buffer_id, cx)
1994 })?
1995 .await
1996 })
1997 } else {
1998 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1999 }
2000 }
2001
2002 pub fn save_buffers(
2003 &self,
2004 buffers: HashSet<Model<Buffer>>,
2005 cx: &mut ModelContext<Self>,
2006 ) -> Task<Result<()>> {
2007 cx.spawn(move |this, mut cx| async move {
2008 let save_tasks = buffers.into_iter().filter_map(|buffer| {
2009 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
2010 .ok()
2011 });
2012 try_join_all(save_tasks).await?;
2013 Ok(())
2014 })
2015 }
2016
2017 pub fn save_buffer(
2018 &self,
2019 buffer: Model<Buffer>,
2020 cx: &mut ModelContext<Self>,
2021 ) -> Task<Result<()>> {
2022 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2023 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
2024 };
2025 let worktree = file.worktree.clone();
2026 let path = file.path.clone();
2027 worktree.update(cx, |worktree, cx| match worktree {
2028 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
2029 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
2030 })
2031 }
2032
2033 pub fn save_buffer_as(
2034 &mut self,
2035 buffer: Model<Buffer>,
2036 abs_path: PathBuf,
2037 cx: &mut ModelContext<Self>,
2038 ) -> Task<Result<()>> {
2039 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
2040 let old_file = File::from_dyn(buffer.read(cx).file())
2041 .filter(|f| f.is_local())
2042 .cloned();
2043 cx.spawn(move |this, mut cx| async move {
2044 if let Some(old_file) = &old_file {
2045 this.update(&mut cx, |this, cx| {
2046 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
2047 })?;
2048 }
2049 let (worktree, path) = worktree_task.await?;
2050 worktree
2051 .update(&mut cx, |worktree, cx| match worktree {
2052 Worktree::Local(worktree) => {
2053 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
2054 }
2055 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
2056 })?
2057 .await?;
2058
2059 this.update(&mut cx, |this, cx| {
2060 this.detect_language_for_buffer(&buffer, cx);
2061 this.register_buffer_with_language_servers(&buffer, cx);
2062 })?;
2063 Ok(())
2064 })
2065 }
2066
2067 pub fn get_open_buffer(
2068 &mut self,
2069 path: &ProjectPath,
2070 cx: &mut ModelContext<Self>,
2071 ) -> Option<Model<Buffer>> {
2072 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
2073 self.opened_buffers.values().find_map(|buffer| {
2074 let buffer = buffer.upgrade()?;
2075 let file = File::from_dyn(buffer.read(cx).file())?;
2076 if file.worktree == worktree && file.path() == &path.path {
2077 Some(buffer)
2078 } else {
2079 None
2080 }
2081 })
2082 }
2083
2084 fn register_buffer(
2085 &mut self,
2086 buffer: &Model<Buffer>,
2087 cx: &mut ModelContext<Self>,
2088 ) -> Result<()> {
2089 self.request_buffer_diff_recalculation(buffer, cx);
2090 buffer.update(cx, |buffer, _| {
2091 buffer.set_language_registry(self.languages.clone())
2092 });
2093
2094 let remote_id = buffer.read(cx).remote_id();
2095 let is_remote = self.is_remote();
2096 let open_buffer = if is_remote || self.is_shared() {
2097 OpenBuffer::Strong(buffer.clone())
2098 } else {
2099 OpenBuffer::Weak(buffer.downgrade())
2100 };
2101
2102 match self.opened_buffers.entry(remote_id) {
2103 hash_map::Entry::Vacant(entry) => {
2104 entry.insert(open_buffer);
2105 }
2106 hash_map::Entry::Occupied(mut entry) => {
2107 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2108 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2109 } else if entry.get().upgrade().is_some() {
2110 if is_remote {
2111 return Ok(());
2112 } else {
2113 debug_panic!("buffer {} was already registered", remote_id);
2114 Err(anyhow!("buffer {} was already registered", remote_id))?;
2115 }
2116 }
2117 entry.insert(open_buffer);
2118 }
2119 }
2120 cx.subscribe(buffer, |this, buffer, event, cx| {
2121 this.on_buffer_event(buffer, event, cx);
2122 })
2123 .detach();
2124
2125 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2126 if file.is_local {
2127 self.local_buffer_ids_by_path.insert(
2128 ProjectPath {
2129 worktree_id: file.worktree_id(cx),
2130 path: file.path.clone(),
2131 },
2132 remote_id,
2133 );
2134
2135 if let Some(entry_id) = file.entry_id {
2136 self.local_buffer_ids_by_entry_id
2137 .insert(entry_id, remote_id);
2138 }
2139 }
2140 }
2141
2142 self.detect_language_for_buffer(buffer, cx);
2143 self.register_buffer_with_language_servers(buffer, cx);
2144 self.register_buffer_with_copilot(buffer, cx);
2145 cx.observe_release(buffer, |this, buffer, cx| {
2146 if let Some(file) = File::from_dyn(buffer.file()) {
2147 if file.is_local() {
2148 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2149 for server in this.language_servers_for_buffer(buffer, cx) {
2150 server
2151 .1
2152 .notify::<lsp::notification::DidCloseTextDocument>(
2153 lsp::DidCloseTextDocumentParams {
2154 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2155 },
2156 )
2157 .log_err();
2158 }
2159 }
2160 }
2161 })
2162 .detach();
2163
2164 if let Some(senders) = self.loading_buffers.remove(&remote_id) {
2165 for sender in senders {
2166 sender.send(Ok(buffer.clone())).ok();
2167 }
2168 }
2169 Ok(())
2170 }
2171
2172 fn register_buffer_with_language_servers(
2173 &mut self,
2174 buffer_handle: &Model<Buffer>,
2175 cx: &mut ModelContext<Self>,
2176 ) {
2177 let buffer = buffer_handle.read(cx);
2178 let buffer_id = buffer.remote_id();
2179
2180 if let Some(file) = File::from_dyn(buffer.file()) {
2181 if !file.is_local() {
2182 return;
2183 }
2184
2185 let abs_path = file.abs_path(cx);
2186 let uri = lsp::Url::from_file_path(&abs_path)
2187 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2188 let initial_snapshot = buffer.text_snapshot();
2189 let language = buffer.language().cloned();
2190 let worktree_id = file.worktree_id(cx);
2191
2192 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2193 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2194 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2195 .log_err();
2196 }
2197 }
2198
2199 if let Some(language) = language {
2200 for adapter in self.languages.lsp_adapters(&language) {
2201 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2202 let server = self
2203 .language_server_ids
2204 .get(&(worktree_id, adapter.name.clone()))
2205 .and_then(|id| self.language_servers.get(id))
2206 .and_then(|server_state| {
2207 if let LanguageServerState::Running { server, .. } = server_state {
2208 Some(server.clone())
2209 } else {
2210 None
2211 }
2212 });
2213 let server = match server {
2214 Some(server) => server,
2215 None => continue,
2216 };
2217
2218 server
2219 .notify::<lsp::notification::DidOpenTextDocument>(
2220 lsp::DidOpenTextDocumentParams {
2221 text_document: lsp::TextDocumentItem::new(
2222 uri.clone(),
2223 language_id.unwrap_or_default(),
2224 0,
2225 initial_snapshot.text(),
2226 ),
2227 },
2228 )
2229 .log_err();
2230
2231 buffer_handle.update(cx, |buffer, cx| {
2232 buffer.set_completion_triggers(
2233 server
2234 .capabilities()
2235 .completion_provider
2236 .as_ref()
2237 .and_then(|provider| provider.trigger_characters.clone())
2238 .unwrap_or_default(),
2239 cx,
2240 );
2241 });
2242
2243 let snapshot = LspBufferSnapshot {
2244 version: 0,
2245 snapshot: initial_snapshot.clone(),
2246 };
2247 self.buffer_snapshots
2248 .entry(buffer_id)
2249 .or_default()
2250 .insert(server.server_id(), vec![snapshot]);
2251 }
2252 }
2253 }
2254 }
2255
2256 fn unregister_buffer_from_language_servers(
2257 &mut self,
2258 buffer: &Model<Buffer>,
2259 old_file: &File,
2260 cx: &mut ModelContext<Self>,
2261 ) {
2262 let old_path = match old_file.as_local() {
2263 Some(local) => local.abs_path(cx),
2264 None => return,
2265 };
2266
2267 buffer.update(cx, |buffer, cx| {
2268 let worktree_id = old_file.worktree_id(cx);
2269 let ids = &self.language_server_ids;
2270
2271 if let Some(language) = buffer.language().cloned() {
2272 for adapter in self.languages.lsp_adapters(&language) {
2273 if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) {
2274 buffer.update_diagnostics(*server_id, Default::default(), cx);
2275 }
2276 }
2277 }
2278
2279 self.buffer_snapshots.remove(&buffer.remote_id());
2280 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2281 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2282 language_server
2283 .notify::<lsp::notification::DidCloseTextDocument>(
2284 lsp::DidCloseTextDocumentParams {
2285 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2286 },
2287 )
2288 .log_err();
2289 }
2290 });
2291 }
2292
2293 fn register_buffer_with_copilot(
2294 &self,
2295 buffer_handle: &Model<Buffer>,
2296 cx: &mut ModelContext<Self>,
2297 ) {
2298 if let Some(copilot) = Copilot::global(cx) {
2299 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2300 }
2301 }
2302
2303 async fn send_buffer_ordered_messages(
2304 this: WeakModel<Self>,
2305 rx: UnboundedReceiver<BufferOrderedMessage>,
2306 mut cx: AsyncAppContext,
2307 ) -> Result<()> {
2308 const MAX_BATCH_SIZE: usize = 128;
2309
2310 let mut operations_by_buffer_id = HashMap::default();
2311 async fn flush_operations(
2312 this: &WeakModel<Project>,
2313 operations_by_buffer_id: &mut HashMap<BufferId, Vec<proto::Operation>>,
2314 needs_resync_with_host: &mut bool,
2315 is_local: bool,
2316 cx: &mut AsyncAppContext,
2317 ) -> Result<()> {
2318 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2319 let request = this.update(cx, |this, _| {
2320 let project_id = this.remote_id()?;
2321 Some(this.client.request(proto::UpdateBuffer {
2322 buffer_id: buffer_id.into(),
2323 project_id,
2324 operations,
2325 }))
2326 })?;
2327 if let Some(request) = request {
2328 if request.await.is_err() && !is_local {
2329 *needs_resync_with_host = true;
2330 break;
2331 }
2332 }
2333 }
2334 Ok(())
2335 }
2336
2337 let mut needs_resync_with_host = false;
2338 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2339
2340 while let Some(changes) = changes.next().await {
2341 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2342
2343 for change in changes {
2344 match change {
2345 BufferOrderedMessage::Operation {
2346 buffer_id,
2347 operation,
2348 } => {
2349 if needs_resync_with_host {
2350 continue;
2351 }
2352
2353 operations_by_buffer_id
2354 .entry(buffer_id)
2355 .or_insert(Vec::new())
2356 .push(operation);
2357 }
2358
2359 BufferOrderedMessage::Resync => {
2360 operations_by_buffer_id.clear();
2361 if this
2362 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2363 .await
2364 .is_ok()
2365 {
2366 needs_resync_with_host = false;
2367 }
2368 }
2369
2370 BufferOrderedMessage::LanguageServerUpdate {
2371 language_server_id,
2372 message,
2373 } => {
2374 flush_operations(
2375 &this,
2376 &mut operations_by_buffer_id,
2377 &mut needs_resync_with_host,
2378 is_local,
2379 &mut cx,
2380 )
2381 .await?;
2382
2383 this.update(&mut cx, |this, _| {
2384 if let Some(project_id) = this.remote_id() {
2385 this.client
2386 .send(proto::UpdateLanguageServer {
2387 project_id,
2388 language_server_id: language_server_id.0 as u64,
2389 variant: Some(message),
2390 })
2391 .log_err();
2392 }
2393 })?;
2394 }
2395 }
2396 }
2397
2398 flush_operations(
2399 &this,
2400 &mut operations_by_buffer_id,
2401 &mut needs_resync_with_host,
2402 is_local,
2403 &mut cx,
2404 )
2405 .await?;
2406 }
2407
2408 Ok(())
2409 }
2410
2411 fn on_buffer_event(
2412 &mut self,
2413 buffer: Model<Buffer>,
2414 event: &BufferEvent,
2415 cx: &mut ModelContext<Self>,
2416 ) -> Option<()> {
2417 if matches!(
2418 event,
2419 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2420 ) {
2421 self.request_buffer_diff_recalculation(&buffer, cx);
2422 }
2423
2424 match event {
2425 BufferEvent::Operation(operation) => {
2426 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation {
2427 buffer_id: buffer.read(cx).remote_id(),
2428 operation: language::proto::serialize_operation(operation),
2429 })
2430 .ok();
2431 }
2432
2433 BufferEvent::Edited { .. } => {
2434 let buffer = buffer.read(cx);
2435 let file = File::from_dyn(buffer.file())?;
2436 let abs_path = file.as_local()?.abs_path(cx);
2437 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2438 let next_snapshot = buffer.text_snapshot();
2439
2440 let language_servers: Vec<_> = self
2441 .language_servers_for_buffer(buffer, cx)
2442 .map(|i| i.1.clone())
2443 .collect();
2444
2445 for language_server in language_servers {
2446 let language_server = language_server.clone();
2447
2448 let buffer_snapshots = self
2449 .buffer_snapshots
2450 .get_mut(&buffer.remote_id())
2451 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2452 let previous_snapshot = buffer_snapshots.last()?;
2453
2454 let build_incremental_change = || {
2455 buffer
2456 .edits_since::<(PointUtf16, usize)>(
2457 previous_snapshot.snapshot.version(),
2458 )
2459 .map(|edit| {
2460 let edit_start = edit.new.start.0;
2461 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2462 let new_text = next_snapshot
2463 .text_for_range(edit.new.start.1..edit.new.end.1)
2464 .collect();
2465 lsp::TextDocumentContentChangeEvent {
2466 range: Some(lsp::Range::new(
2467 point_to_lsp(edit_start),
2468 point_to_lsp(edit_end),
2469 )),
2470 range_length: None,
2471 text: new_text,
2472 }
2473 })
2474 .collect()
2475 };
2476
2477 let document_sync_kind = language_server
2478 .capabilities()
2479 .text_document_sync
2480 .as_ref()
2481 .and_then(|sync| match sync {
2482 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2483 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2484 });
2485
2486 let content_changes: Vec<_> = match document_sync_kind {
2487 Some(lsp::TextDocumentSyncKind::FULL) => {
2488 vec![lsp::TextDocumentContentChangeEvent {
2489 range: None,
2490 range_length: None,
2491 text: next_snapshot.text(),
2492 }]
2493 }
2494 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2495 _ => {
2496 #[cfg(any(test, feature = "test-support"))]
2497 {
2498 build_incremental_change()
2499 }
2500
2501 #[cfg(not(any(test, feature = "test-support")))]
2502 {
2503 continue;
2504 }
2505 }
2506 };
2507
2508 let next_version = previous_snapshot.version + 1;
2509
2510 buffer_snapshots.push(LspBufferSnapshot {
2511 version: next_version,
2512 snapshot: next_snapshot.clone(),
2513 });
2514
2515 language_server
2516 .notify::<lsp::notification::DidChangeTextDocument>(
2517 lsp::DidChangeTextDocumentParams {
2518 text_document: lsp::VersionedTextDocumentIdentifier::new(
2519 uri.clone(),
2520 next_version,
2521 ),
2522 content_changes,
2523 },
2524 )
2525 .log_err();
2526 }
2527 }
2528
2529 BufferEvent::Saved => {
2530 let file = File::from_dyn(buffer.read(cx).file())?;
2531 let worktree_id = file.worktree_id(cx);
2532 let abs_path = file.as_local()?.abs_path(cx);
2533 let text_document = lsp::TextDocumentIdentifier {
2534 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2535 };
2536
2537 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2538 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2539
2540 server
2541 .notify::<lsp::notification::DidSaveTextDocument>(
2542 lsp::DidSaveTextDocumentParams {
2543 text_document: text_document.clone(),
2544 text,
2545 },
2546 )
2547 .log_err();
2548 }
2549
2550 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2551 for language_server_id in language_server_ids {
2552 if let Some(LanguageServerState::Running {
2553 adapter,
2554 simulate_disk_based_diagnostics_completion,
2555 ..
2556 }) = self.language_servers.get_mut(&language_server_id)
2557 {
2558 // After saving a buffer using a language server that doesn't provide
2559 // a disk-based progress token, kick off a timer that will reset every
2560 // time the buffer is saved. If the timer eventually fires, simulate
2561 // disk-based diagnostics being finished so that other pieces of UI
2562 // (e.g., project diagnostics view, diagnostic status bar) can update.
2563 // We don't emit an event right away because the language server might take
2564 // some time to publish diagnostics.
2565 if adapter.disk_based_diagnostics_progress_token.is_none() {
2566 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2567 Duration::from_secs(1);
2568
2569 let task = cx.spawn(move |this, mut cx| async move {
2570 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2571 if let Some(this) = this.upgrade() {
2572 this.update(&mut cx, |this, cx| {
2573 this.disk_based_diagnostics_finished(
2574 language_server_id,
2575 cx,
2576 );
2577 this.enqueue_buffer_ordered_message(
2578 BufferOrderedMessage::LanguageServerUpdate {
2579 language_server_id,
2580 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2581 },
2582 )
2583 .ok();
2584 }).ok();
2585 }
2586 });
2587 *simulate_disk_based_diagnostics_completion = Some(task);
2588 }
2589 }
2590 }
2591 }
2592 BufferEvent::FileHandleChanged => {
2593 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2594 return None;
2595 };
2596
2597 let remote_id = buffer.read(cx).remote_id();
2598 if let Some(entry_id) = file.entry_id {
2599 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2600 Some(_) => {
2601 return None;
2602 }
2603 None => {
2604 self.local_buffer_ids_by_entry_id
2605 .insert(entry_id, remote_id);
2606 }
2607 }
2608 };
2609 self.local_buffer_ids_by_path.insert(
2610 ProjectPath {
2611 worktree_id: file.worktree_id(cx),
2612 path: file.path.clone(),
2613 },
2614 remote_id,
2615 );
2616 }
2617 _ => {}
2618 }
2619
2620 None
2621 }
2622
2623 fn request_buffer_diff_recalculation(
2624 &mut self,
2625 buffer: &Model<Buffer>,
2626 cx: &mut ModelContext<Self>,
2627 ) {
2628 self.buffers_needing_diff.insert(buffer.downgrade());
2629 let first_insertion = self.buffers_needing_diff.len() == 1;
2630
2631 let settings = ProjectSettings::get_global(cx);
2632 let delay = if let Some(delay) = settings.git.gutter_debounce {
2633 delay
2634 } else {
2635 if first_insertion {
2636 let this = cx.weak_model();
2637 cx.defer(move |cx| {
2638 if let Some(this) = this.upgrade() {
2639 this.update(cx, |this, cx| {
2640 this.recalculate_buffer_diffs(cx).detach();
2641 });
2642 }
2643 });
2644 }
2645 return;
2646 };
2647
2648 const MIN_DELAY: u64 = 50;
2649 let delay = delay.max(MIN_DELAY);
2650 let duration = Duration::from_millis(delay);
2651
2652 self.git_diff_debouncer
2653 .fire_new(duration, cx, move |this, cx| {
2654 this.recalculate_buffer_diffs(cx)
2655 });
2656 }
2657
2658 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2659 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2660 cx.spawn(move |this, mut cx| async move {
2661 let tasks: Vec<_> = buffers
2662 .iter()
2663 .filter_map(|buffer| {
2664 let buffer = buffer.upgrade()?;
2665 buffer
2666 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2667 .ok()
2668 .flatten()
2669 })
2670 .collect();
2671
2672 futures::future::join_all(tasks).await;
2673
2674 this.update(&mut cx, |this, cx| {
2675 if !this.buffers_needing_diff.is_empty() {
2676 this.recalculate_buffer_diffs(cx).detach();
2677 } else {
2678 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2679 for buffer in buffers {
2680 if let Some(buffer) = buffer.upgrade() {
2681 buffer.update(cx, |_, cx| cx.notify());
2682 }
2683 }
2684 }
2685 })
2686 .ok();
2687 })
2688 }
2689
2690 fn language_servers_for_worktree(
2691 &self,
2692 worktree_id: WorktreeId,
2693 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2694 self.language_server_ids
2695 .iter()
2696 .filter_map(move |((language_server_worktree_id, _), id)| {
2697 if *language_server_worktree_id == worktree_id {
2698 if let Some(LanguageServerState::Running {
2699 adapter,
2700 language,
2701 server,
2702 ..
2703 }) = self.language_servers.get(id)
2704 {
2705 return Some((adapter, language, server));
2706 }
2707 }
2708 None
2709 })
2710 }
2711
2712 fn maintain_buffer_languages(
2713 languages: Arc<LanguageRegistry>,
2714 cx: &mut ModelContext<Project>,
2715 ) -> Task<()> {
2716 let mut subscription = languages.subscribe();
2717 let mut prev_reload_count = languages.reload_count();
2718 cx.spawn(move |project, mut cx| async move {
2719 while let Some(()) = subscription.next().await {
2720 if let Some(project) = project.upgrade() {
2721 // If the language registry has been reloaded, then remove and
2722 // re-assign the languages on all open buffers.
2723 let reload_count = languages.reload_count();
2724 if reload_count > prev_reload_count {
2725 prev_reload_count = reload_count;
2726 project
2727 .update(&mut cx, |this, cx| {
2728 let buffers = this
2729 .opened_buffers
2730 .values()
2731 .filter_map(|b| b.upgrade())
2732 .collect::<Vec<_>>();
2733 for buffer in buffers {
2734 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2735 {
2736 this.unregister_buffer_from_language_servers(
2737 &buffer, &f, cx,
2738 );
2739 buffer
2740 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2741 }
2742 }
2743 })
2744 .ok();
2745 }
2746
2747 project
2748 .update(&mut cx, |project, cx| {
2749 let mut plain_text_buffers = Vec::new();
2750 let mut buffers_with_unknown_injections = Vec::new();
2751 for buffer in project.opened_buffers.values() {
2752 if let Some(handle) = buffer.upgrade() {
2753 let buffer = &handle.read(cx);
2754 if buffer.language().is_none()
2755 || buffer.language() == Some(&*language::PLAIN_TEXT)
2756 {
2757 plain_text_buffers.push(handle);
2758 } else if buffer.contains_unknown_injections() {
2759 buffers_with_unknown_injections.push(handle);
2760 }
2761 }
2762 }
2763
2764 for buffer in plain_text_buffers {
2765 project.detect_language_for_buffer(&buffer, cx);
2766 project.register_buffer_with_language_servers(&buffer, cx);
2767 }
2768
2769 for buffer in buffers_with_unknown_injections {
2770 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2771 }
2772 })
2773 .ok();
2774 }
2775 }
2776 })
2777 }
2778
2779 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2780 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2781 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2782
2783 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2784 *settings_changed_tx.borrow_mut() = ();
2785 });
2786
2787 cx.spawn(move |this, mut cx| async move {
2788 while let Some(()) = settings_changed_rx.next().await {
2789 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2790 this.language_servers
2791 .values()
2792 .filter_map(|state| match state {
2793 LanguageServerState::Starting(_) => None,
2794 LanguageServerState::Running {
2795 adapter, server, ..
2796 } => Some((adapter.clone(), server.clone())),
2797 })
2798 .collect()
2799 })?;
2800
2801 for (adapter, server) in servers {
2802 let settings =
2803 cx.update(|cx| adapter.workspace_configuration(server.root_path(), cx))?;
2804
2805 server
2806 .notify::<lsp::notification::DidChangeConfiguration>(
2807 lsp::DidChangeConfigurationParams { settings },
2808 )
2809 .ok();
2810 }
2811 }
2812
2813 drop(settings_observation);
2814 anyhow::Ok(())
2815 })
2816 }
2817
2818 fn detect_language_for_buffer(
2819 &mut self,
2820 buffer_handle: &Model<Buffer>,
2821 cx: &mut ModelContext<Self>,
2822 ) {
2823 // If the buffer has a language, set it and start the language server if we haven't already.
2824 let buffer = buffer_handle.read(cx);
2825 let Some(file) = buffer.file() else {
2826 return;
2827 };
2828 let content = buffer.as_rope();
2829 let Some(new_language_result) = self
2830 .languages
2831 .language_for_file(file, Some(content), cx)
2832 .now_or_never()
2833 else {
2834 return;
2835 };
2836
2837 match new_language_result {
2838 Err(e) => {
2839 if e.is::<language::LanguageNotFound>() {
2840 cx.emit(Event::LanguageNotFound(buffer_handle.clone()))
2841 }
2842 }
2843 Ok(new_language) => {
2844 self.set_language_for_buffer(buffer_handle, new_language, cx);
2845 }
2846 };
2847 }
2848
2849 pub fn set_language_for_buffer(
2850 &mut self,
2851 buffer: &Model<Buffer>,
2852 new_language: Arc<Language>,
2853 cx: &mut ModelContext<Self>,
2854 ) {
2855 buffer.update(cx, |buffer, cx| {
2856 if buffer.language().map_or(true, |old_language| {
2857 !Arc::ptr_eq(old_language, &new_language)
2858 }) {
2859 buffer.set_language(Some(new_language.clone()), cx);
2860 }
2861 });
2862
2863 let buffer_file = buffer.read(cx).file().cloned();
2864 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2865 let buffer_file = File::from_dyn(buffer_file.as_ref());
2866 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2867 if let Some(prettier_plugins) =
2868 prettier_support::prettier_plugins_for_language(&new_language, &settings)
2869 {
2870 self.install_default_prettier(worktree, prettier_plugins.iter().cloned(), cx);
2871 };
2872 if let Some(file) = buffer_file {
2873 let worktree = file.worktree.clone();
2874 if worktree.read(cx).is_local() {
2875 self.start_language_servers(&worktree, new_language, cx);
2876 }
2877 }
2878 }
2879
2880 fn start_language_servers(
2881 &mut self,
2882 worktree: &Model<Worktree>,
2883 language: Arc<Language>,
2884 cx: &mut ModelContext<Self>,
2885 ) {
2886 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2887 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2888 if !settings.enable_language_server {
2889 return;
2890 }
2891
2892 for adapter in self.languages.clone().lsp_adapters(&language) {
2893 self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
2894 }
2895 }
2896
2897 fn start_language_server(
2898 &mut self,
2899 worktree_handle: &Model<Worktree>,
2900 adapter: Arc<CachedLspAdapter>,
2901 language: Arc<Language>,
2902 cx: &mut ModelContext<Self>,
2903 ) {
2904 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2905 return;
2906 }
2907
2908 let worktree = worktree_handle.read(cx);
2909 let worktree_id = worktree.id();
2910 let worktree_path = worktree.abs_path();
2911 let key = (worktree_id, adapter.name.clone());
2912 if self.language_server_ids.contains_key(&key) {
2913 return;
2914 }
2915
2916 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2917 let pending_server = match self.languages.create_pending_language_server(
2918 stderr_capture.clone(),
2919 language.clone(),
2920 adapter.clone(),
2921 Arc::clone(&worktree_path),
2922 ProjectLspAdapterDelegate::new(self, worktree_handle, cx),
2923 cx,
2924 ) {
2925 Some(pending_server) => pending_server,
2926 None => return,
2927 };
2928
2929 let project_settings = ProjectSettings::get(
2930 Some(SettingsLocation {
2931 worktree_id: worktree_id.to_proto() as usize,
2932 path: Path::new(""),
2933 }),
2934 cx,
2935 );
2936 let lsp = project_settings.lsp.get(&adapter.name.0);
2937 let override_options = lsp.and_then(|s| s.initialization_options.clone());
2938
2939 let server_id = pending_server.server_id;
2940 let container_dir = pending_server.container_dir.clone();
2941 let state = LanguageServerState::Starting({
2942 let adapter = adapter.clone();
2943 let server_name = adapter.name.0.clone();
2944 let language = language.clone();
2945 let key = key.clone();
2946
2947 cx.spawn(move |this, mut cx| async move {
2948 let result = Self::setup_and_insert_language_server(
2949 this.clone(),
2950 &worktree_path,
2951 override_options,
2952 pending_server,
2953 adapter.clone(),
2954 language.clone(),
2955 server_id,
2956 key,
2957 &mut cx,
2958 )
2959 .await;
2960
2961 match result {
2962 Ok(server) => {
2963 stderr_capture.lock().take();
2964 server
2965 }
2966
2967 Err(err) => {
2968 log::error!("failed to start language server {server_name:?}: {err}");
2969 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2970
2971 let this = this.upgrade()?;
2972 let container_dir = container_dir?;
2973
2974 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2975 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2976 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2977 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2978 return None;
2979 }
2980
2981 log::info!(
2982 "retrying installation of language server {server_name:?} in {}s",
2983 SERVER_REINSTALL_DEBOUNCE_TIMEOUT.as_secs()
2984 );
2985 cx.background_executor()
2986 .timer(SERVER_REINSTALL_DEBOUNCE_TIMEOUT)
2987 .await;
2988
2989 let installation_test_binary = adapter
2990 .installation_test_binary(container_dir.to_path_buf())
2991 .await;
2992
2993 this.update(&mut cx, |_, cx| {
2994 Self::check_errored_server(
2995 language,
2996 adapter,
2997 server_id,
2998 installation_test_binary,
2999 cx,
3000 )
3001 })
3002 .ok();
3003
3004 None
3005 }
3006 }
3007 })
3008 });
3009
3010 self.language_servers.insert(server_id, state);
3011 self.language_server_ids.insert(key, server_id);
3012 }
3013
3014 fn reinstall_language_server(
3015 &mut self,
3016 language: Arc<Language>,
3017 adapter: Arc<CachedLspAdapter>,
3018 server_id: LanguageServerId,
3019 cx: &mut ModelContext<Self>,
3020 ) -> Option<Task<()>> {
3021 log::info!("beginning to reinstall server");
3022
3023 let existing_server = match self.language_servers.remove(&server_id) {
3024 Some(LanguageServerState::Running { server, .. }) => Some(server),
3025 _ => None,
3026 };
3027
3028 for worktree in &self.worktrees {
3029 if let Some(worktree) = worktree.upgrade() {
3030 let key = (worktree.read(cx).id(), adapter.name.clone());
3031 self.language_server_ids.remove(&key);
3032 }
3033 }
3034
3035 Some(cx.spawn(move |this, mut cx| async move {
3036 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
3037 log::info!("shutting down existing server");
3038 task.await;
3039 }
3040
3041 // TODO: This is race-safe with regards to preventing new instances from
3042 // starting while deleting, but existing instances in other projects are going
3043 // to be very confused and messed up
3044 let Some(task) = this
3045 .update(&mut cx, |this, cx| {
3046 this.languages.delete_server_container(adapter.clone(), cx)
3047 })
3048 .log_err()
3049 else {
3050 return;
3051 };
3052 task.await;
3053
3054 this.update(&mut cx, |this, cx| {
3055 let worktrees = this.worktrees.clone();
3056 for worktree in worktrees {
3057 if let Some(worktree) = worktree.upgrade() {
3058 this.start_language_server(
3059 &worktree,
3060 adapter.clone(),
3061 language.clone(),
3062 cx,
3063 );
3064 }
3065 }
3066 })
3067 .ok();
3068 }))
3069 }
3070
3071 #[allow(clippy::too_many_arguments)]
3072 async fn setup_and_insert_language_server(
3073 this: WeakModel<Self>,
3074 worktree_path: &Path,
3075 override_initialization_options: Option<serde_json::Value>,
3076 pending_server: PendingLanguageServer,
3077 adapter: Arc<CachedLspAdapter>,
3078 language: Arc<Language>,
3079 server_id: LanguageServerId,
3080 key: (WorktreeId, LanguageServerName),
3081 cx: &mut AsyncAppContext,
3082 ) -> Result<Option<Arc<LanguageServer>>> {
3083 let language_server = Self::setup_pending_language_server(
3084 this.clone(),
3085 override_initialization_options,
3086 pending_server,
3087 worktree_path,
3088 adapter.clone(),
3089 server_id,
3090 cx,
3091 )
3092 .await?;
3093
3094 let this = match this.upgrade() {
3095 Some(this) => this,
3096 None => return Err(anyhow!("failed to upgrade project handle")),
3097 };
3098
3099 this.update(cx, |this, cx| {
3100 this.insert_newly_running_language_server(
3101 language,
3102 adapter,
3103 language_server.clone(),
3104 server_id,
3105 key,
3106 cx,
3107 )
3108 })??;
3109
3110 Ok(Some(language_server))
3111 }
3112
3113 async fn setup_pending_language_server(
3114 this: WeakModel<Self>,
3115 override_options: Option<serde_json::Value>,
3116 pending_server: PendingLanguageServer,
3117 worktree_path: &Path,
3118 adapter: Arc<CachedLspAdapter>,
3119 server_id: LanguageServerId,
3120 cx: &mut AsyncAppContext,
3121 ) -> Result<Arc<LanguageServer>> {
3122 let workspace_config =
3123 cx.update(|cx| adapter.workspace_configuration(worktree_path, cx))?;
3124 let (language_server, mut initialization_options) = pending_server.task.await?;
3125
3126 let name = language_server.name();
3127 language_server
3128 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3129 let adapter = adapter.clone();
3130 let this = this.clone();
3131 move |mut params, mut cx| {
3132 let adapter = adapter.clone();
3133 if let Some(this) = this.upgrade() {
3134 adapter.process_diagnostics(&mut params);
3135 this.update(&mut cx, |this, cx| {
3136 this.update_diagnostics(
3137 server_id,
3138 params,
3139 &adapter.disk_based_diagnostic_sources,
3140 cx,
3141 )
3142 .log_err();
3143 })
3144 .ok();
3145 }
3146 }
3147 })
3148 .detach();
3149
3150 language_server
3151 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3152 let adapter = adapter.clone();
3153 let worktree_path = worktree_path.to_path_buf();
3154 move |params, cx| {
3155 let adapter = adapter.clone();
3156 let worktree_path = worktree_path.clone();
3157 async move {
3158 let workspace_config =
3159 cx.update(|cx| adapter.workspace_configuration(&worktree_path, cx))?;
3160 Ok(params
3161 .items
3162 .into_iter()
3163 .map(|item| {
3164 if let Some(section) = &item.section {
3165 workspace_config
3166 .get(section)
3167 .cloned()
3168 .unwrap_or(serde_json::Value::Null)
3169 } else {
3170 workspace_config.clone()
3171 }
3172 })
3173 .collect())
3174 }
3175 }
3176 })
3177 .detach();
3178
3179 // Even though we don't have handling for these requests, respond to them to
3180 // avoid stalling any language server like `gopls` which waits for a response
3181 // to these requests when initializing.
3182 language_server
3183 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3184 let this = this.clone();
3185 move |params, mut cx| {
3186 let this = this.clone();
3187 async move {
3188 this.update(&mut cx, |this, _| {
3189 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3190 {
3191 if let lsp::NumberOrString::String(token) = params.token {
3192 status.progress_tokens.insert(token);
3193 }
3194 }
3195 })?;
3196
3197 Ok(())
3198 }
3199 }
3200 })
3201 .detach();
3202
3203 language_server
3204 .on_request::<lsp::request::RegisterCapability, _, _>({
3205 let this = this.clone();
3206 move |params, mut cx| {
3207 let this = this.clone();
3208 async move {
3209 for reg in params.registrations {
3210 if reg.method == "workspace/didChangeWatchedFiles" {
3211 if let Some(options) = reg.register_options {
3212 let options = serde_json::from_value(options)?;
3213 this.update(&mut cx, |this, cx| {
3214 this.on_lsp_did_change_watched_files(
3215 server_id, options, cx,
3216 );
3217 })?;
3218 }
3219 }
3220 }
3221 Ok(())
3222 }
3223 }
3224 })
3225 .detach();
3226
3227 language_server
3228 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3229 let adapter = adapter.clone();
3230 let this = this.clone();
3231 move |params, cx| {
3232 Self::on_lsp_workspace_edit(
3233 this.clone(),
3234 params,
3235 server_id,
3236 adapter.clone(),
3237 cx,
3238 )
3239 }
3240 })
3241 .detach();
3242
3243 language_server
3244 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3245 let this = this.clone();
3246 move |(), mut cx| {
3247 let this = this.clone();
3248 async move {
3249 this.update(&mut cx, |project, cx| {
3250 cx.emit(Event::RefreshInlayHints);
3251 project.remote_id().map(|project_id| {
3252 project.client.send(proto::RefreshInlayHints { project_id })
3253 })
3254 })?
3255 .transpose()?;
3256 Ok(())
3257 }
3258 }
3259 })
3260 .detach();
3261
3262 language_server
3263 .on_request::<lsp::request::ShowMessageRequest, _, _>({
3264 let this = this.clone();
3265 let name = name.to_string();
3266 move |params, mut cx| {
3267 let this = this.clone();
3268 let name = name.to_string();
3269 async move {
3270 if let Some(actions) = params.actions {
3271 let (tx, mut rx) = smol::channel::bounded(1);
3272 let request = LanguageServerPromptRequest {
3273 level: match params.typ {
3274 lsp::MessageType::ERROR => PromptLevel::Critical,
3275 lsp::MessageType::WARNING => PromptLevel::Warning,
3276 _ => PromptLevel::Info,
3277 },
3278 message: params.message,
3279 actions,
3280 response_channel: tx,
3281 lsp_name: name.clone(),
3282 };
3283
3284 if let Ok(_) = this.update(&mut cx, |_, cx| {
3285 cx.emit(Event::LanguageServerPrompt(request));
3286 }) {
3287 let response = rx.next().await;
3288
3289 Ok(response)
3290 } else {
3291 Ok(None)
3292 }
3293 } else {
3294 Ok(None)
3295 }
3296 }
3297 }
3298 })
3299 .detach();
3300
3301 let disk_based_diagnostics_progress_token =
3302 adapter.disk_based_diagnostics_progress_token.clone();
3303
3304 language_server
3305 .on_notification::<ServerStatus, _>({
3306 let this = this.clone();
3307 let name = name.to_string();
3308 move |params, mut cx| {
3309 let this = this.clone();
3310 let name = name.to_string();
3311 if let Some(ref message) = params.message {
3312 let message = message.trim();
3313 if !message.is_empty() {
3314 let formatted_message = format!(
3315 "Language server {name} (id {server_id}) status update: {message}"
3316 );
3317 match params.health {
3318 ServerHealthStatus::Ok => log::info!("{}", formatted_message),
3319 ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
3320 ServerHealthStatus::Error => {
3321 log::error!("{}", formatted_message);
3322 let (tx, _rx) = smol::channel::bounded(1);
3323 let request = LanguageServerPromptRequest {
3324 level: PromptLevel::Critical,
3325 message: params.message.unwrap_or_default(),
3326 actions: Vec::new(),
3327 response_channel: tx,
3328 lsp_name: name.clone(),
3329 };
3330 let _ = this
3331 .update(&mut cx, |_, cx| {
3332 cx.emit(Event::LanguageServerPrompt(request));
3333 })
3334 .ok();
3335 }
3336 ServerHealthStatus::Other(status) => {
3337 log::info!(
3338 "Unknown server health: {status}\n{formatted_message}"
3339 )
3340 }
3341 }
3342 }
3343 }
3344 }
3345 })
3346 .detach();
3347
3348 language_server
3349 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3350 if let Some(this) = this.upgrade() {
3351 this.update(&mut cx, |this, cx| {
3352 this.on_lsp_progress(
3353 params,
3354 server_id,
3355 disk_based_diagnostics_progress_token.clone(),
3356 cx,
3357 );
3358 })
3359 .ok();
3360 }
3361 })
3362 .detach();
3363
3364 match (&mut initialization_options, override_options) {
3365 (Some(initialization_options), Some(override_options)) => {
3366 merge_json_value_into(override_options, initialization_options);
3367 }
3368 (None, override_options) => initialization_options = override_options,
3369 _ => {}
3370 }
3371 let language_server = cx
3372 .update(|cx| language_server.initialize(initialization_options, cx))?
3373 .await?;
3374
3375 language_server
3376 .notify::<lsp::notification::DidChangeConfiguration>(
3377 lsp::DidChangeConfigurationParams {
3378 settings: workspace_config,
3379 },
3380 )
3381 .ok();
3382
3383 Ok(language_server)
3384 }
3385
3386 fn insert_newly_running_language_server(
3387 &mut self,
3388 language: Arc<Language>,
3389 adapter: Arc<CachedLspAdapter>,
3390 language_server: Arc<LanguageServer>,
3391 server_id: LanguageServerId,
3392 key: (WorktreeId, LanguageServerName),
3393 cx: &mut ModelContext<Self>,
3394 ) -> Result<()> {
3395 // If the language server for this key doesn't match the server id, don't store the
3396 // server. Which will cause it to be dropped, killing the process
3397 if self
3398 .language_server_ids
3399 .get(&key)
3400 .map(|id| id != &server_id)
3401 .unwrap_or(false)
3402 {
3403 return Ok(());
3404 }
3405
3406 // Update language_servers collection with Running variant of LanguageServerState
3407 // indicating that the server is up and running and ready
3408 self.language_servers.insert(
3409 server_id,
3410 LanguageServerState::Running {
3411 adapter: adapter.clone(),
3412 language: language.clone(),
3413 server: language_server.clone(),
3414 simulate_disk_based_diagnostics_completion: None,
3415 },
3416 );
3417
3418 self.language_server_statuses.insert(
3419 server_id,
3420 LanguageServerStatus {
3421 name: language_server.name().to_string(),
3422 pending_work: Default::default(),
3423 has_pending_diagnostic_updates: false,
3424 progress_tokens: Default::default(),
3425 },
3426 );
3427
3428 cx.emit(Event::LanguageServerAdded(server_id));
3429
3430 if let Some(project_id) = self.remote_id() {
3431 self.client.send(proto::StartLanguageServer {
3432 project_id,
3433 server: Some(proto::LanguageServer {
3434 id: server_id.0 as u64,
3435 name: language_server.name().to_string(),
3436 }),
3437 })?;
3438 }
3439
3440 // Tell the language server about every open buffer in the worktree that matches the language.
3441 for buffer in self.opened_buffers.values() {
3442 if let Some(buffer_handle) = buffer.upgrade() {
3443 let buffer = buffer_handle.read(cx);
3444 let file = match File::from_dyn(buffer.file()) {
3445 Some(file) => file,
3446 None => continue,
3447 };
3448 let language = match buffer.language() {
3449 Some(language) => language,
3450 None => continue,
3451 };
3452
3453 if file.worktree.read(cx).id() != key.0
3454 || !self
3455 .languages
3456 .lsp_adapters(&language)
3457 .iter()
3458 .any(|a| a.name == key.1)
3459 {
3460 continue;
3461 }
3462
3463 let file = match file.as_local() {
3464 Some(file) => file,
3465 None => continue,
3466 };
3467
3468 let versions = self
3469 .buffer_snapshots
3470 .entry(buffer.remote_id())
3471 .or_default()
3472 .entry(server_id)
3473 .or_insert_with(|| {
3474 vec![LspBufferSnapshot {
3475 version: 0,
3476 snapshot: buffer.text_snapshot(),
3477 }]
3478 });
3479
3480 let snapshot = versions.last().unwrap();
3481 let version = snapshot.version;
3482 let initial_snapshot = &snapshot.snapshot;
3483 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3484 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3485 lsp::DidOpenTextDocumentParams {
3486 text_document: lsp::TextDocumentItem::new(
3487 uri,
3488 adapter
3489 .language_ids
3490 .get(language.name().as_ref())
3491 .cloned()
3492 .unwrap_or_default(),
3493 version,
3494 initial_snapshot.text(),
3495 ),
3496 },
3497 )?;
3498
3499 buffer_handle.update(cx, |buffer, cx| {
3500 buffer.set_completion_triggers(
3501 language_server
3502 .capabilities()
3503 .completion_provider
3504 .as_ref()
3505 .and_then(|provider| provider.trigger_characters.clone())
3506 .unwrap_or_default(),
3507 cx,
3508 )
3509 });
3510 }
3511 }
3512
3513 cx.notify();
3514 Ok(())
3515 }
3516
3517 // Returns a list of all of the worktrees which no longer have a language server and the root path
3518 // for the stopped server
3519 fn stop_language_server(
3520 &mut self,
3521 worktree_id: WorktreeId,
3522 adapter_name: LanguageServerName,
3523 cx: &mut ModelContext<Self>,
3524 ) -> Task<Vec<WorktreeId>> {
3525 let key = (worktree_id, adapter_name);
3526 if let Some(server_id) = self.language_server_ids.remove(&key) {
3527 let name = key.1 .0;
3528 log::info!("stopping language server {name}");
3529
3530 // Remove other entries for this language server as well
3531 let mut orphaned_worktrees = vec![worktree_id];
3532 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3533 for other_key in other_keys {
3534 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3535 self.language_server_ids.remove(&other_key);
3536 orphaned_worktrees.push(other_key.0);
3537 }
3538 }
3539
3540 for buffer in self.opened_buffers.values() {
3541 if let Some(buffer) = buffer.upgrade() {
3542 buffer.update(cx, |buffer, cx| {
3543 buffer.update_diagnostics(server_id, Default::default(), cx);
3544 });
3545 }
3546 }
3547 for worktree in &self.worktrees {
3548 if let Some(worktree) = worktree.upgrade() {
3549 worktree.update(cx, |worktree, cx| {
3550 if let Some(worktree) = worktree.as_local_mut() {
3551 worktree.clear_diagnostics_for_language_server(server_id, cx);
3552 }
3553 });
3554 }
3555 }
3556
3557 self.language_server_watched_paths.remove(&server_id);
3558 self.language_server_statuses.remove(&server_id);
3559 cx.notify();
3560
3561 let server_state = self.language_servers.remove(&server_id);
3562 cx.emit(Event::LanguageServerRemoved(server_id));
3563 cx.spawn(move |_, cx| async move {
3564 Self::shutdown_language_server(server_state, name, cx).await;
3565 orphaned_worktrees
3566 })
3567 } else {
3568 Task::ready(Vec::new())
3569 }
3570 }
3571
3572 async fn shutdown_language_server(
3573 server_state: Option<LanguageServerState>,
3574 name: Arc<str>,
3575 cx: AsyncAppContext,
3576 ) {
3577 let server = match server_state {
3578 Some(LanguageServerState::Starting(task)) => {
3579 let mut timer = cx
3580 .background_executor()
3581 .timer(SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT)
3582 .fuse();
3583
3584 select! {
3585 server = task.fuse() => server,
3586 _ = timer => {
3587 log::info!(
3588 "timeout waiting for language server {} to finish launching before stopping",
3589 name
3590 );
3591 None
3592 },
3593 }
3594 }
3595
3596 Some(LanguageServerState::Running { server, .. }) => Some(server),
3597
3598 None => None,
3599 };
3600
3601 if let Some(server) = server {
3602 if let Some(shutdown) = server.shutdown() {
3603 shutdown.await;
3604 }
3605 }
3606 }
3607
3608 pub fn restart_language_servers_for_buffers(
3609 &mut self,
3610 buffers: impl IntoIterator<Item = Model<Buffer>>,
3611 cx: &mut ModelContext<Self>,
3612 ) -> Option<()> {
3613 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3614 .into_iter()
3615 .filter_map(|buffer| {
3616 let buffer = buffer.read(cx);
3617 let file = buffer.file()?;
3618 let worktree = File::from_dyn(Some(file))?.worktree.clone();
3619 let language = self
3620 .languages
3621 .language_for_file(file, Some(buffer.as_rope()), cx)
3622 .now_or_never()?
3623 .ok()?;
3624 Some((worktree, language))
3625 })
3626 .collect();
3627 for (worktree, language) in language_server_lookup_info {
3628 self.restart_language_servers(worktree, language, cx);
3629 }
3630
3631 None
3632 }
3633
3634 fn restart_language_servers(
3635 &mut self,
3636 worktree: Model<Worktree>,
3637 language: Arc<Language>,
3638 cx: &mut ModelContext<Self>,
3639 ) {
3640 let worktree_id = worktree.read(cx).id();
3641
3642 let stop_tasks = self
3643 .languages
3644 .clone()
3645 .lsp_adapters(&language)
3646 .iter()
3647 .map(|adapter| {
3648 let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
3649 (stop_task, adapter.name.clone())
3650 })
3651 .collect::<Vec<_>>();
3652 if stop_tasks.is_empty() {
3653 return;
3654 }
3655
3656 cx.spawn(move |this, mut cx| async move {
3657 // For each stopped language server, record all of the worktrees with which
3658 // it was associated.
3659 let mut affected_worktrees = Vec::new();
3660 for (stop_task, language_server_name) in stop_tasks {
3661 for affected_worktree_id in stop_task.await {
3662 affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
3663 }
3664 }
3665
3666 this.update(&mut cx, |this, cx| {
3667 // Restart the language server for the given worktree.
3668 this.start_language_servers(&worktree, language.clone(), cx);
3669
3670 // Lookup new server ids and set them for each of the orphaned worktrees
3671 for (affected_worktree_id, language_server_name) in affected_worktrees {
3672 if let Some(new_server_id) = this
3673 .language_server_ids
3674 .get(&(worktree_id, language_server_name.clone()))
3675 .cloned()
3676 {
3677 this.language_server_ids
3678 .insert((affected_worktree_id, language_server_name), new_server_id);
3679 }
3680 }
3681 })
3682 .ok();
3683 })
3684 .detach();
3685 }
3686
3687 fn check_errored_server(
3688 language: Arc<Language>,
3689 adapter: Arc<CachedLspAdapter>,
3690 server_id: LanguageServerId,
3691 installation_test_binary: Option<LanguageServerBinary>,
3692 cx: &mut ModelContext<Self>,
3693 ) {
3694 if !adapter.can_be_reinstalled() {
3695 log::info!(
3696 "Validation check requested for {:?} but it cannot be reinstalled",
3697 adapter.name.0
3698 );
3699 return;
3700 }
3701
3702 cx.spawn(move |this, mut cx| async move {
3703 log::info!("About to spawn test binary");
3704
3705 // A lack of test binary counts as a failure
3706 let process = installation_test_binary.and_then(|binary| {
3707 smol::process::Command::new(&binary.path)
3708 .current_dir(&binary.path)
3709 .args(binary.arguments)
3710 .stdin(Stdio::piped())
3711 .stdout(Stdio::piped())
3712 .stderr(Stdio::inherit())
3713 .kill_on_drop(true)
3714 .spawn()
3715 .ok()
3716 });
3717
3718 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3719 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3720
3721 let mut errored = false;
3722 if let Some(mut process) = process {
3723 futures::select! {
3724 status = process.status().fuse() => match status {
3725 Ok(status) => errored = !status.success(),
3726 Err(_) => errored = true,
3727 },
3728
3729 _ = timeout => {
3730 log::info!("test binary time-ed out, this counts as a success");
3731 _ = process.kill();
3732 }
3733 }
3734 } else {
3735 log::warn!("test binary failed to launch");
3736 errored = true;
3737 }
3738
3739 if errored {
3740 log::warn!("test binary check failed");
3741 let task = this
3742 .update(&mut cx, move |this, cx| {
3743 this.reinstall_language_server(language, adapter, server_id, cx)
3744 })
3745 .ok()
3746 .flatten();
3747
3748 if let Some(task) = task {
3749 task.await;
3750 }
3751 }
3752 })
3753 .detach();
3754 }
3755
3756 fn enqueue_language_server_progress(
3757 &mut self,
3758 message: BufferOrderedMessage,
3759 cx: &mut ModelContext<Self>,
3760 ) {
3761 self.pending_language_server_update.replace(message);
3762 self.flush_language_server_update.get_or_insert_with(|| {
3763 cx.spawn(|this, mut cx| async move {
3764 cx.background_executor()
3765 .timer(SERVER_PROGRESS_DEBOUNCE_TIMEOUT)
3766 .await;
3767 this.update(&mut cx, |this, _| {
3768 this.flush_language_server_update.take();
3769 if let Some(update) = this.pending_language_server_update.take() {
3770 this.enqueue_buffer_ordered_message(update).ok();
3771 }
3772 })
3773 .ok();
3774 })
3775 });
3776 }
3777
3778 fn enqueue_buffer_ordered_message(&mut self, message: BufferOrderedMessage) -> Result<()> {
3779 if let Some(pending_message) = self.pending_language_server_update.take() {
3780 self.flush_language_server_update.take();
3781 self.buffer_ordered_messages_tx
3782 .unbounded_send(pending_message)
3783 .map_err(|e| anyhow!(e))?;
3784 }
3785 self.buffer_ordered_messages_tx
3786 .unbounded_send(message)
3787 .map_err(|e| anyhow!(e))
3788 }
3789
3790 fn on_lsp_progress(
3791 &mut self,
3792 progress: lsp::ProgressParams,
3793 language_server_id: LanguageServerId,
3794 disk_based_diagnostics_progress_token: Option<String>,
3795 cx: &mut ModelContext<Self>,
3796 ) {
3797 let token = match progress.token {
3798 lsp::NumberOrString::String(token) => token,
3799 lsp::NumberOrString::Number(token) => {
3800 log::info!("skipping numeric progress token {}", token);
3801 return;
3802 }
3803 };
3804 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3805 let language_server_status =
3806 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3807 status
3808 } else {
3809 return;
3810 };
3811
3812 if !language_server_status.progress_tokens.contains(&token) {
3813 return;
3814 }
3815
3816 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3817 .as_ref()
3818 .map_or(false, |disk_based_token| {
3819 token.starts_with(disk_based_token)
3820 });
3821
3822 match progress {
3823 lsp::WorkDoneProgress::Begin(report) => {
3824 if is_disk_based_diagnostics_progress {
3825 language_server_status.has_pending_diagnostic_updates = true;
3826 self.disk_based_diagnostics_started(language_server_id, cx);
3827 self.enqueue_buffer_ordered_message(BufferOrderedMessage::LanguageServerUpdate {
3828 language_server_id,
3829 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3830 })
3831 .ok();
3832 } else {
3833 self.on_lsp_work_start(
3834 language_server_id,
3835 token.clone(),
3836 LanguageServerProgress {
3837 message: report.message.clone(),
3838 percentage: report.percentage.map(|p| p as usize),
3839 last_update_at: Instant::now(),
3840 },
3841 cx,
3842 );
3843 self.enqueue_buffer_ordered_message(
3844 BufferOrderedMessage::LanguageServerUpdate {
3845 language_server_id,
3846 message: proto::update_language_server::Variant::WorkStart(
3847 proto::LspWorkStart {
3848 token,
3849 message: report.message,
3850 percentage: report.percentage,
3851 },
3852 ),
3853 },
3854 )
3855 .ok();
3856 }
3857 }
3858 lsp::WorkDoneProgress::Report(report) => {
3859 if !is_disk_based_diagnostics_progress {
3860 self.on_lsp_work_progress(
3861 language_server_id,
3862 token.clone(),
3863 LanguageServerProgress {
3864 message: report.message.clone(),
3865 percentage: report.percentage.map(|p| p as usize),
3866 last_update_at: Instant::now(),
3867 },
3868 cx,
3869 );
3870 self.enqueue_language_server_progress(
3871 BufferOrderedMessage::LanguageServerUpdate {
3872 language_server_id,
3873 message: proto::update_language_server::Variant::WorkProgress(
3874 proto::LspWorkProgress {
3875 token,
3876 message: report.message,
3877 percentage: report.percentage,
3878 },
3879 ),
3880 },
3881 cx,
3882 );
3883 }
3884 }
3885 lsp::WorkDoneProgress::End(_) => {
3886 language_server_status.progress_tokens.remove(&token);
3887
3888 if is_disk_based_diagnostics_progress {
3889 language_server_status.has_pending_diagnostic_updates = false;
3890 self.disk_based_diagnostics_finished(language_server_id, cx);
3891 self.enqueue_buffer_ordered_message(
3892 BufferOrderedMessage::LanguageServerUpdate {
3893 language_server_id,
3894 message:
3895 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3896 Default::default(),
3897 ),
3898 },
3899 )
3900 .ok();
3901 } else {
3902 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3903 self.enqueue_buffer_ordered_message(
3904 BufferOrderedMessage::LanguageServerUpdate {
3905 language_server_id,
3906 message: proto::update_language_server::Variant::WorkEnd(
3907 proto::LspWorkEnd { token },
3908 ),
3909 },
3910 )
3911 .ok();
3912 }
3913 }
3914 }
3915 }
3916
3917 fn on_lsp_work_start(
3918 &mut self,
3919 language_server_id: LanguageServerId,
3920 token: String,
3921 progress: LanguageServerProgress,
3922 cx: &mut ModelContext<Self>,
3923 ) {
3924 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3925 status.pending_work.insert(token, progress);
3926 cx.notify();
3927 }
3928 }
3929
3930 fn on_lsp_work_progress(
3931 &mut self,
3932 language_server_id: LanguageServerId,
3933 token: String,
3934 progress: LanguageServerProgress,
3935 cx: &mut ModelContext<Self>,
3936 ) {
3937 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3938 let entry = status
3939 .pending_work
3940 .entry(token)
3941 .or_insert(LanguageServerProgress {
3942 message: Default::default(),
3943 percentage: Default::default(),
3944 last_update_at: progress.last_update_at,
3945 });
3946 if progress.message.is_some() {
3947 entry.message = progress.message;
3948 }
3949 if progress.percentage.is_some() {
3950 entry.percentage = progress.percentage;
3951 }
3952 entry.last_update_at = progress.last_update_at;
3953 cx.notify();
3954 }
3955 }
3956
3957 fn on_lsp_work_end(
3958 &mut self,
3959 language_server_id: LanguageServerId,
3960 token: String,
3961 cx: &mut ModelContext<Self>,
3962 ) {
3963 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3964 cx.emit(Event::RefreshInlayHints);
3965 status.pending_work.remove(&token);
3966 cx.notify();
3967 }
3968 }
3969
3970 fn on_lsp_did_change_watched_files(
3971 &mut self,
3972 language_server_id: LanguageServerId,
3973 params: DidChangeWatchedFilesRegistrationOptions,
3974 cx: &mut ModelContext<Self>,
3975 ) {
3976 let watched_paths = self
3977 .language_server_watched_paths
3978 .entry(language_server_id)
3979 .or_default();
3980
3981 let mut builders = HashMap::default();
3982 for watcher in params.watchers {
3983 for worktree in &self.worktrees {
3984 if let Some(worktree) = worktree.upgrade() {
3985 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3986 if let Some(abs_path) = tree.abs_path().to_str() {
3987 let relative_glob_pattern = match &watcher.glob_pattern {
3988 lsp::GlobPattern::String(s) => Some(
3989 s.strip_prefix(abs_path)
3990 .unwrap_or(s)
3991 .strip_prefix(std::path::MAIN_SEPARATOR)
3992 .unwrap_or(s),
3993 ),
3994 lsp::GlobPattern::Relative(rp) => {
3995 let base_uri = match &rp.base_uri {
3996 lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
3997 lsp::OneOf::Right(base_uri) => base_uri,
3998 };
3999 base_uri.to_file_path().ok().and_then(|file_path| {
4000 (file_path.to_str() == Some(abs_path))
4001 .then_some(rp.pattern.as_str())
4002 })
4003 }
4004 };
4005 if let Some(relative_glob_pattern) = relative_glob_pattern {
4006 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
4007 tree.as_local_mut()
4008 .unwrap()
4009 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
4010 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
4011 builders
4012 .entry(tree.id())
4013 .or_insert_with(|| GlobSetBuilder::new())
4014 .add(glob);
4015 }
4016 return true;
4017 }
4018 }
4019 false
4020 });
4021 if glob_is_inside_worktree {
4022 break;
4023 }
4024 }
4025 }
4026 }
4027
4028 watched_paths.clear();
4029 for (worktree_id, builder) in builders {
4030 if let Ok(globset) = builder.build() {
4031 watched_paths.insert(worktree_id, globset);
4032 }
4033 }
4034
4035 cx.notify();
4036 }
4037
4038 async fn on_lsp_workspace_edit(
4039 this: WeakModel<Self>,
4040 params: lsp::ApplyWorkspaceEditParams,
4041 server_id: LanguageServerId,
4042 adapter: Arc<CachedLspAdapter>,
4043 mut cx: AsyncAppContext,
4044 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
4045 let this = this
4046 .upgrade()
4047 .ok_or_else(|| anyhow!("project project closed"))?;
4048 let language_server = this
4049 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
4050 .ok_or_else(|| anyhow!("language server not found"))?;
4051 let transaction = Self::deserialize_workspace_edit(
4052 this.clone(),
4053 params.edit,
4054 true,
4055 adapter.clone(),
4056 language_server.clone(),
4057 &mut cx,
4058 )
4059 .await
4060 .log_err();
4061 this.update(&mut cx, |this, _| {
4062 if let Some(transaction) = transaction {
4063 this.last_workspace_edits_by_language_server
4064 .insert(server_id, transaction);
4065 }
4066 })?;
4067 Ok(lsp::ApplyWorkspaceEditResponse {
4068 applied: true,
4069 failed_change: None,
4070 failure_reason: None,
4071 })
4072 }
4073
4074 pub fn language_server_statuses(
4075 &self,
4076 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
4077 self.language_server_statuses.values()
4078 }
4079
4080 pub fn last_formatting_failure(&self) -> Option<&str> {
4081 self.last_formatting_failure.as_deref()
4082 }
4083
4084 pub fn update_diagnostics(
4085 &mut self,
4086 language_server_id: LanguageServerId,
4087 mut params: lsp::PublishDiagnosticsParams,
4088 disk_based_sources: &[String],
4089 cx: &mut ModelContext<Self>,
4090 ) -> Result<()> {
4091 let abs_path = params
4092 .uri
4093 .to_file_path()
4094 .map_err(|_| anyhow!("URI is not a file"))?;
4095 let mut diagnostics = Vec::default();
4096 let mut primary_diagnostic_group_ids = HashMap::default();
4097 let mut sources_by_group_id = HashMap::default();
4098 let mut supporting_diagnostics = HashMap::default();
4099
4100 // Ensure that primary diagnostics are always the most severe
4101 params.diagnostics.sort_by_key(|item| item.severity);
4102
4103 for diagnostic in ¶ms.diagnostics {
4104 let source = diagnostic.source.as_ref();
4105 let code = diagnostic.code.as_ref().map(|code| match code {
4106 lsp::NumberOrString::Number(code) => code.to_string(),
4107 lsp::NumberOrString::String(code) => code.clone(),
4108 });
4109 let range = range_from_lsp(diagnostic.range);
4110 let is_supporting = diagnostic
4111 .related_information
4112 .as_ref()
4113 .map_or(false, |infos| {
4114 infos.iter().any(|info| {
4115 primary_diagnostic_group_ids.contains_key(&(
4116 source,
4117 code.clone(),
4118 range_from_lsp(info.location.range),
4119 ))
4120 })
4121 });
4122
4123 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
4124 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
4125 });
4126
4127 if is_supporting {
4128 supporting_diagnostics.insert(
4129 (source, code.clone(), range),
4130 (diagnostic.severity, is_unnecessary),
4131 );
4132 } else {
4133 let group_id = post_inc(&mut self.next_diagnostic_group_id);
4134 let is_disk_based =
4135 source.map_or(false, |source| disk_based_sources.contains(source));
4136
4137 sources_by_group_id.insert(group_id, source);
4138 primary_diagnostic_group_ids
4139 .insert((source, code.clone(), range.clone()), group_id);
4140
4141 diagnostics.push(DiagnosticEntry {
4142 range,
4143 diagnostic: Diagnostic {
4144 source: diagnostic.source.clone(),
4145 code: code.clone(),
4146 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
4147 message: diagnostic.message.trim().to_string(),
4148 group_id,
4149 is_primary: true,
4150 is_disk_based,
4151 is_unnecessary,
4152 },
4153 });
4154 if let Some(infos) = &diagnostic.related_information {
4155 for info in infos {
4156 if info.location.uri == params.uri && !info.message.is_empty() {
4157 let range = range_from_lsp(info.location.range);
4158 diagnostics.push(DiagnosticEntry {
4159 range,
4160 diagnostic: Diagnostic {
4161 source: diagnostic.source.clone(),
4162 code: code.clone(),
4163 severity: DiagnosticSeverity::INFORMATION,
4164 message: info.message.trim().to_string(),
4165 group_id,
4166 is_primary: false,
4167 is_disk_based,
4168 is_unnecessary: false,
4169 },
4170 });
4171 }
4172 }
4173 }
4174 }
4175 }
4176
4177 for entry in &mut diagnostics {
4178 let diagnostic = &mut entry.diagnostic;
4179 if !diagnostic.is_primary {
4180 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
4181 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
4182 source,
4183 diagnostic.code.clone(),
4184 entry.range.clone(),
4185 )) {
4186 if let Some(severity) = severity {
4187 diagnostic.severity = severity;
4188 }
4189 diagnostic.is_unnecessary = is_unnecessary;
4190 }
4191 }
4192 }
4193
4194 self.update_diagnostic_entries(
4195 language_server_id,
4196 abs_path,
4197 params.version,
4198 diagnostics,
4199 cx,
4200 )?;
4201 Ok(())
4202 }
4203
4204 pub fn update_diagnostic_entries(
4205 &mut self,
4206 server_id: LanguageServerId,
4207 abs_path: PathBuf,
4208 version: Option<i32>,
4209 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4210 cx: &mut ModelContext<Project>,
4211 ) -> Result<(), anyhow::Error> {
4212 let (worktree, relative_path) = self
4213 .find_local_worktree(&abs_path, cx)
4214 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
4215
4216 let project_path = ProjectPath {
4217 worktree_id: worktree.read(cx).id(),
4218 path: relative_path.into(),
4219 };
4220
4221 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
4222 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
4223 }
4224
4225 let updated = worktree.update(cx, |worktree, cx| {
4226 worktree
4227 .as_local_mut()
4228 .ok_or_else(|| anyhow!("not a local worktree"))?
4229 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
4230 })?;
4231 if updated {
4232 cx.emit(Event::DiagnosticsUpdated {
4233 language_server_id: server_id,
4234 path: project_path,
4235 });
4236 }
4237 Ok(())
4238 }
4239
4240 fn update_buffer_diagnostics(
4241 &mut self,
4242 buffer: &Model<Buffer>,
4243 server_id: LanguageServerId,
4244 version: Option<i32>,
4245 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4246 cx: &mut ModelContext<Self>,
4247 ) -> Result<()> {
4248 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4249 Ordering::Equal
4250 .then_with(|| b.is_primary.cmp(&a.is_primary))
4251 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4252 .then_with(|| a.severity.cmp(&b.severity))
4253 .then_with(|| a.message.cmp(&b.message))
4254 }
4255
4256 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4257
4258 diagnostics.sort_unstable_by(|a, b| {
4259 Ordering::Equal
4260 .then_with(|| a.range.start.cmp(&b.range.start))
4261 .then_with(|| b.range.end.cmp(&a.range.end))
4262 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4263 });
4264
4265 let mut sanitized_diagnostics = Vec::new();
4266 let edits_since_save = Patch::new(
4267 snapshot
4268 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4269 .collect(),
4270 );
4271 for entry in diagnostics {
4272 let start;
4273 let end;
4274 if entry.diagnostic.is_disk_based {
4275 // Some diagnostics are based on files on disk instead of buffers'
4276 // current contents. Adjust these diagnostics' ranges to reflect
4277 // any unsaved edits.
4278 start = edits_since_save.old_to_new(entry.range.start);
4279 end = edits_since_save.old_to_new(entry.range.end);
4280 } else {
4281 start = entry.range.start;
4282 end = entry.range.end;
4283 }
4284
4285 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4286 ..snapshot.clip_point_utf16(end, Bias::Right);
4287
4288 // Expand empty ranges by one codepoint
4289 if range.start == range.end {
4290 // This will be go to the next boundary when being clipped
4291 range.end.column += 1;
4292 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4293 if range.start == range.end && range.end.column > 0 {
4294 range.start.column -= 1;
4295 range.start = snapshot.clip_point_utf16(Unclipped(range.start), Bias::Left);
4296 }
4297 }
4298
4299 sanitized_diagnostics.push(DiagnosticEntry {
4300 range,
4301 diagnostic: entry.diagnostic,
4302 });
4303 }
4304 drop(edits_since_save);
4305
4306 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4307 buffer.update(cx, |buffer, cx| {
4308 buffer.update_diagnostics(server_id, set, cx)
4309 });
4310 Ok(())
4311 }
4312
4313 pub fn reload_buffers(
4314 &self,
4315 buffers: HashSet<Model<Buffer>>,
4316 push_to_history: bool,
4317 cx: &mut ModelContext<Self>,
4318 ) -> Task<Result<ProjectTransaction>> {
4319 let mut local_buffers = Vec::new();
4320 let mut remote_buffers = None;
4321 for buffer_handle in buffers {
4322 let buffer = buffer_handle.read(cx);
4323 if buffer.is_dirty() {
4324 if let Some(file) = File::from_dyn(buffer.file()) {
4325 if file.is_local() {
4326 local_buffers.push(buffer_handle);
4327 } else {
4328 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4329 }
4330 }
4331 }
4332 }
4333
4334 let remote_buffers = self.remote_id().zip(remote_buffers);
4335 let client = self.client.clone();
4336
4337 cx.spawn(move |this, mut cx| async move {
4338 let mut project_transaction = ProjectTransaction::default();
4339
4340 if let Some((project_id, remote_buffers)) = remote_buffers {
4341 let response = client
4342 .request(proto::ReloadBuffers {
4343 project_id,
4344 buffer_ids: remote_buffers
4345 .iter()
4346 .filter_map(|buffer| {
4347 buffer
4348 .update(&mut cx, |buffer, _| buffer.remote_id().into())
4349 .ok()
4350 })
4351 .collect(),
4352 })
4353 .await?
4354 .transaction
4355 .ok_or_else(|| anyhow!("missing transaction"))?;
4356 project_transaction = this
4357 .update(&mut cx, |this, cx| {
4358 this.deserialize_project_transaction(response, push_to_history, cx)
4359 })?
4360 .await?;
4361 }
4362
4363 for buffer in local_buffers {
4364 let transaction = buffer
4365 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4366 .await?;
4367 buffer.update(&mut cx, |buffer, cx| {
4368 if let Some(transaction) = transaction {
4369 if !push_to_history {
4370 buffer.forget_transaction(transaction.id);
4371 }
4372 project_transaction.0.insert(cx.handle(), transaction);
4373 }
4374 })?;
4375 }
4376
4377 Ok(project_transaction)
4378 })
4379 }
4380
4381 pub fn format(
4382 &mut self,
4383 buffers: HashSet<Model<Buffer>>,
4384 push_to_history: bool,
4385 trigger: FormatTrigger,
4386 cx: &mut ModelContext<Project>,
4387 ) -> Task<anyhow::Result<ProjectTransaction>> {
4388 if self.is_local() {
4389 let buffers_with_paths = buffers
4390 .into_iter()
4391 .filter_map(|buffer_handle| {
4392 let buffer = buffer_handle.read(cx);
4393 let file = File::from_dyn(buffer.file())?;
4394 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4395 Some((buffer_handle, buffer_abs_path))
4396 })
4397 .collect::<Vec<_>>();
4398
4399 cx.spawn(move |project, mut cx| async move {
4400 let result = Self::format_locally(
4401 project.clone(),
4402 buffers_with_paths,
4403 push_to_history,
4404 trigger,
4405 cx.clone(),
4406 )
4407 .await;
4408
4409 project.update(&mut cx, |project, _| match &result {
4410 Ok(_) => project.last_formatting_failure = None,
4411 Err(error) => {
4412 project.last_formatting_failure.replace(error.to_string());
4413 }
4414 })?;
4415
4416 result
4417 })
4418 } else {
4419 let remote_id = self.remote_id();
4420 let client = self.client.clone();
4421 cx.spawn(move |this, mut cx| async move {
4422 let mut project_transaction = ProjectTransaction::default();
4423 if let Some(project_id) = remote_id {
4424 let response = client
4425 .request(proto::FormatBuffers {
4426 project_id,
4427 trigger: trigger as i32,
4428 buffer_ids: buffers
4429 .iter()
4430 .map(|buffer| {
4431 buffer.update(&mut cx, |buffer, _| buffer.remote_id().into())
4432 })
4433 .collect::<Result<_>>()?,
4434 })
4435 .await?
4436 .transaction
4437 .ok_or_else(|| anyhow!("missing transaction"))?;
4438 project_transaction = this
4439 .update(&mut cx, |this, cx| {
4440 this.deserialize_project_transaction(response, push_to_history, cx)
4441 })?
4442 .await?;
4443 }
4444 Ok(project_transaction)
4445 })
4446 }
4447 }
4448
4449 async fn format_locally(
4450 project: WeakModel<Project>,
4451 mut buffers_with_paths: Vec<(Model<Buffer>, Option<PathBuf>)>,
4452 push_to_history: bool,
4453 trigger: FormatTrigger,
4454 mut cx: AsyncAppContext,
4455 ) -> anyhow::Result<ProjectTransaction> {
4456 // Do not allow multiple concurrent formatting requests for the
4457 // same buffer.
4458 project.update(&mut cx, |this, cx| {
4459 buffers_with_paths.retain(|(buffer, _)| {
4460 this.buffers_being_formatted
4461 .insert(buffer.read(cx).remote_id())
4462 });
4463 })?;
4464
4465 let _cleanup = defer({
4466 let this = project.clone();
4467 let mut cx = cx.clone();
4468 let buffers = &buffers_with_paths;
4469 move || {
4470 this.update(&mut cx, |this, cx| {
4471 for (buffer, _) in buffers {
4472 this.buffers_being_formatted
4473 .remove(&buffer.read(cx).remote_id());
4474 }
4475 })
4476 .ok();
4477 }
4478 });
4479
4480 let mut project_transaction = ProjectTransaction::default();
4481 for (buffer, buffer_abs_path) in &buffers_with_paths {
4482 let adapters_and_servers: Vec<_> = project.update(&mut cx, |project, cx| {
4483 project
4484 .language_servers_for_buffer(&buffer.read(cx), cx)
4485 .map(|(adapter, lsp)| (adapter.clone(), lsp.clone()))
4486 .collect()
4487 })?;
4488
4489 let settings = buffer.update(&mut cx, |buffer, cx| {
4490 language_settings(buffer.language(), buffer.file(), cx).clone()
4491 })?;
4492
4493 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4494 let ensure_final_newline = settings.ensure_final_newline_on_save;
4495 let tab_size = settings.tab_size;
4496
4497 // First, format buffer's whitespace according to the settings.
4498 let trailing_whitespace_diff = if remove_trailing_whitespace {
4499 Some(
4500 buffer
4501 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4502 .await,
4503 )
4504 } else {
4505 None
4506 };
4507 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4508 buffer.finalize_last_transaction();
4509 buffer.start_transaction();
4510 if let Some(diff) = trailing_whitespace_diff {
4511 buffer.apply_diff(diff, cx);
4512 }
4513 if ensure_final_newline {
4514 buffer.ensure_final_newline(cx);
4515 }
4516 buffer.end_transaction(cx)
4517 })?;
4518
4519 for (lsp_adapter, language_server) in adapters_and_servers.iter() {
4520 // Apply the code actions on
4521 let code_actions: Vec<lsp::CodeActionKind> = settings
4522 .code_actions_on_format
4523 .iter()
4524 .flat_map(|(kind, enabled)| {
4525 if *enabled {
4526 Some(kind.clone().into())
4527 } else {
4528 None
4529 }
4530 })
4531 .collect();
4532
4533 #[allow(clippy::nonminimal_bool)]
4534 if !code_actions.is_empty()
4535 && !(trigger == FormatTrigger::Save
4536 && settings.format_on_save == FormatOnSave::Off)
4537 {
4538 let actions = project
4539 .update(&mut cx, |this, cx| {
4540 this.request_lsp(
4541 buffer.clone(),
4542 LanguageServerToQuery::Other(language_server.server_id()),
4543 GetCodeActions {
4544 range: text::Anchor::MIN..text::Anchor::MAX,
4545 kinds: Some(code_actions),
4546 },
4547 cx,
4548 )
4549 })?
4550 .await?;
4551
4552 for mut action in actions {
4553 Self::try_resolve_code_action(&language_server, &mut action)
4554 .await
4555 .context("resolving a formatting code action")?;
4556 if let Some(edit) = action.lsp_action.edit {
4557 if edit.changes.is_none() && edit.document_changes.is_none() {
4558 continue;
4559 }
4560
4561 let new = Self::deserialize_workspace_edit(
4562 project
4563 .upgrade()
4564 .ok_or_else(|| anyhow!("project dropped"))?,
4565 edit,
4566 push_to_history,
4567 lsp_adapter.clone(),
4568 language_server.clone(),
4569 &mut cx,
4570 )
4571 .await?;
4572 project_transaction.0.extend(new.0);
4573 }
4574
4575 if let Some(command) = action.lsp_action.command {
4576 project.update(&mut cx, |this, _| {
4577 this.last_workspace_edits_by_language_server
4578 .remove(&language_server.server_id());
4579 })?;
4580
4581 language_server
4582 .request::<lsp::request::ExecuteCommand>(
4583 lsp::ExecuteCommandParams {
4584 command: command.command,
4585 arguments: command.arguments.unwrap_or_default(),
4586 ..Default::default()
4587 },
4588 )
4589 .await?;
4590
4591 project.update(&mut cx, |this, _| {
4592 project_transaction.0.extend(
4593 this.last_workspace_edits_by_language_server
4594 .remove(&language_server.server_id())
4595 .unwrap_or_default()
4596 .0,
4597 )
4598 })?;
4599 }
4600 }
4601 }
4602 }
4603
4604 // Apply language-specific formatting using either the primary language server
4605 // or external command.
4606 let primary_language_server = adapters_and_servers
4607 .first()
4608 .cloned()
4609 .map(|(_, lsp)| lsp.clone());
4610 let server_and_buffer = primary_language_server
4611 .as_ref()
4612 .zip(buffer_abs_path.as_ref());
4613
4614 let mut format_operation = None;
4615 match (&settings.formatter, &settings.format_on_save) {
4616 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4617
4618 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4619 | (_, FormatOnSave::LanguageServer) => {
4620 if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4621 format_operation = Some(FormatOperation::Lsp(
4622 Self::format_via_lsp(
4623 &project,
4624 buffer,
4625 buffer_abs_path,
4626 language_server,
4627 tab_size,
4628 &mut cx,
4629 )
4630 .await
4631 .context("failed to format via language server")?,
4632 ));
4633 }
4634 }
4635
4636 (
4637 Formatter::External { command, arguments },
4638 FormatOnSave::On | FormatOnSave::Off,
4639 )
4640 | (_, FormatOnSave::External { command, arguments }) => {
4641 if let Some(buffer_abs_path) = buffer_abs_path {
4642 format_operation = Self::format_via_external_command(
4643 buffer,
4644 buffer_abs_path,
4645 command,
4646 arguments,
4647 &mut cx,
4648 )
4649 .await
4650 .context(format!(
4651 "failed to format via external command {:?}",
4652 command
4653 ))?
4654 .map(FormatOperation::External);
4655 }
4656 }
4657 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4658 if let Some(new_operation) =
4659 prettier_support::format_with_prettier(&project, buffer, &mut cx).await
4660 {
4661 format_operation = Some(new_operation);
4662 } else if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4663 format_operation = Some(FormatOperation::Lsp(
4664 Self::format_via_lsp(
4665 &project,
4666 buffer,
4667 buffer_abs_path,
4668 language_server,
4669 tab_size,
4670 &mut cx,
4671 )
4672 .await
4673 .context("failed to format via language server")?,
4674 ));
4675 }
4676 }
4677 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4678 if let Some(new_operation) =
4679 prettier_support::format_with_prettier(&project, buffer, &mut cx).await
4680 {
4681 format_operation = Some(new_operation);
4682 }
4683 }
4684 };
4685
4686 buffer.update(&mut cx, |b, cx| {
4687 // If the buffer had its whitespace formatted and was edited while the language-specific
4688 // formatting was being computed, avoid applying the language-specific formatting, because
4689 // it can't be grouped with the whitespace formatting in the undo history.
4690 if let Some(transaction_id) = whitespace_transaction_id {
4691 if b.peek_undo_stack()
4692 .map_or(true, |e| e.transaction_id() != transaction_id)
4693 {
4694 format_operation.take();
4695 }
4696 }
4697
4698 // Apply any language-specific formatting, and group the two formatting operations
4699 // in the buffer's undo history.
4700 if let Some(operation) = format_operation {
4701 match operation {
4702 FormatOperation::Lsp(edits) => {
4703 b.edit(edits, None, cx);
4704 }
4705 FormatOperation::External(diff) => {
4706 b.apply_diff(diff, cx);
4707 }
4708 FormatOperation::Prettier(diff) => {
4709 b.apply_diff(diff, cx);
4710 }
4711 }
4712
4713 if let Some(transaction_id) = whitespace_transaction_id {
4714 b.group_until_transaction(transaction_id);
4715 } else if let Some(transaction) = project_transaction.0.get(buffer) {
4716 b.group_until_transaction(transaction.id)
4717 }
4718 }
4719
4720 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4721 if !push_to_history {
4722 b.forget_transaction(transaction.id);
4723 }
4724 project_transaction.0.insert(buffer.clone(), transaction);
4725 }
4726 })?;
4727 }
4728
4729 Ok(project_transaction)
4730 }
4731
4732 async fn format_via_lsp(
4733 this: &WeakModel<Self>,
4734 buffer: &Model<Buffer>,
4735 abs_path: &Path,
4736 language_server: &Arc<LanguageServer>,
4737 tab_size: NonZeroU32,
4738 cx: &mut AsyncAppContext,
4739 ) -> Result<Vec<(Range<Anchor>, String)>> {
4740 let uri = lsp::Url::from_file_path(abs_path)
4741 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4742 let text_document = lsp::TextDocumentIdentifier::new(uri);
4743 let capabilities = &language_server.capabilities();
4744
4745 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4746 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4747
4748 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4749 language_server
4750 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4751 text_document,
4752 options: lsp_command::lsp_formatting_options(tab_size.get()),
4753 work_done_progress_params: Default::default(),
4754 })
4755 .await?
4756 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4757 let buffer_start = lsp::Position::new(0, 0);
4758 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4759
4760 language_server
4761 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4762 text_document,
4763 range: lsp::Range::new(buffer_start, buffer_end),
4764 options: lsp_command::lsp_formatting_options(tab_size.get()),
4765 work_done_progress_params: Default::default(),
4766 })
4767 .await?
4768 } else {
4769 None
4770 };
4771
4772 if let Some(lsp_edits) = lsp_edits {
4773 this.update(cx, |this, cx| {
4774 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4775 })?
4776 .await
4777 } else {
4778 Ok(Vec::new())
4779 }
4780 }
4781
4782 async fn format_via_external_command(
4783 buffer: &Model<Buffer>,
4784 buffer_abs_path: &Path,
4785 command: &str,
4786 arguments: &[String],
4787 cx: &mut AsyncAppContext,
4788 ) -> Result<Option<Diff>> {
4789 let working_dir_path = buffer.update(cx, |buffer, cx| {
4790 let file = File::from_dyn(buffer.file())?;
4791 let worktree = file.worktree.read(cx).as_local()?;
4792 let mut worktree_path = worktree.abs_path().to_path_buf();
4793 if worktree.root_entry()?.is_file() {
4794 worktree_path.pop();
4795 }
4796 Some(worktree_path)
4797 })?;
4798
4799 if let Some(working_dir_path) = working_dir_path {
4800 let mut child =
4801 smol::process::Command::new(command)
4802 .args(arguments.iter().map(|arg| {
4803 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4804 }))
4805 .current_dir(&working_dir_path)
4806 .stdin(smol::process::Stdio::piped())
4807 .stdout(smol::process::Stdio::piped())
4808 .stderr(smol::process::Stdio::piped())
4809 .spawn()?;
4810 let stdin = child
4811 .stdin
4812 .as_mut()
4813 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4814 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4815 for chunk in text.chunks() {
4816 stdin.write_all(chunk.as_bytes()).await?;
4817 }
4818 stdin.flush().await?;
4819
4820 let output = child.output().await?;
4821 if !output.status.success() {
4822 return Err(anyhow!(
4823 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4824 output.status.code(),
4825 String::from_utf8_lossy(&output.stdout),
4826 String::from_utf8_lossy(&output.stderr),
4827 ));
4828 }
4829
4830 let stdout = String::from_utf8(output.stdout)?;
4831 Ok(Some(
4832 buffer
4833 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4834 .await,
4835 ))
4836 } else {
4837 Ok(None)
4838 }
4839 }
4840
4841 #[inline(never)]
4842 fn definition_impl(
4843 &self,
4844 buffer: &Model<Buffer>,
4845 position: PointUtf16,
4846 cx: &mut ModelContext<Self>,
4847 ) -> Task<Result<Vec<LocationLink>>> {
4848 self.request_lsp(
4849 buffer.clone(),
4850 LanguageServerToQuery::Primary,
4851 GetDefinition { position },
4852 cx,
4853 )
4854 }
4855 pub fn definition<T: ToPointUtf16>(
4856 &self,
4857 buffer: &Model<Buffer>,
4858 position: T,
4859 cx: &mut ModelContext<Self>,
4860 ) -> Task<Result<Vec<LocationLink>>> {
4861 let position = position.to_point_utf16(buffer.read(cx));
4862 self.definition_impl(buffer, position, cx)
4863 }
4864
4865 fn type_definition_impl(
4866 &self,
4867 buffer: &Model<Buffer>,
4868 position: PointUtf16,
4869 cx: &mut ModelContext<Self>,
4870 ) -> Task<Result<Vec<LocationLink>>> {
4871 self.request_lsp(
4872 buffer.clone(),
4873 LanguageServerToQuery::Primary,
4874 GetTypeDefinition { position },
4875 cx,
4876 )
4877 }
4878
4879 pub fn type_definition<T: ToPointUtf16>(
4880 &self,
4881 buffer: &Model<Buffer>,
4882 position: T,
4883 cx: &mut ModelContext<Self>,
4884 ) -> Task<Result<Vec<LocationLink>>> {
4885 let position = position.to_point_utf16(buffer.read(cx));
4886 self.type_definition_impl(buffer, position, cx)
4887 }
4888
4889 fn implementation_impl(
4890 &self,
4891 buffer: &Model<Buffer>,
4892 position: PointUtf16,
4893 cx: &mut ModelContext<Self>,
4894 ) -> Task<Result<Vec<LocationLink>>> {
4895 self.request_lsp(
4896 buffer.clone(),
4897 LanguageServerToQuery::Primary,
4898 GetImplementation { position },
4899 cx,
4900 )
4901 }
4902
4903 pub fn implementation<T: ToPointUtf16>(
4904 &self,
4905 buffer: &Model<Buffer>,
4906 position: T,
4907 cx: &mut ModelContext<Self>,
4908 ) -> Task<Result<Vec<LocationLink>>> {
4909 let position = position.to_point_utf16(buffer.read(cx));
4910 self.implementation_impl(buffer, position, cx)
4911 }
4912
4913 fn references_impl(
4914 &self,
4915 buffer: &Model<Buffer>,
4916 position: PointUtf16,
4917 cx: &mut ModelContext<Self>,
4918 ) -> Task<Result<Vec<Location>>> {
4919 self.request_lsp(
4920 buffer.clone(),
4921 LanguageServerToQuery::Primary,
4922 GetReferences { position },
4923 cx,
4924 )
4925 }
4926 pub fn references<T: ToPointUtf16>(
4927 &self,
4928 buffer: &Model<Buffer>,
4929 position: T,
4930 cx: &mut ModelContext<Self>,
4931 ) -> Task<Result<Vec<Location>>> {
4932 let position = position.to_point_utf16(buffer.read(cx));
4933 self.references_impl(buffer, position, cx)
4934 }
4935
4936 fn document_highlights_impl(
4937 &self,
4938 buffer: &Model<Buffer>,
4939 position: PointUtf16,
4940 cx: &mut ModelContext<Self>,
4941 ) -> Task<Result<Vec<DocumentHighlight>>> {
4942 self.request_lsp(
4943 buffer.clone(),
4944 LanguageServerToQuery::Primary,
4945 GetDocumentHighlights { position },
4946 cx,
4947 )
4948 }
4949
4950 pub fn document_highlights<T: ToPointUtf16>(
4951 &self,
4952 buffer: &Model<Buffer>,
4953 position: T,
4954 cx: &mut ModelContext<Self>,
4955 ) -> Task<Result<Vec<DocumentHighlight>>> {
4956 let position = position.to_point_utf16(buffer.read(cx));
4957 self.document_highlights_impl(buffer, position, cx)
4958 }
4959
4960 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4961 if self.is_local() {
4962 let mut requests = Vec::new();
4963 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4964 let Some(worktree_handle) = self.worktree_for_id(*worktree_id, cx) else {
4965 continue;
4966 };
4967 let worktree = worktree_handle.read(cx);
4968 if !worktree.is_visible() {
4969 continue;
4970 }
4971 let Some(worktree) = worktree.as_local() else {
4972 continue;
4973 };
4974 let worktree_abs_path = worktree.abs_path().clone();
4975
4976 let (adapter, language, server) = match self.language_servers.get(server_id) {
4977 Some(LanguageServerState::Running {
4978 adapter,
4979 language,
4980 server,
4981 ..
4982 }) => (adapter.clone(), language.clone(), server),
4983
4984 _ => continue,
4985 };
4986
4987 requests.push(
4988 server
4989 .request::<lsp::request::WorkspaceSymbolRequest>(
4990 lsp::WorkspaceSymbolParams {
4991 query: query.to_string(),
4992 ..Default::default()
4993 },
4994 )
4995 .log_err()
4996 .map(move |response| {
4997 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4998 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4999 flat_responses.into_iter().map(|lsp_symbol| {
5000 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
5001 }).collect::<Vec<_>>()
5002 }
5003 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
5004 nested_responses.into_iter().filter_map(|lsp_symbol| {
5005 let location = match lsp_symbol.location {
5006 OneOf::Left(location) => location,
5007 OneOf::Right(_) => {
5008 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
5009 return None
5010 }
5011 };
5012 Some((lsp_symbol.name, lsp_symbol.kind, location))
5013 }).collect::<Vec<_>>()
5014 }
5015 }).unwrap_or_default();
5016
5017 (
5018 adapter,
5019 language,
5020 worktree_handle.downgrade(),
5021 worktree_abs_path,
5022 lsp_symbols,
5023 )
5024 }),
5025 );
5026 }
5027
5028 cx.spawn(move |this, mut cx| async move {
5029 let responses = futures::future::join_all(requests).await;
5030 let this = match this.upgrade() {
5031 Some(this) => this,
5032 None => return Ok(Vec::new()),
5033 };
5034
5035 let symbols = this.update(&mut cx, |this, cx| {
5036 let mut symbols = Vec::new();
5037 for (
5038 adapter,
5039 adapter_language,
5040 source_worktree,
5041 worktree_abs_path,
5042 lsp_symbols,
5043 ) in responses
5044 {
5045 symbols.extend(lsp_symbols.into_iter().filter_map(
5046 |(symbol_name, symbol_kind, symbol_location)| {
5047 let abs_path = symbol_location.uri.to_file_path().ok()?;
5048 let source_worktree = source_worktree.upgrade()?;
5049 let source_worktree_id = source_worktree.read(cx).id();
5050
5051 let path;
5052 let worktree;
5053 if let Some((tree, rel_path)) =
5054 this.find_local_worktree(&abs_path, cx)
5055 {
5056 worktree = tree;
5057 path = rel_path;
5058 } else {
5059 worktree = source_worktree.clone();
5060 path = relativize_path(&worktree_abs_path, &abs_path);
5061 }
5062
5063 let worktree_id = worktree.read(cx).id();
5064 let project_path = ProjectPath {
5065 worktree_id,
5066 path: path.into(),
5067 };
5068 let signature = this.symbol_signature(&project_path);
5069 let adapter_language = adapter_language.clone();
5070 let language = this
5071 .languages
5072 .language_for_file_path(&project_path.path)
5073 .unwrap_or_else(move |_| adapter_language);
5074 let adapter = adapter.clone();
5075 Some(async move {
5076 let language = language.await;
5077 let label = adapter
5078 .label_for_symbol(&symbol_name, symbol_kind, &language)
5079 .await;
5080
5081 Symbol {
5082 language_server_name: adapter.name.clone(),
5083 source_worktree_id,
5084 path: project_path,
5085 label: label.unwrap_or_else(|| {
5086 CodeLabel::plain(symbol_name.clone(), None)
5087 }),
5088 kind: symbol_kind,
5089 name: symbol_name,
5090 range: range_from_lsp(symbol_location.range),
5091 signature,
5092 }
5093 })
5094 },
5095 ));
5096 }
5097
5098 symbols
5099 })?;
5100
5101 Ok(futures::future::join_all(symbols).await)
5102 })
5103 } else if let Some(project_id) = self.remote_id() {
5104 let request = self.client.request(proto::GetProjectSymbols {
5105 project_id,
5106 query: query.to_string(),
5107 });
5108 cx.spawn(move |this, mut cx| async move {
5109 let response = request.await?;
5110 let mut symbols = Vec::new();
5111 if let Some(this) = this.upgrade() {
5112 let new_symbols = this.update(&mut cx, |this, _| {
5113 response
5114 .symbols
5115 .into_iter()
5116 .map(|symbol| this.deserialize_symbol(symbol))
5117 .collect::<Vec<_>>()
5118 })?;
5119 symbols = futures::future::join_all(new_symbols)
5120 .await
5121 .into_iter()
5122 .filter_map(|symbol| symbol.log_err())
5123 .collect::<Vec<_>>();
5124 }
5125 Ok(symbols)
5126 })
5127 } else {
5128 Task::ready(Ok(Default::default()))
5129 }
5130 }
5131
5132 pub fn open_buffer_for_symbol(
5133 &mut self,
5134 symbol: &Symbol,
5135 cx: &mut ModelContext<Self>,
5136 ) -> Task<Result<Model<Buffer>>> {
5137 if self.is_local() {
5138 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
5139 symbol.source_worktree_id,
5140 symbol.language_server_name.clone(),
5141 )) {
5142 *id
5143 } else {
5144 return Task::ready(Err(anyhow!(
5145 "language server for worktree and language not found"
5146 )));
5147 };
5148
5149 let worktree_abs_path = if let Some(worktree_abs_path) = self
5150 .worktree_for_id(symbol.path.worktree_id, cx)
5151 .and_then(|worktree| worktree.read(cx).as_local())
5152 .map(|local_worktree| local_worktree.abs_path())
5153 {
5154 worktree_abs_path
5155 } else {
5156 return Task::ready(Err(anyhow!("worktree not found for symbol")));
5157 };
5158
5159 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
5160 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
5161 uri
5162 } else {
5163 return Task::ready(Err(anyhow!("invalid symbol path")));
5164 };
5165
5166 self.open_local_buffer_via_lsp(
5167 symbol_uri,
5168 language_server_id,
5169 symbol.language_server_name.clone(),
5170 cx,
5171 )
5172 } else if let Some(project_id) = self.remote_id() {
5173 let request = self.client.request(proto::OpenBufferForSymbol {
5174 project_id,
5175 symbol: Some(serialize_symbol(symbol)),
5176 });
5177 cx.spawn(move |this, mut cx| async move {
5178 let response = request.await?;
5179 let buffer_id = BufferId::new(response.buffer_id)?;
5180 this.update(&mut cx, |this, cx| {
5181 this.wait_for_remote_buffer(buffer_id, cx)
5182 })?
5183 .await
5184 })
5185 } else {
5186 Task::ready(Err(anyhow!("project does not have a remote id")))
5187 }
5188 }
5189
5190 fn hover_impl(
5191 &self,
5192 buffer: &Model<Buffer>,
5193 position: PointUtf16,
5194 cx: &mut ModelContext<Self>,
5195 ) -> Task<Vec<Hover>> {
5196 fn remove_empty_hover_blocks(mut hover: Hover) -> Option<Hover> {
5197 hover
5198 .contents
5199 .retain(|hover_block| !hover_block.text.trim().is_empty());
5200 if hover.contents.is_empty() {
5201 None
5202 } else {
5203 Some(hover)
5204 }
5205 }
5206
5207 if self.is_local() {
5208 let snapshot = buffer.read(cx).snapshot();
5209 let offset = position.to_offset(&snapshot);
5210 let scope = snapshot.language_scope_at(offset);
5211
5212 let mut hover_responses = self
5213 .language_servers_for_buffer(buffer.read(cx), cx)
5214 .filter(|(_, server)| match server.capabilities().hover_provider {
5215 Some(lsp::HoverProviderCapability::Simple(enabled)) => enabled,
5216 Some(lsp::HoverProviderCapability::Options(_)) => true,
5217 None => false,
5218 })
5219 .filter(|(adapter, _)| {
5220 scope
5221 .as_ref()
5222 .map(|scope| scope.language_allowed(&adapter.name))
5223 .unwrap_or(true)
5224 })
5225 .map(|(_, server)| server.server_id())
5226 .map(|server_id| {
5227 self.request_lsp(
5228 buffer.clone(),
5229 LanguageServerToQuery::Other(server_id),
5230 GetHover { position },
5231 cx,
5232 )
5233 })
5234 .collect::<FuturesUnordered<_>>();
5235
5236 cx.spawn(|_, _| async move {
5237 let mut hovers = Vec::with_capacity(hover_responses.len());
5238 while let Some(hover_response) = hover_responses.next().await {
5239 if let Some(hover) = hover_response
5240 .log_err()
5241 .flatten()
5242 .and_then(remove_empty_hover_blocks)
5243 {
5244 hovers.push(hover);
5245 }
5246 }
5247 hovers
5248 })
5249 } else if self.is_remote() {
5250 let request_task = self.request_lsp(
5251 buffer.clone(),
5252 LanguageServerToQuery::Primary,
5253 GetHover { position },
5254 cx,
5255 );
5256 cx.spawn(|_, _| async move {
5257 request_task
5258 .await
5259 .log_err()
5260 .flatten()
5261 .and_then(remove_empty_hover_blocks)
5262 .map(|hover| vec![hover])
5263 .unwrap_or_default()
5264 })
5265 } else {
5266 log::error!("cannot show hovers: project does not have a remote id");
5267 Task::ready(Vec::new())
5268 }
5269 }
5270
5271 pub fn hover<T: ToPointUtf16>(
5272 &self,
5273 buffer: &Model<Buffer>,
5274 position: T,
5275 cx: &mut ModelContext<Self>,
5276 ) -> Task<Vec<Hover>> {
5277 let position = position.to_point_utf16(buffer.read(cx));
5278 self.hover_impl(buffer, position, cx)
5279 }
5280
5281 #[inline(never)]
5282 fn completions_impl(
5283 &self,
5284 buffer: &Model<Buffer>,
5285 position: PointUtf16,
5286 cx: &mut ModelContext<Self>,
5287 ) -> Task<Result<Vec<Completion>>> {
5288 if self.is_local() {
5289 let snapshot = buffer.read(cx).snapshot();
5290 let offset = position.to_offset(&snapshot);
5291 let scope = snapshot.language_scope_at(offset);
5292
5293 let server_ids: Vec<_> = self
5294 .language_servers_for_buffer(buffer.read(cx), cx)
5295 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
5296 .filter(|(adapter, _)| {
5297 scope
5298 .as_ref()
5299 .map(|scope| scope.language_allowed(&adapter.name))
5300 .unwrap_or(true)
5301 })
5302 .map(|(_, server)| server.server_id())
5303 .collect();
5304
5305 let buffer = buffer.clone();
5306 cx.spawn(move |this, mut cx| async move {
5307 let mut tasks = Vec::with_capacity(server_ids.len());
5308 this.update(&mut cx, |this, cx| {
5309 for server_id in server_ids {
5310 tasks.push(this.request_lsp(
5311 buffer.clone(),
5312 LanguageServerToQuery::Other(server_id),
5313 GetCompletions { position },
5314 cx,
5315 ));
5316 }
5317 })?;
5318
5319 let mut completions = Vec::new();
5320 for task in tasks {
5321 if let Ok(new_completions) = task.await {
5322 completions.extend_from_slice(&new_completions);
5323 }
5324 }
5325
5326 Ok(completions)
5327 })
5328 } else if let Some(project_id) = self.remote_id() {
5329 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
5330 } else {
5331 Task::ready(Ok(Default::default()))
5332 }
5333 }
5334 pub fn completions<T: ToOffset + ToPointUtf16>(
5335 &self,
5336 buffer: &Model<Buffer>,
5337 position: T,
5338 cx: &mut ModelContext<Self>,
5339 ) -> Task<Result<Vec<Completion>>> {
5340 let position = position.to_point_utf16(buffer.read(cx));
5341 self.completions_impl(buffer, position, cx)
5342 }
5343
5344 pub fn resolve_completions(
5345 &self,
5346 completion_indices: Vec<usize>,
5347 completions: Arc<RwLock<Box<[Completion]>>>,
5348 cx: &mut ModelContext<Self>,
5349 ) -> Task<Result<bool>> {
5350 let client = self.client();
5351 let language_registry = self.languages().clone();
5352
5353 let is_remote = self.is_remote();
5354 let project_id = self.remote_id();
5355
5356 cx.spawn(move |this, mut cx| async move {
5357 let mut did_resolve = false;
5358 if is_remote {
5359 let project_id =
5360 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
5361
5362 for completion_index in completion_indices {
5363 let (server_id, completion) = {
5364 let completions_guard = completions.read();
5365 let completion = &completions_guard[completion_index];
5366 if completion.documentation.is_some() {
5367 continue;
5368 }
5369
5370 did_resolve = true;
5371 let server_id = completion.server_id;
5372 let completion = completion.lsp_completion.clone();
5373
5374 (server_id, completion)
5375 };
5376
5377 Self::resolve_completion_documentation_remote(
5378 project_id,
5379 server_id,
5380 completions.clone(),
5381 completion_index,
5382 completion,
5383 client.clone(),
5384 language_registry.clone(),
5385 )
5386 .await;
5387 }
5388 } else {
5389 for completion_index in completion_indices {
5390 let (server_id, completion) = {
5391 let completions_guard = completions.read();
5392 let completion = &completions_guard[completion_index];
5393 if completion.documentation.is_some() {
5394 continue;
5395 }
5396
5397 let server_id = completion.server_id;
5398 let completion = completion.lsp_completion.clone();
5399
5400 (server_id, completion)
5401 };
5402
5403 let server = this
5404 .read_with(&mut cx, |project, _| {
5405 project.language_server_for_id(server_id)
5406 })
5407 .ok()
5408 .flatten();
5409 let Some(server) = server else {
5410 continue;
5411 };
5412
5413 did_resolve = true;
5414 Self::resolve_completion_documentation_local(
5415 server,
5416 completions.clone(),
5417 completion_index,
5418 completion,
5419 language_registry.clone(),
5420 )
5421 .await;
5422 }
5423 }
5424
5425 Ok(did_resolve)
5426 })
5427 }
5428
5429 async fn resolve_completion_documentation_local(
5430 server: Arc<lsp::LanguageServer>,
5431 completions: Arc<RwLock<Box<[Completion]>>>,
5432 completion_index: usize,
5433 completion: lsp::CompletionItem,
5434 language_registry: Arc<LanguageRegistry>,
5435 ) {
5436 let can_resolve = server
5437 .capabilities()
5438 .completion_provider
5439 .as_ref()
5440 .and_then(|options| options.resolve_provider)
5441 .unwrap_or(false);
5442 if !can_resolve {
5443 return;
5444 }
5445
5446 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
5447 let Some(completion_item) = request.await.log_err() else {
5448 return;
5449 };
5450
5451 if let Some(lsp_documentation) = completion_item.documentation {
5452 let documentation = language::prepare_completion_documentation(
5453 &lsp_documentation,
5454 &language_registry,
5455 None, // TODO: Try to reasonably work out which language the completion is for
5456 )
5457 .await;
5458
5459 let mut completions = completions.write();
5460 let completion = &mut completions[completion_index];
5461 completion.documentation = Some(documentation);
5462 } else {
5463 let mut completions = completions.write();
5464 let completion = &mut completions[completion_index];
5465 completion.documentation = Some(Documentation::Undocumented);
5466 }
5467 }
5468
5469 async fn resolve_completion_documentation_remote(
5470 project_id: u64,
5471 server_id: LanguageServerId,
5472 completions: Arc<RwLock<Box<[Completion]>>>,
5473 completion_index: usize,
5474 completion: lsp::CompletionItem,
5475 client: Arc<Client>,
5476 language_registry: Arc<LanguageRegistry>,
5477 ) {
5478 let request = proto::ResolveCompletionDocumentation {
5479 project_id,
5480 language_server_id: server_id.0 as u64,
5481 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
5482 };
5483
5484 let Some(response) = client
5485 .request(request)
5486 .await
5487 .context("completion documentation resolve proto request")
5488 .log_err()
5489 else {
5490 return;
5491 };
5492
5493 if response.text.is_empty() {
5494 let mut completions = completions.write();
5495 let completion = &mut completions[completion_index];
5496 completion.documentation = Some(Documentation::Undocumented);
5497 }
5498
5499 let documentation = if response.is_markdown {
5500 Documentation::MultiLineMarkdown(
5501 markdown::parse_markdown(&response.text, &language_registry, None).await,
5502 )
5503 } else if response.text.lines().count() <= 1 {
5504 Documentation::SingleLine(response.text)
5505 } else {
5506 Documentation::MultiLinePlainText(response.text)
5507 };
5508
5509 let mut completions = completions.write();
5510 let completion = &mut completions[completion_index];
5511 completion.documentation = Some(documentation);
5512 }
5513
5514 pub fn apply_additional_edits_for_completion(
5515 &self,
5516 buffer_handle: Model<Buffer>,
5517 completion: Completion,
5518 push_to_history: bool,
5519 cx: &mut ModelContext<Self>,
5520 ) -> Task<Result<Option<Transaction>>> {
5521 let buffer = buffer_handle.read(cx);
5522 let buffer_id = buffer.remote_id();
5523
5524 if self.is_local() {
5525 let server_id = completion.server_id;
5526 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
5527 Some((_, server)) => server.clone(),
5528 _ => return Task::ready(Ok(Default::default())),
5529 };
5530
5531 cx.spawn(move |this, mut cx| async move {
5532 let can_resolve = lang_server
5533 .capabilities()
5534 .completion_provider
5535 .as_ref()
5536 .and_then(|options| options.resolve_provider)
5537 .unwrap_or(false);
5538 let additional_text_edits = if can_resolve {
5539 lang_server
5540 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
5541 .await?
5542 .additional_text_edits
5543 } else {
5544 completion.lsp_completion.additional_text_edits
5545 };
5546 if let Some(edits) = additional_text_edits {
5547 let edits = this
5548 .update(&mut cx, |this, cx| {
5549 this.edits_from_lsp(
5550 &buffer_handle,
5551 edits,
5552 lang_server.server_id(),
5553 None,
5554 cx,
5555 )
5556 })?
5557 .await?;
5558
5559 buffer_handle.update(&mut cx, |buffer, cx| {
5560 buffer.finalize_last_transaction();
5561 buffer.start_transaction();
5562
5563 for (range, text) in edits {
5564 let primary = &completion.old_range;
5565 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5566 && primary.end.cmp(&range.start, buffer).is_ge();
5567 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5568 && range.end.cmp(&primary.end, buffer).is_ge();
5569
5570 //Skip additional edits which overlap with the primary completion edit
5571 //https://github.com/zed-industries/zed/pull/1871
5572 if !start_within && !end_within {
5573 buffer.edit([(range, text)], None, cx);
5574 }
5575 }
5576
5577 let transaction = if buffer.end_transaction(cx).is_some() {
5578 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5579 if !push_to_history {
5580 buffer.forget_transaction(transaction.id);
5581 }
5582 Some(transaction)
5583 } else {
5584 None
5585 };
5586 Ok(transaction)
5587 })?
5588 } else {
5589 Ok(None)
5590 }
5591 })
5592 } else if let Some(project_id) = self.remote_id() {
5593 let client = self.client.clone();
5594 cx.spawn(move |_, mut cx| async move {
5595 let response = client
5596 .request(proto::ApplyCompletionAdditionalEdits {
5597 project_id,
5598 buffer_id: buffer_id.into(),
5599 completion: Some(language::proto::serialize_completion(&completion)),
5600 })
5601 .await?;
5602
5603 if let Some(transaction) = response.transaction {
5604 let transaction = language::proto::deserialize_transaction(transaction)?;
5605 buffer_handle
5606 .update(&mut cx, |buffer, _| {
5607 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5608 })?
5609 .await?;
5610 if push_to_history {
5611 buffer_handle.update(&mut cx, |buffer, _| {
5612 buffer.push_transaction(transaction.clone(), Instant::now());
5613 })?;
5614 }
5615 Ok(Some(transaction))
5616 } else {
5617 Ok(None)
5618 }
5619 })
5620 } else {
5621 Task::ready(Err(anyhow!("project does not have a remote id")))
5622 }
5623 }
5624
5625 fn code_actions_impl(
5626 &self,
5627 buffer_handle: &Model<Buffer>,
5628 range: Range<Anchor>,
5629 cx: &mut ModelContext<Self>,
5630 ) -> Task<Vec<CodeAction>> {
5631 if self.is_local() {
5632 let snapshot = buffer_handle.read(cx).snapshot();
5633 let offset = range.start.to_offset(&snapshot);
5634 let scope = snapshot.language_scope_at(offset);
5635
5636 let mut hover_responses = self
5637 .language_servers_for_buffer(buffer_handle.read(cx), cx)
5638 .filter(|(_, server)| GetCodeActions::supports_code_actions(server.capabilities()))
5639 .filter(|(adapter, _)| {
5640 scope
5641 .as_ref()
5642 .map(|scope| scope.language_allowed(&adapter.name))
5643 .unwrap_or(true)
5644 })
5645 .map(|(_, server)| server.server_id())
5646 .map(|server_id| {
5647 self.request_lsp(
5648 buffer_handle.clone(),
5649 LanguageServerToQuery::Other(server_id),
5650 GetCodeActions {
5651 range: range.clone(),
5652 kinds: None,
5653 },
5654 cx,
5655 )
5656 })
5657 .collect::<FuturesUnordered<_>>();
5658
5659 cx.spawn(|_, _| async move {
5660 let mut hovers = Vec::with_capacity(hover_responses.len());
5661 while let Some(hover_response) = hover_responses.next().await {
5662 hovers.extend(hover_response.log_err().unwrap_or_default());
5663 }
5664 hovers
5665 })
5666 } else if self.is_remote() {
5667 let request_task = self.request_lsp(
5668 buffer_handle.clone(),
5669 LanguageServerToQuery::Primary,
5670 GetCodeActions { range, kinds: None },
5671 cx,
5672 );
5673 cx.spawn(|_, _| async move { request_task.await.log_err().unwrap_or_default() })
5674 } else {
5675 log::error!("cannot fetch actions: project does not have a remote id");
5676 Task::ready(Vec::new())
5677 }
5678 }
5679
5680 pub fn code_actions<T: Clone + ToOffset>(
5681 &self,
5682 buffer_handle: &Model<Buffer>,
5683 range: Range<T>,
5684 cx: &mut ModelContext<Self>,
5685 ) -> Task<Vec<CodeAction>> {
5686 let buffer = buffer_handle.read(cx);
5687 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5688 self.code_actions_impl(buffer_handle, range, cx)
5689 }
5690
5691 pub fn apply_code_action(
5692 &self,
5693 buffer_handle: Model<Buffer>,
5694 mut action: CodeAction,
5695 push_to_history: bool,
5696 cx: &mut ModelContext<Self>,
5697 ) -> Task<Result<ProjectTransaction>> {
5698 if self.is_local() {
5699 let buffer = buffer_handle.read(cx);
5700 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5701 self.language_server_for_buffer(buffer, action.server_id, cx)
5702 {
5703 (adapter.clone(), server.clone())
5704 } else {
5705 return Task::ready(Ok(Default::default()));
5706 };
5707 cx.spawn(move |this, mut cx| async move {
5708 Self::try_resolve_code_action(&lang_server, &mut action)
5709 .await
5710 .context("resolving a code action")?;
5711 if let Some(edit) = action.lsp_action.edit {
5712 if edit.changes.is_some() || edit.document_changes.is_some() {
5713 return Self::deserialize_workspace_edit(
5714 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5715 edit,
5716 push_to_history,
5717 lsp_adapter.clone(),
5718 lang_server.clone(),
5719 &mut cx,
5720 )
5721 .await;
5722 }
5723 }
5724
5725 if let Some(command) = action.lsp_action.command {
5726 this.update(&mut cx, |this, _| {
5727 this.last_workspace_edits_by_language_server
5728 .remove(&lang_server.server_id());
5729 })?;
5730
5731 let result = lang_server
5732 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5733 command: command.command,
5734 arguments: command.arguments.unwrap_or_default(),
5735 ..Default::default()
5736 })
5737 .await;
5738
5739 if let Err(err) = result {
5740 // TODO: LSP ERROR
5741 return Err(err);
5742 }
5743
5744 return this.update(&mut cx, |this, _| {
5745 this.last_workspace_edits_by_language_server
5746 .remove(&lang_server.server_id())
5747 .unwrap_or_default()
5748 });
5749 }
5750
5751 Ok(ProjectTransaction::default())
5752 })
5753 } else if let Some(project_id) = self.remote_id() {
5754 let client = self.client.clone();
5755 let request = proto::ApplyCodeAction {
5756 project_id,
5757 buffer_id: buffer_handle.read(cx).remote_id().into(),
5758 action: Some(language::proto::serialize_code_action(&action)),
5759 };
5760 cx.spawn(move |this, mut cx| async move {
5761 let response = client
5762 .request(request)
5763 .await?
5764 .transaction
5765 .ok_or_else(|| anyhow!("missing transaction"))?;
5766 this.update(&mut cx, |this, cx| {
5767 this.deserialize_project_transaction(response, push_to_history, cx)
5768 })?
5769 .await
5770 })
5771 } else {
5772 Task::ready(Err(anyhow!("project does not have a remote id")))
5773 }
5774 }
5775
5776 fn apply_on_type_formatting(
5777 &self,
5778 buffer: Model<Buffer>,
5779 position: Anchor,
5780 trigger: String,
5781 cx: &mut ModelContext<Self>,
5782 ) -> Task<Result<Option<Transaction>>> {
5783 if self.is_local() {
5784 cx.spawn(move |this, mut cx| async move {
5785 // Do not allow multiple concurrent formatting requests for the
5786 // same buffer.
5787 this.update(&mut cx, |this, cx| {
5788 this.buffers_being_formatted
5789 .insert(buffer.read(cx).remote_id())
5790 })?;
5791
5792 let _cleanup = defer({
5793 let this = this.clone();
5794 let mut cx = cx.clone();
5795 let closure_buffer = buffer.clone();
5796 move || {
5797 this.update(&mut cx, |this, cx| {
5798 this.buffers_being_formatted
5799 .remove(&closure_buffer.read(cx).remote_id());
5800 })
5801 .ok();
5802 }
5803 });
5804
5805 buffer
5806 .update(&mut cx, |buffer, _| {
5807 buffer.wait_for_edits(Some(position.timestamp))
5808 })?
5809 .await?;
5810 this.update(&mut cx, |this, cx| {
5811 let position = position.to_point_utf16(buffer.read(cx));
5812 this.on_type_format(buffer, position, trigger, false, cx)
5813 })?
5814 .await
5815 })
5816 } else if let Some(project_id) = self.remote_id() {
5817 let client = self.client.clone();
5818 let request = proto::OnTypeFormatting {
5819 project_id,
5820 buffer_id: buffer.read(cx).remote_id().into(),
5821 position: Some(serialize_anchor(&position)),
5822 trigger,
5823 version: serialize_version(&buffer.read(cx).version()),
5824 };
5825 cx.spawn(move |_, _| async move {
5826 client
5827 .request(request)
5828 .await?
5829 .transaction
5830 .map(language::proto::deserialize_transaction)
5831 .transpose()
5832 })
5833 } else {
5834 Task::ready(Err(anyhow!("project does not have a remote id")))
5835 }
5836 }
5837
5838 async fn deserialize_edits(
5839 this: Model<Self>,
5840 buffer_to_edit: Model<Buffer>,
5841 edits: Vec<lsp::TextEdit>,
5842 push_to_history: bool,
5843 _: Arc<CachedLspAdapter>,
5844 language_server: Arc<LanguageServer>,
5845 cx: &mut AsyncAppContext,
5846 ) -> Result<Option<Transaction>> {
5847 let edits = this
5848 .update(cx, |this, cx| {
5849 this.edits_from_lsp(
5850 &buffer_to_edit,
5851 edits,
5852 language_server.server_id(),
5853 None,
5854 cx,
5855 )
5856 })?
5857 .await?;
5858
5859 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5860 buffer.finalize_last_transaction();
5861 buffer.start_transaction();
5862 for (range, text) in edits {
5863 buffer.edit([(range, text)], None, cx);
5864 }
5865
5866 if buffer.end_transaction(cx).is_some() {
5867 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5868 if !push_to_history {
5869 buffer.forget_transaction(transaction.id);
5870 }
5871 Some(transaction)
5872 } else {
5873 None
5874 }
5875 })?;
5876
5877 Ok(transaction)
5878 }
5879
5880 async fn deserialize_workspace_edit(
5881 this: Model<Self>,
5882 edit: lsp::WorkspaceEdit,
5883 push_to_history: bool,
5884 lsp_adapter: Arc<CachedLspAdapter>,
5885 language_server: Arc<LanguageServer>,
5886 cx: &mut AsyncAppContext,
5887 ) -> Result<ProjectTransaction> {
5888 let fs = this.update(cx, |this, _| this.fs.clone())?;
5889 let mut operations = Vec::new();
5890 if let Some(document_changes) = edit.document_changes {
5891 match document_changes {
5892 lsp::DocumentChanges::Edits(edits) => {
5893 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5894 }
5895 lsp::DocumentChanges::Operations(ops) => operations = ops,
5896 }
5897 } else if let Some(changes) = edit.changes {
5898 operations.extend(changes.into_iter().map(|(uri, edits)| {
5899 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5900 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5901 uri,
5902 version: None,
5903 },
5904 edits: edits.into_iter().map(OneOf::Left).collect(),
5905 })
5906 }));
5907 }
5908
5909 let mut project_transaction = ProjectTransaction::default();
5910 for operation in operations {
5911 match operation {
5912 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5913 let abs_path = op
5914 .uri
5915 .to_file_path()
5916 .map_err(|_| anyhow!("can't convert URI to path"))?;
5917
5918 if let Some(parent_path) = abs_path.parent() {
5919 fs.create_dir(parent_path).await?;
5920 }
5921 if abs_path.ends_with("/") {
5922 fs.create_dir(&abs_path).await?;
5923 } else {
5924 fs.create_file(
5925 &abs_path,
5926 op.options
5927 .map(|options| fs::CreateOptions {
5928 overwrite: options.overwrite.unwrap_or(false),
5929 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5930 })
5931 .unwrap_or_default(),
5932 )
5933 .await?;
5934 }
5935 }
5936
5937 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5938 let source_abs_path = op
5939 .old_uri
5940 .to_file_path()
5941 .map_err(|_| anyhow!("can't convert URI to path"))?;
5942 let target_abs_path = op
5943 .new_uri
5944 .to_file_path()
5945 .map_err(|_| anyhow!("can't convert URI to path"))?;
5946 fs.rename(
5947 &source_abs_path,
5948 &target_abs_path,
5949 op.options
5950 .map(|options| fs::RenameOptions {
5951 overwrite: options.overwrite.unwrap_or(false),
5952 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5953 })
5954 .unwrap_or_default(),
5955 )
5956 .await?;
5957 }
5958
5959 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5960 let abs_path = op
5961 .uri
5962 .to_file_path()
5963 .map_err(|_| anyhow!("can't convert URI to path"))?;
5964 let options = op
5965 .options
5966 .map(|options| fs::RemoveOptions {
5967 recursive: options.recursive.unwrap_or(false),
5968 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5969 })
5970 .unwrap_or_default();
5971 if abs_path.ends_with("/") {
5972 fs.remove_dir(&abs_path, options).await?;
5973 } else {
5974 fs.remove_file(&abs_path, options).await?;
5975 }
5976 }
5977
5978 lsp::DocumentChangeOperation::Edit(op) => {
5979 let buffer_to_edit = this
5980 .update(cx, |this, cx| {
5981 this.open_local_buffer_via_lsp(
5982 op.text_document.uri,
5983 language_server.server_id(),
5984 lsp_adapter.name.clone(),
5985 cx,
5986 )
5987 })?
5988 .await?;
5989
5990 let edits = this
5991 .update(cx, |this, cx| {
5992 let edits = op.edits.into_iter().map(|edit| match edit {
5993 OneOf::Left(edit) => edit,
5994 OneOf::Right(edit) => edit.text_edit,
5995 });
5996 this.edits_from_lsp(
5997 &buffer_to_edit,
5998 edits,
5999 language_server.server_id(),
6000 op.text_document.version,
6001 cx,
6002 )
6003 })?
6004 .await?;
6005
6006 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
6007 buffer.finalize_last_transaction();
6008 buffer.start_transaction();
6009 for (range, text) in edits {
6010 buffer.edit([(range, text)], None, cx);
6011 }
6012 let transaction = if buffer.end_transaction(cx).is_some() {
6013 let transaction = buffer.finalize_last_transaction().unwrap().clone();
6014 if !push_to_history {
6015 buffer.forget_transaction(transaction.id);
6016 }
6017 Some(transaction)
6018 } else {
6019 None
6020 };
6021
6022 transaction
6023 })?;
6024 if let Some(transaction) = transaction {
6025 project_transaction.0.insert(buffer_to_edit, transaction);
6026 }
6027 }
6028 }
6029 }
6030
6031 Ok(project_transaction)
6032 }
6033
6034 fn prepare_rename_impl(
6035 &self,
6036 buffer: Model<Buffer>,
6037 position: PointUtf16,
6038 cx: &mut ModelContext<Self>,
6039 ) -> Task<Result<Option<Range<Anchor>>>> {
6040 self.request_lsp(
6041 buffer,
6042 LanguageServerToQuery::Primary,
6043 PrepareRename { position },
6044 cx,
6045 )
6046 }
6047 pub fn prepare_rename<T: ToPointUtf16>(
6048 &self,
6049 buffer: Model<Buffer>,
6050 position: T,
6051 cx: &mut ModelContext<Self>,
6052 ) -> Task<Result<Option<Range<Anchor>>>> {
6053 let position = position.to_point_utf16(buffer.read(cx));
6054 self.prepare_rename_impl(buffer, position, cx)
6055 }
6056
6057 fn perform_rename_impl(
6058 &self,
6059 buffer: Model<Buffer>,
6060 position: PointUtf16,
6061 new_name: String,
6062 push_to_history: bool,
6063 cx: &mut ModelContext<Self>,
6064 ) -> Task<Result<ProjectTransaction>> {
6065 let position = position.to_point_utf16(buffer.read(cx));
6066 self.request_lsp(
6067 buffer,
6068 LanguageServerToQuery::Primary,
6069 PerformRename {
6070 position,
6071 new_name,
6072 push_to_history,
6073 },
6074 cx,
6075 )
6076 }
6077 pub fn perform_rename<T: ToPointUtf16>(
6078 &self,
6079 buffer: Model<Buffer>,
6080 position: T,
6081 new_name: String,
6082 push_to_history: bool,
6083 cx: &mut ModelContext<Self>,
6084 ) -> Task<Result<ProjectTransaction>> {
6085 let position = position.to_point_utf16(buffer.read(cx));
6086 self.perform_rename_impl(buffer, position, new_name, push_to_history, cx)
6087 }
6088
6089 pub fn on_type_format_impl(
6090 &self,
6091 buffer: Model<Buffer>,
6092 position: PointUtf16,
6093 trigger: String,
6094 push_to_history: bool,
6095 cx: &mut ModelContext<Self>,
6096 ) -> Task<Result<Option<Transaction>>> {
6097 let tab_size = buffer.update(cx, |buffer, cx| {
6098 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx).tab_size
6099 });
6100 self.request_lsp(
6101 buffer.clone(),
6102 LanguageServerToQuery::Primary,
6103 OnTypeFormatting {
6104 position,
6105 trigger,
6106 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
6107 push_to_history,
6108 },
6109 cx,
6110 )
6111 }
6112
6113 pub fn on_type_format<T: ToPointUtf16>(
6114 &self,
6115 buffer: Model<Buffer>,
6116 position: T,
6117 trigger: String,
6118 push_to_history: bool,
6119 cx: &mut ModelContext<Self>,
6120 ) -> Task<Result<Option<Transaction>>> {
6121 let position = position.to_point_utf16(buffer.read(cx));
6122 self.on_type_format_impl(buffer, position, trigger, push_to_history, cx)
6123 }
6124
6125 pub fn inlay_hints<T: ToOffset>(
6126 &self,
6127 buffer_handle: Model<Buffer>,
6128 range: Range<T>,
6129 cx: &mut ModelContext<Self>,
6130 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
6131 let buffer = buffer_handle.read(cx);
6132 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
6133 self.inlay_hints_impl(buffer_handle, range, cx)
6134 }
6135 fn inlay_hints_impl(
6136 &self,
6137 buffer_handle: Model<Buffer>,
6138 range: Range<Anchor>,
6139 cx: &mut ModelContext<Self>,
6140 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
6141 let buffer = buffer_handle.read(cx);
6142 let range_start = range.start;
6143 let range_end = range.end;
6144 let buffer_id = buffer.remote_id().into();
6145 let lsp_request = InlayHints { range };
6146
6147 if self.is_local() {
6148 let lsp_request_task = self.request_lsp(
6149 buffer_handle.clone(),
6150 LanguageServerToQuery::Primary,
6151 lsp_request,
6152 cx,
6153 );
6154 cx.spawn(move |_, mut cx| async move {
6155 buffer_handle
6156 .update(&mut cx, |buffer, _| {
6157 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
6158 })?
6159 .await
6160 .context("waiting for inlay hint request range edits")?;
6161 lsp_request_task.await.context("inlay hints LSP request")
6162 })
6163 } else if let Some(project_id) = self.remote_id() {
6164 let client = self.client.clone();
6165 let request = proto::InlayHints {
6166 project_id,
6167 buffer_id,
6168 start: Some(serialize_anchor(&range_start)),
6169 end: Some(serialize_anchor(&range_end)),
6170 version: serialize_version(&buffer_handle.read(cx).version()),
6171 };
6172 cx.spawn(move |project, cx| async move {
6173 let response = client
6174 .request(request)
6175 .await
6176 .context("inlay hints proto request")?;
6177 LspCommand::response_from_proto(
6178 lsp_request,
6179 response,
6180 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
6181 buffer_handle.clone(),
6182 cx.clone(),
6183 )
6184 .await
6185 .context("inlay hints proto response conversion")
6186 })
6187 } else {
6188 Task::ready(Err(anyhow!("project does not have a remote id")))
6189 }
6190 }
6191
6192 pub fn resolve_inlay_hint(
6193 &self,
6194 hint: InlayHint,
6195 buffer_handle: Model<Buffer>,
6196 server_id: LanguageServerId,
6197 cx: &mut ModelContext<Self>,
6198 ) -> Task<anyhow::Result<InlayHint>> {
6199 if self.is_local() {
6200 let buffer = buffer_handle.read(cx);
6201 let (_, lang_server) = if let Some((adapter, server)) =
6202 self.language_server_for_buffer(buffer, server_id, cx)
6203 {
6204 (adapter.clone(), server.clone())
6205 } else {
6206 return Task::ready(Ok(hint));
6207 };
6208 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
6209 return Task::ready(Ok(hint));
6210 }
6211
6212 let buffer_snapshot = buffer.snapshot();
6213 cx.spawn(move |_, mut cx| async move {
6214 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
6215 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
6216 );
6217 let resolved_hint = resolve_task
6218 .await
6219 .context("inlay hint resolve LSP request")?;
6220 let resolved_hint = InlayHints::lsp_to_project_hint(
6221 resolved_hint,
6222 &buffer_handle,
6223 server_id,
6224 ResolveState::Resolved,
6225 false,
6226 &mut cx,
6227 )
6228 .await?;
6229 Ok(resolved_hint)
6230 })
6231 } else if let Some(project_id) = self.remote_id() {
6232 let client = self.client.clone();
6233 let request = proto::ResolveInlayHint {
6234 project_id,
6235 buffer_id: buffer_handle.read(cx).remote_id().into(),
6236 language_server_id: server_id.0 as u64,
6237 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
6238 };
6239 cx.spawn(move |_, _| async move {
6240 let response = client
6241 .request(request)
6242 .await
6243 .context("inlay hints proto request")?;
6244 match response.hint {
6245 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
6246 .context("inlay hints proto resolve response conversion"),
6247 None => Ok(hint),
6248 }
6249 })
6250 } else {
6251 Task::ready(Err(anyhow!("project does not have a remote id")))
6252 }
6253 }
6254
6255 #[allow(clippy::type_complexity)]
6256 pub fn search(
6257 &self,
6258 query: SearchQuery,
6259 cx: &mut ModelContext<Self>,
6260 ) -> Receiver<SearchResult> {
6261 if self.is_local() {
6262 self.search_local(query, cx)
6263 } else if let Some(project_id) = self.remote_id() {
6264 let (tx, rx) = smol::channel::unbounded();
6265 let request = self.client.request(query.to_proto(project_id));
6266 cx.spawn(move |this, mut cx| async move {
6267 let response = request.await?;
6268 let mut result = HashMap::default();
6269 for location in response.locations {
6270 let buffer_id = BufferId::new(location.buffer_id)?;
6271 let target_buffer = this
6272 .update(&mut cx, |this, cx| {
6273 this.wait_for_remote_buffer(buffer_id, cx)
6274 })?
6275 .await?;
6276 let start = location
6277 .start
6278 .and_then(deserialize_anchor)
6279 .ok_or_else(|| anyhow!("missing target start"))?;
6280 let end = location
6281 .end
6282 .and_then(deserialize_anchor)
6283 .ok_or_else(|| anyhow!("missing target end"))?;
6284 result
6285 .entry(target_buffer)
6286 .or_insert(Vec::new())
6287 .push(start..end)
6288 }
6289 for (buffer, ranges) in result {
6290 let _ = tx.send(SearchResult::Buffer { buffer, ranges }).await;
6291 }
6292
6293 if response.limit_reached {
6294 let _ = tx.send(SearchResult::LimitReached).await;
6295 }
6296
6297 Result::<(), anyhow::Error>::Ok(())
6298 })
6299 .detach_and_log_err(cx);
6300 rx
6301 } else {
6302 unimplemented!();
6303 }
6304 }
6305
6306 pub fn search_local(
6307 &self,
6308 query: SearchQuery,
6309 cx: &mut ModelContext<Self>,
6310 ) -> Receiver<SearchResult> {
6311 // Local search is split into several phases.
6312 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
6313 // and the second phase that finds positions of all the matches found in the candidate files.
6314 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
6315 //
6316 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
6317 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
6318 //
6319 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
6320 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
6321 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
6322 // 2. At this point, we have a list of all potentially matching buffers/files.
6323 // We sort that list by buffer path - this list is retained for later use.
6324 // We ensure that all buffers are now opened and available in project.
6325 // 3. We run a scan over all the candidate buffers on multiple background threads.
6326 // We cannot assume that there will even be a match - while at least one match
6327 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
6328 // There is also an auxiliary background thread responsible for result gathering.
6329 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
6330 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
6331 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
6332 // entry - which might already be available thanks to out-of-order processing.
6333 //
6334 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
6335 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
6336 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
6337 // in face of constantly updating list of sorted matches.
6338 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
6339 let snapshots = self
6340 .visible_worktrees(cx)
6341 .filter_map(|tree| {
6342 let tree = tree.read(cx).as_local()?;
6343 Some(tree.snapshot())
6344 })
6345 .collect::<Vec<_>>();
6346 let include_root = snapshots.len() > 1;
6347
6348 let background = cx.background_executor().clone();
6349 let path_count: usize = snapshots
6350 .iter()
6351 .map(|s| {
6352 if query.include_ignored() {
6353 s.file_count()
6354 } else {
6355 s.visible_file_count()
6356 }
6357 })
6358 .sum();
6359 if path_count == 0 {
6360 let (_, rx) = smol::channel::bounded(1024);
6361 return rx;
6362 }
6363 let workers = background.num_cpus().min(path_count);
6364 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
6365 let mut unnamed_files = vec![];
6366 let opened_buffers = self
6367 .opened_buffers
6368 .iter()
6369 .filter_map(|(_, b)| {
6370 let buffer = b.upgrade()?;
6371 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
6372 let is_ignored = buffer
6373 .project_path(cx)
6374 .and_then(|path| self.entry_for_path(&path, cx))
6375 .map_or(false, |entry| entry.is_ignored);
6376 (is_ignored, buffer.snapshot())
6377 });
6378 if is_ignored && !query.include_ignored() {
6379 return None;
6380 } else if let Some(file) = snapshot.file() {
6381 let matched_path = if include_root {
6382 query.file_matches(Some(&file.full_path(cx)))
6383 } else {
6384 query.file_matches(Some(file.path()))
6385 };
6386
6387 if matched_path {
6388 Some((file.path().clone(), (buffer, snapshot)))
6389 } else {
6390 None
6391 }
6392 } else {
6393 unnamed_files.push(buffer);
6394 None
6395 }
6396 })
6397 .collect();
6398 cx.background_executor()
6399 .spawn(Self::background_search(
6400 unnamed_files,
6401 opened_buffers,
6402 cx.background_executor().clone(),
6403 self.fs.clone(),
6404 workers,
6405 query.clone(),
6406 include_root,
6407 path_count,
6408 snapshots,
6409 matching_paths_tx,
6410 ))
6411 .detach();
6412
6413 let (result_tx, result_rx) = smol::channel::bounded(1024);
6414
6415 cx.spawn(|this, mut cx| async move {
6416 const MAX_SEARCH_RESULT_FILES: usize = 5_000;
6417 const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
6418
6419 let mut matching_paths = matching_paths_rx
6420 .take(MAX_SEARCH_RESULT_FILES + 1)
6421 .collect::<Vec<_>>()
6422 .await;
6423 let mut limit_reached = if matching_paths.len() > MAX_SEARCH_RESULT_FILES {
6424 matching_paths.pop();
6425 true
6426 } else {
6427 false
6428 };
6429 matching_paths.sort_by_key(|candidate| (candidate.is_ignored(), candidate.path()));
6430
6431 let mut range_count = 0;
6432 let query = Arc::new(query);
6433
6434 // Now that we know what paths match the query, we will load at most
6435 // 64 buffers at a time to avoid overwhelming the main thread. For each
6436 // opened buffer, we will spawn a background task that retrieves all the
6437 // ranges in the buffer matched by the query.
6438 'outer: for matching_paths_chunk in matching_paths.chunks(64) {
6439 let mut chunk_results = Vec::new();
6440 for matching_path in matching_paths_chunk {
6441 let query = query.clone();
6442 let buffer = match matching_path {
6443 SearchMatchCandidate::OpenBuffer { buffer, .. } => {
6444 Task::ready(Ok(buffer.clone()))
6445 }
6446 SearchMatchCandidate::Path {
6447 worktree_id, path, ..
6448 } => this.update(&mut cx, |this, cx| {
6449 this.open_buffer((*worktree_id, path.clone()), cx)
6450 })?,
6451 };
6452
6453 chunk_results.push(cx.spawn(|cx| async move {
6454 let buffer = buffer.await?;
6455 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
6456 let ranges = cx
6457 .background_executor()
6458 .spawn(async move {
6459 query
6460 .search(&snapshot, None)
6461 .await
6462 .iter()
6463 .map(|range| {
6464 snapshot.anchor_before(range.start)
6465 ..snapshot.anchor_after(range.end)
6466 })
6467 .collect::<Vec<_>>()
6468 })
6469 .await;
6470 anyhow::Ok((buffer, ranges))
6471 }));
6472 }
6473
6474 let chunk_results = futures::future::join_all(chunk_results).await;
6475 for result in chunk_results {
6476 if let Some((buffer, ranges)) = result.log_err() {
6477 range_count += ranges.len();
6478 result_tx
6479 .send(SearchResult::Buffer { buffer, ranges })
6480 .await?;
6481 if range_count > MAX_SEARCH_RESULT_RANGES {
6482 limit_reached = true;
6483 break 'outer;
6484 }
6485 }
6486 }
6487 }
6488
6489 if limit_reached {
6490 result_tx.send(SearchResult::LimitReached).await?;
6491 }
6492
6493 anyhow::Ok(())
6494 })
6495 .detach();
6496
6497 result_rx
6498 }
6499
6500 /// Pick paths that might potentially contain a match of a given search query.
6501 #[allow(clippy::too_many_arguments)]
6502 async fn background_search(
6503 unnamed_buffers: Vec<Model<Buffer>>,
6504 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
6505 executor: BackgroundExecutor,
6506 fs: Arc<dyn Fs>,
6507 workers: usize,
6508 query: SearchQuery,
6509 include_root: bool,
6510 path_count: usize,
6511 snapshots: Vec<LocalSnapshot>,
6512 matching_paths_tx: Sender<SearchMatchCandidate>,
6513 ) {
6514 let fs = &fs;
6515 let query = &query;
6516 let matching_paths_tx = &matching_paths_tx;
6517 let snapshots = &snapshots;
6518 for buffer in unnamed_buffers {
6519 matching_paths_tx
6520 .send(SearchMatchCandidate::OpenBuffer {
6521 buffer: buffer.clone(),
6522 path: None,
6523 })
6524 .await
6525 .log_err();
6526 }
6527 for (path, (buffer, _)) in opened_buffers.iter() {
6528 matching_paths_tx
6529 .send(SearchMatchCandidate::OpenBuffer {
6530 buffer: buffer.clone(),
6531 path: Some(path.clone()),
6532 })
6533 .await
6534 .log_err();
6535 }
6536
6537 let paths_per_worker = (path_count + workers - 1) / workers;
6538
6539 executor
6540 .scoped(|scope| {
6541 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
6542
6543 for worker_ix in 0..workers {
6544 let worker_start_ix = worker_ix * paths_per_worker;
6545 let worker_end_ix = worker_start_ix + paths_per_worker;
6546 let opened_buffers = opened_buffers.clone();
6547 let limiter = Arc::clone(&max_concurrent_workers);
6548 scope.spawn({
6549 async move {
6550 let _guard = limiter.acquire().await;
6551 search_snapshots(
6552 snapshots,
6553 worker_start_ix,
6554 worker_end_ix,
6555 query,
6556 matching_paths_tx,
6557 &opened_buffers,
6558 include_root,
6559 fs,
6560 )
6561 .await;
6562 }
6563 });
6564 }
6565
6566 if query.include_ignored() {
6567 for snapshot in snapshots {
6568 for ignored_entry in snapshot.entries(true).filter(|e| e.is_ignored) {
6569 let limiter = Arc::clone(&max_concurrent_workers);
6570 scope.spawn(async move {
6571 let _guard = limiter.acquire().await;
6572 search_ignored_entry(
6573 snapshot,
6574 ignored_entry,
6575 fs,
6576 query,
6577 matching_paths_tx,
6578 )
6579 .await;
6580 });
6581 }
6582 }
6583 }
6584 })
6585 .await;
6586 }
6587
6588 pub fn request_lsp<R: LspCommand>(
6589 &self,
6590 buffer_handle: Model<Buffer>,
6591 server: LanguageServerToQuery,
6592 request: R,
6593 cx: &mut ModelContext<Self>,
6594 ) -> Task<Result<R::Response>>
6595 where
6596 <R::LspRequest as lsp::request::Request>::Result: Send,
6597 <R::LspRequest as lsp::request::Request>::Params: Send,
6598 {
6599 let buffer = buffer_handle.read(cx);
6600 if self.is_local() {
6601 let language_server = match server {
6602 LanguageServerToQuery::Primary => {
6603 match self.primary_language_server_for_buffer(buffer, cx) {
6604 Some((_, server)) => Some(Arc::clone(server)),
6605 None => return Task::ready(Ok(Default::default())),
6606 }
6607 }
6608 LanguageServerToQuery::Other(id) => self
6609 .language_server_for_buffer(buffer, id, cx)
6610 .map(|(_, server)| Arc::clone(server)),
6611 };
6612 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6613 if let (Some(file), Some(language_server)) = (file, language_server) {
6614 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6615 return cx.spawn(move |this, cx| async move {
6616 if !request.check_capabilities(language_server.capabilities()) {
6617 return Ok(Default::default());
6618 }
6619
6620 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6621 let response = match result {
6622 Ok(response) => response,
6623
6624 Err(err) => {
6625 log::warn!(
6626 "Generic lsp request to {} failed: {}",
6627 language_server.name(),
6628 err
6629 );
6630 return Err(err);
6631 }
6632 };
6633
6634 request
6635 .response_from_lsp(
6636 response,
6637 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6638 buffer_handle,
6639 language_server.server_id(),
6640 cx,
6641 )
6642 .await
6643 });
6644 }
6645 } else if let Some(project_id) = self.remote_id() {
6646 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6647 }
6648
6649 Task::ready(Ok(Default::default()))
6650 }
6651
6652 fn send_lsp_proto_request<R: LspCommand>(
6653 &self,
6654 buffer: Model<Buffer>,
6655 project_id: u64,
6656 request: R,
6657 cx: &mut ModelContext<'_, Project>,
6658 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6659 let rpc = self.client.clone();
6660 let message = request.to_proto(project_id, buffer.read(cx));
6661 cx.spawn(move |this, mut cx| async move {
6662 // Ensure the project is still alive by the time the task
6663 // is scheduled.
6664 this.upgrade().context("project dropped")?;
6665 let response = rpc.request(message).await?;
6666 let this = this.upgrade().context("project dropped")?;
6667 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6668 Err(anyhow!("disconnected before completing request"))
6669 } else {
6670 request
6671 .response_from_proto(response, this, buffer, cx)
6672 .await
6673 }
6674 })
6675 }
6676
6677 pub fn find_or_create_local_worktree(
6678 &mut self,
6679 abs_path: impl AsRef<Path>,
6680 visible: bool,
6681 cx: &mut ModelContext<Self>,
6682 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6683 let abs_path = abs_path.as_ref();
6684 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6685 Task::ready(Ok((tree, relative_path)))
6686 } else {
6687 let worktree = self.create_local_worktree(abs_path, visible, cx);
6688 cx.background_executor()
6689 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6690 }
6691 }
6692
6693 pub fn find_local_worktree(
6694 &self,
6695 abs_path: &Path,
6696 cx: &AppContext,
6697 ) -> Option<(Model<Worktree>, PathBuf)> {
6698 for tree in &self.worktrees {
6699 if let Some(tree) = tree.upgrade() {
6700 if let Some(relative_path) = tree
6701 .read(cx)
6702 .as_local()
6703 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6704 {
6705 return Some((tree.clone(), relative_path.into()));
6706 }
6707 }
6708 }
6709 None
6710 }
6711
6712 pub fn is_shared(&self) -> bool {
6713 match &self.client_state {
6714 ProjectClientState::Shared { .. } => true,
6715 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6716 }
6717 }
6718
6719 fn create_local_worktree(
6720 &mut self,
6721 abs_path: impl AsRef<Path>,
6722 visible: bool,
6723 cx: &mut ModelContext<Self>,
6724 ) -> Task<Result<Model<Worktree>>> {
6725 let fs = self.fs.clone();
6726 let client = self.client.clone();
6727 let next_entry_id = self.next_entry_id.clone();
6728 let path: Arc<Path> = abs_path.as_ref().into();
6729 let task = self
6730 .loading_local_worktrees
6731 .entry(path.clone())
6732 .or_insert_with(|| {
6733 cx.spawn(move |project, mut cx| {
6734 async move {
6735 let worktree = Worktree::local(
6736 client.clone(),
6737 path.clone(),
6738 visible,
6739 fs,
6740 next_entry_id,
6741 &mut cx,
6742 )
6743 .await;
6744
6745 project.update(&mut cx, |project, _| {
6746 project.loading_local_worktrees.remove(&path);
6747 })?;
6748
6749 let worktree = worktree?;
6750 project
6751 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6752 Ok(worktree)
6753 }
6754 .map_err(Arc::new)
6755 })
6756 .shared()
6757 })
6758 .clone();
6759 cx.background_executor().spawn(async move {
6760 match task.await {
6761 Ok(worktree) => Ok(worktree),
6762 Err(err) => Err(anyhow!("{}", err)),
6763 }
6764 })
6765 }
6766
6767 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6768 let mut servers_to_remove = HashMap::default();
6769 let mut servers_to_preserve = HashSet::default();
6770 for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
6771 if worktree_id == &id_to_remove {
6772 servers_to_remove.insert(server_id, server_name.clone());
6773 } else {
6774 servers_to_preserve.insert(server_id);
6775 }
6776 }
6777 servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id));
6778 for (server_id_to_remove, server_name) in servers_to_remove {
6779 self.language_server_ids
6780 .remove(&(id_to_remove, server_name));
6781 self.language_server_statuses.remove(&server_id_to_remove);
6782 self.language_server_watched_paths
6783 .remove(&server_id_to_remove);
6784 self.last_workspace_edits_by_language_server
6785 .remove(&server_id_to_remove);
6786 self.language_servers.remove(&server_id_to_remove);
6787 cx.emit(Event::LanguageServerRemoved(server_id_to_remove));
6788 }
6789
6790 let mut prettier_instances_to_clean = FuturesUnordered::new();
6791 if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) {
6792 for path in prettier_paths.iter().flatten() {
6793 if let Some(prettier_instance) = self.prettier_instances.remove(path) {
6794 prettier_instances_to_clean.push(async move {
6795 prettier_instance
6796 .server()
6797 .await
6798 .map(|server| server.server_id())
6799 });
6800 }
6801 }
6802 }
6803 cx.spawn(|project, mut cx| async move {
6804 while let Some(prettier_server_id) = prettier_instances_to_clean.next().await {
6805 if let Some(prettier_server_id) = prettier_server_id {
6806 project
6807 .update(&mut cx, |project, cx| {
6808 project
6809 .supplementary_language_servers
6810 .remove(&prettier_server_id);
6811 cx.emit(Event::LanguageServerRemoved(prettier_server_id));
6812 })
6813 .ok();
6814 }
6815 }
6816 })
6817 .detach();
6818
6819 self.task_inventory().update(cx, |inventory, _| {
6820 inventory.remove_worktree_sources(id_to_remove);
6821 });
6822
6823 self.worktrees.retain(|worktree| {
6824 if let Some(worktree) = worktree.upgrade() {
6825 let id = worktree.read(cx).id();
6826 if id == id_to_remove {
6827 cx.emit(Event::WorktreeRemoved(id));
6828 false
6829 } else {
6830 true
6831 }
6832 } else {
6833 false
6834 }
6835 });
6836 self.metadata_changed(cx);
6837 }
6838
6839 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6840 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6841 cx.subscribe(worktree, |this, worktree, event, cx| {
6842 let is_local = worktree.read(cx).is_local();
6843 match event {
6844 worktree::Event::UpdatedEntries(changes) => {
6845 if is_local {
6846 this.update_local_worktree_buffers(&worktree, changes, cx);
6847 this.update_local_worktree_language_servers(&worktree, changes, cx);
6848 this.update_local_worktree_settings(&worktree, changes, cx);
6849 this.update_prettier_settings(&worktree, changes, cx);
6850 }
6851
6852 cx.emit(Event::WorktreeUpdatedEntries(
6853 worktree.read(cx).id(),
6854 changes.clone(),
6855 ));
6856 }
6857 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6858 if is_local {
6859 this.update_local_worktree_buffers_git_repos(
6860 worktree.clone(),
6861 updated_repos,
6862 cx,
6863 )
6864 }
6865 cx.emit(Event::WorktreeUpdatedGitRepositories);
6866 }
6867 }
6868 })
6869 .detach();
6870
6871 let push_strong_handle = {
6872 let worktree = worktree.read(cx);
6873 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6874 };
6875 if push_strong_handle {
6876 self.worktrees
6877 .push(WorktreeHandle::Strong(worktree.clone()));
6878 } else {
6879 self.worktrees
6880 .push(WorktreeHandle::Weak(worktree.downgrade()));
6881 }
6882
6883 let handle_id = worktree.entity_id();
6884 cx.observe_release(worktree, move |this, worktree, cx| {
6885 let _ = this.remove_worktree(worktree.id(), cx);
6886 cx.update_global::<SettingsStore, _>(|store, cx| {
6887 store
6888 .clear_local_settings(handle_id.as_u64() as usize, cx)
6889 .log_err()
6890 });
6891 })
6892 .detach();
6893
6894 cx.emit(Event::WorktreeAdded);
6895 self.metadata_changed(cx);
6896 }
6897
6898 fn update_local_worktree_buffers(
6899 &mut self,
6900 worktree_handle: &Model<Worktree>,
6901 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6902 cx: &mut ModelContext<Self>,
6903 ) {
6904 let snapshot = worktree_handle.read(cx).snapshot();
6905
6906 let mut renamed_buffers = Vec::new();
6907 for (path, entry_id, _) in changes {
6908 let worktree_id = worktree_handle.read(cx).id();
6909 let project_path = ProjectPath {
6910 worktree_id,
6911 path: path.clone(),
6912 };
6913
6914 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6915 Some(&buffer_id) => buffer_id,
6916 None => match self.local_buffer_ids_by_path.get(&project_path) {
6917 Some(&buffer_id) => buffer_id,
6918 None => {
6919 continue;
6920 }
6921 },
6922 };
6923
6924 let open_buffer = self.opened_buffers.get(&buffer_id);
6925 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6926 buffer
6927 } else {
6928 self.opened_buffers.remove(&buffer_id);
6929 self.local_buffer_ids_by_path.remove(&project_path);
6930 self.local_buffer_ids_by_entry_id.remove(entry_id);
6931 continue;
6932 };
6933
6934 buffer.update(cx, |buffer, cx| {
6935 if let Some(old_file) = File::from_dyn(buffer.file()) {
6936 if old_file.worktree != *worktree_handle {
6937 return;
6938 }
6939
6940 let new_file = if let Some(entry) = old_file
6941 .entry_id
6942 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6943 {
6944 File {
6945 is_local: true,
6946 entry_id: Some(entry.id),
6947 mtime: entry.mtime,
6948 path: entry.path.clone(),
6949 worktree: worktree_handle.clone(),
6950 is_deleted: false,
6951 is_private: entry.is_private,
6952 }
6953 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6954 File {
6955 is_local: true,
6956 entry_id: Some(entry.id),
6957 mtime: entry.mtime,
6958 path: entry.path.clone(),
6959 worktree: worktree_handle.clone(),
6960 is_deleted: false,
6961 is_private: entry.is_private,
6962 }
6963 } else {
6964 File {
6965 is_local: true,
6966 entry_id: old_file.entry_id,
6967 path: old_file.path().clone(),
6968 mtime: old_file.mtime(),
6969 worktree: worktree_handle.clone(),
6970 is_deleted: true,
6971 is_private: old_file.is_private,
6972 }
6973 };
6974
6975 let old_path = old_file.abs_path(cx);
6976 if new_file.abs_path(cx) != old_path {
6977 renamed_buffers.push((cx.handle(), old_file.clone()));
6978 self.local_buffer_ids_by_path.remove(&project_path);
6979 self.local_buffer_ids_by_path.insert(
6980 ProjectPath {
6981 worktree_id,
6982 path: path.clone(),
6983 },
6984 buffer_id,
6985 );
6986 }
6987
6988 if new_file.entry_id != Some(*entry_id) {
6989 self.local_buffer_ids_by_entry_id.remove(entry_id);
6990 if let Some(entry_id) = new_file.entry_id {
6991 self.local_buffer_ids_by_entry_id
6992 .insert(entry_id, buffer_id);
6993 }
6994 }
6995
6996 if new_file != *old_file {
6997 if let Some(project_id) = self.remote_id() {
6998 self.client
6999 .send(proto::UpdateBufferFile {
7000 project_id,
7001 buffer_id: buffer_id.into(),
7002 file: Some(new_file.to_proto()),
7003 })
7004 .log_err();
7005 }
7006
7007 buffer.file_updated(Arc::new(new_file), cx);
7008 }
7009 }
7010 });
7011 }
7012
7013 for (buffer, old_file) in renamed_buffers {
7014 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
7015 self.detect_language_for_buffer(&buffer, cx);
7016 self.register_buffer_with_language_servers(&buffer, cx);
7017 }
7018 }
7019
7020 fn update_local_worktree_language_servers(
7021 &mut self,
7022 worktree_handle: &Model<Worktree>,
7023 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
7024 cx: &mut ModelContext<Self>,
7025 ) {
7026 if changes.is_empty() {
7027 return;
7028 }
7029
7030 let worktree_id = worktree_handle.read(cx).id();
7031 let mut language_server_ids = self
7032 .language_server_ids
7033 .iter()
7034 .filter_map(|((server_worktree_id, _), server_id)| {
7035 (*server_worktree_id == worktree_id).then_some(*server_id)
7036 })
7037 .collect::<Vec<_>>();
7038 language_server_ids.sort();
7039 language_server_ids.dedup();
7040
7041 let abs_path = worktree_handle.read(cx).abs_path();
7042 for server_id in &language_server_ids {
7043 if let Some(LanguageServerState::Running { server, .. }) =
7044 self.language_servers.get(server_id)
7045 {
7046 if let Some(watched_paths) = self
7047 .language_server_watched_paths
7048 .get(&server_id)
7049 .and_then(|paths| paths.get(&worktree_id))
7050 {
7051 let params = lsp::DidChangeWatchedFilesParams {
7052 changes: changes
7053 .iter()
7054 .filter_map(|(path, _, change)| {
7055 if !watched_paths.is_match(&path) {
7056 return None;
7057 }
7058 let typ = match change {
7059 PathChange::Loaded => return None,
7060 PathChange::Added => lsp::FileChangeType::CREATED,
7061 PathChange::Removed => lsp::FileChangeType::DELETED,
7062 PathChange::Updated => lsp::FileChangeType::CHANGED,
7063 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
7064 };
7065 Some(lsp::FileEvent {
7066 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
7067 typ,
7068 })
7069 })
7070 .collect(),
7071 };
7072 if !params.changes.is_empty() {
7073 server
7074 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
7075 .log_err();
7076 }
7077 }
7078 }
7079 }
7080 }
7081
7082 fn update_local_worktree_buffers_git_repos(
7083 &mut self,
7084 worktree_handle: Model<Worktree>,
7085 changed_repos: &UpdatedGitRepositoriesSet,
7086 cx: &mut ModelContext<Self>,
7087 ) {
7088 debug_assert!(worktree_handle.read(cx).is_local());
7089
7090 // Identify the loading buffers whose containing repository that has changed.
7091 let future_buffers = self
7092 .loading_buffers_by_path
7093 .iter()
7094 .filter_map(|(project_path, receiver)| {
7095 if project_path.worktree_id != worktree_handle.read(cx).id() {
7096 return None;
7097 }
7098 let path = &project_path.path;
7099 changed_repos
7100 .iter()
7101 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7102 let receiver = receiver.clone();
7103 let path = path.clone();
7104 Some(async move {
7105 wait_for_loading_buffer(receiver)
7106 .await
7107 .ok()
7108 .map(|buffer| (buffer, path))
7109 })
7110 })
7111 .collect::<FuturesUnordered<_>>();
7112
7113 // Identify the current buffers whose containing repository has changed.
7114 let current_buffers = self
7115 .opened_buffers
7116 .values()
7117 .filter_map(|buffer| {
7118 let buffer = buffer.upgrade()?;
7119 let file = File::from_dyn(buffer.read(cx).file())?;
7120 if file.worktree != worktree_handle {
7121 return None;
7122 }
7123 let path = file.path();
7124 changed_repos
7125 .iter()
7126 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7127 Some((buffer, path.clone()))
7128 })
7129 .collect::<Vec<_>>();
7130
7131 if future_buffers.len() + current_buffers.len() == 0 {
7132 return;
7133 }
7134
7135 let remote_id = self.remote_id();
7136 let client = self.client.clone();
7137 cx.spawn(move |_, mut cx| async move {
7138 // Wait for all of the buffers to load.
7139 let future_buffers = future_buffers.collect::<Vec<_>>().await;
7140
7141 // Reload the diff base for every buffer whose containing git repository has changed.
7142 let snapshot =
7143 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
7144 let diff_bases_by_buffer = cx
7145 .background_executor()
7146 .spawn(async move {
7147 future_buffers
7148 .into_iter()
7149 .flatten()
7150 .chain(current_buffers)
7151 .filter_map(|(buffer, path)| {
7152 let (work_directory, repo) =
7153 snapshot.repository_and_work_directory_for_path(&path)?;
7154 let repo_entry = snapshot.get_local_repo(&repo)?;
7155 let relative_path = path.strip_prefix(&work_directory).ok()?;
7156 let base_text = repo_entry.repo().lock().load_index_text(relative_path);
7157
7158 Some((buffer, base_text))
7159 })
7160 .collect::<Vec<_>>()
7161 })
7162 .await;
7163
7164 // Assign the new diff bases on all of the buffers.
7165 for (buffer, diff_base) in diff_bases_by_buffer {
7166 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
7167 buffer.set_diff_base(diff_base.clone(), cx);
7168 buffer.remote_id().into()
7169 })?;
7170 if let Some(project_id) = remote_id {
7171 client
7172 .send(proto::UpdateDiffBase {
7173 project_id,
7174 buffer_id,
7175 diff_base,
7176 })
7177 .log_err();
7178 }
7179 }
7180
7181 anyhow::Ok(())
7182 })
7183 .detach();
7184 }
7185
7186 fn update_local_worktree_settings(
7187 &mut self,
7188 worktree: &Model<Worktree>,
7189 changes: &UpdatedEntriesSet,
7190 cx: &mut ModelContext<Self>,
7191 ) {
7192 if worktree.read(cx).as_local().is_none() {
7193 return;
7194 }
7195 let project_id = self.remote_id();
7196 let worktree_id = worktree.entity_id();
7197 let remote_worktree_id = worktree.read(cx).id();
7198
7199 let mut settings_contents = Vec::new();
7200 for (path, _, change) in changes.iter() {
7201 let removed = change == &PathChange::Removed;
7202 let abs_path = match worktree.read(cx).absolutize(path) {
7203 Ok(abs_path) => abs_path,
7204 Err(e) => {
7205 log::warn!("Cannot absolutize {path:?} received as {change:?} FS change: {e}");
7206 continue;
7207 }
7208 };
7209
7210 if abs_path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
7211 let settings_dir = Arc::from(
7212 path.ancestors()
7213 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
7214 .unwrap(),
7215 );
7216 let fs = self.fs.clone();
7217 settings_contents.push(async move {
7218 (
7219 settings_dir,
7220 if removed {
7221 None
7222 } else {
7223 Some(async move { fs.load(&abs_path).await }.await)
7224 },
7225 )
7226 });
7227 } else if abs_path.ends_with(&*LOCAL_TASKS_RELATIVE_PATH) {
7228 self.task_inventory().update(cx, |task_inventory, cx| {
7229 if removed {
7230 task_inventory.remove_local_static_source(&abs_path);
7231 } else {
7232 let fs = self.fs.clone();
7233 let task_abs_path = abs_path.clone();
7234 task_inventory.add_source(
7235 TaskSourceKind::Worktree {
7236 id: remote_worktree_id,
7237 abs_path,
7238 },
7239 |cx| {
7240 let tasks_file_rx =
7241 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7242 StaticSource::new(
7243 format!("local_tasks_for_workspace_{remote_worktree_id}"),
7244 TrackedFile::new(tasks_file_rx, cx),
7245 cx,
7246 )
7247 },
7248 cx,
7249 );
7250 }
7251 })
7252 } else if abs_path.ends_with(&*LOCAL_VSCODE_TASKS_RELATIVE_PATH) {
7253 self.task_inventory().update(cx, |task_inventory, cx| {
7254 if removed {
7255 task_inventory.remove_local_static_source(&abs_path);
7256 } else {
7257 let fs = self.fs.clone();
7258 let task_abs_path = abs_path.clone();
7259 task_inventory.add_source(
7260 TaskSourceKind::Worktree {
7261 id: remote_worktree_id,
7262 abs_path,
7263 },
7264 |cx| {
7265 let tasks_file_rx =
7266 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7267 StaticSource::new(
7268 format!(
7269 "local_vscode_tasks_for_workspace_{remote_worktree_id}"
7270 ),
7271 TrackedFile::new_convertible::<task::VsCodeTaskFile>(
7272 tasks_file_rx,
7273 cx,
7274 ),
7275 cx,
7276 )
7277 },
7278 cx,
7279 );
7280 }
7281 })
7282 }
7283 }
7284
7285 if settings_contents.is_empty() {
7286 return;
7287 }
7288
7289 let client = self.client.clone();
7290 cx.spawn(move |_, cx| async move {
7291 let settings_contents: Vec<(Arc<Path>, _)> =
7292 futures::future::join_all(settings_contents).await;
7293 cx.update(|cx| {
7294 cx.update_global::<SettingsStore, _>(|store, cx| {
7295 for (directory, file_content) in settings_contents {
7296 let file_content = file_content.and_then(|content| content.log_err());
7297 store
7298 .set_local_settings(
7299 worktree_id.as_u64() as usize,
7300 directory.clone(),
7301 file_content.as_deref(),
7302 cx,
7303 )
7304 .log_err();
7305 if let Some(remote_id) = project_id {
7306 client
7307 .send(proto::UpdateWorktreeSettings {
7308 project_id: remote_id,
7309 worktree_id: remote_worktree_id.to_proto(),
7310 path: directory.to_string_lossy().into_owned(),
7311 content: file_content,
7312 })
7313 .log_err();
7314 }
7315 }
7316 });
7317 })
7318 .ok();
7319 })
7320 .detach();
7321 }
7322
7323 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
7324 let new_active_entry = entry.and_then(|project_path| {
7325 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
7326 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
7327 Some(entry.id)
7328 });
7329 if new_active_entry != self.active_entry {
7330 self.active_entry = new_active_entry;
7331 cx.emit(Event::ActiveEntryChanged(new_active_entry));
7332 }
7333 }
7334
7335 pub fn language_servers_running_disk_based_diagnostics(
7336 &self,
7337 ) -> impl Iterator<Item = LanguageServerId> + '_ {
7338 self.language_server_statuses
7339 .iter()
7340 .filter_map(|(id, status)| {
7341 if status.has_pending_diagnostic_updates {
7342 Some(*id)
7343 } else {
7344 None
7345 }
7346 })
7347 }
7348
7349 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
7350 let mut summary = DiagnosticSummary::default();
7351 for (_, _, path_summary) in
7352 self.diagnostic_summaries(include_ignored, cx)
7353 .filter(|(path, _, _)| {
7354 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7355 include_ignored || worktree == Some(false)
7356 })
7357 {
7358 summary.error_count += path_summary.error_count;
7359 summary.warning_count += path_summary.warning_count;
7360 }
7361 summary
7362 }
7363
7364 pub fn diagnostic_summaries<'a>(
7365 &'a self,
7366 include_ignored: bool,
7367 cx: &'a AppContext,
7368 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
7369 self.visible_worktrees(cx)
7370 .flat_map(move |worktree| {
7371 let worktree = worktree.read(cx);
7372 let worktree_id = worktree.id();
7373 worktree
7374 .diagnostic_summaries()
7375 .map(move |(path, server_id, summary)| {
7376 (ProjectPath { worktree_id, path }, server_id, summary)
7377 })
7378 })
7379 .filter(move |(path, _, _)| {
7380 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7381 include_ignored || worktree == Some(false)
7382 })
7383 }
7384
7385 pub fn disk_based_diagnostics_started(
7386 &mut self,
7387 language_server_id: LanguageServerId,
7388 cx: &mut ModelContext<Self>,
7389 ) {
7390 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
7391 }
7392
7393 pub fn disk_based_diagnostics_finished(
7394 &mut self,
7395 language_server_id: LanguageServerId,
7396 cx: &mut ModelContext<Self>,
7397 ) {
7398 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
7399 }
7400
7401 pub fn active_entry(&self) -> Option<ProjectEntryId> {
7402 self.active_entry
7403 }
7404
7405 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
7406 self.worktree_for_id(path.worktree_id, cx)?
7407 .read(cx)
7408 .entry_for_path(&path.path)
7409 .cloned()
7410 }
7411
7412 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
7413 let worktree = self.worktree_for_entry(entry_id, cx)?;
7414 let worktree = worktree.read(cx);
7415 let worktree_id = worktree.id();
7416 let path = worktree.entry_for_id(entry_id)?.path.clone();
7417 Some(ProjectPath { worktree_id, path })
7418 }
7419
7420 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
7421 let workspace_root = self
7422 .worktree_for_id(project_path.worktree_id, cx)?
7423 .read(cx)
7424 .abs_path();
7425 let project_path = project_path.path.as_ref();
7426
7427 Some(if project_path == Path::new("") {
7428 workspace_root.to_path_buf()
7429 } else {
7430 workspace_root.join(project_path)
7431 })
7432 }
7433
7434 pub fn get_workspace_root(
7435 &self,
7436 project_path: &ProjectPath,
7437 cx: &AppContext,
7438 ) -> Option<PathBuf> {
7439 Some(
7440 self.worktree_for_id(project_path.worktree_id, cx)?
7441 .read(cx)
7442 .abs_path()
7443 .to_path_buf(),
7444 )
7445 }
7446
7447 pub fn get_repo(
7448 &self,
7449 project_path: &ProjectPath,
7450 cx: &AppContext,
7451 ) -> Option<Arc<Mutex<dyn GitRepository>>> {
7452 self.worktree_for_id(project_path.worktree_id, cx)?
7453 .read(cx)
7454 .as_local()?
7455 .snapshot()
7456 .local_git_repo(&project_path.path)
7457 }
7458
7459 pub fn blame_buffer(
7460 &self,
7461 buffer: &Model<Buffer>,
7462 version: Option<clock::Global>,
7463 cx: &AppContext,
7464 ) -> Task<Result<Blame>> {
7465 if self.is_local() {
7466 let blame_params = maybe!({
7467 let buffer = buffer.read(cx);
7468 let buffer_project_path = buffer
7469 .project_path(cx)
7470 .context("failed to get buffer project path")?;
7471
7472 let worktree = self
7473 .worktree_for_id(buffer_project_path.worktree_id, cx)
7474 .context("failed to get worktree")?
7475 .read(cx)
7476 .as_local()
7477 .context("worktree was not local")?
7478 .snapshot();
7479 let (work_directory, repo) = worktree
7480 .repository_and_work_directory_for_path(&buffer_project_path.path)
7481 .context("failed to get repo for blamed buffer")?;
7482
7483 let repo_entry = worktree
7484 .get_local_repo(&repo)
7485 .context("failed to get repo for blamed buffer")?;
7486
7487 let relative_path = buffer_project_path
7488 .path
7489 .strip_prefix(&work_directory)?
7490 .to_path_buf();
7491
7492 let content = match version {
7493 Some(version) => buffer.rope_for_version(&version).clone(),
7494 None => buffer.as_rope().clone(),
7495 };
7496 let repo = repo_entry.repo().clone();
7497
7498 anyhow::Ok((repo, relative_path, content))
7499 });
7500
7501 cx.background_executor().spawn(async move {
7502 let (repo, relative_path, content) = blame_params?;
7503 let lock = repo.lock();
7504 lock.blame(&relative_path, content)
7505 })
7506 } else {
7507 let project_id = self.remote_id();
7508 let buffer_id = buffer.read(cx).remote_id();
7509 let client = self.client.clone();
7510 let version = buffer.read(cx).version();
7511
7512 cx.spawn(|_| async move {
7513 let project_id = project_id.context("unable to get project id for buffer")?;
7514 let response = client
7515 .request(proto::BlameBuffer {
7516 project_id,
7517 buffer_id: buffer_id.into(),
7518 version: serialize_version(&version),
7519 })
7520 .await?;
7521
7522 Ok(deserialize_blame_buffer_response(response))
7523 })
7524 }
7525 }
7526
7527 // RPC message handlers
7528
7529 async fn handle_blame_buffer(
7530 this: Model<Self>,
7531 envelope: TypedEnvelope<proto::BlameBuffer>,
7532 _: Arc<Client>,
7533 mut cx: AsyncAppContext,
7534 ) -> Result<proto::BlameBufferResponse> {
7535 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
7536 let version = deserialize_version(&envelope.payload.version);
7537
7538 let buffer = this.update(&mut cx, |this, _cx| {
7539 this.opened_buffers
7540 .get(&buffer_id)
7541 .and_then(|buffer| buffer.upgrade())
7542 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
7543 })??;
7544
7545 buffer
7546 .update(&mut cx, |buffer, _| {
7547 buffer.wait_for_version(version.clone())
7548 })?
7549 .await?;
7550
7551 let blame = this
7552 .update(&mut cx, |this, cx| {
7553 this.blame_buffer(&buffer, Some(version), cx)
7554 })?
7555 .await?;
7556
7557 Ok(serialize_blame_buffer_response(blame))
7558 }
7559
7560 async fn handle_unshare_project(
7561 this: Model<Self>,
7562 _: TypedEnvelope<proto::UnshareProject>,
7563 _: Arc<Client>,
7564 mut cx: AsyncAppContext,
7565 ) -> Result<()> {
7566 this.update(&mut cx, |this, cx| {
7567 if this.is_local() {
7568 this.unshare(cx)?;
7569 } else {
7570 this.disconnected_from_host(cx);
7571 }
7572 Ok(())
7573 })?
7574 }
7575
7576 async fn handle_add_collaborator(
7577 this: Model<Self>,
7578 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
7579 _: Arc<Client>,
7580 mut cx: AsyncAppContext,
7581 ) -> Result<()> {
7582 let collaborator = envelope
7583 .payload
7584 .collaborator
7585 .take()
7586 .ok_or_else(|| anyhow!("empty collaborator"))?;
7587
7588 let collaborator = Collaborator::from_proto(collaborator)?;
7589 this.update(&mut cx, |this, cx| {
7590 this.shared_buffers.remove(&collaborator.peer_id);
7591 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
7592 this.collaborators
7593 .insert(collaborator.peer_id, collaborator);
7594 cx.notify();
7595 })?;
7596
7597 Ok(())
7598 }
7599
7600 async fn handle_update_project_collaborator(
7601 this: Model<Self>,
7602 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
7603 _: Arc<Client>,
7604 mut cx: AsyncAppContext,
7605 ) -> Result<()> {
7606 let old_peer_id = envelope
7607 .payload
7608 .old_peer_id
7609 .ok_or_else(|| anyhow!("missing old peer id"))?;
7610 let new_peer_id = envelope
7611 .payload
7612 .new_peer_id
7613 .ok_or_else(|| anyhow!("missing new peer id"))?;
7614 this.update(&mut cx, |this, cx| {
7615 let collaborator = this
7616 .collaborators
7617 .remove(&old_peer_id)
7618 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
7619 let is_host = collaborator.replica_id == 0;
7620 this.collaborators.insert(new_peer_id, collaborator);
7621
7622 let buffers = this.shared_buffers.remove(&old_peer_id);
7623 log::info!(
7624 "peer {} became {}. moving buffers {:?}",
7625 old_peer_id,
7626 new_peer_id,
7627 &buffers
7628 );
7629 if let Some(buffers) = buffers {
7630 this.shared_buffers.insert(new_peer_id, buffers);
7631 }
7632
7633 if is_host {
7634 this.opened_buffers
7635 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
7636 this.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
7637 .unwrap();
7638 }
7639
7640 cx.emit(Event::CollaboratorUpdated {
7641 old_peer_id,
7642 new_peer_id,
7643 });
7644 cx.notify();
7645 Ok(())
7646 })?
7647 }
7648
7649 async fn handle_remove_collaborator(
7650 this: Model<Self>,
7651 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7652 _: Arc<Client>,
7653 mut cx: AsyncAppContext,
7654 ) -> Result<()> {
7655 this.update(&mut cx, |this, cx| {
7656 let peer_id = envelope
7657 .payload
7658 .peer_id
7659 .ok_or_else(|| anyhow!("invalid peer id"))?;
7660 let replica_id = this
7661 .collaborators
7662 .remove(&peer_id)
7663 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7664 .replica_id;
7665 for buffer in this.opened_buffers.values() {
7666 if let Some(buffer) = buffer.upgrade() {
7667 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7668 }
7669 }
7670 this.shared_buffers.remove(&peer_id);
7671
7672 cx.emit(Event::CollaboratorLeft(peer_id));
7673 cx.notify();
7674 Ok(())
7675 })?
7676 }
7677
7678 async fn handle_update_project(
7679 this: Model<Self>,
7680 envelope: TypedEnvelope<proto::UpdateProject>,
7681 _: Arc<Client>,
7682 mut cx: AsyncAppContext,
7683 ) -> Result<()> {
7684 this.update(&mut cx, |this, cx| {
7685 // Don't handle messages that were sent before the response to us joining the project
7686 if envelope.message_id > this.join_project_response_message_id {
7687 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7688 }
7689 Ok(())
7690 })?
7691 }
7692
7693 async fn handle_update_worktree(
7694 this: Model<Self>,
7695 envelope: TypedEnvelope<proto::UpdateWorktree>,
7696 _: Arc<Client>,
7697 mut cx: AsyncAppContext,
7698 ) -> Result<()> {
7699 this.update(&mut cx, |this, cx| {
7700 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7701 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7702 worktree.update(cx, |worktree, _| {
7703 let worktree = worktree.as_remote_mut().unwrap();
7704 worktree.update_from_remote(envelope.payload);
7705 });
7706 }
7707 Ok(())
7708 })?
7709 }
7710
7711 async fn handle_update_worktree_settings(
7712 this: Model<Self>,
7713 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7714 _: Arc<Client>,
7715 mut cx: AsyncAppContext,
7716 ) -> Result<()> {
7717 this.update(&mut cx, |this, cx| {
7718 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7719 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7720 cx.update_global::<SettingsStore, _>(|store, cx| {
7721 store
7722 .set_local_settings(
7723 worktree.entity_id().as_u64() as usize,
7724 PathBuf::from(&envelope.payload.path).into(),
7725 envelope.payload.content.as_deref(),
7726 cx,
7727 )
7728 .log_err();
7729 });
7730 }
7731 Ok(())
7732 })?
7733 }
7734
7735 async fn handle_create_project_entry(
7736 this: Model<Self>,
7737 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7738 _: Arc<Client>,
7739 mut cx: AsyncAppContext,
7740 ) -> Result<proto::ProjectEntryResponse> {
7741 let worktree = this.update(&mut cx, |this, cx| {
7742 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7743 this.worktree_for_id(worktree_id, cx)
7744 .ok_or_else(|| anyhow!("worktree not found"))
7745 })??;
7746 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7747 let entry = worktree
7748 .update(&mut cx, |worktree, cx| {
7749 let worktree = worktree.as_local_mut().unwrap();
7750 let path = PathBuf::from(envelope.payload.path);
7751 worktree.create_entry(path, envelope.payload.is_directory, cx)
7752 })?
7753 .await?;
7754 Ok(proto::ProjectEntryResponse {
7755 entry: entry.as_ref().map(|e| e.into()),
7756 worktree_scan_id: worktree_scan_id as u64,
7757 })
7758 }
7759
7760 async fn handle_rename_project_entry(
7761 this: Model<Self>,
7762 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7763 _: Arc<Client>,
7764 mut cx: AsyncAppContext,
7765 ) -> Result<proto::ProjectEntryResponse> {
7766 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7767 let worktree = this.update(&mut cx, |this, cx| {
7768 this.worktree_for_entry(entry_id, cx)
7769 .ok_or_else(|| anyhow!("worktree not found"))
7770 })??;
7771 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7772 let entry = worktree
7773 .update(&mut cx, |worktree, cx| {
7774 let new_path = PathBuf::from(envelope.payload.new_path);
7775 worktree
7776 .as_local_mut()
7777 .unwrap()
7778 .rename_entry(entry_id, new_path, cx)
7779 })?
7780 .await?;
7781 Ok(proto::ProjectEntryResponse {
7782 entry: entry.as_ref().map(|e| e.into()),
7783 worktree_scan_id: worktree_scan_id as u64,
7784 })
7785 }
7786
7787 async fn handle_copy_project_entry(
7788 this: Model<Self>,
7789 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7790 _: Arc<Client>,
7791 mut cx: AsyncAppContext,
7792 ) -> Result<proto::ProjectEntryResponse> {
7793 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7794 let worktree = this.update(&mut cx, |this, cx| {
7795 this.worktree_for_entry(entry_id, cx)
7796 .ok_or_else(|| anyhow!("worktree not found"))
7797 })??;
7798 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7799 let entry = worktree
7800 .update(&mut cx, |worktree, cx| {
7801 let new_path = PathBuf::from(envelope.payload.new_path);
7802 worktree
7803 .as_local_mut()
7804 .unwrap()
7805 .copy_entry(entry_id, new_path, cx)
7806 })?
7807 .await?;
7808 Ok(proto::ProjectEntryResponse {
7809 entry: entry.as_ref().map(|e| e.into()),
7810 worktree_scan_id: worktree_scan_id as u64,
7811 })
7812 }
7813
7814 async fn handle_delete_project_entry(
7815 this: Model<Self>,
7816 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7817 _: Arc<Client>,
7818 mut cx: AsyncAppContext,
7819 ) -> Result<proto::ProjectEntryResponse> {
7820 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7821
7822 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7823
7824 let worktree = this.update(&mut cx, |this, cx| {
7825 this.worktree_for_entry(entry_id, cx)
7826 .ok_or_else(|| anyhow!("worktree not found"))
7827 })??;
7828 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7829 worktree
7830 .update(&mut cx, |worktree, cx| {
7831 worktree
7832 .as_local_mut()
7833 .unwrap()
7834 .delete_entry(entry_id, cx)
7835 .ok_or_else(|| anyhow!("invalid entry"))
7836 })??
7837 .await?;
7838 Ok(proto::ProjectEntryResponse {
7839 entry: None,
7840 worktree_scan_id: worktree_scan_id as u64,
7841 })
7842 }
7843
7844 async fn handle_expand_project_entry(
7845 this: Model<Self>,
7846 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7847 _: Arc<Client>,
7848 mut cx: AsyncAppContext,
7849 ) -> Result<proto::ExpandProjectEntryResponse> {
7850 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7851 let worktree = this
7852 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7853 .ok_or_else(|| anyhow!("invalid request"))?;
7854 worktree
7855 .update(&mut cx, |worktree, cx| {
7856 worktree
7857 .as_local_mut()
7858 .unwrap()
7859 .expand_entry(entry_id, cx)
7860 .ok_or_else(|| anyhow!("invalid entry"))
7861 })??
7862 .await?;
7863 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7864 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7865 }
7866
7867 async fn handle_update_diagnostic_summary(
7868 this: Model<Self>,
7869 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7870 _: Arc<Client>,
7871 mut cx: AsyncAppContext,
7872 ) -> Result<()> {
7873 this.update(&mut cx, |this, cx| {
7874 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7875 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7876 if let Some(summary) = envelope.payload.summary {
7877 let project_path = ProjectPath {
7878 worktree_id,
7879 path: Path::new(&summary.path).into(),
7880 };
7881 worktree.update(cx, |worktree, _| {
7882 worktree
7883 .as_remote_mut()
7884 .unwrap()
7885 .update_diagnostic_summary(project_path.path.clone(), &summary);
7886 });
7887 cx.emit(Event::DiagnosticsUpdated {
7888 language_server_id: LanguageServerId(summary.language_server_id as usize),
7889 path: project_path,
7890 });
7891 }
7892 }
7893 Ok(())
7894 })?
7895 }
7896
7897 async fn handle_start_language_server(
7898 this: Model<Self>,
7899 envelope: TypedEnvelope<proto::StartLanguageServer>,
7900 _: Arc<Client>,
7901 mut cx: AsyncAppContext,
7902 ) -> Result<()> {
7903 let server = envelope
7904 .payload
7905 .server
7906 .ok_or_else(|| anyhow!("invalid server"))?;
7907 this.update(&mut cx, |this, cx| {
7908 this.language_server_statuses.insert(
7909 LanguageServerId(server.id as usize),
7910 LanguageServerStatus {
7911 name: server.name,
7912 pending_work: Default::default(),
7913 has_pending_diagnostic_updates: false,
7914 progress_tokens: Default::default(),
7915 },
7916 );
7917 cx.notify();
7918 })?;
7919 Ok(())
7920 }
7921
7922 async fn handle_update_language_server(
7923 this: Model<Self>,
7924 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7925 _: Arc<Client>,
7926 mut cx: AsyncAppContext,
7927 ) -> Result<()> {
7928 this.update(&mut cx, |this, cx| {
7929 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7930
7931 match envelope
7932 .payload
7933 .variant
7934 .ok_or_else(|| anyhow!("invalid variant"))?
7935 {
7936 proto::update_language_server::Variant::WorkStart(payload) => {
7937 this.on_lsp_work_start(
7938 language_server_id,
7939 payload.token,
7940 LanguageServerProgress {
7941 message: payload.message,
7942 percentage: payload.percentage.map(|p| p as usize),
7943 last_update_at: Instant::now(),
7944 },
7945 cx,
7946 );
7947 }
7948
7949 proto::update_language_server::Variant::WorkProgress(payload) => {
7950 this.on_lsp_work_progress(
7951 language_server_id,
7952 payload.token,
7953 LanguageServerProgress {
7954 message: payload.message,
7955 percentage: payload.percentage.map(|p| p as usize),
7956 last_update_at: Instant::now(),
7957 },
7958 cx,
7959 );
7960 }
7961
7962 proto::update_language_server::Variant::WorkEnd(payload) => {
7963 this.on_lsp_work_end(language_server_id, payload.token, cx);
7964 }
7965
7966 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7967 this.disk_based_diagnostics_started(language_server_id, cx);
7968 }
7969
7970 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7971 this.disk_based_diagnostics_finished(language_server_id, cx)
7972 }
7973 }
7974
7975 Ok(())
7976 })?
7977 }
7978
7979 async fn handle_update_buffer(
7980 this: Model<Self>,
7981 envelope: TypedEnvelope<proto::UpdateBuffer>,
7982 _: Arc<Client>,
7983 mut cx: AsyncAppContext,
7984 ) -> Result<proto::Ack> {
7985 this.update(&mut cx, |this, cx| {
7986 let payload = envelope.payload.clone();
7987 let buffer_id = BufferId::new(payload.buffer_id)?;
7988 let ops = payload
7989 .operations
7990 .into_iter()
7991 .map(language::proto::deserialize_operation)
7992 .collect::<Result<Vec<_>, _>>()?;
7993 let is_remote = this.is_remote();
7994 match this.opened_buffers.entry(buffer_id) {
7995 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7996 OpenBuffer::Strong(buffer) => {
7997 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7998 }
7999 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
8000 OpenBuffer::Weak(_) => {}
8001 },
8002 hash_map::Entry::Vacant(e) => {
8003 assert!(
8004 is_remote,
8005 "received buffer update from {:?}",
8006 envelope.original_sender_id
8007 );
8008 e.insert(OpenBuffer::Operations(ops));
8009 }
8010 }
8011 Ok(proto::Ack {})
8012 })?
8013 }
8014
8015 async fn handle_create_buffer_for_peer(
8016 this: Model<Self>,
8017 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
8018 _: Arc<Client>,
8019 mut cx: AsyncAppContext,
8020 ) -> Result<()> {
8021 this.update(&mut cx, |this, cx| {
8022 match envelope
8023 .payload
8024 .variant
8025 .ok_or_else(|| anyhow!("missing variant"))?
8026 {
8027 proto::create_buffer_for_peer::Variant::State(mut state) => {
8028 let buffer_id = BufferId::new(state.id)?;
8029
8030 let buffer_result = maybe!({
8031 let mut buffer_file = None;
8032 if let Some(file) = state.file.take() {
8033 let worktree_id = WorktreeId::from_proto(file.worktree_id);
8034 let worktree =
8035 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
8036 anyhow!("no worktree found for id {}", file.worktree_id)
8037 })?;
8038 buffer_file =
8039 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
8040 as Arc<dyn language::File>);
8041 }
8042 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
8043 });
8044
8045 match buffer_result {
8046 Ok(buffer) => {
8047 let buffer = cx.new_model(|_| buffer);
8048 this.incomplete_remote_buffers.insert(buffer_id, buffer);
8049 }
8050 Err(error) => {
8051 if let Some(listeners) = this.loading_buffers.remove(&buffer_id) {
8052 for listener in listeners {
8053 listener.send(Err(anyhow!(error.cloned()))).ok();
8054 }
8055 }
8056 }
8057 };
8058 }
8059 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
8060 let buffer_id = BufferId::new(chunk.buffer_id)?;
8061 let buffer = this
8062 .incomplete_remote_buffers
8063 .get(&buffer_id)
8064 .cloned()
8065 .ok_or_else(|| {
8066 anyhow!(
8067 "received chunk for buffer {} without initial state",
8068 chunk.buffer_id
8069 )
8070 })?;
8071
8072 let result = maybe!({
8073 let operations = chunk
8074 .operations
8075 .into_iter()
8076 .map(language::proto::deserialize_operation)
8077 .collect::<Result<Vec<_>>>()?;
8078 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))
8079 });
8080
8081 if let Err(error) = result {
8082 this.incomplete_remote_buffers.remove(&buffer_id);
8083 if let Some(listeners) = this.loading_buffers.remove(&buffer_id) {
8084 for listener in listeners {
8085 listener.send(Err(error.cloned())).ok();
8086 }
8087 }
8088 } else {
8089 if chunk.is_last {
8090 this.incomplete_remote_buffers.remove(&buffer_id);
8091 this.register_buffer(&buffer, cx)?;
8092 }
8093 }
8094 }
8095 }
8096
8097 Ok(())
8098 })?
8099 }
8100
8101 async fn handle_update_diff_base(
8102 this: Model<Self>,
8103 envelope: TypedEnvelope<proto::UpdateDiffBase>,
8104 _: Arc<Client>,
8105 mut cx: AsyncAppContext,
8106 ) -> Result<()> {
8107 this.update(&mut cx, |this, cx| {
8108 let buffer_id = envelope.payload.buffer_id;
8109 let buffer_id = BufferId::new(buffer_id)?;
8110 let diff_base = envelope.payload.diff_base;
8111 if let Some(buffer) = this
8112 .opened_buffers
8113 .get_mut(&buffer_id)
8114 .and_then(|b| b.upgrade())
8115 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned())
8116 {
8117 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
8118 }
8119 Ok(())
8120 })?
8121 }
8122
8123 async fn handle_update_buffer_file(
8124 this: Model<Self>,
8125 envelope: TypedEnvelope<proto::UpdateBufferFile>,
8126 _: Arc<Client>,
8127 mut cx: AsyncAppContext,
8128 ) -> Result<()> {
8129 let buffer_id = envelope.payload.buffer_id;
8130 let buffer_id = BufferId::new(buffer_id)?;
8131
8132 this.update(&mut cx, |this, cx| {
8133 let payload = envelope.payload.clone();
8134 if let Some(buffer) = this
8135 .opened_buffers
8136 .get(&buffer_id)
8137 .and_then(|b| b.upgrade())
8138 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned())
8139 {
8140 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
8141 let worktree = this
8142 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
8143 .ok_or_else(|| anyhow!("no such worktree"))?;
8144 let file = File::from_proto(file, worktree, cx)?;
8145 buffer.update(cx, |buffer, cx| {
8146 buffer.file_updated(Arc::new(file), cx);
8147 });
8148 this.detect_language_for_buffer(&buffer, cx);
8149 }
8150 Ok(())
8151 })?
8152 }
8153
8154 async fn handle_save_buffer(
8155 this: Model<Self>,
8156 envelope: TypedEnvelope<proto::SaveBuffer>,
8157 _: Arc<Client>,
8158 mut cx: AsyncAppContext,
8159 ) -> Result<proto::BufferSaved> {
8160 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8161 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
8162 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
8163 let buffer = this
8164 .opened_buffers
8165 .get(&buffer_id)
8166 .and_then(|buffer| buffer.upgrade())
8167 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8168 anyhow::Ok((project_id, buffer))
8169 })??;
8170 buffer
8171 .update(&mut cx, |buffer, _| {
8172 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8173 })?
8174 .await?;
8175 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
8176
8177 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
8178 .await?;
8179 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
8180 project_id,
8181 buffer_id: buffer_id.into(),
8182 version: serialize_version(buffer.saved_version()),
8183 mtime: buffer.saved_mtime().map(|time| time.into()),
8184 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
8185 })
8186 }
8187
8188 async fn handle_reload_buffers(
8189 this: Model<Self>,
8190 envelope: TypedEnvelope<proto::ReloadBuffers>,
8191 _: Arc<Client>,
8192 mut cx: AsyncAppContext,
8193 ) -> Result<proto::ReloadBuffersResponse> {
8194 let sender_id = envelope.original_sender_id()?;
8195 let reload = this.update(&mut cx, |this, cx| {
8196 let mut buffers = HashSet::default();
8197 for buffer_id in &envelope.payload.buffer_ids {
8198 let buffer_id = BufferId::new(*buffer_id)?;
8199 buffers.insert(
8200 this.opened_buffers
8201 .get(&buffer_id)
8202 .and_then(|buffer| buffer.upgrade())
8203 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8204 );
8205 }
8206 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
8207 })??;
8208
8209 let project_transaction = reload.await?;
8210 let project_transaction = this.update(&mut cx, |this, cx| {
8211 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8212 })?;
8213 Ok(proto::ReloadBuffersResponse {
8214 transaction: Some(project_transaction),
8215 })
8216 }
8217
8218 async fn handle_synchronize_buffers(
8219 this: Model<Self>,
8220 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
8221 _: Arc<Client>,
8222 mut cx: AsyncAppContext,
8223 ) -> Result<proto::SynchronizeBuffersResponse> {
8224 let project_id = envelope.payload.project_id;
8225 let mut response = proto::SynchronizeBuffersResponse {
8226 buffers: Default::default(),
8227 };
8228
8229 this.update(&mut cx, |this, cx| {
8230 let Some(guest_id) = envelope.original_sender_id else {
8231 error!("missing original_sender_id on SynchronizeBuffers request");
8232 bail!("missing original_sender_id on SynchronizeBuffers request");
8233 };
8234
8235 this.shared_buffers.entry(guest_id).or_default().clear();
8236 for buffer in envelope.payload.buffers {
8237 let buffer_id = BufferId::new(buffer.id)?;
8238 let remote_version = language::proto::deserialize_version(&buffer.version);
8239 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8240 this.shared_buffers
8241 .entry(guest_id)
8242 .or_default()
8243 .insert(buffer_id);
8244
8245 let buffer = buffer.read(cx);
8246 response.buffers.push(proto::BufferVersion {
8247 id: buffer_id.into(),
8248 version: language::proto::serialize_version(&buffer.version),
8249 });
8250
8251 let operations = buffer.serialize_ops(Some(remote_version), cx);
8252 let client = this.client.clone();
8253 if let Some(file) = buffer.file() {
8254 client
8255 .send(proto::UpdateBufferFile {
8256 project_id,
8257 buffer_id: buffer_id.into(),
8258 file: Some(file.to_proto()),
8259 })
8260 .log_err();
8261 }
8262
8263 client
8264 .send(proto::UpdateDiffBase {
8265 project_id,
8266 buffer_id: buffer_id.into(),
8267 diff_base: buffer.diff_base().map(Into::into),
8268 })
8269 .log_err();
8270
8271 client
8272 .send(proto::BufferReloaded {
8273 project_id,
8274 buffer_id: buffer_id.into(),
8275 version: language::proto::serialize_version(buffer.saved_version()),
8276 mtime: buffer.saved_mtime().map(|time| time.into()),
8277 fingerprint: language::proto::serialize_fingerprint(
8278 buffer.saved_version_fingerprint(),
8279 ),
8280 line_ending: language::proto::serialize_line_ending(
8281 buffer.line_ending(),
8282 ) as i32,
8283 })
8284 .log_err();
8285
8286 cx.background_executor()
8287 .spawn(
8288 async move {
8289 let operations = operations.await;
8290 for chunk in split_operations(operations) {
8291 client
8292 .request(proto::UpdateBuffer {
8293 project_id,
8294 buffer_id: buffer_id.into(),
8295 operations: chunk,
8296 })
8297 .await?;
8298 }
8299 anyhow::Ok(())
8300 }
8301 .log_err(),
8302 )
8303 .detach();
8304 }
8305 }
8306 Ok(())
8307 })??;
8308
8309 Ok(response)
8310 }
8311
8312 async fn handle_format_buffers(
8313 this: Model<Self>,
8314 envelope: TypedEnvelope<proto::FormatBuffers>,
8315 _: Arc<Client>,
8316 mut cx: AsyncAppContext,
8317 ) -> Result<proto::FormatBuffersResponse> {
8318 let sender_id = envelope.original_sender_id()?;
8319 let format = this.update(&mut cx, |this, cx| {
8320 let mut buffers = HashSet::default();
8321 for buffer_id in &envelope.payload.buffer_ids {
8322 let buffer_id = BufferId::new(*buffer_id)?;
8323 buffers.insert(
8324 this.opened_buffers
8325 .get(&buffer_id)
8326 .and_then(|buffer| buffer.upgrade())
8327 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8328 );
8329 }
8330 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
8331 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
8332 })??;
8333
8334 let project_transaction = format.await?;
8335 let project_transaction = this.update(&mut cx, |this, cx| {
8336 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8337 })?;
8338 Ok(proto::FormatBuffersResponse {
8339 transaction: Some(project_transaction),
8340 })
8341 }
8342
8343 async fn handle_apply_additional_edits_for_completion(
8344 this: Model<Self>,
8345 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
8346 _: Arc<Client>,
8347 mut cx: AsyncAppContext,
8348 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
8349 let languages = this.update(&mut cx, |this, _| this.languages.clone())?;
8350 let (buffer, completion) = this.update(&mut cx, |this, cx| {
8351 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8352 let buffer = this
8353 .opened_buffers
8354 .get(&buffer_id)
8355 .and_then(|buffer| buffer.upgrade())
8356 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8357 let language = buffer.read(cx).language();
8358 let completion = language::proto::deserialize_completion(
8359 envelope
8360 .payload
8361 .completion
8362 .ok_or_else(|| anyhow!("invalid completion"))?,
8363 language.cloned(),
8364 &languages,
8365 );
8366 Ok::<_, anyhow::Error>((buffer, completion))
8367 })??;
8368
8369 let completion = completion.await?;
8370
8371 let apply_additional_edits = this.update(&mut cx, |this, cx| {
8372 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
8373 })?;
8374
8375 Ok(proto::ApplyCompletionAdditionalEditsResponse {
8376 transaction: apply_additional_edits
8377 .await?
8378 .as_ref()
8379 .map(language::proto::serialize_transaction),
8380 })
8381 }
8382
8383 async fn handle_resolve_completion_documentation(
8384 this: Model<Self>,
8385 envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
8386 _: Arc<Client>,
8387 mut cx: AsyncAppContext,
8388 ) -> Result<proto::ResolveCompletionDocumentationResponse> {
8389 let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
8390
8391 let completion = this
8392 .read_with(&mut cx, |this, _| {
8393 let id = LanguageServerId(envelope.payload.language_server_id as usize);
8394 let Some(server) = this.language_server_for_id(id) else {
8395 return Err(anyhow!("No language server {id}"));
8396 };
8397
8398 Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
8399 })??
8400 .await?;
8401
8402 let mut is_markdown = false;
8403 let text = match completion.documentation {
8404 Some(lsp::Documentation::String(text)) => text,
8405
8406 Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
8407 is_markdown = kind == lsp::MarkupKind::Markdown;
8408 value
8409 }
8410
8411 _ => String::new(),
8412 };
8413
8414 Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
8415 }
8416
8417 async fn handle_apply_code_action(
8418 this: Model<Self>,
8419 envelope: TypedEnvelope<proto::ApplyCodeAction>,
8420 _: Arc<Client>,
8421 mut cx: AsyncAppContext,
8422 ) -> Result<proto::ApplyCodeActionResponse> {
8423 let sender_id = envelope.original_sender_id()?;
8424 let action = language::proto::deserialize_code_action(
8425 envelope
8426 .payload
8427 .action
8428 .ok_or_else(|| anyhow!("invalid action"))?,
8429 )?;
8430 let apply_code_action = this.update(&mut cx, |this, cx| {
8431 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8432 let buffer = this
8433 .opened_buffers
8434 .get(&buffer_id)
8435 .and_then(|buffer| buffer.upgrade())
8436 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
8437 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
8438 })??;
8439
8440 let project_transaction = apply_code_action.await?;
8441 let project_transaction = this.update(&mut cx, |this, cx| {
8442 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8443 })?;
8444 Ok(proto::ApplyCodeActionResponse {
8445 transaction: Some(project_transaction),
8446 })
8447 }
8448
8449 async fn handle_on_type_formatting(
8450 this: Model<Self>,
8451 envelope: TypedEnvelope<proto::OnTypeFormatting>,
8452 _: Arc<Client>,
8453 mut cx: AsyncAppContext,
8454 ) -> Result<proto::OnTypeFormattingResponse> {
8455 let on_type_formatting = this.update(&mut cx, |this, cx| {
8456 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8457 let buffer = this
8458 .opened_buffers
8459 .get(&buffer_id)
8460 .and_then(|buffer| buffer.upgrade())
8461 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8462 let position = envelope
8463 .payload
8464 .position
8465 .and_then(deserialize_anchor)
8466 .ok_or_else(|| anyhow!("invalid position"))?;
8467 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
8468 buffer,
8469 position,
8470 envelope.payload.trigger.clone(),
8471 cx,
8472 ))
8473 })??;
8474
8475 let transaction = on_type_formatting
8476 .await?
8477 .as_ref()
8478 .map(language::proto::serialize_transaction);
8479 Ok(proto::OnTypeFormattingResponse { transaction })
8480 }
8481
8482 async fn handle_inlay_hints(
8483 this: Model<Self>,
8484 envelope: TypedEnvelope<proto::InlayHints>,
8485 _: Arc<Client>,
8486 mut cx: AsyncAppContext,
8487 ) -> Result<proto::InlayHintsResponse> {
8488 let sender_id = envelope.original_sender_id()?;
8489 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8490 let buffer = this.update(&mut cx, |this, _| {
8491 this.opened_buffers
8492 .get(&buffer_id)
8493 .and_then(|buffer| buffer.upgrade())
8494 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
8495 })??;
8496 buffer
8497 .update(&mut cx, |buffer, _| {
8498 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8499 })?
8500 .await
8501 .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?;
8502
8503 let start = envelope
8504 .payload
8505 .start
8506 .and_then(deserialize_anchor)
8507 .context("missing range start")?;
8508 let end = envelope
8509 .payload
8510 .end
8511 .and_then(deserialize_anchor)
8512 .context("missing range end")?;
8513 let buffer_hints = this
8514 .update(&mut cx, |project, cx| {
8515 project.inlay_hints(buffer.clone(), start..end, cx)
8516 })?
8517 .await
8518 .context("inlay hints fetch")?;
8519
8520 this.update(&mut cx, |project, cx| {
8521 InlayHints::response_to_proto(
8522 buffer_hints,
8523 project,
8524 sender_id,
8525 &buffer.read(cx).version(),
8526 cx,
8527 )
8528 })
8529 }
8530
8531 async fn handle_resolve_inlay_hint(
8532 this: Model<Self>,
8533 envelope: TypedEnvelope<proto::ResolveInlayHint>,
8534 _: Arc<Client>,
8535 mut cx: AsyncAppContext,
8536 ) -> Result<proto::ResolveInlayHintResponse> {
8537 let proto_hint = envelope
8538 .payload
8539 .hint
8540 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
8541 let hint = InlayHints::proto_to_project_hint(proto_hint)
8542 .context("resolved proto inlay hint conversion")?;
8543 let buffer = this.update(&mut cx, |this, _cx| {
8544 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8545 this.opened_buffers
8546 .get(&buffer_id)
8547 .and_then(|buffer| buffer.upgrade())
8548 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8549 })??;
8550 let response_hint = this
8551 .update(&mut cx, |project, cx| {
8552 project.resolve_inlay_hint(
8553 hint,
8554 buffer,
8555 LanguageServerId(envelope.payload.language_server_id as usize),
8556 cx,
8557 )
8558 })?
8559 .await
8560 .context("inlay hints fetch")?;
8561 Ok(proto::ResolveInlayHintResponse {
8562 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
8563 })
8564 }
8565
8566 async fn try_resolve_code_action(
8567 lang_server: &LanguageServer,
8568 action: &mut CodeAction,
8569 ) -> anyhow::Result<()> {
8570 if GetCodeActions::can_resolve_actions(&lang_server.capabilities()) {
8571 if action.lsp_action.data.is_some()
8572 && (action.lsp_action.command.is_none() || action.lsp_action.edit.is_none())
8573 {
8574 action.lsp_action = lang_server
8575 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action.clone())
8576 .await?;
8577 }
8578 }
8579
8580 anyhow::Ok(())
8581 }
8582
8583 async fn handle_refresh_inlay_hints(
8584 this: Model<Self>,
8585 _: TypedEnvelope<proto::RefreshInlayHints>,
8586 _: Arc<Client>,
8587 mut cx: AsyncAppContext,
8588 ) -> Result<proto::Ack> {
8589 this.update(&mut cx, |_, cx| {
8590 cx.emit(Event::RefreshInlayHints);
8591 })?;
8592 Ok(proto::Ack {})
8593 }
8594
8595 async fn handle_lsp_command<T: LspCommand>(
8596 this: Model<Self>,
8597 envelope: TypedEnvelope<T::ProtoRequest>,
8598 _: Arc<Client>,
8599 mut cx: AsyncAppContext,
8600 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
8601 where
8602 <T::LspRequest as lsp::request::Request>::Params: Send,
8603 <T::LspRequest as lsp::request::Request>::Result: Send,
8604 {
8605 let sender_id = envelope.original_sender_id()?;
8606 let buffer_id = T::buffer_id_from_proto(&envelope.payload)?;
8607 let buffer_handle = this.update(&mut cx, |this, _cx| {
8608 this.opened_buffers
8609 .get(&buffer_id)
8610 .and_then(|buffer| buffer.upgrade())
8611 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8612 })??;
8613 let request = T::from_proto(
8614 envelope.payload,
8615 this.clone(),
8616 buffer_handle.clone(),
8617 cx.clone(),
8618 )
8619 .await?;
8620 let response = this
8621 .update(&mut cx, |this, cx| {
8622 this.request_lsp(
8623 buffer_handle.clone(),
8624 LanguageServerToQuery::Primary,
8625 request,
8626 cx,
8627 )
8628 })?
8629 .await?;
8630 this.update(&mut cx, |this, cx| {
8631 Ok(T::response_to_proto(
8632 response,
8633 this,
8634 sender_id,
8635 &buffer_handle.read(cx).version(),
8636 cx,
8637 ))
8638 })?
8639 }
8640
8641 async fn handle_get_project_symbols(
8642 this: Model<Self>,
8643 envelope: TypedEnvelope<proto::GetProjectSymbols>,
8644 _: Arc<Client>,
8645 mut cx: AsyncAppContext,
8646 ) -> Result<proto::GetProjectSymbolsResponse> {
8647 let symbols = this
8648 .update(&mut cx, |this, cx| {
8649 this.symbols(&envelope.payload.query, cx)
8650 })?
8651 .await?;
8652
8653 Ok(proto::GetProjectSymbolsResponse {
8654 symbols: symbols.iter().map(serialize_symbol).collect(),
8655 })
8656 }
8657
8658 async fn handle_search_project(
8659 this: Model<Self>,
8660 envelope: TypedEnvelope<proto::SearchProject>,
8661 _: Arc<Client>,
8662 mut cx: AsyncAppContext,
8663 ) -> Result<proto::SearchProjectResponse> {
8664 let peer_id = envelope.original_sender_id()?;
8665 let query = SearchQuery::from_proto(envelope.payload)?;
8666 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
8667
8668 cx.spawn(move |mut cx| async move {
8669 let mut locations = Vec::new();
8670 let mut limit_reached = false;
8671 while let Some(result) = result.next().await {
8672 match result {
8673 SearchResult::Buffer { buffer, ranges } => {
8674 for range in ranges {
8675 let start = serialize_anchor(&range.start);
8676 let end = serialize_anchor(&range.end);
8677 let buffer_id = this.update(&mut cx, |this, cx| {
8678 this.create_buffer_for_peer(&buffer, peer_id, cx).into()
8679 })?;
8680 locations.push(proto::Location {
8681 buffer_id,
8682 start: Some(start),
8683 end: Some(end),
8684 });
8685 }
8686 }
8687 SearchResult::LimitReached => limit_reached = true,
8688 }
8689 }
8690 Ok(proto::SearchProjectResponse {
8691 locations,
8692 limit_reached,
8693 })
8694 })
8695 .await
8696 }
8697
8698 async fn handle_open_buffer_for_symbol(
8699 this: Model<Self>,
8700 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
8701 _: Arc<Client>,
8702 mut cx: AsyncAppContext,
8703 ) -> Result<proto::OpenBufferForSymbolResponse> {
8704 let peer_id = envelope.original_sender_id()?;
8705 let symbol = envelope
8706 .payload
8707 .symbol
8708 .ok_or_else(|| anyhow!("invalid symbol"))?;
8709 let symbol = this
8710 .update(&mut cx, |this, _cx| this.deserialize_symbol(symbol))?
8711 .await?;
8712 let symbol = this.update(&mut cx, |this, _| {
8713 let signature = this.symbol_signature(&symbol.path);
8714 if signature == symbol.signature {
8715 Ok(symbol)
8716 } else {
8717 Err(anyhow!("invalid symbol signature"))
8718 }
8719 })??;
8720 let buffer = this
8721 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
8722 .await?;
8723
8724 this.update(&mut cx, |this, cx| {
8725 let is_private = buffer
8726 .read(cx)
8727 .file()
8728 .map(|f| f.is_private())
8729 .unwrap_or_default();
8730 if is_private {
8731 Err(anyhow!(ErrorCode::UnsharedItem))
8732 } else {
8733 Ok(proto::OpenBufferForSymbolResponse {
8734 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
8735 })
8736 }
8737 })?
8738 }
8739
8740 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
8741 let mut hasher = Sha256::new();
8742 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
8743 hasher.update(project_path.path.to_string_lossy().as_bytes());
8744 hasher.update(self.nonce.to_be_bytes());
8745 hasher.finalize().as_slice().try_into().unwrap()
8746 }
8747
8748 async fn handle_open_buffer_by_id(
8749 this: Model<Self>,
8750 envelope: TypedEnvelope<proto::OpenBufferById>,
8751 _: Arc<Client>,
8752 mut cx: AsyncAppContext,
8753 ) -> Result<proto::OpenBufferResponse> {
8754 let peer_id = envelope.original_sender_id()?;
8755 let buffer_id = BufferId::new(envelope.payload.id)?;
8756 let buffer = this
8757 .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))?
8758 .await?;
8759 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
8760 }
8761
8762 async fn handle_open_buffer_by_path(
8763 this: Model<Self>,
8764 envelope: TypedEnvelope<proto::OpenBufferByPath>,
8765 _: Arc<Client>,
8766 mut cx: AsyncAppContext,
8767 ) -> Result<proto::OpenBufferResponse> {
8768 let peer_id = envelope.original_sender_id()?;
8769 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8770 let open_buffer = this.update(&mut cx, |this, cx| {
8771 this.open_buffer(
8772 ProjectPath {
8773 worktree_id,
8774 path: PathBuf::from(envelope.payload.path).into(),
8775 },
8776 cx,
8777 )
8778 })?;
8779
8780 let buffer = open_buffer.await?;
8781 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
8782 }
8783
8784 fn respond_to_open_buffer_request(
8785 this: Model<Self>,
8786 buffer: Model<Buffer>,
8787 peer_id: proto::PeerId,
8788 cx: &mut AsyncAppContext,
8789 ) -> Result<proto::OpenBufferResponse> {
8790 this.update(cx, |this, cx| {
8791 let is_private = buffer
8792 .read(cx)
8793 .file()
8794 .map(|f| f.is_private())
8795 .unwrap_or_default();
8796 if is_private {
8797 Err(anyhow!(ErrorCode::UnsharedItem))
8798 } else {
8799 Ok(proto::OpenBufferResponse {
8800 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
8801 })
8802 }
8803 })?
8804 }
8805
8806 fn serialize_project_transaction_for_peer(
8807 &mut self,
8808 project_transaction: ProjectTransaction,
8809 peer_id: proto::PeerId,
8810 cx: &mut AppContext,
8811 ) -> proto::ProjectTransaction {
8812 let mut serialized_transaction = proto::ProjectTransaction {
8813 buffer_ids: Default::default(),
8814 transactions: Default::default(),
8815 };
8816 for (buffer, transaction) in project_transaction.0 {
8817 serialized_transaction
8818 .buffer_ids
8819 .push(self.create_buffer_for_peer(&buffer, peer_id, cx).into());
8820 serialized_transaction
8821 .transactions
8822 .push(language::proto::serialize_transaction(&transaction));
8823 }
8824 serialized_transaction
8825 }
8826
8827 fn deserialize_project_transaction(
8828 &mut self,
8829 message: proto::ProjectTransaction,
8830 push_to_history: bool,
8831 cx: &mut ModelContext<Self>,
8832 ) -> Task<Result<ProjectTransaction>> {
8833 cx.spawn(move |this, mut cx| async move {
8834 let mut project_transaction = ProjectTransaction::default();
8835 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
8836 {
8837 let buffer_id = BufferId::new(buffer_id)?;
8838 let buffer = this
8839 .update(&mut cx, |this, cx| {
8840 this.wait_for_remote_buffer(buffer_id, cx)
8841 })?
8842 .await?;
8843 let transaction = language::proto::deserialize_transaction(transaction)?;
8844 project_transaction.0.insert(buffer, transaction);
8845 }
8846
8847 for (buffer, transaction) in &project_transaction.0 {
8848 buffer
8849 .update(&mut cx, |buffer, _| {
8850 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
8851 })?
8852 .await?;
8853
8854 if push_to_history {
8855 buffer.update(&mut cx, |buffer, _| {
8856 buffer.push_transaction(transaction.clone(), Instant::now());
8857 })?;
8858 }
8859 }
8860
8861 Ok(project_transaction)
8862 })
8863 }
8864
8865 fn create_buffer_for_peer(
8866 &mut self,
8867 buffer: &Model<Buffer>,
8868 peer_id: proto::PeerId,
8869 cx: &mut AppContext,
8870 ) -> BufferId {
8871 let buffer_id = buffer.read(cx).remote_id();
8872 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
8873 updates_tx
8874 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
8875 .ok();
8876 }
8877 buffer_id
8878 }
8879
8880 fn wait_for_remote_buffer(
8881 &mut self,
8882 id: BufferId,
8883 cx: &mut ModelContext<Self>,
8884 ) -> Task<Result<Model<Buffer>>> {
8885 let buffer = self
8886 .opened_buffers
8887 .get(&id)
8888 .and_then(|buffer| buffer.upgrade());
8889
8890 if let Some(buffer) = buffer {
8891 return Task::ready(Ok(buffer));
8892 }
8893
8894 let (tx, rx) = oneshot::channel();
8895 self.loading_buffers.entry(id).or_default().push(tx);
8896
8897 cx.background_executor().spawn(async move { rx.await? })
8898 }
8899
8900 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8901 let project_id = match self.client_state {
8902 ProjectClientState::Remote {
8903 sharing_has_stopped,
8904 remote_id,
8905 ..
8906 } => {
8907 if sharing_has_stopped {
8908 return Task::ready(Err(anyhow!(
8909 "can't synchronize remote buffers on a readonly project"
8910 )));
8911 } else {
8912 remote_id
8913 }
8914 }
8915 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8916 return Task::ready(Err(anyhow!(
8917 "can't synchronize remote buffers on a local project"
8918 )))
8919 }
8920 };
8921
8922 let client = self.client.clone();
8923 cx.spawn(move |this, mut cx| async move {
8924 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8925 let buffers = this
8926 .opened_buffers
8927 .iter()
8928 .filter_map(|(id, buffer)| {
8929 let buffer = buffer.upgrade()?;
8930 Some(proto::BufferVersion {
8931 id: (*id).into(),
8932 version: language::proto::serialize_version(&buffer.read(cx).version),
8933 })
8934 })
8935 .collect();
8936 let incomplete_buffer_ids = this
8937 .incomplete_remote_buffers
8938 .keys()
8939 .copied()
8940 .collect::<Vec<_>>();
8941
8942 (buffers, incomplete_buffer_ids)
8943 })?;
8944 let response = client
8945 .request(proto::SynchronizeBuffers {
8946 project_id,
8947 buffers,
8948 })
8949 .await?;
8950
8951 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8952 response
8953 .buffers
8954 .into_iter()
8955 .map(|buffer| {
8956 let client = client.clone();
8957 let buffer_id = match BufferId::new(buffer.id) {
8958 Ok(id) => id,
8959 Err(e) => {
8960 return Task::ready(Err(e));
8961 }
8962 };
8963 let remote_version = language::proto::deserialize_version(&buffer.version);
8964 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8965 let operations =
8966 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8967 cx.background_executor().spawn(async move {
8968 let operations = operations.await;
8969 for chunk in split_operations(operations) {
8970 client
8971 .request(proto::UpdateBuffer {
8972 project_id,
8973 buffer_id: buffer_id.into(),
8974 operations: chunk,
8975 })
8976 .await?;
8977 }
8978 anyhow::Ok(())
8979 })
8980 } else {
8981 Task::ready(Ok(()))
8982 }
8983 })
8984 .collect::<Vec<_>>()
8985 })?;
8986
8987 // Any incomplete buffers have open requests waiting. Request that the host sends
8988 // creates these buffers for us again to unblock any waiting futures.
8989 for id in incomplete_buffer_ids {
8990 cx.background_executor()
8991 .spawn(client.request(proto::OpenBufferById {
8992 project_id,
8993 id: id.into(),
8994 }))
8995 .detach();
8996 }
8997
8998 futures::future::join_all(send_updates_for_buffers)
8999 .await
9000 .into_iter()
9001 .collect()
9002 })
9003 }
9004
9005 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
9006 self.worktrees()
9007 .map(|worktree| {
9008 let worktree = worktree.read(cx);
9009 proto::WorktreeMetadata {
9010 id: worktree.id().to_proto(),
9011 root_name: worktree.root_name().into(),
9012 visible: worktree.is_visible(),
9013 abs_path: worktree.abs_path().to_string_lossy().into(),
9014 }
9015 })
9016 .collect()
9017 }
9018
9019 fn set_worktrees_from_proto(
9020 &mut self,
9021 worktrees: Vec<proto::WorktreeMetadata>,
9022 cx: &mut ModelContext<Project>,
9023 ) -> Result<()> {
9024 let replica_id = self.replica_id();
9025 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
9026
9027 let mut old_worktrees_by_id = self
9028 .worktrees
9029 .drain(..)
9030 .filter_map(|worktree| {
9031 let worktree = worktree.upgrade()?;
9032 Some((worktree.read(cx).id(), worktree))
9033 })
9034 .collect::<HashMap<_, _>>();
9035
9036 for worktree in worktrees {
9037 if let Some(old_worktree) =
9038 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
9039 {
9040 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
9041 } else {
9042 let worktree =
9043 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
9044 let _ = self.add_worktree(&worktree, cx);
9045 }
9046 }
9047
9048 self.metadata_changed(cx);
9049 for id in old_worktrees_by_id.keys() {
9050 cx.emit(Event::WorktreeRemoved(*id));
9051 }
9052
9053 Ok(())
9054 }
9055
9056 fn set_collaborators_from_proto(
9057 &mut self,
9058 messages: Vec<proto::Collaborator>,
9059 cx: &mut ModelContext<Self>,
9060 ) -> Result<()> {
9061 let mut collaborators = HashMap::default();
9062 for message in messages {
9063 let collaborator = Collaborator::from_proto(message)?;
9064 collaborators.insert(collaborator.peer_id, collaborator);
9065 }
9066 for old_peer_id in self.collaborators.keys() {
9067 if !collaborators.contains_key(old_peer_id) {
9068 cx.emit(Event::CollaboratorLeft(*old_peer_id));
9069 }
9070 }
9071 self.collaborators = collaborators;
9072 Ok(())
9073 }
9074
9075 fn deserialize_symbol(
9076 &self,
9077 serialized_symbol: proto::Symbol,
9078 ) -> impl Future<Output = Result<Symbol>> {
9079 let languages = self.languages.clone();
9080 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
9081 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
9082 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
9083 let path = ProjectPath {
9084 worktree_id,
9085 path: PathBuf::from(serialized_symbol.path).into(),
9086 };
9087 let language = languages.language_for_file_path(&path.path);
9088
9089 async move {
9090 let language = language.await.log_err();
9091 let adapter = language
9092 .as_ref()
9093 .and_then(|language| languages.lsp_adapters(language).first().cloned());
9094 let start = serialized_symbol
9095 .start
9096 .ok_or_else(|| anyhow!("invalid start"))?;
9097 let end = serialized_symbol
9098 .end
9099 .ok_or_else(|| anyhow!("invalid end"))?;
9100 Ok(Symbol {
9101 language_server_name: LanguageServerName(
9102 serialized_symbol.language_server_name.into(),
9103 ),
9104 source_worktree_id,
9105 path,
9106 label: {
9107 match language.as_ref().zip(adapter.as_ref()) {
9108 Some((language, adapter)) => {
9109 adapter
9110 .label_for_symbol(&serialized_symbol.name, kind, language)
9111 .await
9112 }
9113 None => None,
9114 }
9115 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
9116 },
9117
9118 name: serialized_symbol.name,
9119 range: Unclipped(PointUtf16::new(start.row, start.column))
9120 ..Unclipped(PointUtf16::new(end.row, end.column)),
9121 kind,
9122 signature: serialized_symbol
9123 .signature
9124 .try_into()
9125 .map_err(|_| anyhow!("invalid signature"))?,
9126 })
9127 }
9128 }
9129
9130 async fn handle_buffer_saved(
9131 this: Model<Self>,
9132 envelope: TypedEnvelope<proto::BufferSaved>,
9133 _: Arc<Client>,
9134 mut cx: AsyncAppContext,
9135 ) -> Result<()> {
9136 let fingerprint = Default::default();
9137 let version = deserialize_version(&envelope.payload.version);
9138 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
9139 let mtime = envelope.payload.mtime.map(|time| time.into());
9140
9141 this.update(&mut cx, |this, cx| {
9142 let buffer = this
9143 .opened_buffers
9144 .get(&buffer_id)
9145 .and_then(|buffer| buffer.upgrade())
9146 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned());
9147 if let Some(buffer) = buffer {
9148 buffer.update(cx, |buffer, cx| {
9149 buffer.did_save(version, fingerprint, mtime, cx);
9150 });
9151 }
9152 Ok(())
9153 })?
9154 }
9155
9156 async fn handle_buffer_reloaded(
9157 this: Model<Self>,
9158 envelope: TypedEnvelope<proto::BufferReloaded>,
9159 _: Arc<Client>,
9160 mut cx: AsyncAppContext,
9161 ) -> Result<()> {
9162 let payload = envelope.payload;
9163 let version = deserialize_version(&payload.version);
9164 let fingerprint = RopeFingerprint::default();
9165 let line_ending = deserialize_line_ending(
9166 proto::LineEnding::from_i32(payload.line_ending)
9167 .ok_or_else(|| anyhow!("missing line ending"))?,
9168 );
9169 let mtime = payload.mtime.map(|time| time.into());
9170 let buffer_id = BufferId::new(payload.buffer_id)?;
9171 this.update(&mut cx, |this, cx| {
9172 let buffer = this
9173 .opened_buffers
9174 .get(&buffer_id)
9175 .and_then(|buffer| buffer.upgrade())
9176 .or_else(|| this.incomplete_remote_buffers.get(&buffer_id).cloned());
9177 if let Some(buffer) = buffer {
9178 buffer.update(cx, |buffer, cx| {
9179 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
9180 });
9181 }
9182 Ok(())
9183 })?
9184 }
9185
9186 #[allow(clippy::type_complexity)]
9187 fn edits_from_lsp(
9188 &mut self,
9189 buffer: &Model<Buffer>,
9190 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
9191 server_id: LanguageServerId,
9192 version: Option<i32>,
9193 cx: &mut ModelContext<Self>,
9194 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
9195 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
9196 cx.background_executor().spawn(async move {
9197 let snapshot = snapshot?;
9198 let mut lsp_edits = lsp_edits
9199 .into_iter()
9200 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
9201 .collect::<Vec<_>>();
9202 lsp_edits.sort_by_key(|(range, _)| range.start);
9203
9204 let mut lsp_edits = lsp_edits.into_iter().peekable();
9205 let mut edits = Vec::new();
9206 while let Some((range, mut new_text)) = lsp_edits.next() {
9207 // Clip invalid ranges provided by the language server.
9208 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
9209 ..snapshot.clip_point_utf16(range.end, Bias::Left);
9210
9211 // Combine any LSP edits that are adjacent.
9212 //
9213 // Also, combine LSP edits that are separated from each other by only
9214 // a newline. This is important because for some code actions,
9215 // Rust-analyzer rewrites the entire buffer via a series of edits that
9216 // are separated by unchanged newline characters.
9217 //
9218 // In order for the diffing logic below to work properly, any edits that
9219 // cancel each other out must be combined into one.
9220 while let Some((next_range, next_text)) = lsp_edits.peek() {
9221 if next_range.start.0 > range.end {
9222 if next_range.start.0.row > range.end.row + 1
9223 || next_range.start.0.column > 0
9224 || snapshot.clip_point_utf16(
9225 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
9226 Bias::Left,
9227 ) > range.end
9228 {
9229 break;
9230 }
9231 new_text.push('\n');
9232 }
9233 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
9234 new_text.push_str(next_text);
9235 lsp_edits.next();
9236 }
9237
9238 // For multiline edits, perform a diff of the old and new text so that
9239 // we can identify the changes more precisely, preserving the locations
9240 // of any anchors positioned in the unchanged regions.
9241 if range.end.row > range.start.row {
9242 let mut offset = range.start.to_offset(&snapshot);
9243 let old_text = snapshot.text_for_range(range).collect::<String>();
9244
9245 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
9246 let mut moved_since_edit = true;
9247 for change in diff.iter_all_changes() {
9248 let tag = change.tag();
9249 let value = change.value();
9250 match tag {
9251 ChangeTag::Equal => {
9252 offset += value.len();
9253 moved_since_edit = true;
9254 }
9255 ChangeTag::Delete => {
9256 let start = snapshot.anchor_after(offset);
9257 let end = snapshot.anchor_before(offset + value.len());
9258 if moved_since_edit {
9259 edits.push((start..end, String::new()));
9260 } else {
9261 edits.last_mut().unwrap().0.end = end;
9262 }
9263 offset += value.len();
9264 moved_since_edit = false;
9265 }
9266 ChangeTag::Insert => {
9267 if moved_since_edit {
9268 let anchor = snapshot.anchor_after(offset);
9269 edits.push((anchor..anchor, value.to_string()));
9270 } else {
9271 edits.last_mut().unwrap().1.push_str(value);
9272 }
9273 moved_since_edit = false;
9274 }
9275 }
9276 }
9277 } else if range.end == range.start {
9278 let anchor = snapshot.anchor_after(range.start);
9279 edits.push((anchor..anchor, new_text));
9280 } else {
9281 let edit_start = snapshot.anchor_after(range.start);
9282 let edit_end = snapshot.anchor_before(range.end);
9283 edits.push((edit_start..edit_end, new_text));
9284 }
9285 }
9286
9287 Ok(edits)
9288 })
9289 }
9290
9291 fn buffer_snapshot_for_lsp_version(
9292 &mut self,
9293 buffer: &Model<Buffer>,
9294 server_id: LanguageServerId,
9295 version: Option<i32>,
9296 cx: &AppContext,
9297 ) -> Result<TextBufferSnapshot> {
9298 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
9299
9300 if let Some(version) = version {
9301 let buffer_id = buffer.read(cx).remote_id();
9302 let snapshots = self
9303 .buffer_snapshots
9304 .get_mut(&buffer_id)
9305 .and_then(|m| m.get_mut(&server_id))
9306 .ok_or_else(|| {
9307 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
9308 })?;
9309
9310 let found_snapshot = snapshots
9311 .binary_search_by_key(&version, |e| e.version)
9312 .map(|ix| snapshots[ix].snapshot.clone())
9313 .map_err(|_| {
9314 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
9315 })?;
9316
9317 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
9318 Ok(found_snapshot)
9319 } else {
9320 Ok((buffer.read(cx)).text_snapshot())
9321 }
9322 }
9323
9324 pub fn language_servers(
9325 &self,
9326 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
9327 self.language_server_ids
9328 .iter()
9329 .map(|((worktree_id, server_name), server_id)| {
9330 (*server_id, server_name.clone(), *worktree_id)
9331 })
9332 }
9333
9334 pub fn supplementary_language_servers(
9335 &self,
9336 ) -> impl '_
9337 + Iterator<
9338 Item = (
9339 &LanguageServerId,
9340 &(LanguageServerName, Arc<LanguageServer>),
9341 ),
9342 > {
9343 self.supplementary_language_servers.iter()
9344 }
9345
9346 pub fn language_server_adapter_for_id(
9347 &self,
9348 id: LanguageServerId,
9349 ) -> Option<Arc<CachedLspAdapter>> {
9350 if let Some(LanguageServerState::Running { adapter, .. }) = self.language_servers.get(&id) {
9351 Some(adapter.clone())
9352 } else {
9353 None
9354 }
9355 }
9356
9357 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
9358 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
9359 Some(server.clone())
9360 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
9361 Some(Arc::clone(server))
9362 } else {
9363 None
9364 }
9365 }
9366
9367 pub fn language_servers_for_buffer(
9368 &self,
9369 buffer: &Buffer,
9370 cx: &AppContext,
9371 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9372 self.language_server_ids_for_buffer(buffer, cx)
9373 .into_iter()
9374 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
9375 LanguageServerState::Running {
9376 adapter, server, ..
9377 } => Some((adapter, server)),
9378 _ => None,
9379 })
9380 }
9381
9382 fn primary_language_server_for_buffer(
9383 &self,
9384 buffer: &Buffer,
9385 cx: &AppContext,
9386 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9387 self.language_servers_for_buffer(buffer, cx)
9388 .find(|s| s.0.is_primary)
9389 }
9390
9391 pub fn language_server_for_buffer(
9392 &self,
9393 buffer: &Buffer,
9394 server_id: LanguageServerId,
9395 cx: &AppContext,
9396 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9397 self.language_servers_for_buffer(buffer, cx)
9398 .find(|(_, s)| s.server_id() == server_id)
9399 }
9400
9401 fn language_server_ids_for_buffer(
9402 &self,
9403 buffer: &Buffer,
9404 cx: &AppContext,
9405 ) -> Vec<LanguageServerId> {
9406 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
9407 let worktree_id = file.worktree_id(cx);
9408 self.languages
9409 .lsp_adapters(&language)
9410 .iter()
9411 .flat_map(|adapter| {
9412 let key = (worktree_id, adapter.name.clone());
9413 self.language_server_ids.get(&key).copied()
9414 })
9415 .collect()
9416 } else {
9417 Vec::new()
9418 }
9419 }
9420}
9421
9422#[allow(clippy::too_many_arguments)]
9423async fn search_snapshots(
9424 snapshots: &Vec<LocalSnapshot>,
9425 worker_start_ix: usize,
9426 worker_end_ix: usize,
9427 query: &SearchQuery,
9428 results_tx: &Sender<SearchMatchCandidate>,
9429 opened_buffers: &HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
9430 include_root: bool,
9431 fs: &Arc<dyn Fs>,
9432) {
9433 let mut snapshot_start_ix = 0;
9434 let mut abs_path = PathBuf::new();
9435
9436 for snapshot in snapshots {
9437 let snapshot_end_ix = snapshot_start_ix
9438 + if query.include_ignored() {
9439 snapshot.file_count()
9440 } else {
9441 snapshot.visible_file_count()
9442 };
9443 if worker_end_ix <= snapshot_start_ix {
9444 break;
9445 } else if worker_start_ix > snapshot_end_ix {
9446 snapshot_start_ix = snapshot_end_ix;
9447 continue;
9448 } else {
9449 let start_in_snapshot = worker_start_ix.saturating_sub(snapshot_start_ix);
9450 let end_in_snapshot = cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
9451
9452 for entry in snapshot
9453 .files(false, start_in_snapshot)
9454 .take(end_in_snapshot - start_in_snapshot)
9455 {
9456 if results_tx.is_closed() {
9457 break;
9458 }
9459 if opened_buffers.contains_key(&entry.path) {
9460 continue;
9461 }
9462
9463 let matched_path = if include_root {
9464 let mut full_path = PathBuf::from(snapshot.root_name());
9465 full_path.push(&entry.path);
9466 query.file_matches(Some(&full_path))
9467 } else {
9468 query.file_matches(Some(&entry.path))
9469 };
9470
9471 let matches = if matched_path {
9472 abs_path.clear();
9473 abs_path.push(&snapshot.abs_path());
9474 abs_path.push(&entry.path);
9475 if let Some(file) = fs.open_sync(&abs_path).await.log_err() {
9476 query.detect(file).unwrap_or(false)
9477 } else {
9478 false
9479 }
9480 } else {
9481 false
9482 };
9483
9484 if matches {
9485 let project_path = SearchMatchCandidate::Path {
9486 worktree_id: snapshot.id(),
9487 path: entry.path.clone(),
9488 is_ignored: entry.is_ignored,
9489 };
9490 if results_tx.send(project_path).await.is_err() {
9491 return;
9492 }
9493 }
9494 }
9495
9496 snapshot_start_ix = snapshot_end_ix;
9497 }
9498 }
9499}
9500
9501async fn search_ignored_entry(
9502 snapshot: &LocalSnapshot,
9503 ignored_entry: &Entry,
9504 fs: &Arc<dyn Fs>,
9505 query: &SearchQuery,
9506 counter_tx: &Sender<SearchMatchCandidate>,
9507) {
9508 let mut ignored_paths_to_process =
9509 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
9510
9511 while let Some(ignored_abs_path) = ignored_paths_to_process.pop_front() {
9512 let metadata = fs
9513 .metadata(&ignored_abs_path)
9514 .await
9515 .with_context(|| format!("fetching fs metadata for {ignored_abs_path:?}"))
9516 .log_err()
9517 .flatten();
9518
9519 if let Some(fs_metadata) = metadata {
9520 if fs_metadata.is_dir {
9521 let files = fs
9522 .read_dir(&ignored_abs_path)
9523 .await
9524 .with_context(|| format!("listing ignored path {ignored_abs_path:?}"))
9525 .log_err();
9526
9527 if let Some(mut subfiles) = files {
9528 while let Some(subfile) = subfiles.next().await {
9529 if let Some(subfile) = subfile.log_err() {
9530 ignored_paths_to_process.push_back(subfile);
9531 }
9532 }
9533 }
9534 } else if !fs_metadata.is_symlink {
9535 if !query.file_matches(Some(&ignored_abs_path))
9536 || snapshot.is_path_excluded(ignored_entry.path.to_path_buf())
9537 {
9538 continue;
9539 }
9540 let matches = if let Some(file) = fs
9541 .open_sync(&ignored_abs_path)
9542 .await
9543 .with_context(|| format!("Opening ignored path {ignored_abs_path:?}"))
9544 .log_err()
9545 {
9546 query.detect(file).unwrap_or(false)
9547 } else {
9548 false
9549 };
9550
9551 if matches {
9552 let project_path = SearchMatchCandidate::Path {
9553 worktree_id: snapshot.id(),
9554 path: Arc::from(
9555 ignored_abs_path
9556 .strip_prefix(snapshot.abs_path())
9557 .expect("scanning worktree-related files"),
9558 ),
9559 is_ignored: true,
9560 };
9561 if counter_tx.send(project_path).await.is_err() {
9562 return;
9563 }
9564 }
9565 }
9566 }
9567 }
9568}
9569
9570fn subscribe_for_copilot_events(
9571 copilot: &Model<Copilot>,
9572 cx: &mut ModelContext<'_, Project>,
9573) -> gpui::Subscription {
9574 cx.subscribe(
9575 copilot,
9576 |project, copilot, copilot_event, cx| match copilot_event {
9577 copilot::Event::CopilotLanguageServerStarted => {
9578 match copilot.read(cx).language_server() {
9579 Some((name, copilot_server)) => {
9580 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
9581 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
9582 let new_server_id = copilot_server.server_id();
9583 let weak_project = cx.weak_model();
9584 let copilot_log_subscription = copilot_server
9585 .on_notification::<copilot::request::LogMessage, _>(
9586 move |params, mut cx| {
9587 weak_project.update(&mut cx, |_, cx| {
9588 cx.emit(Event::LanguageServerLog(
9589 new_server_id,
9590 params.message,
9591 ));
9592 }).ok();
9593 },
9594 );
9595 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
9596 project.copilot_log_subscription = Some(copilot_log_subscription);
9597 cx.emit(Event::LanguageServerAdded(new_server_id));
9598 }
9599 }
9600 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
9601 }
9602 }
9603 },
9604 )
9605}
9606
9607fn glob_literal_prefix(glob: &str) -> &str {
9608 let mut literal_end = 0;
9609 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
9610 if part.contains(&['*', '?', '{', '}']) {
9611 break;
9612 } else {
9613 if i > 0 {
9614 // Account for separator prior to this part
9615 literal_end += path::MAIN_SEPARATOR.len_utf8();
9616 }
9617 literal_end += part.len();
9618 }
9619 }
9620 &glob[..literal_end]
9621}
9622
9623impl WorktreeHandle {
9624 pub fn upgrade(&self) -> Option<Model<Worktree>> {
9625 match self {
9626 WorktreeHandle::Strong(handle) => Some(handle.clone()),
9627 WorktreeHandle::Weak(handle) => handle.upgrade(),
9628 }
9629 }
9630
9631 pub fn handle_id(&self) -> usize {
9632 match self {
9633 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
9634 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
9635 }
9636 }
9637}
9638
9639impl OpenBuffer {
9640 pub fn upgrade(&self) -> Option<Model<Buffer>> {
9641 match self {
9642 OpenBuffer::Strong(handle) => Some(handle.clone()),
9643 OpenBuffer::Weak(handle) => handle.upgrade(),
9644 OpenBuffer::Operations(_) => None,
9645 }
9646 }
9647}
9648
9649pub struct PathMatchCandidateSet {
9650 pub snapshot: Snapshot,
9651 pub include_ignored: bool,
9652 pub include_root_name: bool,
9653}
9654
9655impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
9656 type Candidates = PathMatchCandidateSetIter<'a>;
9657
9658 fn id(&self) -> usize {
9659 self.snapshot.id().to_usize()
9660 }
9661
9662 fn len(&self) -> usize {
9663 if self.include_ignored {
9664 self.snapshot.file_count()
9665 } else {
9666 self.snapshot.visible_file_count()
9667 }
9668 }
9669
9670 fn prefix(&self) -> Arc<str> {
9671 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
9672 self.snapshot.root_name().into()
9673 } else if self.include_root_name {
9674 format!("{}/", self.snapshot.root_name()).into()
9675 } else {
9676 "".into()
9677 }
9678 }
9679
9680 fn candidates(&'a self, start: usize) -> Self::Candidates {
9681 PathMatchCandidateSetIter {
9682 traversal: self.snapshot.files(self.include_ignored, start),
9683 }
9684 }
9685}
9686
9687pub struct PathMatchCandidateSetIter<'a> {
9688 traversal: Traversal<'a>,
9689}
9690
9691impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
9692 type Item = fuzzy::PathMatchCandidate<'a>;
9693
9694 fn next(&mut self) -> Option<Self::Item> {
9695 self.traversal.next().map(|entry| {
9696 if let EntryKind::File(char_bag) = entry.kind {
9697 fuzzy::PathMatchCandidate {
9698 path: &entry.path,
9699 char_bag,
9700 }
9701 } else {
9702 unreachable!()
9703 }
9704 })
9705 }
9706}
9707
9708impl EventEmitter<Event> for Project {}
9709
9710impl<'a> Into<SettingsLocation<'a>> for &'a ProjectPath {
9711 fn into(self) -> SettingsLocation<'a> {
9712 SettingsLocation {
9713 worktree_id: self.worktree_id.to_usize(),
9714 path: self.path.as_ref(),
9715 }
9716 }
9717}
9718
9719impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
9720 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
9721 Self {
9722 worktree_id,
9723 path: path.as_ref().into(),
9724 }
9725 }
9726}
9727
9728struct ProjectLspAdapterDelegate {
9729 project: WeakModel<Project>,
9730 worktree: worktree::Snapshot,
9731 fs: Arc<dyn Fs>,
9732 http_client: Arc<dyn HttpClient>,
9733 language_registry: Arc<LanguageRegistry>,
9734 shell_env: Mutex<Option<HashMap<String, String>>>,
9735}
9736
9737impl ProjectLspAdapterDelegate {
9738 fn new(project: &Project, worktree: &Model<Worktree>, cx: &ModelContext<Project>) -> Arc<Self> {
9739 Arc::new(Self {
9740 project: cx.weak_model(),
9741 worktree: worktree.read(cx).snapshot(),
9742 fs: project.fs.clone(),
9743 http_client: project.client.http_client(),
9744 language_registry: project.languages.clone(),
9745 shell_env: Default::default(),
9746 })
9747 }
9748
9749 async fn load_shell_env(&self) {
9750 let worktree_abs_path = self.worktree.abs_path();
9751 let shell_env = load_shell_environment(&worktree_abs_path)
9752 .await
9753 .with_context(|| {
9754 format!("failed to determine load login shell environment in {worktree_abs_path:?}")
9755 })
9756 .log_err()
9757 .unwrap_or_default();
9758 *self.shell_env.lock() = Some(shell_env);
9759 }
9760}
9761
9762#[async_trait]
9763impl LspAdapterDelegate for ProjectLspAdapterDelegate {
9764 fn show_notification(&self, message: &str, cx: &mut AppContext) {
9765 self.project
9766 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())))
9767 .ok();
9768 }
9769
9770 fn http_client(&self) -> Arc<dyn HttpClient> {
9771 self.http_client.clone()
9772 }
9773
9774 async fn shell_env(&self) -> HashMap<String, String> {
9775 self.load_shell_env().await;
9776 self.shell_env.lock().as_ref().cloned().unwrap_or_default()
9777 }
9778
9779 #[cfg(not(target_os = "windows"))]
9780 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
9781 let worktree_abs_path = self.worktree.abs_path();
9782 self.load_shell_env().await;
9783 let shell_path = self
9784 .shell_env
9785 .lock()
9786 .as_ref()
9787 .and_then(|shell_env| shell_env.get("PATH").cloned());
9788 which::which_in(command, shell_path.as_ref(), &worktree_abs_path).ok()
9789 }
9790
9791 #[cfg(target_os = "windows")]
9792 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
9793 // todo(windows) Getting the shell env variables in a current directory on Windows is more complicated than other platforms
9794 // there isn't a 'default shell' necessarily. The closest would be the default profile on the windows terminal
9795 // SEE: https://learn.microsoft.com/en-us/windows/terminal/customize-settings/startup
9796 which::which(command).ok()
9797 }
9798
9799 fn update_status(
9800 &self,
9801 server_name: LanguageServerName,
9802 status: language::LanguageServerBinaryStatus,
9803 ) {
9804 self.language_registry
9805 .update_lsp_status(server_name, status);
9806 }
9807
9808 async fn read_text_file(&self, path: PathBuf) -> Result<String> {
9809 if self.worktree.entry_for_path(&path).is_none() {
9810 return Err(anyhow!("no such path {path:?}"));
9811 }
9812 let path = self.worktree.absolutize(path.as_ref())?;
9813 let content = self.fs.load(&path).await?;
9814 Ok(content)
9815 }
9816}
9817
9818fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
9819 proto::Symbol {
9820 language_server_name: symbol.language_server_name.0.to_string(),
9821 source_worktree_id: symbol.source_worktree_id.to_proto(),
9822 worktree_id: symbol.path.worktree_id.to_proto(),
9823 path: symbol.path.path.to_string_lossy().to_string(),
9824 name: symbol.name.clone(),
9825 kind: unsafe { mem::transmute(symbol.kind) },
9826 start: Some(proto::PointUtf16 {
9827 row: symbol.range.start.0.row,
9828 column: symbol.range.start.0.column,
9829 }),
9830 end: Some(proto::PointUtf16 {
9831 row: symbol.range.end.0.row,
9832 column: symbol.range.end.0.column,
9833 }),
9834 signature: symbol.signature.to_vec(),
9835 }
9836}
9837
9838fn relativize_path(base: &Path, path: &Path) -> PathBuf {
9839 let mut path_components = path.components();
9840 let mut base_components = base.components();
9841 let mut components: Vec<Component> = Vec::new();
9842 loop {
9843 match (path_components.next(), base_components.next()) {
9844 (None, None) => break,
9845 (Some(a), None) => {
9846 components.push(a);
9847 components.extend(path_components.by_ref());
9848 break;
9849 }
9850 (None, _) => components.push(Component::ParentDir),
9851 (Some(a), Some(b)) if components.is_empty() && a == b => (),
9852 (Some(a), Some(Component::CurDir)) => components.push(a),
9853 (Some(a), Some(_)) => {
9854 components.push(Component::ParentDir);
9855 for _ in base_components {
9856 components.push(Component::ParentDir);
9857 }
9858 components.push(a);
9859 components.extend(path_components.by_ref());
9860 break;
9861 }
9862 }
9863 }
9864 components.iter().map(|c| c.as_os_str()).collect()
9865}
9866
9867fn resolve_path(base: &Path, path: &Path) -> PathBuf {
9868 let mut result = base.to_path_buf();
9869 for component in path.components() {
9870 match component {
9871 Component::ParentDir => {
9872 result.pop();
9873 }
9874 Component::CurDir => (),
9875 _ => result.push(component),
9876 }
9877 }
9878 result
9879}
9880
9881impl Item for Buffer {
9882 fn try_open(
9883 project: &Model<Project>,
9884 path: &ProjectPath,
9885 cx: &mut AppContext,
9886 ) -> Option<Task<Result<Model<Self>>>> {
9887 Some(project.update(cx, |project, cx| project.open_buffer(path.clone(), cx)))
9888 }
9889
9890 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
9891 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
9892 }
9893
9894 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
9895 File::from_dyn(self.file()).map(|file| ProjectPath {
9896 worktree_id: file.worktree_id(cx),
9897 path: file.path().clone(),
9898 })
9899 }
9900}
9901
9902async fn wait_for_loading_buffer(
9903 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
9904) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
9905 loop {
9906 if let Some(result) = receiver.borrow().as_ref() {
9907 match result {
9908 Ok(buffer) => return Ok(buffer.to_owned()),
9909 Err(e) => return Err(e.to_owned()),
9910 }
9911 }
9912 receiver.next().await;
9913 }
9914}
9915
9916fn include_text(server: &lsp::LanguageServer) -> bool {
9917 server
9918 .capabilities()
9919 .text_document_sync
9920 .as_ref()
9921 .and_then(|sync| match sync {
9922 lsp::TextDocumentSyncCapability::Kind(_) => None,
9923 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
9924 })
9925 .and_then(|save_options| match save_options {
9926 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
9927 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
9928 })
9929 .unwrap_or(false)
9930}
9931
9932async fn load_shell_environment(dir: &Path) -> Result<HashMap<String, String>> {
9933 let marker = "ZED_SHELL_START";
9934 let shell = env::var("SHELL").context(
9935 "SHELL environment variable is not assigned so we can't source login environment variables",
9936 )?;
9937
9938 // What we're doing here is to spawn a shell and then `cd` into
9939 // the project directory to get the env in there as if the user
9940 // `cd`'d into it. We do that because tools like direnv, asdf, ...
9941 // hook into `cd` and only set up the env after that.
9942 //
9943 // In certain shells we need to execute additional_command in order to
9944 // trigger the behavior of direnv, etc.
9945 //
9946 //
9947 // The `exit 0` is the result of hours of debugging, trying to find out
9948 // why running this command here, without `exit 0`, would mess
9949 // up signal process for our process so that `ctrl-c` doesn't work
9950 // anymore.
9951 //
9952 // We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would
9953 // do that, but it does, and `exit 0` helps.
9954 let additional_command = PathBuf::from(&shell)
9955 .file_name()
9956 .and_then(|f| f.to_str())
9957 .and_then(|shell| match shell {
9958 "fish" => Some("emit fish_prompt;"),
9959 _ => None,
9960 });
9961
9962 let command = format!(
9963 "cd '{}';{} echo {marker}; /usr/bin/env -0; exit 0;",
9964 dir.display(),
9965 additional_command.unwrap_or("")
9966 );
9967
9968 let output = smol::process::Command::new(&shell)
9969 .args(["-i", "-c", &command])
9970 .output()
9971 .await
9972 .context("failed to spawn login shell to source login environment variables")?;
9973
9974 anyhow::ensure!(
9975 output.status.success(),
9976 "login shell exited with error {:?}",
9977 output.status
9978 );
9979
9980 let stdout = String::from_utf8_lossy(&output.stdout);
9981 let env_output_start = stdout.find(marker).ok_or_else(|| {
9982 anyhow!(
9983 "failed to parse output of `env` command in login shell: {}",
9984 stdout
9985 )
9986 })?;
9987
9988 let mut parsed_env = HashMap::default();
9989 let env_output = &stdout[env_output_start + marker.len()..];
9990 for line in env_output.split_terminator('\0') {
9991 if let Some(separator_index) = line.find('=') {
9992 let key = line[..separator_index].to_string();
9993 let value = line[separator_index + 1..].to_string();
9994 parsed_env.insert(key, value);
9995 }
9996 }
9997 Ok(parsed_env)
9998}
9999
10000fn serialize_blame_buffer_response(blame: git::blame::Blame) -> proto::BlameBufferResponse {
10001 let entries = blame
10002 .entries
10003 .into_iter()
10004 .map(|entry| proto::BlameEntry {
10005 sha: entry.sha.as_bytes().into(),
10006 start_line: entry.range.start,
10007 end_line: entry.range.end,
10008 original_line_number: entry.original_line_number,
10009 author: entry.author.clone(),
10010 author_mail: entry.author_mail.clone(),
10011 author_time: entry.author_time,
10012 author_tz: entry.author_tz.clone(),
10013 committer: entry.committer.clone(),
10014 committer_mail: entry.committer_mail.clone(),
10015 committer_time: entry.committer_time,
10016 committer_tz: entry.committer_tz.clone(),
10017 summary: entry.summary.clone(),
10018 previous: entry.previous.clone(),
10019 filename: entry.filename.clone(),
10020 })
10021 .collect::<Vec<_>>();
10022
10023 let messages = blame
10024 .messages
10025 .into_iter()
10026 .map(|(oid, message)| proto::CommitMessage {
10027 oid: oid.as_bytes().into(),
10028 message,
10029 })
10030 .collect::<Vec<_>>();
10031
10032 let permalinks = blame
10033 .permalinks
10034 .into_iter()
10035 .map(|(oid, url)| proto::CommitPermalink {
10036 oid: oid.as_bytes().into(),
10037 permalink: url.to_string(),
10038 })
10039 .collect::<Vec<_>>();
10040
10041 proto::BlameBufferResponse {
10042 entries,
10043 messages,
10044 permalinks,
10045 }
10046}
10047
10048fn deserialize_blame_buffer_response(response: proto::BlameBufferResponse) -> git::blame::Blame {
10049 let entries = response
10050 .entries
10051 .into_iter()
10052 .filter_map(|entry| {
10053 Some(git::blame::BlameEntry {
10054 sha: git::Oid::from_bytes(&entry.sha).ok()?,
10055 range: entry.start_line..entry.end_line,
10056 original_line_number: entry.original_line_number,
10057 committer: entry.committer,
10058 committer_time: entry.committer_time,
10059 committer_tz: entry.committer_tz,
10060 committer_mail: entry.committer_mail,
10061 author: entry.author,
10062 author_mail: entry.author_mail,
10063 author_time: entry.author_time,
10064 author_tz: entry.author_tz,
10065 summary: entry.summary,
10066 previous: entry.previous,
10067 filename: entry.filename,
10068 })
10069 })
10070 .collect::<Vec<_>>();
10071
10072 let messages = response
10073 .messages
10074 .into_iter()
10075 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
10076 .collect::<HashMap<_, _>>();
10077
10078 let permalinks = response
10079 .permalinks
10080 .into_iter()
10081 .filter_map(|permalink| {
10082 Some((
10083 git::Oid::from_bytes(&permalink.oid).ok()?,
10084 Url::from_str(&permalink.permalink).ok()?,
10085 ))
10086 })
10087 .collect::<HashMap<_, _>>();
10088
10089 Blame {
10090 entries,
10091 permalinks,
10092 messages,
10093 }
10094}