1pub mod debounced_delay;
2pub mod lsp_command;
3pub mod lsp_ext_command;
4mod prettier_support;
5pub mod project_settings;
6pub mod search;
7mod task_inventory;
8pub mod terminals;
9
10#[cfg(test)]
11mod project_tests;
12
13use anyhow::{anyhow, bail, Context as _, Result};
14use async_trait::async_trait;
15use client::{
16 proto, Client, Collaborator, PendingEntitySubscription, ProjectId, TypedEnvelope, UserStore,
17};
18use clock::ReplicaId;
19use collections::{hash_map, BTreeMap, HashMap, HashSet, VecDeque};
20use copilot::Copilot;
21use debounced_delay::DebouncedDelay;
22use fs::repository::GitRepository;
23use futures::{
24 channel::mpsc::{self, UnboundedReceiver},
25 future::{try_join_all, Shared},
26 select,
27 stream::FuturesUnordered,
28 AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt,
29};
30use globset::{Glob, GlobSet, GlobSetBuilder};
31use gpui::{
32 AnyModel, AppContext, AsyncAppContext, BackgroundExecutor, Context, Entity, EventEmitter,
33 Model, ModelContext, PromptLevel, Task, WeakModel,
34};
35use itertools::Itertools;
36use language::{
37 language_settings::{language_settings, FormatOnSave, Formatter, InlayHintKind},
38 markdown, point_to_lsp,
39 proto::{
40 deserialize_anchor, deserialize_fingerprint, deserialize_line_ending, deserialize_version,
41 serialize_anchor, serialize_version, split_operations,
42 },
43 range_from_lsp, Bias, Buffer, BufferSnapshot, CachedLspAdapter, Capability, CodeAction,
44 CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Documentation,
45 Event as BufferEvent, File as _, Language, LanguageRegistry, LanguageServerName, LocalFile,
46 LspAdapterDelegate, Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot,
47 ToOffset, ToPointUtf16, Transaction, Unclipped,
48};
49use log::error;
50use lsp::{
51 DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
52 DocumentHighlightKind, LanguageServer, LanguageServerBinary, LanguageServerId,
53 MessageActionItem, OneOf, ServerHealthStatus, ServerStatus,
54};
55use lsp_command::*;
56use node_runtime::NodeRuntime;
57use parking_lot::{Mutex, RwLock};
58use postage::watch;
59use prettier_support::{DefaultPrettier, PrettierInstance};
60use project_settings::{LspSettings, ProjectSettings};
61use rand::prelude::*;
62use worktree::LocalSnapshot;
63
64use rpc::{ErrorCode, ErrorExt as _};
65use search::SearchQuery;
66use serde::Serialize;
67use settings::{watch_config_file, Settings, SettingsStore};
68use sha2::{Digest, Sha256};
69use similar::{ChangeTag, TextDiff};
70use smol::channel::{Receiver, Sender};
71use smol::lock::Semaphore;
72use std::{
73 cmp::{self, Ordering},
74 convert::TryInto,
75 env,
76 ffi::OsStr,
77 hash::Hash,
78 mem,
79 num::NonZeroU32,
80 ops::Range,
81 path::{self, Component, Path, PathBuf},
82 process::Stdio,
83 str,
84 sync::{
85 atomic::{AtomicUsize, Ordering::SeqCst},
86 Arc,
87 },
88 time::{Duration, Instant},
89};
90use task::static_source::StaticSource;
91use terminals::Terminals;
92use text::{Anchor, BufferId};
93use util::{
94 debug_panic, defer,
95 http::HttpClient,
96 merge_json_value_into,
97 paths::{LOCAL_SETTINGS_RELATIVE_PATH, LOCAL_TASKS_RELATIVE_PATH},
98 post_inc, ResultExt, TryFutureExt as _,
99};
100use worktree::{Snapshot, Traversal};
101
102pub use fs::*;
103pub use language::Location;
104#[cfg(any(test, feature = "test-support"))]
105pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
106#[cfg(feature = "test-support")]
107pub use task_inventory::test_inventory::*;
108pub use task_inventory::{Inventory, TaskSourceKind};
109pub use worktree::{
110 DiagnosticSummary, Entry, EntryKind, File, LocalWorktree, PathChange, ProjectEntryId,
111 RepositoryEntry, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId,
112 WorktreeSettings, FS_WATCH_LATENCY,
113};
114
115const MAX_SERVER_REINSTALL_ATTEMPT_COUNT: u64 = 4;
116const SERVER_REINSTALL_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
117const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
118pub const SERVER_PROGRESS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(100);
119
120pub trait Item {
121 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
122 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath>;
123}
124
125pub struct Project {
126 worktrees: Vec<WorktreeHandle>,
127 active_entry: Option<ProjectEntryId>,
128 buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
129 pending_language_server_update: Option<BufferOrderedMessage>,
130 flush_language_server_update: Option<Task<()>>,
131
132 languages: Arc<LanguageRegistry>,
133 supplementary_language_servers:
134 HashMap<LanguageServerId, (LanguageServerName, Arc<LanguageServer>)>,
135 language_servers: HashMap<LanguageServerId, LanguageServerState>,
136 language_server_ids: HashMap<(WorktreeId, LanguageServerName), LanguageServerId>,
137 language_server_statuses: BTreeMap<LanguageServerId, LanguageServerStatus>,
138 last_formatting_failure: Option<String>,
139 last_workspace_edits_by_language_server: HashMap<LanguageServerId, ProjectTransaction>,
140 language_server_watched_paths: HashMap<LanguageServerId, HashMap<WorktreeId, GlobSet>>,
141 client: Arc<client::Client>,
142 next_entry_id: Arc<AtomicUsize>,
143 join_project_response_message_id: u32,
144 next_diagnostic_group_id: usize,
145 user_store: Model<UserStore>,
146 fs: Arc<dyn Fs>,
147 client_state: ProjectClientState,
148 collaborators: HashMap<proto::PeerId, Collaborator>,
149 client_subscriptions: Vec<client::Subscription>,
150 _subscriptions: Vec<gpui::Subscription>,
151 next_buffer_id: BufferId,
152 opened_buffer: (watch::Sender<()>, watch::Receiver<()>),
153 shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
154 #[allow(clippy::type_complexity)]
155 loading_buffers_by_path: HashMap<
156 ProjectPath,
157 postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
158 >,
159 #[allow(clippy::type_complexity)]
160 loading_local_worktrees:
161 HashMap<Arc<Path>, Shared<Task<Result<Model<Worktree>, Arc<anyhow::Error>>>>>,
162 opened_buffers: HashMap<BufferId, OpenBuffer>,
163 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
164 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
165 /// A mapping from a buffer ID to None means that we've started waiting for an ID but haven't finished loading it.
166 /// Used for re-issuing buffer requests when peers temporarily disconnect
167 incomplete_remote_buffers: HashMap<BufferId, Option<Model<Buffer>>>,
168 buffer_snapshots: HashMap<BufferId, HashMap<LanguageServerId, Vec<LspBufferSnapshot>>>, // buffer_id -> server_id -> vec of snapshots
169 buffers_being_formatted: HashSet<BufferId>,
170 buffers_needing_diff: HashSet<WeakModel<Buffer>>,
171 git_diff_debouncer: DebouncedDelay,
172 nonce: u128,
173 _maintain_buffer_languages: Task<()>,
174 _maintain_workspace_config: Task<Result<()>>,
175 terminals: Terminals,
176 copilot_lsp_subscription: Option<gpui::Subscription>,
177 copilot_log_subscription: Option<lsp::Subscription>,
178 current_lsp_settings: HashMap<Arc<str>, LspSettings>,
179 node: Option<Arc<dyn NodeRuntime>>,
180 default_prettier: DefaultPrettier,
181 prettiers_per_worktree: HashMap<WorktreeId, HashSet<Option<PathBuf>>>,
182 prettier_instances: HashMap<PathBuf, PrettierInstance>,
183 tasks: Model<Inventory>,
184 hosted_project_id: Option<ProjectId>,
185}
186
187pub enum LanguageServerToQuery {
188 Primary,
189 Other(LanguageServerId),
190}
191
192struct LspBufferSnapshot {
193 version: i32,
194 snapshot: TextBufferSnapshot,
195}
196
197/// Message ordered with respect to buffer operations
198#[derive(Debug)]
199enum BufferOrderedMessage {
200 Operation {
201 buffer_id: BufferId,
202 operation: proto::Operation,
203 },
204 LanguageServerUpdate {
205 language_server_id: LanguageServerId,
206 message: proto::update_language_server::Variant,
207 },
208 Resync,
209}
210
211enum LocalProjectUpdate {
212 WorktreesChanged,
213 CreateBufferForPeer {
214 peer_id: proto::PeerId,
215 buffer_id: BufferId,
216 },
217}
218
219enum OpenBuffer {
220 Strong(Model<Buffer>),
221 Weak(WeakModel<Buffer>),
222 Operations(Vec<Operation>),
223}
224
225#[derive(Clone)]
226enum WorktreeHandle {
227 Strong(Model<Worktree>),
228 Weak(WeakModel<Worktree>),
229}
230
231#[derive(Debug)]
232enum ProjectClientState {
233 Local,
234 Shared {
235 remote_id: u64,
236 updates_tx: mpsc::UnboundedSender<LocalProjectUpdate>,
237 _send_updates: Task<Result<()>>,
238 },
239 Remote {
240 sharing_has_stopped: bool,
241 capability: Capability,
242 remote_id: u64,
243 replica_id: ReplicaId,
244 },
245}
246
247/// A prompt requested by LSP server.
248#[derive(Clone, Debug)]
249pub struct LanguageServerPromptRequest {
250 pub level: PromptLevel,
251 pub message: String,
252 pub actions: Vec<MessageActionItem>,
253 pub lsp_name: String,
254 response_channel: Sender<MessageActionItem>,
255}
256
257impl LanguageServerPromptRequest {
258 pub async fn respond(self, index: usize) -> Option<()> {
259 if let Some(response) = self.actions.into_iter().nth(index) {
260 self.response_channel.send(response).await.ok()
261 } else {
262 None
263 }
264 }
265}
266impl PartialEq for LanguageServerPromptRequest {
267 fn eq(&self, other: &Self) -> bool {
268 self.message == other.message && self.actions == other.actions
269 }
270}
271
272#[derive(Clone, Debug, PartialEq)]
273pub enum Event {
274 LanguageServerAdded(LanguageServerId),
275 LanguageServerRemoved(LanguageServerId),
276 LanguageServerLog(LanguageServerId, String),
277 Notification(String),
278 LanguageServerPrompt(LanguageServerPromptRequest),
279 ActiveEntryChanged(Option<ProjectEntryId>),
280 ActivateProjectPanel,
281 WorktreeAdded,
282 WorktreeRemoved(WorktreeId),
283 WorktreeUpdatedEntries(WorktreeId, UpdatedEntriesSet),
284 DiskBasedDiagnosticsStarted {
285 language_server_id: LanguageServerId,
286 },
287 DiskBasedDiagnosticsFinished {
288 language_server_id: LanguageServerId,
289 },
290 DiagnosticsUpdated {
291 path: ProjectPath,
292 language_server_id: LanguageServerId,
293 },
294 RemoteIdChanged(Option<u64>),
295 DisconnectedFromHost,
296 Closed,
297 DeletedEntry(ProjectEntryId),
298 CollaboratorUpdated {
299 old_peer_id: proto::PeerId,
300 new_peer_id: proto::PeerId,
301 },
302 CollaboratorJoined(proto::PeerId),
303 CollaboratorLeft(proto::PeerId),
304 RefreshInlayHints,
305 RevealInProjectPanel(ProjectEntryId),
306}
307
308pub enum LanguageServerState {
309 Starting(Task<Option<Arc<LanguageServer>>>),
310
311 Running {
312 language: Arc<Language>,
313 adapter: Arc<CachedLspAdapter>,
314 server: Arc<LanguageServer>,
315 simulate_disk_based_diagnostics_completion: Option<Task<()>>,
316 },
317}
318
319#[derive(Serialize)]
320pub struct LanguageServerStatus {
321 pub name: String,
322 pub pending_work: BTreeMap<String, LanguageServerProgress>,
323 pub has_pending_diagnostic_updates: bool,
324 progress_tokens: HashSet<String>,
325}
326
327#[derive(Clone, Debug, Serialize)]
328pub struct LanguageServerProgress {
329 pub message: Option<String>,
330 pub percentage: Option<usize>,
331 #[serde(skip_serializing)]
332 pub last_update_at: Instant,
333}
334
335#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
336pub struct ProjectPath {
337 pub worktree_id: WorktreeId,
338 pub path: Arc<Path>,
339}
340
341#[derive(Debug, Clone, PartialEq, Eq)]
342pub struct InlayHint {
343 pub position: language::Anchor,
344 pub label: InlayHintLabel,
345 pub kind: Option<InlayHintKind>,
346 pub padding_left: bool,
347 pub padding_right: bool,
348 pub tooltip: Option<InlayHintTooltip>,
349 pub resolve_state: ResolveState,
350}
351
352#[derive(Debug, Clone, PartialEq, Eq)]
353pub enum ResolveState {
354 Resolved,
355 CanResolve(LanguageServerId, Option<lsp::LSPAny>),
356 Resolving,
357}
358
359impl InlayHint {
360 pub fn text(&self) -> String {
361 match &self.label {
362 InlayHintLabel::String(s) => s.to_owned(),
363 InlayHintLabel::LabelParts(parts) => parts.iter().map(|part| &part.value).join(""),
364 }
365 }
366}
367
368#[derive(Debug, Clone, PartialEq, Eq)]
369pub enum InlayHintLabel {
370 String(String),
371 LabelParts(Vec<InlayHintLabelPart>),
372}
373
374#[derive(Debug, Clone, PartialEq, Eq)]
375pub struct InlayHintLabelPart {
376 pub value: String,
377 pub tooltip: Option<InlayHintLabelPartTooltip>,
378 pub location: Option<(LanguageServerId, lsp::Location)>,
379}
380
381#[derive(Debug, Clone, PartialEq, Eq)]
382pub enum InlayHintTooltip {
383 String(String),
384 MarkupContent(MarkupContent),
385}
386
387#[derive(Debug, Clone, PartialEq, Eq)]
388pub enum InlayHintLabelPartTooltip {
389 String(String),
390 MarkupContent(MarkupContent),
391}
392
393#[derive(Debug, Clone, PartialEq, Eq)]
394pub struct MarkupContent {
395 pub kind: HoverBlockKind,
396 pub value: String,
397}
398
399#[derive(Debug, Clone)]
400pub struct LocationLink {
401 pub origin: Option<Location>,
402 pub target: Location,
403}
404
405#[derive(Debug)]
406pub struct DocumentHighlight {
407 pub range: Range<language::Anchor>,
408 pub kind: DocumentHighlightKind,
409}
410
411#[derive(Clone, Debug)]
412pub struct Symbol {
413 pub language_server_name: LanguageServerName,
414 pub source_worktree_id: WorktreeId,
415 pub path: ProjectPath,
416 pub label: CodeLabel,
417 pub name: String,
418 pub kind: lsp::SymbolKind,
419 pub range: Range<Unclipped<PointUtf16>>,
420 pub signature: [u8; 32],
421}
422
423#[derive(Clone, Debug, PartialEq)]
424pub struct HoverBlock {
425 pub text: String,
426 pub kind: HoverBlockKind,
427}
428
429#[derive(Clone, Debug, PartialEq, Eq)]
430pub enum HoverBlockKind {
431 PlainText,
432 Markdown,
433 Code { language: String },
434}
435
436#[derive(Debug)]
437pub struct Hover {
438 pub contents: Vec<HoverBlock>,
439 pub range: Option<Range<language::Anchor>>,
440 pub language: Option<Arc<Language>>,
441}
442
443impl Hover {
444 pub fn is_empty(&self) -> bool {
445 self.contents.iter().all(|block| block.text.is_empty())
446 }
447}
448
449#[derive(Default)]
450pub struct ProjectTransaction(pub HashMap<Model<Buffer>, language::Transaction>);
451
452#[derive(Debug, Clone, Copy, PartialEq, Eq)]
453pub enum FormatTrigger {
454 Save,
455 Manual,
456}
457
458// Currently, formatting operations are represented differently depending on
459// whether they come from a language server or an external command.
460enum FormatOperation {
461 Lsp(Vec<(Range<Anchor>, String)>),
462 External(Diff),
463 Prettier(Diff),
464}
465
466impl FormatTrigger {
467 fn from_proto(value: i32) -> FormatTrigger {
468 match value {
469 0 => FormatTrigger::Save,
470 1 => FormatTrigger::Manual,
471 _ => FormatTrigger::Save,
472 }
473 }
474}
475#[derive(Clone, Debug, PartialEq)]
476enum SearchMatchCandidate {
477 OpenBuffer {
478 buffer: Model<Buffer>,
479 // This might be an unnamed file without representation on filesystem
480 path: Option<Arc<Path>>,
481 },
482 Path {
483 worktree_id: WorktreeId,
484 is_ignored: bool,
485 path: Arc<Path>,
486 },
487}
488
489type SearchMatchCandidateIndex = usize;
490impl SearchMatchCandidate {
491 fn path(&self) -> Option<Arc<Path>> {
492 match self {
493 SearchMatchCandidate::OpenBuffer { path, .. } => path.clone(),
494 SearchMatchCandidate::Path { path, .. } => Some(path.clone()),
495 }
496 }
497}
498
499impl Project {
500 pub fn init_settings(cx: &mut AppContext) {
501 WorktreeSettings::register(cx);
502 ProjectSettings::register(cx);
503 }
504
505 pub fn init(client: &Arc<Client>, cx: &mut AppContext) {
506 Self::init_settings(cx);
507
508 client.add_model_message_handler(Self::handle_add_collaborator);
509 client.add_model_message_handler(Self::handle_update_project_collaborator);
510 client.add_model_message_handler(Self::handle_remove_collaborator);
511 client.add_model_message_handler(Self::handle_buffer_reloaded);
512 client.add_model_message_handler(Self::handle_buffer_saved);
513 client.add_model_message_handler(Self::handle_start_language_server);
514 client.add_model_message_handler(Self::handle_update_language_server);
515 client.add_model_message_handler(Self::handle_update_project);
516 client.add_model_message_handler(Self::handle_unshare_project);
517 client.add_model_message_handler(Self::handle_create_buffer_for_peer);
518 client.add_model_message_handler(Self::handle_update_buffer_file);
519 client.add_model_request_handler(Self::handle_update_buffer);
520 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
521 client.add_model_message_handler(Self::handle_update_worktree);
522 client.add_model_message_handler(Self::handle_update_worktree_settings);
523 client.add_model_request_handler(Self::handle_create_project_entry);
524 client.add_model_request_handler(Self::handle_rename_project_entry);
525 client.add_model_request_handler(Self::handle_copy_project_entry);
526 client.add_model_request_handler(Self::handle_delete_project_entry);
527 client.add_model_request_handler(Self::handle_expand_project_entry);
528 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
529 client.add_model_request_handler(Self::handle_resolve_completion_documentation);
530 client.add_model_request_handler(Self::handle_apply_code_action);
531 client.add_model_request_handler(Self::handle_on_type_formatting);
532 client.add_model_request_handler(Self::handle_inlay_hints);
533 client.add_model_request_handler(Self::handle_resolve_inlay_hint);
534 client.add_model_request_handler(Self::handle_refresh_inlay_hints);
535 client.add_model_request_handler(Self::handle_reload_buffers);
536 client.add_model_request_handler(Self::handle_synchronize_buffers);
537 client.add_model_request_handler(Self::handle_format_buffers);
538 client.add_model_request_handler(Self::handle_lsp_command::<GetCodeActions>);
539 client.add_model_request_handler(Self::handle_lsp_command::<GetCompletions>);
540 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
541 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
542 client.add_model_request_handler(Self::handle_lsp_command::<GetTypeDefinition>);
543 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
544 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
545 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
546 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
547 client.add_model_request_handler(Self::handle_search_project);
548 client.add_model_request_handler(Self::handle_get_project_symbols);
549 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
550 client.add_model_request_handler(Self::handle_open_buffer_by_id);
551 client.add_model_request_handler(Self::handle_open_buffer_by_path);
552 client.add_model_request_handler(Self::handle_save_buffer);
553 client.add_model_message_handler(Self::handle_update_diff_base);
554 client.add_model_request_handler(Self::handle_lsp_command::<lsp_ext_command::ExpandMacro>);
555 }
556
557 pub fn local(
558 client: Arc<Client>,
559 node: Arc<dyn NodeRuntime>,
560 user_store: Model<UserStore>,
561 languages: Arc<LanguageRegistry>,
562 fs: Arc<dyn Fs>,
563 cx: &mut AppContext,
564 ) -> Model<Self> {
565 cx.new_model(|cx: &mut ModelContext<Self>| {
566 let (tx, rx) = mpsc::unbounded();
567 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
568 .detach();
569 let copilot_lsp_subscription =
570 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
571 let tasks = Inventory::new(cx);
572
573 Self {
574 worktrees: Vec::new(),
575 buffer_ordered_messages_tx: tx,
576 flush_language_server_update: None,
577 pending_language_server_update: None,
578 collaborators: Default::default(),
579 next_buffer_id: BufferId::new(1).unwrap(),
580 opened_buffers: Default::default(),
581 shared_buffers: Default::default(),
582 incomplete_remote_buffers: Default::default(),
583 loading_buffers_by_path: Default::default(),
584 loading_local_worktrees: Default::default(),
585 local_buffer_ids_by_path: Default::default(),
586 local_buffer_ids_by_entry_id: Default::default(),
587 buffer_snapshots: Default::default(),
588 join_project_response_message_id: 0,
589 client_state: ProjectClientState::Local,
590 opened_buffer: watch::channel(),
591 client_subscriptions: Vec::new(),
592 _subscriptions: vec![
593 cx.observe_global::<SettingsStore>(Self::on_settings_changed),
594 cx.on_release(Self::release),
595 cx.on_app_quit(Self::shutdown_language_servers),
596 ],
597 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
598 _maintain_workspace_config: Self::maintain_workspace_config(cx),
599 active_entry: None,
600 languages,
601 client,
602 user_store,
603 fs,
604 next_entry_id: Default::default(),
605 next_diagnostic_group_id: Default::default(),
606 supplementary_language_servers: HashMap::default(),
607 language_servers: Default::default(),
608 language_server_ids: HashMap::default(),
609 language_server_statuses: Default::default(),
610 last_formatting_failure: None,
611 last_workspace_edits_by_language_server: Default::default(),
612 language_server_watched_paths: HashMap::default(),
613 buffers_being_formatted: Default::default(),
614 buffers_needing_diff: Default::default(),
615 git_diff_debouncer: DebouncedDelay::new(),
616 nonce: StdRng::from_entropy().gen(),
617 terminals: Terminals {
618 local_handles: Vec::new(),
619 },
620 copilot_lsp_subscription,
621 copilot_log_subscription: None,
622 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
623 node: Some(node),
624 default_prettier: DefaultPrettier::default(),
625 prettiers_per_worktree: HashMap::default(),
626 prettier_instances: HashMap::default(),
627 tasks,
628 hosted_project_id: None,
629 }
630 })
631 }
632
633 pub async fn remote(
634 remote_id: u64,
635 client: Arc<Client>,
636 user_store: Model<UserStore>,
637 languages: Arc<LanguageRegistry>,
638 fs: Arc<dyn Fs>,
639 cx: AsyncAppContext,
640 ) -> Result<Model<Self>> {
641 client.authenticate_and_connect(true, &cx).await?;
642
643 let subscription = client.subscribe_to_entity(remote_id)?;
644 let response = client
645 .request_envelope(proto::JoinProject {
646 project_id: remote_id,
647 })
648 .await?;
649 Self::from_join_project_response(
650 response,
651 subscription,
652 client,
653 user_store,
654 languages,
655 fs,
656 cx,
657 )
658 .await
659 }
660 async fn from_join_project_response(
661 response: TypedEnvelope<proto::JoinProjectResponse>,
662 subscription: PendingEntitySubscription<Project>,
663 client: Arc<Client>,
664 user_store: Model<UserStore>,
665 languages: Arc<LanguageRegistry>,
666 fs: Arc<dyn Fs>,
667 mut cx: AsyncAppContext,
668 ) -> Result<Model<Self>> {
669 let remote_id = response.payload.project_id;
670 let role = response.payload.role();
671 let this = cx.new_model(|cx| {
672 let replica_id = response.payload.replica_id as ReplicaId;
673 let tasks = Inventory::new(cx);
674 // BIG CAUTION NOTE: The order in which we initialize fields here matters and it should match what's done in Self::local.
675 // Otherwise, you might run into issues where worktree id on remote is different than what's on local host.
676 // That's because Worktree's identifier is entity id, which should probably be changed.
677 let mut worktrees = Vec::new();
678 for worktree in response.payload.worktrees {
679 let worktree =
680 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
681 worktrees.push(worktree);
682 }
683
684 let (tx, rx) = mpsc::unbounded();
685 cx.spawn(move |this, cx| Self::send_buffer_ordered_messages(this, rx, cx))
686 .detach();
687 let copilot_lsp_subscription =
688 Copilot::global(cx).map(|copilot| subscribe_for_copilot_events(&copilot, cx));
689 let mut this = Self {
690 worktrees: Vec::new(),
691 buffer_ordered_messages_tx: tx,
692 pending_language_server_update: None,
693 flush_language_server_update: None,
694 loading_buffers_by_path: Default::default(),
695 next_buffer_id: BufferId::new(1).unwrap(),
696 opened_buffer: watch::channel(),
697 shared_buffers: Default::default(),
698 incomplete_remote_buffers: Default::default(),
699 loading_local_worktrees: Default::default(),
700 local_buffer_ids_by_path: Default::default(),
701 local_buffer_ids_by_entry_id: Default::default(),
702 active_entry: None,
703 collaborators: Default::default(),
704 join_project_response_message_id: response.message_id,
705 _maintain_buffer_languages: Self::maintain_buffer_languages(languages.clone(), cx),
706 _maintain_workspace_config: Self::maintain_workspace_config(cx),
707 languages,
708 user_store: user_store.clone(),
709 fs,
710 next_entry_id: Default::default(),
711 next_diagnostic_group_id: Default::default(),
712 client_subscriptions: Default::default(),
713 _subscriptions: vec![
714 cx.on_release(Self::release),
715 cx.on_app_quit(Self::shutdown_language_servers),
716 ],
717 client: client.clone(),
718 client_state: ProjectClientState::Remote {
719 sharing_has_stopped: false,
720 capability: Capability::ReadWrite,
721 remote_id,
722 replica_id,
723 },
724 supplementary_language_servers: HashMap::default(),
725 language_servers: Default::default(),
726 language_server_ids: HashMap::default(),
727 language_server_statuses: response
728 .payload
729 .language_servers
730 .into_iter()
731 .map(|server| {
732 (
733 LanguageServerId(server.id as usize),
734 LanguageServerStatus {
735 name: server.name,
736 pending_work: Default::default(),
737 has_pending_diagnostic_updates: false,
738 progress_tokens: Default::default(),
739 },
740 )
741 })
742 .collect(),
743 last_formatting_failure: None,
744 last_workspace_edits_by_language_server: Default::default(),
745 language_server_watched_paths: HashMap::default(),
746 opened_buffers: Default::default(),
747 buffers_being_formatted: Default::default(),
748 buffers_needing_diff: Default::default(),
749 git_diff_debouncer: DebouncedDelay::new(),
750 buffer_snapshots: Default::default(),
751 nonce: StdRng::from_entropy().gen(),
752 terminals: Terminals {
753 local_handles: Vec::new(),
754 },
755 copilot_lsp_subscription,
756 copilot_log_subscription: None,
757 current_lsp_settings: ProjectSettings::get_global(cx).lsp.clone(),
758 node: None,
759 default_prettier: DefaultPrettier::default(),
760 prettiers_per_worktree: HashMap::default(),
761 prettier_instances: HashMap::default(),
762 tasks,
763 hosted_project_id: None,
764 };
765 this.set_role(role, cx);
766 for worktree in worktrees {
767 let _ = this.add_worktree(&worktree, cx);
768 }
769 this
770 })?;
771 let subscription = subscription.set_model(&this, &mut cx);
772
773 let user_ids = response
774 .payload
775 .collaborators
776 .iter()
777 .map(|peer| peer.user_id)
778 .collect();
779 user_store
780 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))?
781 .await?;
782
783 this.update(&mut cx, |this, cx| {
784 this.set_collaborators_from_proto(response.payload.collaborators, cx)?;
785 this.client_subscriptions.push(subscription);
786 anyhow::Ok(())
787 })??;
788
789 Ok(this)
790 }
791
792 pub async fn hosted(
793 remote_id: ProjectId,
794 user_store: Model<UserStore>,
795 client: Arc<Client>,
796 languages: Arc<LanguageRegistry>,
797 fs: Arc<dyn Fs>,
798 cx: AsyncAppContext,
799 ) -> Result<Model<Self>> {
800 client.authenticate_and_connect(true, &cx).await?;
801
802 let subscription = client.subscribe_to_entity(remote_id.0)?;
803 let response = client
804 .request_envelope(proto::JoinHostedProject {
805 project_id: remote_id.0,
806 })
807 .await?;
808 Self::from_join_project_response(
809 response,
810 subscription,
811 client,
812 user_store,
813 languages,
814 fs,
815 cx,
816 )
817 .await
818 }
819
820 fn release(&mut self, cx: &mut AppContext) {
821 match &self.client_state {
822 ProjectClientState::Local => {}
823 ProjectClientState::Shared { .. } => {
824 let _ = self.unshare_internal(cx);
825 }
826 ProjectClientState::Remote { remote_id, .. } => {
827 let _ = self.client.send(proto::LeaveProject {
828 project_id: *remote_id,
829 });
830 self.disconnected_from_host_internal(cx);
831 }
832 }
833 }
834
835 fn shutdown_language_servers(
836 &mut self,
837 _cx: &mut ModelContext<Self>,
838 ) -> impl Future<Output = ()> {
839 let shutdown_futures = self
840 .language_servers
841 .drain()
842 .map(|(_, server_state)| async {
843 use LanguageServerState::*;
844 match server_state {
845 Running { server, .. } => server.shutdown()?.await,
846 Starting(task) => task.await?.shutdown()?.await,
847 }
848 })
849 .collect::<Vec<_>>();
850
851 async move {
852 futures::future::join_all(shutdown_futures).await;
853 }
854 }
855
856 #[cfg(any(test, feature = "test-support"))]
857 pub async fn test(
858 fs: Arc<dyn Fs>,
859 root_paths: impl IntoIterator<Item = &Path>,
860 cx: &mut gpui::TestAppContext,
861 ) -> Model<Project> {
862 use clock::FakeSystemClock;
863
864 let mut languages = LanguageRegistry::test();
865 languages.set_executor(cx.executor());
866 let clock = Arc::new(FakeSystemClock::default());
867 let http_client = util::http::FakeHttpClient::with_404_response();
868 let client = cx.update(|cx| client::Client::new(clock, http_client.clone(), cx));
869 let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
870 let project = cx.update(|cx| {
871 Project::local(
872 client,
873 node_runtime::FakeNodeRuntime::new(),
874 user_store,
875 Arc::new(languages),
876 fs,
877 cx,
878 )
879 });
880 for path in root_paths {
881 let (tree, _) = project
882 .update(cx, |project, cx| {
883 project.find_or_create_local_worktree(path, true, cx)
884 })
885 .await
886 .unwrap();
887 tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
888 .await;
889 }
890 project
891 }
892
893 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
894 let mut language_servers_to_start = Vec::new();
895 let mut language_formatters_to_check = Vec::new();
896 for buffer in self.opened_buffers.values() {
897 if let Some(buffer) = buffer.upgrade() {
898 let buffer = buffer.read(cx);
899 let buffer_file = File::from_dyn(buffer.file());
900 let buffer_language = buffer.language();
901 let settings = language_settings(buffer_language, buffer.file(), cx);
902 if let Some(language) = buffer_language {
903 if settings.enable_language_server {
904 if let Some(file) = buffer_file {
905 language_servers_to_start
906 .push((file.worktree.clone(), Arc::clone(language)));
907 }
908 }
909 language_formatters_to_check.push((
910 buffer_file.map(|f| f.worktree_id(cx)),
911 Arc::clone(language),
912 settings.clone(),
913 ));
914 }
915 }
916 }
917
918 let mut language_servers_to_stop = Vec::new();
919 let mut language_servers_to_restart = Vec::new();
920 let languages = self.languages.to_vec();
921
922 let new_lsp_settings = ProjectSettings::get_global(cx).lsp.clone();
923 let current_lsp_settings = &self.current_lsp_settings;
924 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
925 let language = languages.iter().find_map(|l| {
926 let adapter = self
927 .languages
928 .lsp_adapters(l)
929 .iter()
930 .find(|adapter| &adapter.name == started_lsp_name)?
931 .clone();
932 Some((l, adapter))
933 });
934 if let Some((language, adapter)) = language {
935 let worktree = self.worktree_for_id(*worktree_id, cx);
936 let file = worktree.as_ref().and_then(|tree| {
937 tree.update(cx, |tree, cx| tree.root_file(cx).map(|f| f as _))
938 });
939 if !language_settings(Some(language), file.as_ref(), cx).enable_language_server {
940 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
941 } else if let Some(worktree) = worktree {
942 let server_name = &adapter.name.0;
943 match (
944 current_lsp_settings.get(server_name),
945 new_lsp_settings.get(server_name),
946 ) {
947 (None, None) => {}
948 (Some(_), None) | (None, Some(_)) => {
949 language_servers_to_restart.push((worktree, Arc::clone(language)));
950 }
951 (Some(current_lsp_settings), Some(new_lsp_settings)) => {
952 if current_lsp_settings != new_lsp_settings {
953 language_servers_to_restart.push((worktree, Arc::clone(language)));
954 }
955 }
956 }
957 }
958 }
959 }
960 self.current_lsp_settings = new_lsp_settings;
961
962 // Stop all newly-disabled language servers.
963 for (worktree_id, adapter_name) in language_servers_to_stop {
964 self.stop_language_server(worktree_id, adapter_name, cx)
965 .detach();
966 }
967
968 let mut prettier_plugins_by_worktree = HashMap::default();
969 for (worktree, language, settings) in language_formatters_to_check {
970 if let Some(plugins) = prettier_support::prettier_plugins_for_language(
971 &self.languages,
972 &language,
973 &settings,
974 ) {
975 prettier_plugins_by_worktree
976 .entry(worktree)
977 .or_insert_with(|| HashSet::default())
978 .extend(plugins);
979 }
980 }
981 for (worktree, prettier_plugins) in prettier_plugins_by_worktree {
982 self.install_default_prettier(worktree, prettier_plugins, cx);
983 }
984
985 // Start all the newly-enabled language servers.
986 for (worktree, language) in language_servers_to_start {
987 self.start_language_servers(&worktree, language, cx);
988 }
989
990 // Restart all language servers with changed initialization options.
991 for (worktree, language) in language_servers_to_restart {
992 self.restart_language_servers(worktree, language, cx);
993 }
994
995 if self.copilot_lsp_subscription.is_none() {
996 if let Some(copilot) = Copilot::global(cx) {
997 for buffer in self.opened_buffers.values() {
998 if let Some(buffer) = buffer.upgrade() {
999 self.register_buffer_with_copilot(&buffer, cx);
1000 }
1001 }
1002 self.copilot_lsp_subscription = Some(subscribe_for_copilot_events(&copilot, cx));
1003 }
1004 }
1005
1006 cx.notify();
1007 }
1008
1009 pub fn buffer_for_id(&self, remote_id: BufferId) -> Option<Model<Buffer>> {
1010 self.opened_buffers
1011 .get(&remote_id)
1012 .and_then(|buffer| buffer.upgrade())
1013 }
1014
1015 pub fn languages(&self) -> &Arc<LanguageRegistry> {
1016 &self.languages
1017 }
1018
1019 pub fn client(&self) -> Arc<Client> {
1020 self.client.clone()
1021 }
1022
1023 pub fn user_store(&self) -> Model<UserStore> {
1024 self.user_store.clone()
1025 }
1026
1027 pub fn opened_buffers(&self) -> Vec<Model<Buffer>> {
1028 self.opened_buffers
1029 .values()
1030 .filter_map(|b| b.upgrade())
1031 .collect()
1032 }
1033
1034 #[cfg(any(test, feature = "test-support"))]
1035 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
1036 let path = path.into();
1037 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
1038 self.opened_buffers.iter().any(|(_, buffer)| {
1039 if let Some(buffer) = buffer.upgrade() {
1040 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1041 if file.worktree == worktree && file.path() == &path.path {
1042 return true;
1043 }
1044 }
1045 }
1046 false
1047 })
1048 } else {
1049 false
1050 }
1051 }
1052
1053 pub fn fs(&self) -> &Arc<dyn Fs> {
1054 &self.fs
1055 }
1056
1057 pub fn remote_id(&self) -> Option<u64> {
1058 match self.client_state {
1059 ProjectClientState::Local => None,
1060 ProjectClientState::Shared { remote_id, .. }
1061 | ProjectClientState::Remote { remote_id, .. } => Some(remote_id),
1062 }
1063 }
1064
1065 pub fn hosted_project_id(&self) -> Option<ProjectId> {
1066 self.hosted_project_id
1067 }
1068
1069 pub fn replica_id(&self) -> ReplicaId {
1070 match self.client_state {
1071 ProjectClientState::Remote { replica_id, .. } => replica_id,
1072 _ => 0,
1073 }
1074 }
1075
1076 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
1077 if let ProjectClientState::Shared { updates_tx, .. } = &mut self.client_state {
1078 updates_tx
1079 .unbounded_send(LocalProjectUpdate::WorktreesChanged)
1080 .ok();
1081 }
1082 cx.notify();
1083 }
1084
1085 pub fn task_inventory(&self) -> &Model<Inventory> {
1086 &self.tasks
1087 }
1088
1089 pub fn collaborators(&self) -> &HashMap<proto::PeerId, Collaborator> {
1090 &self.collaborators
1091 }
1092
1093 pub fn host(&self) -> Option<&Collaborator> {
1094 self.collaborators.values().find(|c| c.replica_id == 0)
1095 }
1096
1097 /// Collect all worktrees, including ones that don't appear in the project panel
1098 pub fn worktrees(&self) -> impl '_ + DoubleEndedIterator<Item = Model<Worktree>> {
1099 self.worktrees
1100 .iter()
1101 .filter_map(move |worktree| worktree.upgrade())
1102 }
1103
1104 /// Collect all user-visible worktrees, the ones that appear in the project panel
1105 pub fn visible_worktrees<'a>(
1106 &'a self,
1107 cx: &'a AppContext,
1108 ) -> impl 'a + DoubleEndedIterator<Item = Model<Worktree>> {
1109 self.worktrees.iter().filter_map(|worktree| {
1110 worktree.upgrade().and_then(|worktree| {
1111 if worktree.read(cx).is_visible() {
1112 Some(worktree)
1113 } else {
1114 None
1115 }
1116 })
1117 })
1118 }
1119
1120 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1121 self.visible_worktrees(cx)
1122 .map(|tree| tree.read(cx).root_name())
1123 }
1124
1125 pub fn worktree_for_id(&self, id: WorktreeId, cx: &AppContext) -> Option<Model<Worktree>> {
1126 self.worktrees()
1127 .find(|worktree| worktree.read(cx).id() == id)
1128 }
1129
1130 pub fn worktree_for_entry(
1131 &self,
1132 entry_id: ProjectEntryId,
1133 cx: &AppContext,
1134 ) -> Option<Model<Worktree>> {
1135 self.worktrees()
1136 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1137 }
1138
1139 pub fn worktree_id_for_entry(
1140 &self,
1141 entry_id: ProjectEntryId,
1142 cx: &AppContext,
1143 ) -> Option<WorktreeId> {
1144 self.worktree_for_entry(entry_id, cx)
1145 .map(|worktree| worktree.read(cx).id())
1146 }
1147
1148 pub fn visibility_for_paths(&self, paths: &[PathBuf], cx: &AppContext) -> Option<bool> {
1149 paths
1150 .iter()
1151 .map(|path| self.visibility_for_path(path, cx))
1152 .max()
1153 .flatten()
1154 }
1155
1156 pub fn visibility_for_path(&self, path: &Path, cx: &AppContext) -> Option<bool> {
1157 self.worktrees()
1158 .filter_map(|worktree| {
1159 let worktree = worktree.read(cx);
1160 worktree
1161 .as_local()?
1162 .contains_abs_path(path)
1163 .then(|| worktree.is_visible())
1164 })
1165 .max()
1166 }
1167
1168 pub fn create_entry(
1169 &mut self,
1170 project_path: impl Into<ProjectPath>,
1171 is_directory: bool,
1172 cx: &mut ModelContext<Self>,
1173 ) -> Task<Result<Option<Entry>>> {
1174 let project_path = project_path.into();
1175 let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
1176 return Task::ready(Ok(None));
1177 };
1178 if self.is_local() {
1179 worktree.update(cx, |worktree, cx| {
1180 worktree
1181 .as_local_mut()
1182 .unwrap()
1183 .create_entry(project_path.path, is_directory, cx)
1184 })
1185 } else {
1186 let client = self.client.clone();
1187 let project_id = self.remote_id().unwrap();
1188 cx.spawn(move |_, mut cx| async move {
1189 let response = client
1190 .request(proto::CreateProjectEntry {
1191 worktree_id: project_path.worktree_id.to_proto(),
1192 project_id,
1193 path: project_path.path.to_string_lossy().into(),
1194 is_directory,
1195 })
1196 .await?;
1197 match response.entry {
1198 Some(entry) => worktree
1199 .update(&mut cx, |worktree, cx| {
1200 worktree.as_remote_mut().unwrap().insert_entry(
1201 entry,
1202 response.worktree_scan_id as usize,
1203 cx,
1204 )
1205 })?
1206 .await
1207 .map(Some),
1208 None => Ok(None),
1209 }
1210 })
1211 }
1212 }
1213
1214 pub fn copy_entry(
1215 &mut self,
1216 entry_id: ProjectEntryId,
1217 new_path: impl Into<Arc<Path>>,
1218 cx: &mut ModelContext<Self>,
1219 ) -> Task<Result<Option<Entry>>> {
1220 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1221 return Task::ready(Ok(None));
1222 };
1223 let new_path = new_path.into();
1224 if self.is_local() {
1225 worktree.update(cx, |worktree, cx| {
1226 worktree
1227 .as_local_mut()
1228 .unwrap()
1229 .copy_entry(entry_id, new_path, cx)
1230 })
1231 } else {
1232 let client = self.client.clone();
1233 let project_id = self.remote_id().unwrap();
1234
1235 cx.spawn(move |_, mut cx| async move {
1236 let response = client
1237 .request(proto::CopyProjectEntry {
1238 project_id,
1239 entry_id: entry_id.to_proto(),
1240 new_path: new_path.to_string_lossy().into(),
1241 })
1242 .await?;
1243 match response.entry {
1244 Some(entry) => worktree
1245 .update(&mut cx, |worktree, cx| {
1246 worktree.as_remote_mut().unwrap().insert_entry(
1247 entry,
1248 response.worktree_scan_id as usize,
1249 cx,
1250 )
1251 })?
1252 .await
1253 .map(Some),
1254 None => Ok(None),
1255 }
1256 })
1257 }
1258 }
1259
1260 pub fn rename_entry(
1261 &mut self,
1262 entry_id: ProjectEntryId,
1263 new_path: impl Into<Arc<Path>>,
1264 cx: &mut ModelContext<Self>,
1265 ) -> Task<Result<Option<Entry>>> {
1266 let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
1267 return Task::ready(Ok(None));
1268 };
1269 let new_path = new_path.into();
1270 if self.is_local() {
1271 worktree.update(cx, |worktree, cx| {
1272 worktree
1273 .as_local_mut()
1274 .unwrap()
1275 .rename_entry(entry_id, new_path, cx)
1276 })
1277 } else {
1278 let client = self.client.clone();
1279 let project_id = self.remote_id().unwrap();
1280
1281 cx.spawn(move |_, mut cx| async move {
1282 let response = client
1283 .request(proto::RenameProjectEntry {
1284 project_id,
1285 entry_id: entry_id.to_proto(),
1286 new_path: new_path.to_string_lossy().into(),
1287 })
1288 .await?;
1289 match response.entry {
1290 Some(entry) => worktree
1291 .update(&mut cx, |worktree, cx| {
1292 worktree.as_remote_mut().unwrap().insert_entry(
1293 entry,
1294 response.worktree_scan_id as usize,
1295 cx,
1296 )
1297 })?
1298 .await
1299 .map(Some),
1300 None => Ok(None),
1301 }
1302 })
1303 }
1304 }
1305
1306 pub fn delete_entry(
1307 &mut self,
1308 entry_id: ProjectEntryId,
1309 cx: &mut ModelContext<Self>,
1310 ) -> Option<Task<Result<()>>> {
1311 let worktree = self.worktree_for_entry(entry_id, cx)?;
1312
1313 cx.emit(Event::DeletedEntry(entry_id));
1314
1315 if self.is_local() {
1316 worktree.update(cx, |worktree, cx| {
1317 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1318 })
1319 } else {
1320 let client = self.client.clone();
1321 let project_id = self.remote_id().unwrap();
1322 Some(cx.spawn(move |_, mut cx| async move {
1323 let response = client
1324 .request(proto::DeleteProjectEntry {
1325 project_id,
1326 entry_id: entry_id.to_proto(),
1327 })
1328 .await?;
1329 worktree
1330 .update(&mut cx, move |worktree, cx| {
1331 worktree.as_remote_mut().unwrap().delete_entry(
1332 entry_id,
1333 response.worktree_scan_id as usize,
1334 cx,
1335 )
1336 })?
1337 .await
1338 }))
1339 }
1340 }
1341
1342 pub fn expand_entry(
1343 &mut self,
1344 worktree_id: WorktreeId,
1345 entry_id: ProjectEntryId,
1346 cx: &mut ModelContext<Self>,
1347 ) -> Option<Task<Result<()>>> {
1348 let worktree = self.worktree_for_id(worktree_id, cx)?;
1349 if self.is_local() {
1350 worktree.update(cx, |worktree, cx| {
1351 worktree.as_local_mut().unwrap().expand_entry(entry_id, cx)
1352 })
1353 } else {
1354 let worktree = worktree.downgrade();
1355 let request = self.client.request(proto::ExpandProjectEntry {
1356 project_id: self.remote_id().unwrap(),
1357 entry_id: entry_id.to_proto(),
1358 });
1359 Some(cx.spawn(move |_, mut cx| async move {
1360 let response = request.await?;
1361 if let Some(worktree) = worktree.upgrade() {
1362 worktree
1363 .update(&mut cx, |worktree, _| {
1364 worktree
1365 .as_remote_mut()
1366 .unwrap()
1367 .wait_for_snapshot(response.worktree_scan_id as usize)
1368 })?
1369 .await?;
1370 }
1371 Ok(())
1372 }))
1373 }
1374 }
1375
1376 pub fn shared(&mut self, project_id: u64, cx: &mut ModelContext<Self>) -> Result<()> {
1377 if !matches!(self.client_state, ProjectClientState::Local) {
1378 return Err(anyhow!("project was already shared"));
1379 }
1380 self.client_subscriptions.push(
1381 self.client
1382 .subscribe_to_entity(project_id)?
1383 .set_model(&cx.handle(), &mut cx.to_async()),
1384 );
1385
1386 for open_buffer in self.opened_buffers.values_mut() {
1387 match open_buffer {
1388 OpenBuffer::Strong(_) => {}
1389 OpenBuffer::Weak(buffer) => {
1390 if let Some(buffer) = buffer.upgrade() {
1391 *open_buffer = OpenBuffer::Strong(buffer);
1392 }
1393 }
1394 OpenBuffer::Operations(_) => unreachable!(),
1395 }
1396 }
1397
1398 for worktree_handle in self.worktrees.iter_mut() {
1399 match worktree_handle {
1400 WorktreeHandle::Strong(_) => {}
1401 WorktreeHandle::Weak(worktree) => {
1402 if let Some(worktree) = worktree.upgrade() {
1403 *worktree_handle = WorktreeHandle::Strong(worktree);
1404 }
1405 }
1406 }
1407 }
1408
1409 for (server_id, status) in &self.language_server_statuses {
1410 self.client
1411 .send(proto::StartLanguageServer {
1412 project_id,
1413 server: Some(proto::LanguageServer {
1414 id: server_id.0 as u64,
1415 name: status.name.clone(),
1416 }),
1417 })
1418 .log_err();
1419 }
1420
1421 let store = cx.global::<SettingsStore>();
1422 for worktree in self.worktrees() {
1423 let worktree_id = worktree.read(cx).id().to_proto();
1424 for (path, content) in store.local_settings(worktree.entity_id().as_u64() as usize) {
1425 self.client
1426 .send(proto::UpdateWorktreeSettings {
1427 project_id,
1428 worktree_id,
1429 path: path.to_string_lossy().into(),
1430 content: Some(content),
1431 })
1432 .log_err();
1433 }
1434 }
1435
1436 let (updates_tx, mut updates_rx) = mpsc::unbounded();
1437 let client = self.client.clone();
1438 self.client_state = ProjectClientState::Shared {
1439 remote_id: project_id,
1440 updates_tx,
1441 _send_updates: cx.spawn(move |this, mut cx| async move {
1442 while let Some(update) = updates_rx.next().await {
1443 match update {
1444 LocalProjectUpdate::WorktreesChanged => {
1445 let worktrees = this.update(&mut cx, |this, _cx| {
1446 this.worktrees().collect::<Vec<_>>()
1447 })?;
1448 let update_project = this
1449 .update(&mut cx, |this, cx| {
1450 this.client.request(proto::UpdateProject {
1451 project_id,
1452 worktrees: this.worktree_metadata_protos(cx),
1453 })
1454 })?
1455 .await;
1456 if update_project.is_ok() {
1457 for worktree in worktrees {
1458 worktree.update(&mut cx, |worktree, cx| {
1459 let worktree = worktree.as_local_mut().unwrap();
1460 worktree.share(project_id, cx).detach_and_log_err(cx)
1461 })?;
1462 }
1463 }
1464 }
1465 LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id } => {
1466 let buffer = this.update(&mut cx, |this, _| {
1467 let buffer = this.opened_buffers.get(&buffer_id).unwrap();
1468 let shared_buffers =
1469 this.shared_buffers.entry(peer_id).or_default();
1470 if shared_buffers.insert(buffer_id) {
1471 if let OpenBuffer::Strong(buffer) = buffer {
1472 Some(buffer.clone())
1473 } else {
1474 None
1475 }
1476 } else {
1477 None
1478 }
1479 })?;
1480
1481 let Some(buffer) = buffer else { continue };
1482 let operations =
1483 buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
1484 let operations = operations.await;
1485 let state = buffer.update(&mut cx, |buffer, _| buffer.to_proto())?;
1486
1487 let initial_state = proto::CreateBufferForPeer {
1488 project_id,
1489 peer_id: Some(peer_id),
1490 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1491 };
1492 if client.send(initial_state).log_err().is_some() {
1493 let client = client.clone();
1494 cx.background_executor()
1495 .spawn(async move {
1496 let mut chunks = split_operations(operations).peekable();
1497 while let Some(chunk) = chunks.next() {
1498 let is_last = chunks.peek().is_none();
1499 client.send(proto::CreateBufferForPeer {
1500 project_id,
1501 peer_id: Some(peer_id),
1502 variant: Some(
1503 proto::create_buffer_for_peer::Variant::Chunk(
1504 proto::BufferChunk {
1505 buffer_id: buffer_id.into(),
1506 operations: chunk,
1507 is_last,
1508 },
1509 ),
1510 ),
1511 })?;
1512 }
1513 anyhow::Ok(())
1514 })
1515 .await
1516 .log_err();
1517 }
1518 }
1519 }
1520 }
1521 Ok(())
1522 }),
1523 };
1524
1525 self.metadata_changed(cx);
1526 cx.emit(Event::RemoteIdChanged(Some(project_id)));
1527 cx.notify();
1528 Ok(())
1529 }
1530
1531 pub fn reshared(
1532 &mut self,
1533 message: proto::ResharedProject,
1534 cx: &mut ModelContext<Self>,
1535 ) -> Result<()> {
1536 self.shared_buffers.clear();
1537 self.set_collaborators_from_proto(message.collaborators, cx)?;
1538 self.metadata_changed(cx);
1539 Ok(())
1540 }
1541
1542 pub fn rejoined(
1543 &mut self,
1544 message: proto::RejoinedProject,
1545 message_id: u32,
1546 cx: &mut ModelContext<Self>,
1547 ) -> Result<()> {
1548 cx.update_global::<SettingsStore, _>(|store, cx| {
1549 for worktree in &self.worktrees {
1550 store
1551 .clear_local_settings(worktree.handle_id(), cx)
1552 .log_err();
1553 }
1554 });
1555
1556 self.join_project_response_message_id = message_id;
1557 self.set_worktrees_from_proto(message.worktrees, cx)?;
1558 self.set_collaborators_from_proto(message.collaborators, cx)?;
1559 self.language_server_statuses = message
1560 .language_servers
1561 .into_iter()
1562 .map(|server| {
1563 (
1564 LanguageServerId(server.id as usize),
1565 LanguageServerStatus {
1566 name: server.name,
1567 pending_work: Default::default(),
1568 has_pending_diagnostic_updates: false,
1569 progress_tokens: Default::default(),
1570 },
1571 )
1572 })
1573 .collect();
1574 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
1575 .unwrap();
1576 cx.notify();
1577 Ok(())
1578 }
1579
1580 pub fn unshare(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
1581 self.unshare_internal(cx)?;
1582 self.metadata_changed(cx);
1583 cx.notify();
1584 Ok(())
1585 }
1586
1587 fn unshare_internal(&mut self, cx: &mut AppContext) -> Result<()> {
1588 if self.is_remote() {
1589 return Err(anyhow!("attempted to unshare a remote project"));
1590 }
1591
1592 if let ProjectClientState::Shared { remote_id, .. } = self.client_state {
1593 self.client_state = ProjectClientState::Local;
1594 self.collaborators.clear();
1595 self.shared_buffers.clear();
1596 self.client_subscriptions.clear();
1597
1598 for worktree_handle in self.worktrees.iter_mut() {
1599 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1600 let is_visible = worktree.update(cx, |worktree, _| {
1601 worktree.as_local_mut().unwrap().unshare();
1602 worktree.is_visible()
1603 });
1604 if !is_visible {
1605 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1606 }
1607 }
1608 }
1609
1610 for open_buffer in self.opened_buffers.values_mut() {
1611 // Wake up any tasks waiting for peers' edits to this buffer.
1612 if let Some(buffer) = open_buffer.upgrade() {
1613 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1614 }
1615
1616 if let OpenBuffer::Strong(buffer) = open_buffer {
1617 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1618 }
1619 }
1620
1621 self.client.send(proto::UnshareProject {
1622 project_id: remote_id,
1623 })?;
1624
1625 Ok(())
1626 } else {
1627 Err(anyhow!("attempted to unshare an unshared project"))
1628 }
1629 }
1630
1631 pub fn disconnected_from_host(&mut self, cx: &mut ModelContext<Self>) {
1632 self.disconnected_from_host_internal(cx);
1633 cx.emit(Event::DisconnectedFromHost);
1634 cx.notify();
1635 }
1636
1637 pub fn set_role(&mut self, role: proto::ChannelRole, cx: &mut ModelContext<Self>) {
1638 let new_capability =
1639 if role == proto::ChannelRole::Member || role == proto::ChannelRole::Admin {
1640 Capability::ReadWrite
1641 } else {
1642 Capability::ReadOnly
1643 };
1644 if let ProjectClientState::Remote { capability, .. } = &mut self.client_state {
1645 if *capability == new_capability {
1646 return;
1647 }
1648
1649 *capability = new_capability;
1650 for buffer in self.opened_buffers() {
1651 buffer.update(cx, |buffer, cx| buffer.set_capability(new_capability, cx));
1652 }
1653 }
1654 }
1655
1656 fn disconnected_from_host_internal(&mut self, cx: &mut AppContext) {
1657 if let ProjectClientState::Remote {
1658 sharing_has_stopped,
1659 ..
1660 } = &mut self.client_state
1661 {
1662 *sharing_has_stopped = true;
1663
1664 self.collaborators.clear();
1665
1666 for worktree in &self.worktrees {
1667 if let Some(worktree) = worktree.upgrade() {
1668 worktree.update(cx, |worktree, _| {
1669 if let Some(worktree) = worktree.as_remote_mut() {
1670 worktree.disconnected_from_host();
1671 }
1672 });
1673 }
1674 }
1675
1676 for open_buffer in self.opened_buffers.values_mut() {
1677 // Wake up any tasks waiting for peers' edits to this buffer.
1678 if let Some(buffer) = open_buffer.upgrade() {
1679 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1680 }
1681
1682 if let OpenBuffer::Strong(buffer) = open_buffer {
1683 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1684 }
1685 }
1686
1687 // Wake up all futures currently waiting on a buffer to get opened,
1688 // to give them a chance to fail now that we've disconnected.
1689 *self.opened_buffer.0.borrow_mut() = ();
1690 }
1691 }
1692
1693 pub fn close(&mut self, cx: &mut ModelContext<Self>) {
1694 cx.emit(Event::Closed);
1695 }
1696
1697 pub fn is_disconnected(&self) -> bool {
1698 match &self.client_state {
1699 ProjectClientState::Remote {
1700 sharing_has_stopped,
1701 ..
1702 } => *sharing_has_stopped,
1703 _ => false,
1704 }
1705 }
1706
1707 pub fn capability(&self) -> Capability {
1708 match &self.client_state {
1709 ProjectClientState::Remote { capability, .. } => *capability,
1710 ProjectClientState::Shared { .. } | ProjectClientState::Local => Capability::ReadWrite,
1711 }
1712 }
1713
1714 pub fn is_read_only(&self) -> bool {
1715 self.is_disconnected() || self.capability() == Capability::ReadOnly
1716 }
1717
1718 pub fn is_local(&self) -> bool {
1719 match &self.client_state {
1720 ProjectClientState::Local | ProjectClientState::Shared { .. } => true,
1721 ProjectClientState::Remote { .. } => false,
1722 }
1723 }
1724
1725 pub fn is_remote(&self) -> bool {
1726 !self.is_local()
1727 }
1728
1729 pub fn create_buffer(
1730 &mut self,
1731 text: &str,
1732 language: Option<Arc<Language>>,
1733 cx: &mut ModelContext<Self>,
1734 ) -> Result<Model<Buffer>> {
1735 if self.is_remote() {
1736 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1737 }
1738 let id = self.next_buffer_id.next();
1739 let buffer = cx.new_model(|cx| {
1740 Buffer::new(self.replica_id(), id, text)
1741 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1742 });
1743 self.register_buffer(&buffer, cx)?;
1744 Ok(buffer)
1745 }
1746
1747 pub fn open_path(
1748 &mut self,
1749 path: ProjectPath,
1750 cx: &mut ModelContext<Self>,
1751 ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
1752 let task = self.open_buffer(path.clone(), cx);
1753 cx.spawn(move |_, cx| async move {
1754 let buffer = task.await?;
1755 let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
1756 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1757 })?;
1758
1759 let buffer: &AnyModel = &buffer;
1760 Ok((project_entry_id, buffer.clone()))
1761 })
1762 }
1763
1764 pub fn open_local_buffer(
1765 &mut self,
1766 abs_path: impl AsRef<Path>,
1767 cx: &mut ModelContext<Self>,
1768 ) -> Task<Result<Model<Buffer>>> {
1769 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1770 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1771 } else {
1772 Task::ready(Err(anyhow!("no such path")))
1773 }
1774 }
1775
1776 pub fn open_buffer(
1777 &mut self,
1778 path: impl Into<ProjectPath>,
1779 cx: &mut ModelContext<Self>,
1780 ) -> Task<Result<Model<Buffer>>> {
1781 let project_path = path.into();
1782 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1783 worktree
1784 } else {
1785 return Task::ready(Err(anyhow!("no such worktree")));
1786 };
1787
1788 // If there is already a buffer for the given path, then return it.
1789 let existing_buffer = self.get_open_buffer(&project_path, cx);
1790 if let Some(existing_buffer) = existing_buffer {
1791 return Task::ready(Ok(existing_buffer));
1792 }
1793
1794 let loading_watch = match self.loading_buffers_by_path.entry(project_path.clone()) {
1795 // If the given path is already being loaded, then wait for that existing
1796 // task to complete and return the same buffer.
1797 hash_map::Entry::Occupied(e) => e.get().clone(),
1798
1799 // Otherwise, record the fact that this path is now being loaded.
1800 hash_map::Entry::Vacant(entry) => {
1801 let (mut tx, rx) = postage::watch::channel();
1802 entry.insert(rx.clone());
1803
1804 let load_buffer = if worktree.read(cx).is_local() {
1805 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1806 } else {
1807 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1808 };
1809
1810 let project_path = project_path.clone();
1811 cx.spawn(move |this, mut cx| async move {
1812 let load_result = load_buffer.await;
1813 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1814 // Record the fact that the buffer is no longer loading.
1815 this.loading_buffers_by_path.remove(&project_path);
1816 let buffer = load_result.map_err(Arc::new)?;
1817 Ok(buffer)
1818 })?);
1819 anyhow::Ok(())
1820 })
1821 .detach();
1822 rx
1823 }
1824 };
1825
1826 cx.background_executor().spawn(async move {
1827 wait_for_loading_buffer(loading_watch)
1828 .await
1829 .map_err(|e| e.cloned())
1830 })
1831 }
1832
1833 fn open_local_buffer_internal(
1834 &mut self,
1835 path: &Arc<Path>,
1836 worktree: &Model<Worktree>,
1837 cx: &mut ModelContext<Self>,
1838 ) -> Task<Result<Model<Buffer>>> {
1839 let buffer_id = self.next_buffer_id.next();
1840 let load_buffer = worktree.update(cx, |worktree, cx| {
1841 let worktree = worktree.as_local_mut().unwrap();
1842 worktree.load_buffer(buffer_id, path, cx)
1843 });
1844 cx.spawn(move |this, mut cx| async move {
1845 let buffer = load_buffer.await?;
1846 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))??;
1847 Ok(buffer)
1848 })
1849 }
1850
1851 fn open_remote_buffer_internal(
1852 &mut self,
1853 path: &Arc<Path>,
1854 worktree: &Model<Worktree>,
1855 cx: &mut ModelContext<Self>,
1856 ) -> Task<Result<Model<Buffer>>> {
1857 let rpc = self.client.clone();
1858 let project_id = self.remote_id().unwrap();
1859 let remote_worktree_id = worktree.read(cx).id();
1860 let path = path.clone();
1861 let path_string = path.to_string_lossy().to_string();
1862 cx.spawn(move |this, mut cx| async move {
1863 let response = rpc
1864 .request(proto::OpenBufferByPath {
1865 project_id,
1866 worktree_id: remote_worktree_id.to_proto(),
1867 path: path_string,
1868 })
1869 .await?;
1870 let buffer_id = BufferId::new(response.buffer_id)?;
1871 this.update(&mut cx, |this, cx| {
1872 this.wait_for_remote_buffer(buffer_id, cx)
1873 })?
1874 .await
1875 })
1876 }
1877
1878 /// LanguageServerName is owned, because it is inserted into a map
1879 pub fn open_local_buffer_via_lsp(
1880 &mut self,
1881 abs_path: lsp::Url,
1882 language_server_id: LanguageServerId,
1883 language_server_name: LanguageServerName,
1884 cx: &mut ModelContext<Self>,
1885 ) -> Task<Result<Model<Buffer>>> {
1886 cx.spawn(move |this, mut cx| async move {
1887 let abs_path = abs_path
1888 .to_file_path()
1889 .map_err(|_| anyhow!("can't convert URI to path"))?;
1890 let (worktree, relative_path) = if let Some(result) =
1891 this.update(&mut cx, |this, cx| this.find_local_worktree(&abs_path, cx))?
1892 {
1893 result
1894 } else {
1895 let worktree = this
1896 .update(&mut cx, |this, cx| {
1897 this.create_local_worktree(&abs_path, false, cx)
1898 })?
1899 .await?;
1900 this.update(&mut cx, |this, cx| {
1901 this.language_server_ids.insert(
1902 (worktree.read(cx).id(), language_server_name),
1903 language_server_id,
1904 );
1905 })
1906 .ok();
1907 (worktree, PathBuf::new())
1908 };
1909
1910 let project_path = ProjectPath {
1911 worktree_id: worktree.update(&mut cx, |worktree, _| worktree.id())?,
1912 path: relative_path.into(),
1913 };
1914 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))?
1915 .await
1916 })
1917 }
1918
1919 pub fn open_buffer_by_id(
1920 &mut self,
1921 id: BufferId,
1922 cx: &mut ModelContext<Self>,
1923 ) -> Task<Result<Model<Buffer>>> {
1924 if let Some(buffer) = self.buffer_for_id(id) {
1925 Task::ready(Ok(buffer))
1926 } else if self.is_local() {
1927 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1928 } else if let Some(project_id) = self.remote_id() {
1929 let request = self.client.request(proto::OpenBufferById {
1930 project_id,
1931 id: id.into(),
1932 });
1933 cx.spawn(move |this, mut cx| async move {
1934 let buffer_id = BufferId::new(request.await?.buffer_id)?;
1935 this.update(&mut cx, |this, cx| {
1936 this.wait_for_remote_buffer(buffer_id, cx)
1937 })?
1938 .await
1939 })
1940 } else {
1941 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1942 }
1943 }
1944
1945 pub fn save_buffers(
1946 &self,
1947 buffers: HashSet<Model<Buffer>>,
1948 cx: &mut ModelContext<Self>,
1949 ) -> Task<Result<()>> {
1950 cx.spawn(move |this, mut cx| async move {
1951 let save_tasks = buffers.into_iter().filter_map(|buffer| {
1952 this.update(&mut cx, |this, cx| this.save_buffer(buffer, cx))
1953 .ok()
1954 });
1955 try_join_all(save_tasks).await?;
1956 Ok(())
1957 })
1958 }
1959
1960 pub fn save_buffer(
1961 &self,
1962 buffer: Model<Buffer>,
1963 cx: &mut ModelContext<Self>,
1964 ) -> Task<Result<()>> {
1965 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1966 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1967 };
1968 let worktree = file.worktree.clone();
1969 let path = file.path.clone();
1970 worktree.update(cx, |worktree, cx| match worktree {
1971 Worktree::Local(worktree) => worktree.save_buffer(buffer, path, false, cx),
1972 Worktree::Remote(worktree) => worktree.save_buffer(buffer, cx),
1973 })
1974 }
1975
1976 pub fn save_buffer_as(
1977 &mut self,
1978 buffer: Model<Buffer>,
1979 abs_path: PathBuf,
1980 cx: &mut ModelContext<Self>,
1981 ) -> Task<Result<()>> {
1982 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1983 let old_file = File::from_dyn(buffer.read(cx).file())
1984 .filter(|f| f.is_local())
1985 .cloned();
1986 cx.spawn(move |this, mut cx| async move {
1987 if let Some(old_file) = &old_file {
1988 this.update(&mut cx, |this, cx| {
1989 this.unregister_buffer_from_language_servers(&buffer, old_file, cx);
1990 })?;
1991 }
1992 let (worktree, path) = worktree_task.await?;
1993 worktree
1994 .update(&mut cx, |worktree, cx| match worktree {
1995 Worktree::Local(worktree) => {
1996 worktree.save_buffer(buffer.clone(), path.into(), true, cx)
1997 }
1998 Worktree::Remote(_) => panic!("cannot remote buffers as new files"),
1999 })?
2000 .await?;
2001
2002 this.update(&mut cx, |this, cx| {
2003 this.detect_language_for_buffer(&buffer, cx);
2004 this.register_buffer_with_language_servers(&buffer, cx);
2005 })?;
2006 Ok(())
2007 })
2008 }
2009
2010 pub fn get_open_buffer(
2011 &mut self,
2012 path: &ProjectPath,
2013 cx: &mut ModelContext<Self>,
2014 ) -> Option<Model<Buffer>> {
2015 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
2016 self.opened_buffers.values().find_map(|buffer| {
2017 let buffer = buffer.upgrade()?;
2018 let file = File::from_dyn(buffer.read(cx).file())?;
2019 if file.worktree == worktree && file.path() == &path.path {
2020 Some(buffer)
2021 } else {
2022 None
2023 }
2024 })
2025 }
2026
2027 fn register_buffer(
2028 &mut self,
2029 buffer: &Model<Buffer>,
2030 cx: &mut ModelContext<Self>,
2031 ) -> Result<()> {
2032 self.request_buffer_diff_recalculation(buffer, cx);
2033 buffer.update(cx, |buffer, _| {
2034 buffer.set_language_registry(self.languages.clone())
2035 });
2036
2037 let remote_id = buffer.read(cx).remote_id();
2038 let is_remote = self.is_remote();
2039 let open_buffer = if is_remote || self.is_shared() {
2040 OpenBuffer::Strong(buffer.clone())
2041 } else {
2042 OpenBuffer::Weak(buffer.downgrade())
2043 };
2044
2045 match self.opened_buffers.entry(remote_id) {
2046 hash_map::Entry::Vacant(entry) => {
2047 entry.insert(open_buffer);
2048 }
2049 hash_map::Entry::Occupied(mut entry) => {
2050 if let OpenBuffer::Operations(operations) = entry.get_mut() {
2051 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx))?;
2052 } else if entry.get().upgrade().is_some() {
2053 if is_remote {
2054 return Ok(());
2055 } else {
2056 debug_panic!("buffer {} was already registered", remote_id);
2057 Err(anyhow!("buffer {} was already registered", remote_id))?;
2058 }
2059 }
2060 entry.insert(open_buffer);
2061 }
2062 }
2063 cx.subscribe(buffer, |this, buffer, event, cx| {
2064 this.on_buffer_event(buffer, event, cx);
2065 })
2066 .detach();
2067
2068 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2069 if file.is_local {
2070 self.local_buffer_ids_by_path.insert(
2071 ProjectPath {
2072 worktree_id: file.worktree_id(cx),
2073 path: file.path.clone(),
2074 },
2075 remote_id,
2076 );
2077
2078 if let Some(entry_id) = file.entry_id {
2079 self.local_buffer_ids_by_entry_id
2080 .insert(entry_id, remote_id);
2081 }
2082 }
2083 }
2084
2085 self.detect_language_for_buffer(buffer, cx);
2086 self.register_buffer_with_language_servers(buffer, cx);
2087 self.register_buffer_with_copilot(buffer, cx);
2088 cx.observe_release(buffer, |this, buffer, cx| {
2089 if let Some(file) = File::from_dyn(buffer.file()) {
2090 if file.is_local() {
2091 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2092 for server in this.language_servers_for_buffer(buffer, cx) {
2093 server
2094 .1
2095 .notify::<lsp::notification::DidCloseTextDocument>(
2096 lsp::DidCloseTextDocumentParams {
2097 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
2098 },
2099 )
2100 .log_err();
2101 }
2102 }
2103 }
2104 })
2105 .detach();
2106
2107 *self.opened_buffer.0.borrow_mut() = ();
2108 Ok(())
2109 }
2110
2111 fn register_buffer_with_language_servers(
2112 &mut self,
2113 buffer_handle: &Model<Buffer>,
2114 cx: &mut ModelContext<Self>,
2115 ) {
2116 let buffer = buffer_handle.read(cx);
2117 let buffer_id = buffer.remote_id();
2118
2119 if let Some(file) = File::from_dyn(buffer.file()) {
2120 if !file.is_local() {
2121 return;
2122 }
2123
2124 let abs_path = file.abs_path(cx);
2125 let uri = lsp::Url::from_file_path(&abs_path)
2126 .unwrap_or_else(|()| panic!("Failed to register file {abs_path:?}"));
2127 let initial_snapshot = buffer.text_snapshot();
2128 let language = buffer.language().cloned();
2129 let worktree_id = file.worktree_id(cx);
2130
2131 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
2132 for (server_id, diagnostics) in local_worktree.diagnostics_for_path(file.path()) {
2133 self.update_buffer_diagnostics(buffer_handle, server_id, None, diagnostics, cx)
2134 .log_err();
2135 }
2136 }
2137
2138 if let Some(language) = language {
2139 for adapter in self.languages.lsp_adapters(&language) {
2140 let language_id = adapter.language_ids.get(language.name().as_ref()).cloned();
2141 let server = self
2142 .language_server_ids
2143 .get(&(worktree_id, adapter.name.clone()))
2144 .and_then(|id| self.language_servers.get(id))
2145 .and_then(|server_state| {
2146 if let LanguageServerState::Running { server, .. } = server_state {
2147 Some(server.clone())
2148 } else {
2149 None
2150 }
2151 });
2152 let server = match server {
2153 Some(server) => server,
2154 None => continue,
2155 };
2156
2157 server
2158 .notify::<lsp::notification::DidOpenTextDocument>(
2159 lsp::DidOpenTextDocumentParams {
2160 text_document: lsp::TextDocumentItem::new(
2161 uri.clone(),
2162 language_id.unwrap_or_default(),
2163 0,
2164 initial_snapshot.text(),
2165 ),
2166 },
2167 )
2168 .log_err();
2169
2170 buffer_handle.update(cx, |buffer, cx| {
2171 buffer.set_completion_triggers(
2172 server
2173 .capabilities()
2174 .completion_provider
2175 .as_ref()
2176 .and_then(|provider| provider.trigger_characters.clone())
2177 .unwrap_or_default(),
2178 cx,
2179 );
2180 });
2181
2182 let snapshot = LspBufferSnapshot {
2183 version: 0,
2184 snapshot: initial_snapshot.clone(),
2185 };
2186 self.buffer_snapshots
2187 .entry(buffer_id)
2188 .or_default()
2189 .insert(server.server_id(), vec![snapshot]);
2190 }
2191 }
2192 }
2193 }
2194
2195 fn unregister_buffer_from_language_servers(
2196 &mut self,
2197 buffer: &Model<Buffer>,
2198 old_file: &File,
2199 cx: &mut ModelContext<Self>,
2200 ) {
2201 let old_path = match old_file.as_local() {
2202 Some(local) => local.abs_path(cx),
2203 None => return,
2204 };
2205
2206 buffer.update(cx, |buffer, cx| {
2207 let worktree_id = old_file.worktree_id(cx);
2208 let ids = &self.language_server_ids;
2209
2210 if let Some(language) = buffer.language().cloned() {
2211 for adapter in self.languages.lsp_adapters(&language) {
2212 if let Some(server_id) = ids.get(&(worktree_id, adapter.name.clone())) {
2213 buffer.update_diagnostics(*server_id, Default::default(), cx);
2214 }
2215 }
2216 }
2217
2218 self.buffer_snapshots.remove(&buffer.remote_id());
2219 let file_url = lsp::Url::from_file_path(old_path).unwrap();
2220 for (_, language_server) in self.language_servers_for_buffer(buffer, cx) {
2221 language_server
2222 .notify::<lsp::notification::DidCloseTextDocument>(
2223 lsp::DidCloseTextDocumentParams {
2224 text_document: lsp::TextDocumentIdentifier::new(file_url.clone()),
2225 },
2226 )
2227 .log_err();
2228 }
2229 });
2230 }
2231
2232 fn register_buffer_with_copilot(
2233 &self,
2234 buffer_handle: &Model<Buffer>,
2235 cx: &mut ModelContext<Self>,
2236 ) {
2237 if let Some(copilot) = Copilot::global(cx) {
2238 copilot.update(cx, |copilot, cx| copilot.register_buffer(buffer_handle, cx));
2239 }
2240 }
2241
2242 async fn send_buffer_ordered_messages(
2243 this: WeakModel<Self>,
2244 rx: UnboundedReceiver<BufferOrderedMessage>,
2245 mut cx: AsyncAppContext,
2246 ) -> Result<()> {
2247 const MAX_BATCH_SIZE: usize = 128;
2248
2249 let mut operations_by_buffer_id = HashMap::default();
2250 async fn flush_operations(
2251 this: &WeakModel<Project>,
2252 operations_by_buffer_id: &mut HashMap<BufferId, Vec<proto::Operation>>,
2253 needs_resync_with_host: &mut bool,
2254 is_local: bool,
2255 cx: &mut AsyncAppContext,
2256 ) -> Result<()> {
2257 for (buffer_id, operations) in operations_by_buffer_id.drain() {
2258 let request = this.update(cx, |this, _| {
2259 let project_id = this.remote_id()?;
2260 Some(this.client.request(proto::UpdateBuffer {
2261 buffer_id: buffer_id.into(),
2262 project_id,
2263 operations,
2264 }))
2265 })?;
2266 if let Some(request) = request {
2267 if request.await.is_err() && !is_local {
2268 *needs_resync_with_host = true;
2269 break;
2270 }
2271 }
2272 }
2273 Ok(())
2274 }
2275
2276 let mut needs_resync_with_host = false;
2277 let mut changes = rx.ready_chunks(MAX_BATCH_SIZE);
2278
2279 while let Some(changes) = changes.next().await {
2280 let is_local = this.update(&mut cx, |this, _| this.is_local())?;
2281
2282 for change in changes {
2283 match change {
2284 BufferOrderedMessage::Operation {
2285 buffer_id,
2286 operation,
2287 } => {
2288 if needs_resync_with_host {
2289 continue;
2290 }
2291
2292 operations_by_buffer_id
2293 .entry(buffer_id)
2294 .or_insert(Vec::new())
2295 .push(operation);
2296 }
2297
2298 BufferOrderedMessage::Resync => {
2299 operations_by_buffer_id.clear();
2300 if this
2301 .update(&mut cx, |this, cx| this.synchronize_remote_buffers(cx))?
2302 .await
2303 .is_ok()
2304 {
2305 needs_resync_with_host = false;
2306 }
2307 }
2308
2309 BufferOrderedMessage::LanguageServerUpdate {
2310 language_server_id,
2311 message,
2312 } => {
2313 flush_operations(
2314 &this,
2315 &mut operations_by_buffer_id,
2316 &mut needs_resync_with_host,
2317 is_local,
2318 &mut cx,
2319 )
2320 .await?;
2321
2322 this.update(&mut cx, |this, _| {
2323 if let Some(project_id) = this.remote_id() {
2324 this.client
2325 .send(proto::UpdateLanguageServer {
2326 project_id,
2327 language_server_id: language_server_id.0 as u64,
2328 variant: Some(message),
2329 })
2330 .log_err();
2331 }
2332 })?;
2333 }
2334 }
2335 }
2336
2337 flush_operations(
2338 &this,
2339 &mut operations_by_buffer_id,
2340 &mut needs_resync_with_host,
2341 is_local,
2342 &mut cx,
2343 )
2344 .await?;
2345 }
2346
2347 Ok(())
2348 }
2349
2350 fn on_buffer_event(
2351 &mut self,
2352 buffer: Model<Buffer>,
2353 event: &BufferEvent,
2354 cx: &mut ModelContext<Self>,
2355 ) -> Option<()> {
2356 if matches!(
2357 event,
2358 BufferEvent::Edited { .. } | BufferEvent::Reloaded | BufferEvent::DiffBaseChanged
2359 ) {
2360 self.request_buffer_diff_recalculation(&buffer, cx);
2361 }
2362
2363 match event {
2364 BufferEvent::Operation(operation) => {
2365 self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation {
2366 buffer_id: buffer.read(cx).remote_id(),
2367 operation: language::proto::serialize_operation(operation),
2368 })
2369 .ok();
2370 }
2371
2372 BufferEvent::Edited { .. } => {
2373 let buffer = buffer.read(cx);
2374 let file = File::from_dyn(buffer.file())?;
2375 let abs_path = file.as_local()?.abs_path(cx);
2376 let uri = lsp::Url::from_file_path(abs_path).unwrap();
2377 let next_snapshot = buffer.text_snapshot();
2378
2379 let language_servers: Vec<_> = self
2380 .language_servers_for_buffer(buffer, cx)
2381 .map(|i| i.1.clone())
2382 .collect();
2383
2384 for language_server in language_servers {
2385 let language_server = language_server.clone();
2386
2387 let buffer_snapshots = self
2388 .buffer_snapshots
2389 .get_mut(&buffer.remote_id())
2390 .and_then(|m| m.get_mut(&language_server.server_id()))?;
2391 let previous_snapshot = buffer_snapshots.last()?;
2392
2393 let build_incremental_change = || {
2394 buffer
2395 .edits_since::<(PointUtf16, usize)>(
2396 previous_snapshot.snapshot.version(),
2397 )
2398 .map(|edit| {
2399 let edit_start = edit.new.start.0;
2400 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
2401 let new_text = next_snapshot
2402 .text_for_range(edit.new.start.1..edit.new.end.1)
2403 .collect();
2404 lsp::TextDocumentContentChangeEvent {
2405 range: Some(lsp::Range::new(
2406 point_to_lsp(edit_start),
2407 point_to_lsp(edit_end),
2408 )),
2409 range_length: None,
2410 text: new_text,
2411 }
2412 })
2413 .collect()
2414 };
2415
2416 let document_sync_kind = language_server
2417 .capabilities()
2418 .text_document_sync
2419 .as_ref()
2420 .and_then(|sync| match sync {
2421 lsp::TextDocumentSyncCapability::Kind(kind) => Some(*kind),
2422 lsp::TextDocumentSyncCapability::Options(options) => options.change,
2423 });
2424
2425 let content_changes: Vec<_> = match document_sync_kind {
2426 Some(lsp::TextDocumentSyncKind::FULL) => {
2427 vec![lsp::TextDocumentContentChangeEvent {
2428 range: None,
2429 range_length: None,
2430 text: next_snapshot.text(),
2431 }]
2432 }
2433 Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
2434 _ => {
2435 #[cfg(any(test, feature = "test-support"))]
2436 {
2437 build_incremental_change()
2438 }
2439
2440 #[cfg(not(any(test, feature = "test-support")))]
2441 {
2442 continue;
2443 }
2444 }
2445 };
2446
2447 let next_version = previous_snapshot.version + 1;
2448
2449 buffer_snapshots.push(LspBufferSnapshot {
2450 version: next_version,
2451 snapshot: next_snapshot.clone(),
2452 });
2453
2454 language_server
2455 .notify::<lsp::notification::DidChangeTextDocument>(
2456 lsp::DidChangeTextDocumentParams {
2457 text_document: lsp::VersionedTextDocumentIdentifier::new(
2458 uri.clone(),
2459 next_version,
2460 ),
2461 content_changes,
2462 },
2463 )
2464 .log_err();
2465 }
2466 }
2467
2468 BufferEvent::Saved => {
2469 let file = File::from_dyn(buffer.read(cx).file())?;
2470 let worktree_id = file.worktree_id(cx);
2471 let abs_path = file.as_local()?.abs_path(cx);
2472 let text_document = lsp::TextDocumentIdentifier {
2473 uri: lsp::Url::from_file_path(abs_path).unwrap(),
2474 };
2475
2476 for (_, _, server) in self.language_servers_for_worktree(worktree_id) {
2477 let text = include_text(server.as_ref()).then(|| buffer.read(cx).text());
2478
2479 server
2480 .notify::<lsp::notification::DidSaveTextDocument>(
2481 lsp::DidSaveTextDocumentParams {
2482 text_document: text_document.clone(),
2483 text,
2484 },
2485 )
2486 .log_err();
2487 }
2488
2489 let language_server_ids = self.language_server_ids_for_buffer(buffer.read(cx), cx);
2490 for language_server_id in language_server_ids {
2491 if let Some(LanguageServerState::Running {
2492 adapter,
2493 simulate_disk_based_diagnostics_completion,
2494 ..
2495 }) = self.language_servers.get_mut(&language_server_id)
2496 {
2497 // After saving a buffer using a language server that doesn't provide
2498 // a disk-based progress token, kick off a timer that will reset every
2499 // time the buffer is saved. If the timer eventually fires, simulate
2500 // disk-based diagnostics being finished so that other pieces of UI
2501 // (e.g., project diagnostics view, diagnostic status bar) can update.
2502 // We don't emit an event right away because the language server might take
2503 // some time to publish diagnostics.
2504 if adapter.disk_based_diagnostics_progress_token.is_none() {
2505 const DISK_BASED_DIAGNOSTICS_DEBOUNCE: Duration =
2506 Duration::from_secs(1);
2507
2508 let task = cx.spawn(move |this, mut cx| async move {
2509 cx.background_executor().timer(DISK_BASED_DIAGNOSTICS_DEBOUNCE).await;
2510 if let Some(this) = this.upgrade() {
2511 this.update(&mut cx, |this, cx| {
2512 this.disk_based_diagnostics_finished(
2513 language_server_id,
2514 cx,
2515 );
2516 this.enqueue_buffer_ordered_message(
2517 BufferOrderedMessage::LanguageServerUpdate {
2518 language_server_id,
2519 message:proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(Default::default())
2520 },
2521 )
2522 .ok();
2523 }).ok();
2524 }
2525 });
2526 *simulate_disk_based_diagnostics_completion = Some(task);
2527 }
2528 }
2529 }
2530 }
2531 BufferEvent::FileHandleChanged => {
2532 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
2533 return None;
2534 };
2535
2536 let remote_id = buffer.read(cx).remote_id();
2537 if let Some(entry_id) = file.entry_id {
2538 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
2539 Some(_) => {
2540 return None;
2541 }
2542 None => {
2543 self.local_buffer_ids_by_entry_id
2544 .insert(entry_id, remote_id);
2545 }
2546 }
2547 };
2548 self.local_buffer_ids_by_path.insert(
2549 ProjectPath {
2550 worktree_id: file.worktree_id(cx),
2551 path: file.path.clone(),
2552 },
2553 remote_id,
2554 );
2555 }
2556 _ => {}
2557 }
2558
2559 None
2560 }
2561
2562 fn request_buffer_diff_recalculation(
2563 &mut self,
2564 buffer: &Model<Buffer>,
2565 cx: &mut ModelContext<Self>,
2566 ) {
2567 self.buffers_needing_diff.insert(buffer.downgrade());
2568 let first_insertion = self.buffers_needing_diff.len() == 1;
2569
2570 let settings = ProjectSettings::get_global(cx);
2571 let delay = if let Some(delay) = settings.git.gutter_debounce {
2572 delay
2573 } else {
2574 if first_insertion {
2575 let this = cx.weak_model();
2576 cx.defer(move |cx| {
2577 if let Some(this) = this.upgrade() {
2578 this.update(cx, |this, cx| {
2579 this.recalculate_buffer_diffs(cx).detach();
2580 });
2581 }
2582 });
2583 }
2584 return;
2585 };
2586
2587 const MIN_DELAY: u64 = 50;
2588 let delay = delay.max(MIN_DELAY);
2589 let duration = Duration::from_millis(delay);
2590
2591 self.git_diff_debouncer
2592 .fire_new(duration, cx, move |this, cx| {
2593 this.recalculate_buffer_diffs(cx)
2594 });
2595 }
2596
2597 fn recalculate_buffer_diffs(&mut self, cx: &mut ModelContext<Self>) -> Task<()> {
2598 let buffers = self.buffers_needing_diff.drain().collect::<Vec<_>>();
2599 cx.spawn(move |this, mut cx| async move {
2600 let tasks: Vec<_> = buffers
2601 .iter()
2602 .filter_map(|buffer| {
2603 let buffer = buffer.upgrade()?;
2604 buffer
2605 .update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx))
2606 .ok()
2607 .flatten()
2608 })
2609 .collect();
2610
2611 futures::future::join_all(tasks).await;
2612
2613 this.update(&mut cx, |this, cx| {
2614 if !this.buffers_needing_diff.is_empty() {
2615 this.recalculate_buffer_diffs(cx).detach();
2616 } else {
2617 // TODO: Would a `ModelContext<Project>.notify()` suffice here?
2618 for buffer in buffers {
2619 if let Some(buffer) = buffer.upgrade() {
2620 buffer.update(cx, |_, cx| cx.notify());
2621 }
2622 }
2623 }
2624 })
2625 .ok();
2626 })
2627 }
2628
2629 fn language_servers_for_worktree(
2630 &self,
2631 worktree_id: WorktreeId,
2632 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<Language>, &Arc<LanguageServer>)> {
2633 self.language_server_ids
2634 .iter()
2635 .filter_map(move |((language_server_worktree_id, _), id)| {
2636 if *language_server_worktree_id == worktree_id {
2637 if let Some(LanguageServerState::Running {
2638 adapter,
2639 language,
2640 server,
2641 ..
2642 }) = self.language_servers.get(id)
2643 {
2644 return Some((adapter, language, server));
2645 }
2646 }
2647 None
2648 })
2649 }
2650
2651 fn maintain_buffer_languages(
2652 languages: Arc<LanguageRegistry>,
2653 cx: &mut ModelContext<Project>,
2654 ) -> Task<()> {
2655 let mut subscription = languages.subscribe();
2656 let mut prev_reload_count = languages.reload_count();
2657 cx.spawn(move |project, mut cx| async move {
2658 while let Some(()) = subscription.next().await {
2659 if let Some(project) = project.upgrade() {
2660 // If the language registry has been reloaded, then remove and
2661 // re-assign the languages on all open buffers.
2662 let reload_count = languages.reload_count();
2663 if reload_count > prev_reload_count {
2664 prev_reload_count = reload_count;
2665 project
2666 .update(&mut cx, |this, cx| {
2667 let buffers = this
2668 .opened_buffers
2669 .values()
2670 .filter_map(|b| b.upgrade())
2671 .collect::<Vec<_>>();
2672 for buffer in buffers {
2673 if let Some(f) = File::from_dyn(buffer.read(cx).file()).cloned()
2674 {
2675 this.unregister_buffer_from_language_servers(
2676 &buffer, &f, cx,
2677 );
2678 buffer
2679 .update(cx, |buffer, cx| buffer.set_language(None, cx));
2680 }
2681 }
2682 })
2683 .ok();
2684 }
2685
2686 project
2687 .update(&mut cx, |project, cx| {
2688 let mut plain_text_buffers = Vec::new();
2689 let mut buffers_with_unknown_injections = Vec::new();
2690 for buffer in project.opened_buffers.values() {
2691 if let Some(handle) = buffer.upgrade() {
2692 let buffer = &handle.read(cx);
2693 if buffer.language().is_none()
2694 || buffer.language() == Some(&*language::PLAIN_TEXT)
2695 {
2696 plain_text_buffers.push(handle);
2697 } else if buffer.contains_unknown_injections() {
2698 buffers_with_unknown_injections.push(handle);
2699 }
2700 }
2701 }
2702
2703 for buffer in plain_text_buffers {
2704 project.detect_language_for_buffer(&buffer, cx);
2705 project.register_buffer_with_language_servers(&buffer, cx);
2706 }
2707
2708 for buffer in buffers_with_unknown_injections {
2709 buffer.update(cx, |buffer, cx| buffer.reparse(cx));
2710 }
2711 })
2712 .ok();
2713 }
2714 }
2715 })
2716 }
2717
2718 fn maintain_workspace_config(cx: &mut ModelContext<Project>) -> Task<Result<()>> {
2719 let (mut settings_changed_tx, mut settings_changed_rx) = watch::channel();
2720 let _ = postage::stream::Stream::try_recv(&mut settings_changed_rx);
2721
2722 let settings_observation = cx.observe_global::<SettingsStore>(move |_, _| {
2723 *settings_changed_tx.borrow_mut() = ();
2724 });
2725
2726 cx.spawn(move |this, mut cx| async move {
2727 while let Some(()) = settings_changed_rx.next().await {
2728 let servers: Vec<_> = this.update(&mut cx, |this, _| {
2729 this.language_servers
2730 .values()
2731 .filter_map(|state| match state {
2732 LanguageServerState::Starting(_) => None,
2733 LanguageServerState::Running {
2734 adapter, server, ..
2735 } => Some((adapter.clone(), server.clone())),
2736 })
2737 .collect()
2738 })?;
2739
2740 for (adapter, server) in servers {
2741 let settings =
2742 cx.update(|cx| adapter.workspace_configuration(server.root_path(), cx))?;
2743
2744 server
2745 .notify::<lsp::notification::DidChangeConfiguration>(
2746 lsp::DidChangeConfigurationParams { settings },
2747 )
2748 .ok();
2749 }
2750 }
2751
2752 drop(settings_observation);
2753 anyhow::Ok(())
2754 })
2755 }
2756
2757 fn detect_language_for_buffer(
2758 &mut self,
2759 buffer_handle: &Model<Buffer>,
2760 cx: &mut ModelContext<Self>,
2761 ) -> Option<()> {
2762 // If the buffer has a language, set it and start the language server if we haven't already.
2763 let buffer = buffer_handle.read(cx);
2764 let full_path = buffer.file()?.full_path(cx);
2765 let content = buffer.as_rope();
2766 let new_language = self
2767 .languages
2768 .language_for_file(&full_path, Some(content))
2769 .now_or_never()?
2770 .ok()?;
2771 self.set_language_for_buffer(buffer_handle, new_language, cx);
2772 None
2773 }
2774
2775 pub fn set_language_for_buffer(
2776 &mut self,
2777 buffer: &Model<Buffer>,
2778 new_language: Arc<Language>,
2779 cx: &mut ModelContext<Self>,
2780 ) {
2781 buffer.update(cx, |buffer, cx| {
2782 if buffer.language().map_or(true, |old_language| {
2783 !Arc::ptr_eq(old_language, &new_language)
2784 }) {
2785 buffer.set_language(Some(new_language.clone()), cx);
2786 }
2787 });
2788
2789 let buffer_file = buffer.read(cx).file().cloned();
2790 let settings = language_settings(Some(&new_language), buffer_file.as_ref(), cx).clone();
2791 let buffer_file = File::from_dyn(buffer_file.as_ref());
2792 let worktree = buffer_file.as_ref().map(|f| f.worktree_id(cx));
2793 if let Some(prettier_plugins) = prettier_support::prettier_plugins_for_language(
2794 &self.languages,
2795 &new_language,
2796 &settings,
2797 ) {
2798 self.install_default_prettier(worktree, prettier_plugins, cx);
2799 };
2800 if let Some(file) = buffer_file {
2801 let worktree = file.worktree.clone();
2802 if worktree.read(cx).is_local() {
2803 self.start_language_servers(&worktree, new_language, cx);
2804 }
2805 }
2806 }
2807
2808 fn start_language_servers(
2809 &mut self,
2810 worktree: &Model<Worktree>,
2811 language: Arc<Language>,
2812 cx: &mut ModelContext<Self>,
2813 ) {
2814 let root_file = worktree.update(cx, |tree, cx| tree.root_file(cx));
2815 let settings = language_settings(Some(&language), root_file.map(|f| f as _).as_ref(), cx);
2816 if !settings.enable_language_server {
2817 return;
2818 }
2819
2820 for adapter in self.languages.clone().lsp_adapters(&language) {
2821 self.start_language_server(worktree, adapter.clone(), language.clone(), cx);
2822 }
2823 }
2824
2825 fn start_language_server(
2826 &mut self,
2827 worktree_handle: &Model<Worktree>,
2828 adapter: Arc<CachedLspAdapter>,
2829 language: Arc<Language>,
2830 cx: &mut ModelContext<Self>,
2831 ) {
2832 if adapter.reinstall_attempt_count.load(SeqCst) > MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2833 return;
2834 }
2835
2836 let worktree = worktree_handle.read(cx);
2837 let worktree_id = worktree.id();
2838 let worktree_path = worktree.abs_path();
2839 let key = (worktree_id, adapter.name.clone());
2840 if self.language_server_ids.contains_key(&key) {
2841 return;
2842 }
2843
2844 let stderr_capture = Arc::new(Mutex::new(Some(String::new())));
2845 let pending_server = match self.languages.create_pending_language_server(
2846 stderr_capture.clone(),
2847 language.clone(),
2848 adapter.clone(),
2849 Arc::clone(&worktree_path),
2850 ProjectLspAdapterDelegate::new(self, worktree_handle, cx),
2851 cx,
2852 ) {
2853 Some(pending_server) => pending_server,
2854 None => return,
2855 };
2856
2857 let project_settings =
2858 ProjectSettings::get(Some((worktree_id.to_proto() as usize, Path::new(""))), cx);
2859 let lsp = project_settings.lsp.get(&adapter.name.0);
2860 let override_options = lsp.and_then(|s| s.initialization_options.clone());
2861
2862 let server_id = pending_server.server_id;
2863 let container_dir = pending_server.container_dir.clone();
2864 let state = LanguageServerState::Starting({
2865 let adapter = adapter.clone();
2866 let server_name = adapter.name.0.clone();
2867 let language = language.clone();
2868 let key = key.clone();
2869
2870 cx.spawn(move |this, mut cx| async move {
2871 let result = Self::setup_and_insert_language_server(
2872 this.clone(),
2873 &worktree_path,
2874 override_options,
2875 pending_server,
2876 adapter.clone(),
2877 language.clone(),
2878 server_id,
2879 key,
2880 &mut cx,
2881 )
2882 .await;
2883
2884 match result {
2885 Ok(server) => {
2886 stderr_capture.lock().take();
2887 server
2888 }
2889
2890 Err(err) => {
2891 log::error!("failed to start language server {server_name:?}: {err}");
2892 log::error!("server stderr: {:?}", stderr_capture.lock().take());
2893
2894 let this = this.upgrade()?;
2895 let container_dir = container_dir?;
2896
2897 let attempt_count = adapter.reinstall_attempt_count.fetch_add(1, SeqCst);
2898 if attempt_count >= MAX_SERVER_REINSTALL_ATTEMPT_COUNT {
2899 let max = MAX_SERVER_REINSTALL_ATTEMPT_COUNT;
2900 log::error!("Hit {max} reinstallation attempts for {server_name:?}");
2901 return None;
2902 }
2903
2904 log::info!(
2905 "retrying installation of language server {server_name:?} in {}s",
2906 SERVER_REINSTALL_DEBOUNCE_TIMEOUT.as_secs()
2907 );
2908 cx.background_executor()
2909 .timer(SERVER_REINSTALL_DEBOUNCE_TIMEOUT)
2910 .await;
2911
2912 let installation_test_binary = adapter
2913 .installation_test_binary(container_dir.to_path_buf())
2914 .await;
2915
2916 this.update(&mut cx, |_, cx| {
2917 Self::check_errored_server(
2918 language,
2919 adapter,
2920 server_id,
2921 installation_test_binary,
2922 cx,
2923 )
2924 })
2925 .ok();
2926
2927 None
2928 }
2929 }
2930 })
2931 });
2932
2933 self.language_servers.insert(server_id, state);
2934 self.language_server_ids.insert(key, server_id);
2935 }
2936
2937 fn reinstall_language_server(
2938 &mut self,
2939 language: Arc<Language>,
2940 adapter: Arc<CachedLspAdapter>,
2941 server_id: LanguageServerId,
2942 cx: &mut ModelContext<Self>,
2943 ) -> Option<Task<()>> {
2944 log::info!("beginning to reinstall server");
2945
2946 let existing_server = match self.language_servers.remove(&server_id) {
2947 Some(LanguageServerState::Running { server, .. }) => Some(server),
2948 _ => None,
2949 };
2950
2951 for worktree in &self.worktrees {
2952 if let Some(worktree) = worktree.upgrade() {
2953 let key = (worktree.read(cx).id(), adapter.name.clone());
2954 self.language_server_ids.remove(&key);
2955 }
2956 }
2957
2958 Some(cx.spawn(move |this, mut cx| async move {
2959 if let Some(task) = existing_server.and_then(|server| server.shutdown()) {
2960 log::info!("shutting down existing server");
2961 task.await;
2962 }
2963
2964 // TODO: This is race-safe with regards to preventing new instances from
2965 // starting while deleting, but existing instances in other projects are going
2966 // to be very confused and messed up
2967 let Some(task) = this
2968 .update(&mut cx, |this, cx| {
2969 this.languages.delete_server_container(adapter.clone(), cx)
2970 })
2971 .log_err()
2972 else {
2973 return;
2974 };
2975 task.await;
2976
2977 this.update(&mut cx, |this, cx| {
2978 let worktrees = this.worktrees.clone();
2979 for worktree in worktrees {
2980 if let Some(worktree) = worktree.upgrade() {
2981 this.start_language_server(
2982 &worktree,
2983 adapter.clone(),
2984 language.clone(),
2985 cx,
2986 );
2987 }
2988 }
2989 })
2990 .ok();
2991 }))
2992 }
2993
2994 #[allow(clippy::too_many_arguments)]
2995 async fn setup_and_insert_language_server(
2996 this: WeakModel<Self>,
2997 worktree_path: &Path,
2998 override_initialization_options: Option<serde_json::Value>,
2999 pending_server: PendingLanguageServer,
3000 adapter: Arc<CachedLspAdapter>,
3001 language: Arc<Language>,
3002 server_id: LanguageServerId,
3003 key: (WorktreeId, LanguageServerName),
3004 cx: &mut AsyncAppContext,
3005 ) -> Result<Option<Arc<LanguageServer>>> {
3006 let language_server = Self::setup_pending_language_server(
3007 this.clone(),
3008 override_initialization_options,
3009 pending_server,
3010 worktree_path,
3011 adapter.clone(),
3012 server_id,
3013 cx,
3014 )
3015 .await?;
3016
3017 let this = match this.upgrade() {
3018 Some(this) => this,
3019 None => return Err(anyhow!("failed to upgrade project handle")),
3020 };
3021
3022 this.update(cx, |this, cx| {
3023 this.insert_newly_running_language_server(
3024 language,
3025 adapter,
3026 language_server.clone(),
3027 server_id,
3028 key,
3029 cx,
3030 )
3031 })??;
3032
3033 Ok(Some(language_server))
3034 }
3035
3036 async fn setup_pending_language_server(
3037 this: WeakModel<Self>,
3038 override_options: Option<serde_json::Value>,
3039 pending_server: PendingLanguageServer,
3040 worktree_path: &Path,
3041 adapter: Arc<CachedLspAdapter>,
3042 server_id: LanguageServerId,
3043 cx: &mut AsyncAppContext,
3044 ) -> Result<Arc<LanguageServer>> {
3045 let workspace_config =
3046 cx.update(|cx| adapter.workspace_configuration(worktree_path, cx))?;
3047 let language_server = pending_server.task.await?;
3048
3049 let name = language_server.name();
3050 language_server
3051 .on_notification::<lsp::notification::PublishDiagnostics, _>({
3052 let adapter = adapter.clone();
3053 let this = this.clone();
3054 move |mut params, mut cx| {
3055 let adapter = adapter.clone();
3056 if let Some(this) = this.upgrade() {
3057 adapter.process_diagnostics(&mut params);
3058 this.update(&mut cx, |this, cx| {
3059 this.update_diagnostics(
3060 server_id,
3061 params,
3062 &adapter.disk_based_diagnostic_sources,
3063 cx,
3064 )
3065 .log_err();
3066 })
3067 .ok();
3068 }
3069 }
3070 })
3071 .detach();
3072
3073 language_server
3074 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
3075 let adapter = adapter.clone();
3076 let worktree_path = worktree_path.to_path_buf();
3077 move |params, cx| {
3078 let adapter = adapter.clone();
3079 let worktree_path = worktree_path.clone();
3080 async move {
3081 let workspace_config =
3082 cx.update(|cx| adapter.workspace_configuration(&worktree_path, cx))?;
3083 Ok(params
3084 .items
3085 .into_iter()
3086 .map(|item| {
3087 if let Some(section) = &item.section {
3088 workspace_config
3089 .get(section)
3090 .cloned()
3091 .unwrap_or(serde_json::Value::Null)
3092 } else {
3093 workspace_config.clone()
3094 }
3095 })
3096 .collect())
3097 }
3098 }
3099 })
3100 .detach();
3101
3102 // Even though we don't have handling for these requests, respond to them to
3103 // avoid stalling any language server like `gopls` which waits for a response
3104 // to these requests when initializing.
3105 language_server
3106 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
3107 let this = this.clone();
3108 move |params, mut cx| {
3109 let this = this.clone();
3110 async move {
3111 this.update(&mut cx, |this, _| {
3112 if let Some(status) = this.language_server_statuses.get_mut(&server_id)
3113 {
3114 if let lsp::NumberOrString::String(token) = params.token {
3115 status.progress_tokens.insert(token);
3116 }
3117 }
3118 })?;
3119
3120 Ok(())
3121 }
3122 }
3123 })
3124 .detach();
3125
3126 language_server
3127 .on_request::<lsp::request::RegisterCapability, _, _>({
3128 let this = this.clone();
3129 move |params, mut cx| {
3130 let this = this.clone();
3131 async move {
3132 for reg in params.registrations {
3133 if reg.method == "workspace/didChangeWatchedFiles" {
3134 if let Some(options) = reg.register_options {
3135 let options = serde_json::from_value(options)?;
3136 this.update(&mut cx, |this, cx| {
3137 this.on_lsp_did_change_watched_files(
3138 server_id, options, cx,
3139 );
3140 })?;
3141 }
3142 }
3143 }
3144 Ok(())
3145 }
3146 }
3147 })
3148 .detach();
3149
3150 language_server
3151 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
3152 let adapter = adapter.clone();
3153 let this = this.clone();
3154 move |params, cx| {
3155 Self::on_lsp_workspace_edit(
3156 this.clone(),
3157 params,
3158 server_id,
3159 adapter.clone(),
3160 cx,
3161 )
3162 }
3163 })
3164 .detach();
3165
3166 language_server
3167 .on_request::<lsp::request::InlayHintRefreshRequest, _, _>({
3168 let this = this.clone();
3169 move |(), mut cx| {
3170 let this = this.clone();
3171 async move {
3172 this.update(&mut cx, |project, cx| {
3173 cx.emit(Event::RefreshInlayHints);
3174 project.remote_id().map(|project_id| {
3175 project.client.send(proto::RefreshInlayHints { project_id })
3176 })
3177 })?
3178 .transpose()?;
3179 Ok(())
3180 }
3181 }
3182 })
3183 .detach();
3184
3185 language_server
3186 .on_request::<lsp::request::ShowMessageRequest, _, _>({
3187 let this = this.clone();
3188 let name = name.to_string();
3189 move |params, mut cx| {
3190 let this = this.clone();
3191 let name = name.to_string();
3192 async move {
3193 if let Some(actions) = params.actions {
3194 let (tx, mut rx) = smol::channel::bounded(1);
3195 let request = LanguageServerPromptRequest {
3196 level: match params.typ {
3197 lsp::MessageType::ERROR => PromptLevel::Critical,
3198 lsp::MessageType::WARNING => PromptLevel::Warning,
3199 _ => PromptLevel::Info,
3200 },
3201 message: params.message,
3202 actions,
3203 response_channel: tx,
3204 lsp_name: name.clone(),
3205 };
3206
3207 if let Ok(_) = this.update(&mut cx, |_, cx| {
3208 cx.emit(Event::LanguageServerPrompt(request));
3209 }) {
3210 let response = rx.next().await;
3211
3212 Ok(response)
3213 } else {
3214 Ok(None)
3215 }
3216 } else {
3217 Ok(None)
3218 }
3219 }
3220 }
3221 })
3222 .detach();
3223
3224 let disk_based_diagnostics_progress_token =
3225 adapter.disk_based_diagnostics_progress_token.clone();
3226
3227 language_server
3228 .on_notification::<ServerStatus, _>({
3229 let this = this.clone();
3230 let name = name.to_string();
3231 move |params, mut cx| {
3232 let this = this.clone();
3233 let name = name.to_string();
3234 if let Some(ref message) = params.message {
3235 let message = message.trim();
3236 if !message.is_empty() {
3237 let formatted_message = format!(
3238 "Language server {name} (id {server_id}) status update: {message}"
3239 );
3240 match params.health {
3241 ServerHealthStatus::Ok => log::info!("{}", formatted_message),
3242 ServerHealthStatus::Warning => log::warn!("{}", formatted_message),
3243 ServerHealthStatus::Error => {
3244 log::error!("{}", formatted_message);
3245 let (tx, _rx) = smol::channel::bounded(1);
3246 let request = LanguageServerPromptRequest {
3247 level: PromptLevel::Critical,
3248 message: params.message.unwrap_or_default(),
3249 actions: Vec::new(),
3250 response_channel: tx,
3251 lsp_name: name.clone(),
3252 };
3253 let _ = this
3254 .update(&mut cx, |_, cx| {
3255 cx.emit(Event::LanguageServerPrompt(request));
3256 })
3257 .ok();
3258 }
3259 ServerHealthStatus::Other(status) => {
3260 log::info!(
3261 "Unknown server health: {status}\n{formatted_message}"
3262 )
3263 }
3264 }
3265 }
3266 }
3267 }
3268 })
3269 .detach();
3270
3271 language_server
3272 .on_notification::<lsp::notification::Progress, _>(move |params, mut cx| {
3273 if let Some(this) = this.upgrade() {
3274 this.update(&mut cx, |this, cx| {
3275 this.on_lsp_progress(
3276 params,
3277 server_id,
3278 disk_based_diagnostics_progress_token.clone(),
3279 cx,
3280 );
3281 })
3282 .ok();
3283 }
3284 })
3285 .detach();
3286
3287 let mut initialization_options = adapter.adapter.initialization_options();
3288 match (&mut initialization_options, override_options) {
3289 (Some(initialization_options), Some(override_options)) => {
3290 merge_json_value_into(override_options, initialization_options);
3291 }
3292 (None, override_options) => initialization_options = override_options,
3293 _ => {}
3294 }
3295 let language_server = cx
3296 .update(|cx| language_server.initialize(initialization_options, cx))?
3297 .await?;
3298
3299 language_server
3300 .notify::<lsp::notification::DidChangeConfiguration>(
3301 lsp::DidChangeConfigurationParams {
3302 settings: workspace_config,
3303 },
3304 )
3305 .ok();
3306
3307 Ok(language_server)
3308 }
3309
3310 fn insert_newly_running_language_server(
3311 &mut self,
3312 language: Arc<Language>,
3313 adapter: Arc<CachedLspAdapter>,
3314 language_server: Arc<LanguageServer>,
3315 server_id: LanguageServerId,
3316 key: (WorktreeId, LanguageServerName),
3317 cx: &mut ModelContext<Self>,
3318 ) -> Result<()> {
3319 // If the language server for this key doesn't match the server id, don't store the
3320 // server. Which will cause it to be dropped, killing the process
3321 if self
3322 .language_server_ids
3323 .get(&key)
3324 .map(|id| id != &server_id)
3325 .unwrap_or(false)
3326 {
3327 return Ok(());
3328 }
3329
3330 // Update language_servers collection with Running variant of LanguageServerState
3331 // indicating that the server is up and running and ready
3332 self.language_servers.insert(
3333 server_id,
3334 LanguageServerState::Running {
3335 adapter: adapter.clone(),
3336 language: language.clone(),
3337 server: language_server.clone(),
3338 simulate_disk_based_diagnostics_completion: None,
3339 },
3340 );
3341
3342 self.language_server_statuses.insert(
3343 server_id,
3344 LanguageServerStatus {
3345 name: language_server.name().to_string(),
3346 pending_work: Default::default(),
3347 has_pending_diagnostic_updates: false,
3348 progress_tokens: Default::default(),
3349 },
3350 );
3351
3352 cx.emit(Event::LanguageServerAdded(server_id));
3353
3354 if let Some(project_id) = self.remote_id() {
3355 self.client.send(proto::StartLanguageServer {
3356 project_id,
3357 server: Some(proto::LanguageServer {
3358 id: server_id.0 as u64,
3359 name: language_server.name().to_string(),
3360 }),
3361 })?;
3362 }
3363
3364 // Tell the language server about every open buffer in the worktree that matches the language.
3365 for buffer in self.opened_buffers.values() {
3366 if let Some(buffer_handle) = buffer.upgrade() {
3367 let buffer = buffer_handle.read(cx);
3368 let file = match File::from_dyn(buffer.file()) {
3369 Some(file) => file,
3370 None => continue,
3371 };
3372 let language = match buffer.language() {
3373 Some(language) => language,
3374 None => continue,
3375 };
3376
3377 if file.worktree.read(cx).id() != key.0
3378 || !self
3379 .languages
3380 .lsp_adapters(&language)
3381 .iter()
3382 .any(|a| a.name == key.1)
3383 {
3384 continue;
3385 }
3386
3387 let file = match file.as_local() {
3388 Some(file) => file,
3389 None => continue,
3390 };
3391
3392 let versions = self
3393 .buffer_snapshots
3394 .entry(buffer.remote_id())
3395 .or_default()
3396 .entry(server_id)
3397 .or_insert_with(|| {
3398 vec![LspBufferSnapshot {
3399 version: 0,
3400 snapshot: buffer.text_snapshot(),
3401 }]
3402 });
3403
3404 let snapshot = versions.last().unwrap();
3405 let version = snapshot.version;
3406 let initial_snapshot = &snapshot.snapshot;
3407 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
3408 language_server.notify::<lsp::notification::DidOpenTextDocument>(
3409 lsp::DidOpenTextDocumentParams {
3410 text_document: lsp::TextDocumentItem::new(
3411 uri,
3412 adapter
3413 .language_ids
3414 .get(language.name().as_ref())
3415 .cloned()
3416 .unwrap_or_default(),
3417 version,
3418 initial_snapshot.text(),
3419 ),
3420 },
3421 )?;
3422
3423 buffer_handle.update(cx, |buffer, cx| {
3424 buffer.set_completion_triggers(
3425 language_server
3426 .capabilities()
3427 .completion_provider
3428 .as_ref()
3429 .and_then(|provider| provider.trigger_characters.clone())
3430 .unwrap_or_default(),
3431 cx,
3432 )
3433 });
3434 }
3435 }
3436
3437 cx.notify();
3438 Ok(())
3439 }
3440
3441 // Returns a list of all of the worktrees which no longer have a language server and the root path
3442 // for the stopped server
3443 fn stop_language_server(
3444 &mut self,
3445 worktree_id: WorktreeId,
3446 adapter_name: LanguageServerName,
3447 cx: &mut ModelContext<Self>,
3448 ) -> Task<Vec<WorktreeId>> {
3449 let key = (worktree_id, adapter_name);
3450 if let Some(server_id) = self.language_server_ids.remove(&key) {
3451 let name = key.1 .0;
3452 log::info!("stopping language server {name}");
3453
3454 // Remove other entries for this language server as well
3455 let mut orphaned_worktrees = vec![worktree_id];
3456 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
3457 for other_key in other_keys {
3458 if self.language_server_ids.get(&other_key) == Some(&server_id) {
3459 self.language_server_ids.remove(&other_key);
3460 orphaned_worktrees.push(other_key.0);
3461 }
3462 }
3463
3464 for buffer in self.opened_buffers.values() {
3465 if let Some(buffer) = buffer.upgrade() {
3466 buffer.update(cx, |buffer, cx| {
3467 buffer.update_diagnostics(server_id, Default::default(), cx);
3468 });
3469 }
3470 }
3471 for worktree in &self.worktrees {
3472 if let Some(worktree) = worktree.upgrade() {
3473 worktree.update(cx, |worktree, cx| {
3474 if let Some(worktree) = worktree.as_local_mut() {
3475 worktree.clear_diagnostics_for_language_server(server_id, cx);
3476 }
3477 });
3478 }
3479 }
3480
3481 self.language_server_watched_paths.remove(&server_id);
3482 self.language_server_statuses.remove(&server_id);
3483 cx.notify();
3484
3485 let server_state = self.language_servers.remove(&server_id);
3486 cx.emit(Event::LanguageServerRemoved(server_id));
3487 cx.spawn(move |_, cx| async move {
3488 Self::shutdown_language_server(server_state, name, cx).await;
3489 orphaned_worktrees
3490 })
3491 } else {
3492 Task::ready(Vec::new())
3493 }
3494 }
3495
3496 async fn shutdown_language_server(
3497 server_state: Option<LanguageServerState>,
3498 name: Arc<str>,
3499 cx: AsyncAppContext,
3500 ) {
3501 let server = match server_state {
3502 Some(LanguageServerState::Starting(task)) => {
3503 let mut timer = cx
3504 .background_executor()
3505 .timer(SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT)
3506 .fuse();
3507
3508 select! {
3509 server = task.fuse() => server,
3510 _ = timer => {
3511 log::info!(
3512 "timeout waiting for language server {} to finish launching before stopping",
3513 name
3514 );
3515 None
3516 },
3517 }
3518 }
3519
3520 Some(LanguageServerState::Running { server, .. }) => Some(server),
3521
3522 None => None,
3523 };
3524
3525 if let Some(server) = server {
3526 if let Some(shutdown) = server.shutdown() {
3527 shutdown.await;
3528 }
3529 }
3530 }
3531
3532 pub fn restart_language_servers_for_buffers(
3533 &mut self,
3534 buffers: impl IntoIterator<Item = Model<Buffer>>,
3535 cx: &mut ModelContext<Self>,
3536 ) -> Option<()> {
3537 let language_server_lookup_info: HashSet<(Model<Worktree>, Arc<Language>)> = buffers
3538 .into_iter()
3539 .filter_map(|buffer| {
3540 let buffer = buffer.read(cx);
3541 let file = File::from_dyn(buffer.file())?;
3542 let full_path = file.full_path(cx);
3543 let language = self
3544 .languages
3545 .language_for_file(&full_path, Some(buffer.as_rope()))
3546 .now_or_never()?
3547 .ok()?;
3548 Some((file.worktree.clone(), language))
3549 })
3550 .collect();
3551 for (worktree, language) in language_server_lookup_info {
3552 self.restart_language_servers(worktree, language, cx);
3553 }
3554
3555 None
3556 }
3557
3558 fn restart_language_servers(
3559 &mut self,
3560 worktree: Model<Worktree>,
3561 language: Arc<Language>,
3562 cx: &mut ModelContext<Self>,
3563 ) {
3564 let worktree_id = worktree.read(cx).id();
3565
3566 let stop_tasks = self
3567 .languages
3568 .clone()
3569 .lsp_adapters(&language)
3570 .iter()
3571 .map(|adapter| {
3572 let stop_task = self.stop_language_server(worktree_id, adapter.name.clone(), cx);
3573 (stop_task, adapter.name.clone())
3574 })
3575 .collect::<Vec<_>>();
3576 if stop_tasks.is_empty() {
3577 return;
3578 }
3579
3580 cx.spawn(move |this, mut cx| async move {
3581 // For each stopped language server, record all of the worktrees with which
3582 // it was associated.
3583 let mut affected_worktrees = Vec::new();
3584 for (stop_task, language_server_name) in stop_tasks {
3585 for affected_worktree_id in stop_task.await {
3586 affected_worktrees.push((affected_worktree_id, language_server_name.clone()));
3587 }
3588 }
3589
3590 this.update(&mut cx, |this, cx| {
3591 // Restart the language server for the given worktree.
3592 this.start_language_servers(&worktree, language.clone(), cx);
3593
3594 // Lookup new server ids and set them for each of the orphaned worktrees
3595 for (affected_worktree_id, language_server_name) in affected_worktrees {
3596 if let Some(new_server_id) = this
3597 .language_server_ids
3598 .get(&(worktree_id, language_server_name.clone()))
3599 .cloned()
3600 {
3601 this.language_server_ids
3602 .insert((affected_worktree_id, language_server_name), new_server_id);
3603 }
3604 }
3605 })
3606 .ok();
3607 })
3608 .detach();
3609 }
3610
3611 fn check_errored_server(
3612 language: Arc<Language>,
3613 adapter: Arc<CachedLspAdapter>,
3614 server_id: LanguageServerId,
3615 installation_test_binary: Option<LanguageServerBinary>,
3616 cx: &mut ModelContext<Self>,
3617 ) {
3618 if !adapter.can_be_reinstalled() {
3619 log::info!(
3620 "Validation check requested for {:?} but it cannot be reinstalled",
3621 adapter.name.0
3622 );
3623 return;
3624 }
3625
3626 cx.spawn(move |this, mut cx| async move {
3627 log::info!("About to spawn test binary");
3628
3629 // A lack of test binary counts as a failure
3630 let process = installation_test_binary.and_then(|binary| {
3631 smol::process::Command::new(&binary.path)
3632 .current_dir(&binary.path)
3633 .args(binary.arguments)
3634 .stdin(Stdio::piped())
3635 .stdout(Stdio::piped())
3636 .stderr(Stdio::inherit())
3637 .kill_on_drop(true)
3638 .spawn()
3639 .ok()
3640 });
3641
3642 const PROCESS_TIMEOUT: Duration = Duration::from_secs(5);
3643 let mut timeout = cx.background_executor().timer(PROCESS_TIMEOUT).fuse();
3644
3645 let mut errored = false;
3646 if let Some(mut process) = process {
3647 futures::select! {
3648 status = process.status().fuse() => match status {
3649 Ok(status) => errored = !status.success(),
3650 Err(_) => errored = true,
3651 },
3652
3653 _ = timeout => {
3654 log::info!("test binary time-ed out, this counts as a success");
3655 _ = process.kill();
3656 }
3657 }
3658 } else {
3659 log::warn!("test binary failed to launch");
3660 errored = true;
3661 }
3662
3663 if errored {
3664 log::warn!("test binary check failed");
3665 let task = this
3666 .update(&mut cx, move |this, cx| {
3667 this.reinstall_language_server(language, adapter, server_id, cx)
3668 })
3669 .ok()
3670 .flatten();
3671
3672 if let Some(task) = task {
3673 task.await;
3674 }
3675 }
3676 })
3677 .detach();
3678 }
3679
3680 fn enqueue_language_server_progress(
3681 &mut self,
3682 message: BufferOrderedMessage,
3683 cx: &mut ModelContext<Self>,
3684 ) {
3685 self.pending_language_server_update.replace(message);
3686 self.flush_language_server_update.get_or_insert_with(|| {
3687 cx.spawn(|this, mut cx| async move {
3688 cx.background_executor()
3689 .timer(SERVER_PROGRESS_DEBOUNCE_TIMEOUT)
3690 .await;
3691 this.update(&mut cx, |this, _| {
3692 this.flush_language_server_update.take();
3693 if let Some(update) = this.pending_language_server_update.take() {
3694 this.enqueue_buffer_ordered_message(update).ok();
3695 }
3696 })
3697 .ok();
3698 })
3699 });
3700 }
3701
3702 fn enqueue_buffer_ordered_message(&mut self, message: BufferOrderedMessage) -> Result<()> {
3703 if let Some(pending_message) = self.pending_language_server_update.take() {
3704 self.flush_language_server_update.take();
3705 self.buffer_ordered_messages_tx
3706 .unbounded_send(pending_message)
3707 .map_err(|e| anyhow!(e))?;
3708 }
3709 self.buffer_ordered_messages_tx
3710 .unbounded_send(message)
3711 .map_err(|e| anyhow!(e))
3712 }
3713
3714 fn on_lsp_progress(
3715 &mut self,
3716 progress: lsp::ProgressParams,
3717 language_server_id: LanguageServerId,
3718 disk_based_diagnostics_progress_token: Option<String>,
3719 cx: &mut ModelContext<Self>,
3720 ) {
3721 let token = match progress.token {
3722 lsp::NumberOrString::String(token) => token,
3723 lsp::NumberOrString::Number(token) => {
3724 log::info!("skipping numeric progress token {}", token);
3725 return;
3726 }
3727 };
3728 let lsp::ProgressParamsValue::WorkDone(progress) = progress.value;
3729 let language_server_status =
3730 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3731 status
3732 } else {
3733 return;
3734 };
3735
3736 if !language_server_status.progress_tokens.contains(&token) {
3737 return;
3738 }
3739
3740 let is_disk_based_diagnostics_progress = disk_based_diagnostics_progress_token
3741 .as_ref()
3742 .map_or(false, |disk_based_token| {
3743 token.starts_with(disk_based_token)
3744 });
3745
3746 match progress {
3747 lsp::WorkDoneProgress::Begin(report) => {
3748 if is_disk_based_diagnostics_progress {
3749 language_server_status.has_pending_diagnostic_updates = true;
3750 self.disk_based_diagnostics_started(language_server_id, cx);
3751 self.enqueue_buffer_ordered_message(BufferOrderedMessage::LanguageServerUpdate {
3752 language_server_id,
3753 message: proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(Default::default())
3754 })
3755 .ok();
3756 } else {
3757 self.on_lsp_work_start(
3758 language_server_id,
3759 token.clone(),
3760 LanguageServerProgress {
3761 message: report.message.clone(),
3762 percentage: report.percentage.map(|p| p as usize),
3763 last_update_at: Instant::now(),
3764 },
3765 cx,
3766 );
3767 self.enqueue_buffer_ordered_message(
3768 BufferOrderedMessage::LanguageServerUpdate {
3769 language_server_id,
3770 message: proto::update_language_server::Variant::WorkStart(
3771 proto::LspWorkStart {
3772 token,
3773 message: report.message,
3774 percentage: report.percentage,
3775 },
3776 ),
3777 },
3778 )
3779 .ok();
3780 }
3781 }
3782 lsp::WorkDoneProgress::Report(report) => {
3783 if !is_disk_based_diagnostics_progress {
3784 self.on_lsp_work_progress(
3785 language_server_id,
3786 token.clone(),
3787 LanguageServerProgress {
3788 message: report.message.clone(),
3789 percentage: report.percentage.map(|p| p as usize),
3790 last_update_at: Instant::now(),
3791 },
3792 cx,
3793 );
3794 self.enqueue_language_server_progress(
3795 BufferOrderedMessage::LanguageServerUpdate {
3796 language_server_id,
3797 message: proto::update_language_server::Variant::WorkProgress(
3798 proto::LspWorkProgress {
3799 token,
3800 message: report.message,
3801 percentage: report.percentage,
3802 },
3803 ),
3804 },
3805 cx,
3806 );
3807 }
3808 }
3809 lsp::WorkDoneProgress::End(_) => {
3810 language_server_status.progress_tokens.remove(&token);
3811
3812 if is_disk_based_diagnostics_progress {
3813 language_server_status.has_pending_diagnostic_updates = false;
3814 self.disk_based_diagnostics_finished(language_server_id, cx);
3815 self.enqueue_buffer_ordered_message(
3816 BufferOrderedMessage::LanguageServerUpdate {
3817 language_server_id,
3818 message:
3819 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
3820 Default::default(),
3821 ),
3822 },
3823 )
3824 .ok();
3825 } else {
3826 self.on_lsp_work_end(language_server_id, token.clone(), cx);
3827 self.enqueue_buffer_ordered_message(
3828 BufferOrderedMessage::LanguageServerUpdate {
3829 language_server_id,
3830 message: proto::update_language_server::Variant::WorkEnd(
3831 proto::LspWorkEnd { token },
3832 ),
3833 },
3834 )
3835 .ok();
3836 }
3837 }
3838 }
3839 }
3840
3841 fn on_lsp_work_start(
3842 &mut self,
3843 language_server_id: LanguageServerId,
3844 token: String,
3845 progress: LanguageServerProgress,
3846 cx: &mut ModelContext<Self>,
3847 ) {
3848 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3849 status.pending_work.insert(token, progress);
3850 cx.notify();
3851 }
3852 }
3853
3854 fn on_lsp_work_progress(
3855 &mut self,
3856 language_server_id: LanguageServerId,
3857 token: String,
3858 progress: LanguageServerProgress,
3859 cx: &mut ModelContext<Self>,
3860 ) {
3861 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3862 let entry = status
3863 .pending_work
3864 .entry(token)
3865 .or_insert(LanguageServerProgress {
3866 message: Default::default(),
3867 percentage: Default::default(),
3868 last_update_at: progress.last_update_at,
3869 });
3870 if progress.message.is_some() {
3871 entry.message = progress.message;
3872 }
3873 if progress.percentage.is_some() {
3874 entry.percentage = progress.percentage;
3875 }
3876 entry.last_update_at = progress.last_update_at;
3877 cx.notify();
3878 }
3879 }
3880
3881 fn on_lsp_work_end(
3882 &mut self,
3883 language_server_id: LanguageServerId,
3884 token: String,
3885 cx: &mut ModelContext<Self>,
3886 ) {
3887 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
3888 cx.emit(Event::RefreshInlayHints);
3889 status.pending_work.remove(&token);
3890 cx.notify();
3891 }
3892 }
3893
3894 fn on_lsp_did_change_watched_files(
3895 &mut self,
3896 language_server_id: LanguageServerId,
3897 params: DidChangeWatchedFilesRegistrationOptions,
3898 cx: &mut ModelContext<Self>,
3899 ) {
3900 let watched_paths = self
3901 .language_server_watched_paths
3902 .entry(language_server_id)
3903 .or_default();
3904
3905 let mut builders = HashMap::default();
3906 for watcher in params.watchers {
3907 for worktree in &self.worktrees {
3908 if let Some(worktree) = worktree.upgrade() {
3909 let glob_is_inside_worktree = worktree.update(cx, |tree, _| {
3910 if let Some(abs_path) = tree.abs_path().to_str() {
3911 let relative_glob_pattern = match &watcher.glob_pattern {
3912 lsp::GlobPattern::String(s) => Some(
3913 s.strip_prefix(abs_path)
3914 .unwrap_or(s)
3915 .strip_prefix(std::path::MAIN_SEPARATOR)
3916 .unwrap_or(s),
3917 ),
3918 lsp::GlobPattern::Relative(rp) => {
3919 let base_uri = match &rp.base_uri {
3920 lsp::OneOf::Left(workspace_folder) => &workspace_folder.uri,
3921 lsp::OneOf::Right(base_uri) => base_uri,
3922 };
3923 base_uri.to_file_path().ok().and_then(|file_path| {
3924 (file_path.to_str() == Some(abs_path))
3925 .then_some(rp.pattern.as_str())
3926 })
3927 }
3928 };
3929 if let Some(relative_glob_pattern) = relative_glob_pattern {
3930 let literal_prefix = glob_literal_prefix(relative_glob_pattern);
3931 tree.as_local_mut()
3932 .unwrap()
3933 .add_path_prefix_to_scan(Path::new(literal_prefix).into());
3934 if let Some(glob) = Glob::new(relative_glob_pattern).log_err() {
3935 builders
3936 .entry(tree.id())
3937 .or_insert_with(|| GlobSetBuilder::new())
3938 .add(glob);
3939 }
3940 return true;
3941 }
3942 }
3943 false
3944 });
3945 if glob_is_inside_worktree {
3946 break;
3947 }
3948 }
3949 }
3950 }
3951
3952 watched_paths.clear();
3953 for (worktree_id, builder) in builders {
3954 if let Ok(globset) = builder.build() {
3955 watched_paths.insert(worktree_id, globset);
3956 }
3957 }
3958
3959 cx.notify();
3960 }
3961
3962 async fn on_lsp_workspace_edit(
3963 this: WeakModel<Self>,
3964 params: lsp::ApplyWorkspaceEditParams,
3965 server_id: LanguageServerId,
3966 adapter: Arc<CachedLspAdapter>,
3967 mut cx: AsyncAppContext,
3968 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
3969 let this = this
3970 .upgrade()
3971 .ok_or_else(|| anyhow!("project project closed"))?;
3972 let language_server = this
3973 .update(&mut cx, |this, _| this.language_server_for_id(server_id))?
3974 .ok_or_else(|| anyhow!("language server not found"))?;
3975 let transaction = Self::deserialize_workspace_edit(
3976 this.clone(),
3977 params.edit,
3978 true,
3979 adapter.clone(),
3980 language_server.clone(),
3981 &mut cx,
3982 )
3983 .await
3984 .log_err();
3985 this.update(&mut cx, |this, _| {
3986 if let Some(transaction) = transaction {
3987 this.last_workspace_edits_by_language_server
3988 .insert(server_id, transaction);
3989 }
3990 })?;
3991 Ok(lsp::ApplyWorkspaceEditResponse {
3992 applied: true,
3993 failed_change: None,
3994 failure_reason: None,
3995 })
3996 }
3997
3998 pub fn language_server_statuses(
3999 &self,
4000 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
4001 self.language_server_statuses.values()
4002 }
4003
4004 pub fn last_formatting_failure(&self) -> Option<&str> {
4005 self.last_formatting_failure.as_deref()
4006 }
4007
4008 pub fn update_diagnostics(
4009 &mut self,
4010 language_server_id: LanguageServerId,
4011 mut params: lsp::PublishDiagnosticsParams,
4012 disk_based_sources: &[String],
4013 cx: &mut ModelContext<Self>,
4014 ) -> Result<()> {
4015 let abs_path = params
4016 .uri
4017 .to_file_path()
4018 .map_err(|_| anyhow!("URI is not a file"))?;
4019 let mut diagnostics = Vec::default();
4020 let mut primary_diagnostic_group_ids = HashMap::default();
4021 let mut sources_by_group_id = HashMap::default();
4022 let mut supporting_diagnostics = HashMap::default();
4023
4024 // Ensure that primary diagnostics are always the most severe
4025 params.diagnostics.sort_by_key(|item| item.severity);
4026
4027 for diagnostic in ¶ms.diagnostics {
4028 let source = diagnostic.source.as_ref();
4029 let code = diagnostic.code.as_ref().map(|code| match code {
4030 lsp::NumberOrString::Number(code) => code.to_string(),
4031 lsp::NumberOrString::String(code) => code.clone(),
4032 });
4033 let range = range_from_lsp(diagnostic.range);
4034 let is_supporting = diagnostic
4035 .related_information
4036 .as_ref()
4037 .map_or(false, |infos| {
4038 infos.iter().any(|info| {
4039 primary_diagnostic_group_ids.contains_key(&(
4040 source,
4041 code.clone(),
4042 range_from_lsp(info.location.range),
4043 ))
4044 })
4045 });
4046
4047 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
4048 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
4049 });
4050
4051 if is_supporting {
4052 supporting_diagnostics.insert(
4053 (source, code.clone(), range),
4054 (diagnostic.severity, is_unnecessary),
4055 );
4056 } else {
4057 let group_id = post_inc(&mut self.next_diagnostic_group_id);
4058 let is_disk_based =
4059 source.map_or(false, |source| disk_based_sources.contains(source));
4060
4061 sources_by_group_id.insert(group_id, source);
4062 primary_diagnostic_group_ids
4063 .insert((source, code.clone(), range.clone()), group_id);
4064
4065 diagnostics.push(DiagnosticEntry {
4066 range,
4067 diagnostic: Diagnostic {
4068 source: diagnostic.source.clone(),
4069 code: code.clone(),
4070 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
4071 message: diagnostic.message.trim().to_string(),
4072 group_id,
4073 is_primary: true,
4074 is_disk_based,
4075 is_unnecessary,
4076 },
4077 });
4078 if let Some(infos) = &diagnostic.related_information {
4079 for info in infos {
4080 if info.location.uri == params.uri && !info.message.is_empty() {
4081 let range = range_from_lsp(info.location.range);
4082 diagnostics.push(DiagnosticEntry {
4083 range,
4084 diagnostic: Diagnostic {
4085 source: diagnostic.source.clone(),
4086 code: code.clone(),
4087 severity: DiagnosticSeverity::INFORMATION,
4088 message: info.message.trim().to_string(),
4089 group_id,
4090 is_primary: false,
4091 is_disk_based,
4092 is_unnecessary: false,
4093 },
4094 });
4095 }
4096 }
4097 }
4098 }
4099 }
4100
4101 for entry in &mut diagnostics {
4102 let diagnostic = &mut entry.diagnostic;
4103 if !diagnostic.is_primary {
4104 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
4105 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
4106 source,
4107 diagnostic.code.clone(),
4108 entry.range.clone(),
4109 )) {
4110 if let Some(severity) = severity {
4111 diagnostic.severity = severity;
4112 }
4113 diagnostic.is_unnecessary = is_unnecessary;
4114 }
4115 }
4116 }
4117
4118 self.update_diagnostic_entries(
4119 language_server_id,
4120 abs_path,
4121 params.version,
4122 diagnostics,
4123 cx,
4124 )?;
4125 Ok(())
4126 }
4127
4128 pub fn update_diagnostic_entries(
4129 &mut self,
4130 server_id: LanguageServerId,
4131 abs_path: PathBuf,
4132 version: Option<i32>,
4133 diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4134 cx: &mut ModelContext<Project>,
4135 ) -> Result<(), anyhow::Error> {
4136 let (worktree, relative_path) = self
4137 .find_local_worktree(&abs_path, cx)
4138 .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
4139
4140 let project_path = ProjectPath {
4141 worktree_id: worktree.read(cx).id(),
4142 path: relative_path.into(),
4143 };
4144
4145 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
4146 self.update_buffer_diagnostics(&buffer, server_id, version, diagnostics.clone(), cx)?;
4147 }
4148
4149 let updated = worktree.update(cx, |worktree, cx| {
4150 worktree
4151 .as_local_mut()
4152 .ok_or_else(|| anyhow!("not a local worktree"))?
4153 .update_diagnostics(server_id, project_path.path.clone(), diagnostics, cx)
4154 })?;
4155 if updated {
4156 cx.emit(Event::DiagnosticsUpdated {
4157 language_server_id: server_id,
4158 path: project_path,
4159 });
4160 }
4161 Ok(())
4162 }
4163
4164 fn update_buffer_diagnostics(
4165 &mut self,
4166 buffer: &Model<Buffer>,
4167 server_id: LanguageServerId,
4168 version: Option<i32>,
4169 mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
4170 cx: &mut ModelContext<Self>,
4171 ) -> Result<()> {
4172 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
4173 Ordering::Equal
4174 .then_with(|| b.is_primary.cmp(&a.is_primary))
4175 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
4176 .then_with(|| a.severity.cmp(&b.severity))
4177 .then_with(|| a.message.cmp(&b.message))
4178 }
4179
4180 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
4181
4182 diagnostics.sort_unstable_by(|a, b| {
4183 Ordering::Equal
4184 .then_with(|| a.range.start.cmp(&b.range.start))
4185 .then_with(|| b.range.end.cmp(&a.range.end))
4186 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
4187 });
4188
4189 let mut sanitized_diagnostics = Vec::new();
4190 let edits_since_save = Patch::new(
4191 snapshot
4192 .edits_since::<Unclipped<PointUtf16>>(buffer.read(cx).saved_version())
4193 .collect(),
4194 );
4195 for entry in diagnostics {
4196 let start;
4197 let end;
4198 if entry.diagnostic.is_disk_based {
4199 // Some diagnostics are based on files on disk instead of buffers'
4200 // current contents. Adjust these diagnostics' ranges to reflect
4201 // any unsaved edits.
4202 start = edits_since_save.old_to_new(entry.range.start);
4203 end = edits_since_save.old_to_new(entry.range.end);
4204 } else {
4205 start = entry.range.start;
4206 end = entry.range.end;
4207 }
4208
4209 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
4210 ..snapshot.clip_point_utf16(end, Bias::Right);
4211
4212 // Expand empty ranges by one codepoint
4213 if range.start == range.end {
4214 // This will be go to the next boundary when being clipped
4215 range.end.column += 1;
4216 range.end = snapshot.clip_point_utf16(Unclipped(range.end), Bias::Right);
4217 if range.start == range.end && range.end.column > 0 {
4218 range.start.column -= 1;
4219 range.start = snapshot.clip_point_utf16(Unclipped(range.start), Bias::Left);
4220 }
4221 }
4222
4223 sanitized_diagnostics.push(DiagnosticEntry {
4224 range,
4225 diagnostic: entry.diagnostic,
4226 });
4227 }
4228 drop(edits_since_save);
4229
4230 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
4231 buffer.update(cx, |buffer, cx| {
4232 buffer.update_diagnostics(server_id, set, cx)
4233 });
4234 Ok(())
4235 }
4236
4237 pub fn reload_buffers(
4238 &self,
4239 buffers: HashSet<Model<Buffer>>,
4240 push_to_history: bool,
4241 cx: &mut ModelContext<Self>,
4242 ) -> Task<Result<ProjectTransaction>> {
4243 let mut local_buffers = Vec::new();
4244 let mut remote_buffers = None;
4245 for buffer_handle in buffers {
4246 let buffer = buffer_handle.read(cx);
4247 if buffer.is_dirty() {
4248 if let Some(file) = File::from_dyn(buffer.file()) {
4249 if file.is_local() {
4250 local_buffers.push(buffer_handle);
4251 } else {
4252 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
4253 }
4254 }
4255 }
4256 }
4257
4258 let remote_buffers = self.remote_id().zip(remote_buffers);
4259 let client = self.client.clone();
4260
4261 cx.spawn(move |this, mut cx| async move {
4262 let mut project_transaction = ProjectTransaction::default();
4263
4264 if let Some((project_id, remote_buffers)) = remote_buffers {
4265 let response = client
4266 .request(proto::ReloadBuffers {
4267 project_id,
4268 buffer_ids: remote_buffers
4269 .iter()
4270 .filter_map(|buffer| {
4271 buffer
4272 .update(&mut cx, |buffer, _| buffer.remote_id().into())
4273 .ok()
4274 })
4275 .collect(),
4276 })
4277 .await?
4278 .transaction
4279 .ok_or_else(|| anyhow!("missing transaction"))?;
4280 project_transaction = this
4281 .update(&mut cx, |this, cx| {
4282 this.deserialize_project_transaction(response, push_to_history, cx)
4283 })?
4284 .await?;
4285 }
4286
4287 for buffer in local_buffers {
4288 let transaction = buffer
4289 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
4290 .await?;
4291 buffer.update(&mut cx, |buffer, cx| {
4292 if let Some(transaction) = transaction {
4293 if !push_to_history {
4294 buffer.forget_transaction(transaction.id);
4295 }
4296 project_transaction.0.insert(cx.handle(), transaction);
4297 }
4298 })?;
4299 }
4300
4301 Ok(project_transaction)
4302 })
4303 }
4304
4305 pub fn format(
4306 &mut self,
4307 buffers: HashSet<Model<Buffer>>,
4308 push_to_history: bool,
4309 trigger: FormatTrigger,
4310 cx: &mut ModelContext<Project>,
4311 ) -> Task<anyhow::Result<ProjectTransaction>> {
4312 if self.is_local() {
4313 let buffers_with_paths = buffers
4314 .into_iter()
4315 .filter_map(|buffer_handle| {
4316 let buffer = buffer_handle.read(cx);
4317 let file = File::from_dyn(buffer.file())?;
4318 let buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
4319 Some((buffer_handle, buffer_abs_path))
4320 })
4321 .collect::<Vec<_>>();
4322
4323 cx.spawn(move |project, mut cx| async move {
4324 let result = Self::format_locally(
4325 project.clone(),
4326 buffers_with_paths,
4327 push_to_history,
4328 trigger,
4329 cx.clone(),
4330 )
4331 .await;
4332
4333 project.update(&mut cx, |project, _| match &result {
4334 Ok(_) => project.last_formatting_failure = None,
4335 Err(error) => {
4336 project.last_formatting_failure.replace(error.to_string());
4337 }
4338 })?;
4339
4340 result
4341 })
4342 } else {
4343 let remote_id = self.remote_id();
4344 let client = self.client.clone();
4345 cx.spawn(move |this, mut cx| async move {
4346 let mut project_transaction = ProjectTransaction::default();
4347 if let Some(project_id) = remote_id {
4348 let response = client
4349 .request(proto::FormatBuffers {
4350 project_id,
4351 trigger: trigger as i32,
4352 buffer_ids: buffers
4353 .iter()
4354 .map(|buffer| {
4355 buffer.update(&mut cx, |buffer, _| buffer.remote_id().into())
4356 })
4357 .collect::<Result<_>>()?,
4358 })
4359 .await?
4360 .transaction
4361 .ok_or_else(|| anyhow!("missing transaction"))?;
4362 project_transaction = this
4363 .update(&mut cx, |this, cx| {
4364 this.deserialize_project_transaction(response, push_to_history, cx)
4365 })?
4366 .await?;
4367 }
4368 Ok(project_transaction)
4369 })
4370 }
4371 }
4372
4373 async fn format_locally(
4374 project: WeakModel<Project>,
4375 mut buffers_with_paths: Vec<(Model<Buffer>, Option<PathBuf>)>,
4376 push_to_history: bool,
4377 trigger: FormatTrigger,
4378 mut cx: AsyncAppContext,
4379 ) -> anyhow::Result<ProjectTransaction> {
4380 // Do not allow multiple concurrent formatting requests for the
4381 // same buffer.
4382 project.update(&mut cx, |this, cx| {
4383 buffers_with_paths.retain(|(buffer, _)| {
4384 this.buffers_being_formatted
4385 .insert(buffer.read(cx).remote_id())
4386 });
4387 })?;
4388
4389 let _cleanup = defer({
4390 let this = project.clone();
4391 let mut cx = cx.clone();
4392 let buffers = &buffers_with_paths;
4393 move || {
4394 this.update(&mut cx, |this, cx| {
4395 for (buffer, _) in buffers {
4396 this.buffers_being_formatted
4397 .remove(&buffer.read(cx).remote_id());
4398 }
4399 })
4400 .ok();
4401 }
4402 });
4403
4404 let mut project_transaction = ProjectTransaction::default();
4405 for (buffer, buffer_abs_path) in &buffers_with_paths {
4406 let adapters_and_servers: Vec<_> = project.update(&mut cx, |project, cx| {
4407 project
4408 .language_servers_for_buffer(&buffer.read(cx), cx)
4409 .map(|(adapter, lsp)| (adapter.clone(), lsp.clone()))
4410 .collect()
4411 })?;
4412
4413 let settings = buffer.update(&mut cx, |buffer, cx| {
4414 language_settings(buffer.language(), buffer.file(), cx).clone()
4415 })?;
4416
4417 let remove_trailing_whitespace = settings.remove_trailing_whitespace_on_save;
4418 let ensure_final_newline = settings.ensure_final_newline_on_save;
4419 let tab_size = settings.tab_size;
4420
4421 // First, format buffer's whitespace according to the settings.
4422 let trailing_whitespace_diff = if remove_trailing_whitespace {
4423 Some(
4424 buffer
4425 .update(&mut cx, |b, cx| b.remove_trailing_whitespace(cx))?
4426 .await,
4427 )
4428 } else {
4429 None
4430 };
4431 let whitespace_transaction_id = buffer.update(&mut cx, |buffer, cx| {
4432 buffer.finalize_last_transaction();
4433 buffer.start_transaction();
4434 if let Some(diff) = trailing_whitespace_diff {
4435 buffer.apply_diff(diff, cx);
4436 }
4437 if ensure_final_newline {
4438 buffer.ensure_final_newline(cx);
4439 }
4440 buffer.end_transaction(cx)
4441 })?;
4442
4443 for (lsp_adapter, language_server) in adapters_and_servers.iter() {
4444 // Apply the code actions on
4445 let code_actions: Vec<lsp::CodeActionKind> = settings
4446 .code_actions_on_format
4447 .iter()
4448 .flat_map(|(kind, enabled)| {
4449 if *enabled {
4450 Some(kind.clone().into())
4451 } else {
4452 None
4453 }
4454 })
4455 .collect();
4456
4457 #[allow(clippy::nonminimal_bool)]
4458 if !code_actions.is_empty()
4459 && !(trigger == FormatTrigger::Save
4460 && settings.format_on_save == FormatOnSave::Off)
4461 {
4462 let actions = project
4463 .update(&mut cx, |this, cx| {
4464 this.request_lsp(
4465 buffer.clone(),
4466 LanguageServerToQuery::Other(language_server.server_id()),
4467 GetCodeActions {
4468 range: text::Anchor::MIN..text::Anchor::MAX,
4469 kinds: Some(code_actions),
4470 },
4471 cx,
4472 )
4473 })?
4474 .await?;
4475
4476 for mut action in actions {
4477 Self::try_resolve_code_action(&language_server, &mut action)
4478 .await
4479 .context("resolving a formatting code action")?;
4480 if let Some(edit) = action.lsp_action.edit {
4481 if edit.changes.is_none() && edit.document_changes.is_none() {
4482 continue;
4483 }
4484
4485 let new = Self::deserialize_workspace_edit(
4486 project
4487 .upgrade()
4488 .ok_or_else(|| anyhow!("project dropped"))?,
4489 edit,
4490 push_to_history,
4491 lsp_adapter.clone(),
4492 language_server.clone(),
4493 &mut cx,
4494 )
4495 .await?;
4496 project_transaction.0.extend(new.0);
4497 }
4498
4499 if let Some(command) = action.lsp_action.command {
4500 project.update(&mut cx, |this, _| {
4501 this.last_workspace_edits_by_language_server
4502 .remove(&language_server.server_id());
4503 })?;
4504
4505 language_server
4506 .request::<lsp::request::ExecuteCommand>(
4507 lsp::ExecuteCommandParams {
4508 command: command.command,
4509 arguments: command.arguments.unwrap_or_default(),
4510 ..Default::default()
4511 },
4512 )
4513 .await?;
4514
4515 project.update(&mut cx, |this, _| {
4516 project_transaction.0.extend(
4517 this.last_workspace_edits_by_language_server
4518 .remove(&language_server.server_id())
4519 .unwrap_or_default()
4520 .0,
4521 )
4522 })?;
4523 }
4524 }
4525 }
4526 }
4527
4528 // Apply language-specific formatting using either the primary language server
4529 // or external command.
4530 let primary_language_server = adapters_and_servers
4531 .first()
4532 .cloned()
4533 .map(|(_, lsp)| lsp.clone());
4534 let server_and_buffer = primary_language_server
4535 .as_ref()
4536 .zip(buffer_abs_path.as_ref());
4537
4538 let mut format_operation = None;
4539 match (&settings.formatter, &settings.format_on_save) {
4540 (_, FormatOnSave::Off) if trigger == FormatTrigger::Save => {}
4541
4542 (Formatter::LanguageServer, FormatOnSave::On | FormatOnSave::Off)
4543 | (_, FormatOnSave::LanguageServer) => {
4544 if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4545 format_operation = Some(FormatOperation::Lsp(
4546 Self::format_via_lsp(
4547 &project,
4548 buffer,
4549 buffer_abs_path,
4550 language_server,
4551 tab_size,
4552 &mut cx,
4553 )
4554 .await
4555 .context("failed to format via language server")?,
4556 ));
4557 }
4558 }
4559
4560 (
4561 Formatter::External { command, arguments },
4562 FormatOnSave::On | FormatOnSave::Off,
4563 )
4564 | (_, FormatOnSave::External { command, arguments }) => {
4565 if let Some(buffer_abs_path) = buffer_abs_path {
4566 format_operation = Self::format_via_external_command(
4567 buffer,
4568 buffer_abs_path,
4569 command,
4570 arguments,
4571 &mut cx,
4572 )
4573 .await
4574 .context(format!(
4575 "failed to format via external command {:?}",
4576 command
4577 ))?
4578 .map(FormatOperation::External);
4579 }
4580 }
4581 (Formatter::Auto, FormatOnSave::On | FormatOnSave::Off) => {
4582 if let Some(new_operation) =
4583 prettier_support::format_with_prettier(&project, buffer, &mut cx).await
4584 {
4585 format_operation = Some(new_operation);
4586 } else if let Some((language_server, buffer_abs_path)) = server_and_buffer {
4587 format_operation = Some(FormatOperation::Lsp(
4588 Self::format_via_lsp(
4589 &project,
4590 buffer,
4591 buffer_abs_path,
4592 language_server,
4593 tab_size,
4594 &mut cx,
4595 )
4596 .await
4597 .context("failed to format via language server")?,
4598 ));
4599 }
4600 }
4601 (Formatter::Prettier, FormatOnSave::On | FormatOnSave::Off) => {
4602 if let Some(new_operation) =
4603 prettier_support::format_with_prettier(&project, buffer, &mut cx).await
4604 {
4605 format_operation = Some(new_operation);
4606 }
4607 }
4608 };
4609
4610 buffer.update(&mut cx, |b, cx| {
4611 // If the buffer had its whitespace formatted and was edited while the language-specific
4612 // formatting was being computed, avoid applying the language-specific formatting, because
4613 // it can't be grouped with the whitespace formatting in the undo history.
4614 if let Some(transaction_id) = whitespace_transaction_id {
4615 if b.peek_undo_stack()
4616 .map_or(true, |e| e.transaction_id() != transaction_id)
4617 {
4618 format_operation.take();
4619 }
4620 }
4621
4622 // Apply any language-specific formatting, and group the two formatting operations
4623 // in the buffer's undo history.
4624 if let Some(operation) = format_operation {
4625 match operation {
4626 FormatOperation::Lsp(edits) => {
4627 b.edit(edits, None, cx);
4628 }
4629 FormatOperation::External(diff) => {
4630 b.apply_diff(diff, cx);
4631 }
4632 FormatOperation::Prettier(diff) => {
4633 b.apply_diff(diff, cx);
4634 }
4635 }
4636
4637 if let Some(transaction_id) = whitespace_transaction_id {
4638 b.group_until_transaction(transaction_id);
4639 } else if let Some(transaction) = project_transaction.0.get(buffer) {
4640 b.group_until_transaction(transaction.id)
4641 }
4642 }
4643
4644 if let Some(transaction) = b.finalize_last_transaction().cloned() {
4645 if !push_to_history {
4646 b.forget_transaction(transaction.id);
4647 }
4648 project_transaction.0.insert(buffer.clone(), transaction);
4649 }
4650 })?;
4651 }
4652
4653 Ok(project_transaction)
4654 }
4655
4656 async fn format_via_lsp(
4657 this: &WeakModel<Self>,
4658 buffer: &Model<Buffer>,
4659 abs_path: &Path,
4660 language_server: &Arc<LanguageServer>,
4661 tab_size: NonZeroU32,
4662 cx: &mut AsyncAppContext,
4663 ) -> Result<Vec<(Range<Anchor>, String)>> {
4664 let uri = lsp::Url::from_file_path(abs_path)
4665 .map_err(|_| anyhow!("failed to convert abs path to uri"))?;
4666 let text_document = lsp::TextDocumentIdentifier::new(uri);
4667 let capabilities = &language_server.capabilities();
4668
4669 let formatting_provider = capabilities.document_formatting_provider.as_ref();
4670 let range_formatting_provider = capabilities.document_range_formatting_provider.as_ref();
4671
4672 let lsp_edits = if matches!(formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4673 language_server
4674 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
4675 text_document,
4676 options: lsp_command::lsp_formatting_options(tab_size.get()),
4677 work_done_progress_params: Default::default(),
4678 })
4679 .await?
4680 } else if matches!(range_formatting_provider, Some(p) if *p != OneOf::Left(false)) {
4681 let buffer_start = lsp::Position::new(0, 0);
4682 let buffer_end = buffer.update(cx, |b, _| point_to_lsp(b.max_point_utf16()))?;
4683
4684 language_server
4685 .request::<lsp::request::RangeFormatting>(lsp::DocumentRangeFormattingParams {
4686 text_document,
4687 range: lsp::Range::new(buffer_start, buffer_end),
4688 options: lsp_command::lsp_formatting_options(tab_size.get()),
4689 work_done_progress_params: Default::default(),
4690 })
4691 .await?
4692 } else {
4693 None
4694 };
4695
4696 if let Some(lsp_edits) = lsp_edits {
4697 this.update(cx, |this, cx| {
4698 this.edits_from_lsp(buffer, lsp_edits, language_server.server_id(), None, cx)
4699 })?
4700 .await
4701 } else {
4702 Ok(Vec::new())
4703 }
4704 }
4705
4706 async fn format_via_external_command(
4707 buffer: &Model<Buffer>,
4708 buffer_abs_path: &Path,
4709 command: &str,
4710 arguments: &[String],
4711 cx: &mut AsyncAppContext,
4712 ) -> Result<Option<Diff>> {
4713 let working_dir_path = buffer.update(cx, |buffer, cx| {
4714 let file = File::from_dyn(buffer.file())?;
4715 let worktree = file.worktree.read(cx).as_local()?;
4716 let mut worktree_path = worktree.abs_path().to_path_buf();
4717 if worktree.root_entry()?.is_file() {
4718 worktree_path.pop();
4719 }
4720 Some(worktree_path)
4721 })?;
4722
4723 if let Some(working_dir_path) = working_dir_path {
4724 let mut child =
4725 smol::process::Command::new(command)
4726 .args(arguments.iter().map(|arg| {
4727 arg.replace("{buffer_path}", &buffer_abs_path.to_string_lossy())
4728 }))
4729 .current_dir(&working_dir_path)
4730 .stdin(smol::process::Stdio::piped())
4731 .stdout(smol::process::Stdio::piped())
4732 .stderr(smol::process::Stdio::piped())
4733 .spawn()?;
4734 let stdin = child
4735 .stdin
4736 .as_mut()
4737 .ok_or_else(|| anyhow!("failed to acquire stdin"))?;
4738 let text = buffer.update(cx, |buffer, _| buffer.as_rope().clone())?;
4739 for chunk in text.chunks() {
4740 stdin.write_all(chunk.as_bytes()).await?;
4741 }
4742 stdin.flush().await?;
4743
4744 let output = child.output().await?;
4745 if !output.status.success() {
4746 return Err(anyhow!(
4747 "command failed with exit code {:?}:\nstdout: {}\nstderr: {}",
4748 output.status.code(),
4749 String::from_utf8_lossy(&output.stdout),
4750 String::from_utf8_lossy(&output.stderr),
4751 ));
4752 }
4753
4754 let stdout = String::from_utf8(output.stdout)?;
4755 Ok(Some(
4756 buffer
4757 .update(cx, |buffer, cx| buffer.diff(stdout, cx))?
4758 .await,
4759 ))
4760 } else {
4761 Ok(None)
4762 }
4763 }
4764
4765 #[inline(never)]
4766 fn definition_impl(
4767 &self,
4768 buffer: &Model<Buffer>,
4769 position: PointUtf16,
4770 cx: &mut ModelContext<Self>,
4771 ) -> Task<Result<Vec<LocationLink>>> {
4772 self.request_lsp(
4773 buffer.clone(),
4774 LanguageServerToQuery::Primary,
4775 GetDefinition { position },
4776 cx,
4777 )
4778 }
4779 pub fn definition<T: ToPointUtf16>(
4780 &self,
4781 buffer: &Model<Buffer>,
4782 position: T,
4783 cx: &mut ModelContext<Self>,
4784 ) -> Task<Result<Vec<LocationLink>>> {
4785 let position = position.to_point_utf16(buffer.read(cx));
4786 self.definition_impl(buffer, position, cx)
4787 }
4788
4789 fn type_definition_impl(
4790 &self,
4791 buffer: &Model<Buffer>,
4792 position: PointUtf16,
4793 cx: &mut ModelContext<Self>,
4794 ) -> Task<Result<Vec<LocationLink>>> {
4795 self.request_lsp(
4796 buffer.clone(),
4797 LanguageServerToQuery::Primary,
4798 GetTypeDefinition { position },
4799 cx,
4800 )
4801 }
4802
4803 pub fn type_definition<T: ToPointUtf16>(
4804 &self,
4805 buffer: &Model<Buffer>,
4806 position: T,
4807 cx: &mut ModelContext<Self>,
4808 ) -> Task<Result<Vec<LocationLink>>> {
4809 let position = position.to_point_utf16(buffer.read(cx));
4810 self.type_definition_impl(buffer, position, cx)
4811 }
4812
4813 fn implementation_impl(
4814 &self,
4815 buffer: &Model<Buffer>,
4816 position: PointUtf16,
4817 cx: &mut ModelContext<Self>,
4818 ) -> Task<Result<Vec<LocationLink>>> {
4819 self.request_lsp(
4820 buffer.clone(),
4821 LanguageServerToQuery::Primary,
4822 GetImplementation { position },
4823 cx,
4824 )
4825 }
4826
4827 pub fn implementation<T: ToPointUtf16>(
4828 &self,
4829 buffer: &Model<Buffer>,
4830 position: T,
4831 cx: &mut ModelContext<Self>,
4832 ) -> Task<Result<Vec<LocationLink>>> {
4833 let position = position.to_point_utf16(buffer.read(cx));
4834 self.implementation_impl(buffer, position, cx)
4835 }
4836
4837 fn references_impl(
4838 &self,
4839 buffer: &Model<Buffer>,
4840 position: PointUtf16,
4841 cx: &mut ModelContext<Self>,
4842 ) -> Task<Result<Vec<Location>>> {
4843 self.request_lsp(
4844 buffer.clone(),
4845 LanguageServerToQuery::Primary,
4846 GetReferences { position },
4847 cx,
4848 )
4849 }
4850 pub fn references<T: ToPointUtf16>(
4851 &self,
4852 buffer: &Model<Buffer>,
4853 position: T,
4854 cx: &mut ModelContext<Self>,
4855 ) -> Task<Result<Vec<Location>>> {
4856 let position = position.to_point_utf16(buffer.read(cx));
4857 self.references_impl(buffer, position, cx)
4858 }
4859
4860 fn document_highlights_impl(
4861 &self,
4862 buffer: &Model<Buffer>,
4863 position: PointUtf16,
4864 cx: &mut ModelContext<Self>,
4865 ) -> Task<Result<Vec<DocumentHighlight>>> {
4866 self.request_lsp(
4867 buffer.clone(),
4868 LanguageServerToQuery::Primary,
4869 GetDocumentHighlights { position },
4870 cx,
4871 )
4872 }
4873
4874 pub fn document_highlights<T: ToPointUtf16>(
4875 &self,
4876 buffer: &Model<Buffer>,
4877 position: T,
4878 cx: &mut ModelContext<Self>,
4879 ) -> Task<Result<Vec<DocumentHighlight>>> {
4880 let position = position.to_point_utf16(buffer.read(cx));
4881 self.document_highlights_impl(buffer, position, cx)
4882 }
4883
4884 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
4885 if self.is_local() {
4886 let mut requests = Vec::new();
4887 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
4888 let worktree_id = *worktree_id;
4889 let worktree_handle = self.worktree_for_id(worktree_id, cx);
4890 let worktree = match worktree_handle.and_then(|tree| tree.read(cx).as_local()) {
4891 Some(worktree) => worktree,
4892 None => continue,
4893 };
4894 let worktree_abs_path = worktree.abs_path().clone();
4895
4896 let (adapter, language, server) = match self.language_servers.get(server_id) {
4897 Some(LanguageServerState::Running {
4898 adapter,
4899 language,
4900 server,
4901 ..
4902 }) => (adapter.clone(), language.clone(), server),
4903
4904 _ => continue,
4905 };
4906
4907 requests.push(
4908 server
4909 .request::<lsp::request::WorkspaceSymbolRequest>(
4910 lsp::WorkspaceSymbolParams {
4911 query: query.to_string(),
4912 ..Default::default()
4913 },
4914 )
4915 .log_err()
4916 .map(move |response| {
4917 let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
4918 lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
4919 flat_responses.into_iter().map(|lsp_symbol| {
4920 (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
4921 }).collect::<Vec<_>>()
4922 }
4923 lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
4924 nested_responses.into_iter().filter_map(|lsp_symbol| {
4925 let location = match lsp_symbol.location {
4926 OneOf::Left(location) => location,
4927 OneOf::Right(_) => {
4928 error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
4929 return None
4930 }
4931 };
4932 Some((lsp_symbol.name, lsp_symbol.kind, location))
4933 }).collect::<Vec<_>>()
4934 }
4935 }).unwrap_or_default();
4936
4937 (
4938 adapter,
4939 language,
4940 worktree_id,
4941 worktree_abs_path,
4942 lsp_symbols,
4943 )
4944 }),
4945 );
4946 }
4947
4948 cx.spawn(move |this, mut cx| async move {
4949 let responses = futures::future::join_all(requests).await;
4950 let this = match this.upgrade() {
4951 Some(this) => this,
4952 None => return Ok(Vec::new()),
4953 };
4954
4955 let symbols = this.update(&mut cx, |this, cx| {
4956 let mut symbols = Vec::new();
4957 for (
4958 adapter,
4959 adapter_language,
4960 source_worktree_id,
4961 worktree_abs_path,
4962 lsp_symbols,
4963 ) in responses
4964 {
4965 symbols.extend(lsp_symbols.into_iter().filter_map(
4966 |(symbol_name, symbol_kind, symbol_location)| {
4967 let abs_path = symbol_location.uri.to_file_path().ok()?;
4968 let mut worktree_id = source_worktree_id;
4969 let path;
4970 if let Some((worktree, rel_path)) =
4971 this.find_local_worktree(&abs_path, cx)
4972 {
4973 worktree_id = worktree.read(cx).id();
4974 path = rel_path;
4975 } else {
4976 path = relativize_path(&worktree_abs_path, &abs_path);
4977 }
4978
4979 let project_path = ProjectPath {
4980 worktree_id,
4981 path: path.into(),
4982 };
4983 let signature = this.symbol_signature(&project_path);
4984 let adapter_language = adapter_language.clone();
4985 let language = this
4986 .languages
4987 .language_for_file(&project_path.path, None)
4988 .unwrap_or_else(move |_| adapter_language);
4989 let adapter = adapter.clone();
4990 Some(async move {
4991 let language = language.await;
4992 let label = adapter
4993 .label_for_symbol(&symbol_name, symbol_kind, &language)
4994 .await;
4995
4996 Symbol {
4997 language_server_name: adapter.name.clone(),
4998 source_worktree_id,
4999 path: project_path,
5000 label: label.unwrap_or_else(|| {
5001 CodeLabel::plain(symbol_name.clone(), None)
5002 }),
5003 kind: symbol_kind,
5004 name: symbol_name,
5005 range: range_from_lsp(symbol_location.range),
5006 signature,
5007 }
5008 })
5009 },
5010 ));
5011 }
5012
5013 symbols
5014 })?;
5015
5016 Ok(futures::future::join_all(symbols).await)
5017 })
5018 } else if let Some(project_id) = self.remote_id() {
5019 let request = self.client.request(proto::GetProjectSymbols {
5020 project_id,
5021 query: query.to_string(),
5022 });
5023 cx.spawn(move |this, mut cx| async move {
5024 let response = request.await?;
5025 let mut symbols = Vec::new();
5026 if let Some(this) = this.upgrade() {
5027 let new_symbols = this.update(&mut cx, |this, _| {
5028 response
5029 .symbols
5030 .into_iter()
5031 .map(|symbol| this.deserialize_symbol(symbol))
5032 .collect::<Vec<_>>()
5033 })?;
5034 symbols = futures::future::join_all(new_symbols)
5035 .await
5036 .into_iter()
5037 .filter_map(|symbol| symbol.log_err())
5038 .collect::<Vec<_>>();
5039 }
5040 Ok(symbols)
5041 })
5042 } else {
5043 Task::ready(Ok(Default::default()))
5044 }
5045 }
5046
5047 pub fn open_buffer_for_symbol(
5048 &mut self,
5049 symbol: &Symbol,
5050 cx: &mut ModelContext<Self>,
5051 ) -> Task<Result<Model<Buffer>>> {
5052 if self.is_local() {
5053 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
5054 symbol.source_worktree_id,
5055 symbol.language_server_name.clone(),
5056 )) {
5057 *id
5058 } else {
5059 return Task::ready(Err(anyhow!(
5060 "language server for worktree and language not found"
5061 )));
5062 };
5063
5064 let worktree_abs_path = if let Some(worktree_abs_path) = self
5065 .worktree_for_id(symbol.path.worktree_id, cx)
5066 .and_then(|worktree| worktree.read(cx).as_local())
5067 .map(|local_worktree| local_worktree.abs_path())
5068 {
5069 worktree_abs_path
5070 } else {
5071 return Task::ready(Err(anyhow!("worktree not found for symbol")));
5072 };
5073
5074 let symbol_abs_path = resolve_path(worktree_abs_path, &symbol.path.path);
5075 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
5076 uri
5077 } else {
5078 return Task::ready(Err(anyhow!("invalid symbol path")));
5079 };
5080
5081 self.open_local_buffer_via_lsp(
5082 symbol_uri,
5083 language_server_id,
5084 symbol.language_server_name.clone(),
5085 cx,
5086 )
5087 } else if let Some(project_id) = self.remote_id() {
5088 let request = self.client.request(proto::OpenBufferForSymbol {
5089 project_id,
5090 symbol: Some(serialize_symbol(symbol)),
5091 });
5092 cx.spawn(move |this, mut cx| async move {
5093 let response = request.await?;
5094 let buffer_id = BufferId::new(response.buffer_id)?;
5095 this.update(&mut cx, |this, cx| {
5096 this.wait_for_remote_buffer(buffer_id, cx)
5097 })?
5098 .await
5099 })
5100 } else {
5101 Task::ready(Err(anyhow!("project does not have a remote id")))
5102 }
5103 }
5104
5105 fn hover_impl(
5106 &self,
5107 buffer: &Model<Buffer>,
5108 position: PointUtf16,
5109 cx: &mut ModelContext<Self>,
5110 ) -> Task<Result<Option<Hover>>> {
5111 self.request_lsp(
5112 buffer.clone(),
5113 LanguageServerToQuery::Primary,
5114 GetHover { position },
5115 cx,
5116 )
5117 }
5118 pub fn hover<T: ToPointUtf16>(
5119 &self,
5120 buffer: &Model<Buffer>,
5121 position: T,
5122 cx: &mut ModelContext<Self>,
5123 ) -> Task<Result<Option<Hover>>> {
5124 let position = position.to_point_utf16(buffer.read(cx));
5125 self.hover_impl(buffer, position, cx)
5126 }
5127
5128 #[inline(never)]
5129 fn completions_impl(
5130 &self,
5131 buffer: &Model<Buffer>,
5132 position: PointUtf16,
5133 cx: &mut ModelContext<Self>,
5134 ) -> Task<Result<Vec<Completion>>> {
5135 if self.is_local() {
5136 let snapshot = buffer.read(cx).snapshot();
5137 let offset = position.to_offset(&snapshot);
5138 let scope = snapshot.language_scope_at(offset);
5139
5140 let server_ids: Vec<_> = self
5141 .language_servers_for_buffer(buffer.read(cx), cx)
5142 .filter(|(_, server)| server.capabilities().completion_provider.is_some())
5143 .filter(|(adapter, _)| {
5144 scope
5145 .as_ref()
5146 .map(|scope| scope.language_allowed(&adapter.name))
5147 .unwrap_or(true)
5148 })
5149 .map(|(_, server)| server.server_id())
5150 .collect();
5151
5152 let buffer = buffer.clone();
5153 cx.spawn(move |this, mut cx| async move {
5154 let mut tasks = Vec::with_capacity(server_ids.len());
5155 this.update(&mut cx, |this, cx| {
5156 for server_id in server_ids {
5157 tasks.push(this.request_lsp(
5158 buffer.clone(),
5159 LanguageServerToQuery::Other(server_id),
5160 GetCompletions { position },
5161 cx,
5162 ));
5163 }
5164 })?;
5165
5166 let mut completions = Vec::new();
5167 for task in tasks {
5168 if let Ok(new_completions) = task.await {
5169 completions.extend_from_slice(&new_completions);
5170 }
5171 }
5172
5173 Ok(completions)
5174 })
5175 } else if let Some(project_id) = self.remote_id() {
5176 self.send_lsp_proto_request(buffer.clone(), project_id, GetCompletions { position }, cx)
5177 } else {
5178 Task::ready(Ok(Default::default()))
5179 }
5180 }
5181 pub fn completions<T: ToOffset + ToPointUtf16>(
5182 &self,
5183 buffer: &Model<Buffer>,
5184 position: T,
5185 cx: &mut ModelContext<Self>,
5186 ) -> Task<Result<Vec<Completion>>> {
5187 let position = position.to_point_utf16(buffer.read(cx));
5188 self.completions_impl(buffer, position, cx)
5189 }
5190
5191 pub fn resolve_completions(
5192 &self,
5193 completion_indices: Vec<usize>,
5194 completions: Arc<RwLock<Box<[Completion]>>>,
5195 cx: &mut ModelContext<Self>,
5196 ) -> Task<Result<bool>> {
5197 let client = self.client();
5198 let language_registry = self.languages().clone();
5199
5200 let is_remote = self.is_remote();
5201 let project_id = self.remote_id();
5202
5203 cx.spawn(move |this, mut cx| async move {
5204 let mut did_resolve = false;
5205 if is_remote {
5206 let project_id =
5207 project_id.ok_or_else(|| anyhow!("Remote project without remote_id"))?;
5208
5209 for completion_index in completion_indices {
5210 let completions_guard = completions.read();
5211 let completion = &completions_guard[completion_index];
5212 if completion.documentation.is_some() {
5213 continue;
5214 }
5215
5216 did_resolve = true;
5217 let server_id = completion.server_id;
5218 let completion = completion.lsp_completion.clone();
5219 drop(completions_guard);
5220
5221 Self::resolve_completion_documentation_remote(
5222 project_id,
5223 server_id,
5224 completions.clone(),
5225 completion_index,
5226 completion,
5227 client.clone(),
5228 language_registry.clone(),
5229 )
5230 .await;
5231 }
5232 } else {
5233 for completion_index in completion_indices {
5234 let completions_guard = completions.read();
5235 let completion = &completions_guard[completion_index];
5236 if completion.documentation.is_some() {
5237 continue;
5238 }
5239
5240 let server_id = completion.server_id;
5241 let completion = completion.lsp_completion.clone();
5242 drop(completions_guard);
5243
5244 let server = this
5245 .read_with(&mut cx, |project, _| {
5246 project.language_server_for_id(server_id)
5247 })
5248 .ok()
5249 .flatten();
5250 let Some(server) = server else {
5251 continue;
5252 };
5253
5254 did_resolve = true;
5255 Self::resolve_completion_documentation_local(
5256 server,
5257 completions.clone(),
5258 completion_index,
5259 completion,
5260 language_registry.clone(),
5261 )
5262 .await;
5263 }
5264 }
5265
5266 Ok(did_resolve)
5267 })
5268 }
5269
5270 async fn resolve_completion_documentation_local(
5271 server: Arc<lsp::LanguageServer>,
5272 completions: Arc<RwLock<Box<[Completion]>>>,
5273 completion_index: usize,
5274 completion: lsp::CompletionItem,
5275 language_registry: Arc<LanguageRegistry>,
5276 ) {
5277 let can_resolve = server
5278 .capabilities()
5279 .completion_provider
5280 .as_ref()
5281 .and_then(|options| options.resolve_provider)
5282 .unwrap_or(false);
5283 if !can_resolve {
5284 return;
5285 }
5286
5287 let request = server.request::<lsp::request::ResolveCompletionItem>(completion);
5288 let Some(completion_item) = request.await.log_err() else {
5289 return;
5290 };
5291
5292 if let Some(lsp_documentation) = completion_item.documentation {
5293 let documentation = language::prepare_completion_documentation(
5294 &lsp_documentation,
5295 &language_registry,
5296 None, // TODO: Try to reasonably work out which language the completion is for
5297 )
5298 .await;
5299
5300 let mut completions = completions.write();
5301 let completion = &mut completions[completion_index];
5302 completion.documentation = Some(documentation);
5303 } else {
5304 let mut completions = completions.write();
5305 let completion = &mut completions[completion_index];
5306 completion.documentation = Some(Documentation::Undocumented);
5307 }
5308 }
5309
5310 async fn resolve_completion_documentation_remote(
5311 project_id: u64,
5312 server_id: LanguageServerId,
5313 completions: Arc<RwLock<Box<[Completion]>>>,
5314 completion_index: usize,
5315 completion: lsp::CompletionItem,
5316 client: Arc<Client>,
5317 language_registry: Arc<LanguageRegistry>,
5318 ) {
5319 let request = proto::ResolveCompletionDocumentation {
5320 project_id,
5321 language_server_id: server_id.0 as u64,
5322 lsp_completion: serde_json::to_string(&completion).unwrap().into_bytes(),
5323 };
5324
5325 let Some(response) = client
5326 .request(request)
5327 .await
5328 .context("completion documentation resolve proto request")
5329 .log_err()
5330 else {
5331 return;
5332 };
5333
5334 if response.text.is_empty() {
5335 let mut completions = completions.write();
5336 let completion = &mut completions[completion_index];
5337 completion.documentation = Some(Documentation::Undocumented);
5338 }
5339
5340 let documentation = if response.is_markdown {
5341 Documentation::MultiLineMarkdown(
5342 markdown::parse_markdown(&response.text, &language_registry, None).await,
5343 )
5344 } else if response.text.lines().count() <= 1 {
5345 Documentation::SingleLine(response.text)
5346 } else {
5347 Documentation::MultiLinePlainText(response.text)
5348 };
5349
5350 let mut completions = completions.write();
5351 let completion = &mut completions[completion_index];
5352 completion.documentation = Some(documentation);
5353 }
5354
5355 pub fn apply_additional_edits_for_completion(
5356 &self,
5357 buffer_handle: Model<Buffer>,
5358 completion: Completion,
5359 push_to_history: bool,
5360 cx: &mut ModelContext<Self>,
5361 ) -> Task<Result<Option<Transaction>>> {
5362 let buffer = buffer_handle.read(cx);
5363 let buffer_id = buffer.remote_id();
5364
5365 if self.is_local() {
5366 let server_id = completion.server_id;
5367 let lang_server = match self.language_server_for_buffer(buffer, server_id, cx) {
5368 Some((_, server)) => server.clone(),
5369 _ => return Task::ready(Ok(Default::default())),
5370 };
5371
5372 cx.spawn(move |this, mut cx| async move {
5373 let can_resolve = lang_server
5374 .capabilities()
5375 .completion_provider
5376 .as_ref()
5377 .and_then(|options| options.resolve_provider)
5378 .unwrap_or(false);
5379 let additional_text_edits = if can_resolve {
5380 lang_server
5381 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
5382 .await?
5383 .additional_text_edits
5384 } else {
5385 completion.lsp_completion.additional_text_edits
5386 };
5387 if let Some(edits) = additional_text_edits {
5388 let edits = this
5389 .update(&mut cx, |this, cx| {
5390 this.edits_from_lsp(
5391 &buffer_handle,
5392 edits,
5393 lang_server.server_id(),
5394 None,
5395 cx,
5396 )
5397 })?
5398 .await?;
5399
5400 buffer_handle.update(&mut cx, |buffer, cx| {
5401 buffer.finalize_last_transaction();
5402 buffer.start_transaction();
5403
5404 for (range, text) in edits {
5405 let primary = &completion.old_range;
5406 let start_within = primary.start.cmp(&range.start, buffer).is_le()
5407 && primary.end.cmp(&range.start, buffer).is_ge();
5408 let end_within = range.start.cmp(&primary.end, buffer).is_le()
5409 && range.end.cmp(&primary.end, buffer).is_ge();
5410
5411 //Skip additional edits which overlap with the primary completion edit
5412 //https://github.com/zed-industries/zed/pull/1871
5413 if !start_within && !end_within {
5414 buffer.edit([(range, text)], None, cx);
5415 }
5416 }
5417
5418 let transaction = if buffer.end_transaction(cx).is_some() {
5419 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5420 if !push_to_history {
5421 buffer.forget_transaction(transaction.id);
5422 }
5423 Some(transaction)
5424 } else {
5425 None
5426 };
5427 Ok(transaction)
5428 })?
5429 } else {
5430 Ok(None)
5431 }
5432 })
5433 } else if let Some(project_id) = self.remote_id() {
5434 let client = self.client.clone();
5435 cx.spawn(move |_, mut cx| async move {
5436 let response = client
5437 .request(proto::ApplyCompletionAdditionalEdits {
5438 project_id,
5439 buffer_id: buffer_id.into(),
5440 completion: Some(language::proto::serialize_completion(&completion)),
5441 })
5442 .await?;
5443
5444 if let Some(transaction) = response.transaction {
5445 let transaction = language::proto::deserialize_transaction(transaction)?;
5446 buffer_handle
5447 .update(&mut cx, |buffer, _| {
5448 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5449 })?
5450 .await?;
5451 if push_to_history {
5452 buffer_handle.update(&mut cx, |buffer, _| {
5453 buffer.push_transaction(transaction.clone(), Instant::now());
5454 })?;
5455 }
5456 Ok(Some(transaction))
5457 } else {
5458 Ok(None)
5459 }
5460 })
5461 } else {
5462 Task::ready(Err(anyhow!("project does not have a remote id")))
5463 }
5464 }
5465
5466 fn code_actions_impl(
5467 &self,
5468 buffer_handle: &Model<Buffer>,
5469 range: Range<Anchor>,
5470 cx: &mut ModelContext<Self>,
5471 ) -> Task<Result<Vec<CodeAction>>> {
5472 self.request_lsp(
5473 buffer_handle.clone(),
5474 LanguageServerToQuery::Primary,
5475 GetCodeActions { range, kinds: None },
5476 cx,
5477 )
5478 }
5479
5480 pub fn code_actions<T: Clone + ToOffset>(
5481 &self,
5482 buffer_handle: &Model<Buffer>,
5483 range: Range<T>,
5484 cx: &mut ModelContext<Self>,
5485 ) -> Task<Result<Vec<CodeAction>>> {
5486 let buffer = buffer_handle.read(cx);
5487 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5488 self.code_actions_impl(buffer_handle, range, cx)
5489 }
5490
5491 pub fn apply_code_action(
5492 &self,
5493 buffer_handle: Model<Buffer>,
5494 mut action: CodeAction,
5495 push_to_history: bool,
5496 cx: &mut ModelContext<Self>,
5497 ) -> Task<Result<ProjectTransaction>> {
5498 if self.is_local() {
5499 let buffer = buffer_handle.read(cx);
5500 let (lsp_adapter, lang_server) = if let Some((adapter, server)) =
5501 self.language_server_for_buffer(buffer, action.server_id, cx)
5502 {
5503 (adapter.clone(), server.clone())
5504 } else {
5505 return Task::ready(Ok(Default::default()));
5506 };
5507 cx.spawn(move |this, mut cx| async move {
5508 Self::try_resolve_code_action(&lang_server, &mut action)
5509 .await
5510 .context("resolving a code action")?;
5511 if let Some(edit) = action.lsp_action.edit {
5512 if edit.changes.is_some() || edit.document_changes.is_some() {
5513 return Self::deserialize_workspace_edit(
5514 this.upgrade().ok_or_else(|| anyhow!("no app present"))?,
5515 edit,
5516 push_to_history,
5517 lsp_adapter.clone(),
5518 lang_server.clone(),
5519 &mut cx,
5520 )
5521 .await;
5522 }
5523 }
5524
5525 if let Some(command) = action.lsp_action.command {
5526 this.update(&mut cx, |this, _| {
5527 this.last_workspace_edits_by_language_server
5528 .remove(&lang_server.server_id());
5529 })?;
5530
5531 let result = lang_server
5532 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
5533 command: command.command,
5534 arguments: command.arguments.unwrap_or_default(),
5535 ..Default::default()
5536 })
5537 .await;
5538
5539 if let Err(err) = result {
5540 // TODO: LSP ERROR
5541 return Err(err);
5542 }
5543
5544 return this.update(&mut cx, |this, _| {
5545 this.last_workspace_edits_by_language_server
5546 .remove(&lang_server.server_id())
5547 .unwrap_or_default()
5548 });
5549 }
5550
5551 Ok(ProjectTransaction::default())
5552 })
5553 } else if let Some(project_id) = self.remote_id() {
5554 let client = self.client.clone();
5555 let request = proto::ApplyCodeAction {
5556 project_id,
5557 buffer_id: buffer_handle.read(cx).remote_id().into(),
5558 action: Some(language::proto::serialize_code_action(&action)),
5559 };
5560 cx.spawn(move |this, mut cx| async move {
5561 let response = client
5562 .request(request)
5563 .await?
5564 .transaction
5565 .ok_or_else(|| anyhow!("missing transaction"))?;
5566 this.update(&mut cx, |this, cx| {
5567 this.deserialize_project_transaction(response, push_to_history, cx)
5568 })?
5569 .await
5570 })
5571 } else {
5572 Task::ready(Err(anyhow!("project does not have a remote id")))
5573 }
5574 }
5575
5576 fn apply_on_type_formatting(
5577 &self,
5578 buffer: Model<Buffer>,
5579 position: Anchor,
5580 trigger: String,
5581 cx: &mut ModelContext<Self>,
5582 ) -> Task<Result<Option<Transaction>>> {
5583 if self.is_local() {
5584 cx.spawn(move |this, mut cx| async move {
5585 // Do not allow multiple concurrent formatting requests for the
5586 // same buffer.
5587 this.update(&mut cx, |this, cx| {
5588 this.buffers_being_formatted
5589 .insert(buffer.read(cx).remote_id())
5590 })?;
5591
5592 let _cleanup = defer({
5593 let this = this.clone();
5594 let mut cx = cx.clone();
5595 let closure_buffer = buffer.clone();
5596 move || {
5597 this.update(&mut cx, |this, cx| {
5598 this.buffers_being_formatted
5599 .remove(&closure_buffer.read(cx).remote_id());
5600 })
5601 .ok();
5602 }
5603 });
5604
5605 buffer
5606 .update(&mut cx, |buffer, _| {
5607 buffer.wait_for_edits(Some(position.timestamp))
5608 })?
5609 .await?;
5610 this.update(&mut cx, |this, cx| {
5611 let position = position.to_point_utf16(buffer.read(cx));
5612 this.on_type_format(buffer, position, trigger, false, cx)
5613 })?
5614 .await
5615 })
5616 } else if let Some(project_id) = self.remote_id() {
5617 let client = self.client.clone();
5618 let request = proto::OnTypeFormatting {
5619 project_id,
5620 buffer_id: buffer.read(cx).remote_id().into(),
5621 position: Some(serialize_anchor(&position)),
5622 trigger,
5623 version: serialize_version(&buffer.read(cx).version()),
5624 };
5625 cx.spawn(move |_, _| async move {
5626 client
5627 .request(request)
5628 .await?
5629 .transaction
5630 .map(language::proto::deserialize_transaction)
5631 .transpose()
5632 })
5633 } else {
5634 Task::ready(Err(anyhow!("project does not have a remote id")))
5635 }
5636 }
5637
5638 async fn deserialize_edits(
5639 this: Model<Self>,
5640 buffer_to_edit: Model<Buffer>,
5641 edits: Vec<lsp::TextEdit>,
5642 push_to_history: bool,
5643 _: Arc<CachedLspAdapter>,
5644 language_server: Arc<LanguageServer>,
5645 cx: &mut AsyncAppContext,
5646 ) -> Result<Option<Transaction>> {
5647 let edits = this
5648 .update(cx, |this, cx| {
5649 this.edits_from_lsp(
5650 &buffer_to_edit,
5651 edits,
5652 language_server.server_id(),
5653 None,
5654 cx,
5655 )
5656 })?
5657 .await?;
5658
5659 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5660 buffer.finalize_last_transaction();
5661 buffer.start_transaction();
5662 for (range, text) in edits {
5663 buffer.edit([(range, text)], None, cx);
5664 }
5665
5666 if buffer.end_transaction(cx).is_some() {
5667 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5668 if !push_to_history {
5669 buffer.forget_transaction(transaction.id);
5670 }
5671 Some(transaction)
5672 } else {
5673 None
5674 }
5675 })?;
5676
5677 Ok(transaction)
5678 }
5679
5680 async fn deserialize_workspace_edit(
5681 this: Model<Self>,
5682 edit: lsp::WorkspaceEdit,
5683 push_to_history: bool,
5684 lsp_adapter: Arc<CachedLspAdapter>,
5685 language_server: Arc<LanguageServer>,
5686 cx: &mut AsyncAppContext,
5687 ) -> Result<ProjectTransaction> {
5688 let fs = this.update(cx, |this, _| this.fs.clone())?;
5689 let mut operations = Vec::new();
5690 if let Some(document_changes) = edit.document_changes {
5691 match document_changes {
5692 lsp::DocumentChanges::Edits(edits) => {
5693 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
5694 }
5695 lsp::DocumentChanges::Operations(ops) => operations = ops,
5696 }
5697 } else if let Some(changes) = edit.changes {
5698 operations.extend(changes.into_iter().map(|(uri, edits)| {
5699 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
5700 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5701 uri,
5702 version: None,
5703 },
5704 edits: edits.into_iter().map(OneOf::Left).collect(),
5705 })
5706 }));
5707 }
5708
5709 let mut project_transaction = ProjectTransaction::default();
5710 for operation in operations {
5711 match operation {
5712 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
5713 let abs_path = op
5714 .uri
5715 .to_file_path()
5716 .map_err(|_| anyhow!("can't convert URI to path"))?;
5717
5718 if let Some(parent_path) = abs_path.parent() {
5719 fs.create_dir(parent_path).await?;
5720 }
5721 if abs_path.ends_with("/") {
5722 fs.create_dir(&abs_path).await?;
5723 } else {
5724 fs.create_file(
5725 &abs_path,
5726 op.options
5727 .map(|options| fs::CreateOptions {
5728 overwrite: options.overwrite.unwrap_or(false),
5729 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5730 })
5731 .unwrap_or_default(),
5732 )
5733 .await?;
5734 }
5735 }
5736
5737 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
5738 let source_abs_path = op
5739 .old_uri
5740 .to_file_path()
5741 .map_err(|_| anyhow!("can't convert URI to path"))?;
5742 let target_abs_path = op
5743 .new_uri
5744 .to_file_path()
5745 .map_err(|_| anyhow!("can't convert URI to path"))?;
5746 fs.rename(
5747 &source_abs_path,
5748 &target_abs_path,
5749 op.options
5750 .map(|options| fs::RenameOptions {
5751 overwrite: options.overwrite.unwrap_or(false),
5752 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5753 })
5754 .unwrap_or_default(),
5755 )
5756 .await?;
5757 }
5758
5759 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
5760 let abs_path = op
5761 .uri
5762 .to_file_path()
5763 .map_err(|_| anyhow!("can't convert URI to path"))?;
5764 let options = op
5765 .options
5766 .map(|options| fs::RemoveOptions {
5767 recursive: options.recursive.unwrap_or(false),
5768 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5769 })
5770 .unwrap_or_default();
5771 if abs_path.ends_with("/") {
5772 fs.remove_dir(&abs_path, options).await?;
5773 } else {
5774 fs.remove_file(&abs_path, options).await?;
5775 }
5776 }
5777
5778 lsp::DocumentChangeOperation::Edit(op) => {
5779 let buffer_to_edit = this
5780 .update(cx, |this, cx| {
5781 this.open_local_buffer_via_lsp(
5782 op.text_document.uri,
5783 language_server.server_id(),
5784 lsp_adapter.name.clone(),
5785 cx,
5786 )
5787 })?
5788 .await?;
5789
5790 let edits = this
5791 .update(cx, |this, cx| {
5792 let edits = op.edits.into_iter().map(|edit| match edit {
5793 OneOf::Left(edit) => edit,
5794 OneOf::Right(edit) => edit.text_edit,
5795 });
5796 this.edits_from_lsp(
5797 &buffer_to_edit,
5798 edits,
5799 language_server.server_id(),
5800 op.text_document.version,
5801 cx,
5802 )
5803 })?
5804 .await?;
5805
5806 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
5807 buffer.finalize_last_transaction();
5808 buffer.start_transaction();
5809 for (range, text) in edits {
5810 buffer.edit([(range, text)], None, cx);
5811 }
5812 let transaction = if buffer.end_transaction(cx).is_some() {
5813 let transaction = buffer.finalize_last_transaction().unwrap().clone();
5814 if !push_to_history {
5815 buffer.forget_transaction(transaction.id);
5816 }
5817 Some(transaction)
5818 } else {
5819 None
5820 };
5821
5822 transaction
5823 })?;
5824 if let Some(transaction) = transaction {
5825 project_transaction.0.insert(buffer_to_edit, transaction);
5826 }
5827 }
5828 }
5829 }
5830
5831 Ok(project_transaction)
5832 }
5833
5834 fn prepare_rename_impl(
5835 &self,
5836 buffer: Model<Buffer>,
5837 position: PointUtf16,
5838 cx: &mut ModelContext<Self>,
5839 ) -> Task<Result<Option<Range<Anchor>>>> {
5840 self.request_lsp(
5841 buffer,
5842 LanguageServerToQuery::Primary,
5843 PrepareRename { position },
5844 cx,
5845 )
5846 }
5847 pub fn prepare_rename<T: ToPointUtf16>(
5848 &self,
5849 buffer: Model<Buffer>,
5850 position: T,
5851 cx: &mut ModelContext<Self>,
5852 ) -> Task<Result<Option<Range<Anchor>>>> {
5853 let position = position.to_point_utf16(buffer.read(cx));
5854 self.prepare_rename_impl(buffer, position, cx)
5855 }
5856
5857 fn perform_rename_impl(
5858 &self,
5859 buffer: Model<Buffer>,
5860 position: PointUtf16,
5861 new_name: String,
5862 push_to_history: bool,
5863 cx: &mut ModelContext<Self>,
5864 ) -> Task<Result<ProjectTransaction>> {
5865 let position = position.to_point_utf16(buffer.read(cx));
5866 self.request_lsp(
5867 buffer,
5868 LanguageServerToQuery::Primary,
5869 PerformRename {
5870 position,
5871 new_name,
5872 push_to_history,
5873 },
5874 cx,
5875 )
5876 }
5877 pub fn perform_rename<T: ToPointUtf16>(
5878 &self,
5879 buffer: Model<Buffer>,
5880 position: T,
5881 new_name: String,
5882 push_to_history: bool,
5883 cx: &mut ModelContext<Self>,
5884 ) -> Task<Result<ProjectTransaction>> {
5885 let position = position.to_point_utf16(buffer.read(cx));
5886 self.perform_rename_impl(buffer, position, new_name, push_to_history, cx)
5887 }
5888
5889 pub fn on_type_format_impl(
5890 &self,
5891 buffer: Model<Buffer>,
5892 position: PointUtf16,
5893 trigger: String,
5894 push_to_history: bool,
5895 cx: &mut ModelContext<Self>,
5896 ) -> Task<Result<Option<Transaction>>> {
5897 let tab_size = buffer.update(cx, |buffer, cx| {
5898 language_settings(buffer.language_at(position).as_ref(), buffer.file(), cx).tab_size
5899 });
5900 self.request_lsp(
5901 buffer.clone(),
5902 LanguageServerToQuery::Primary,
5903 OnTypeFormatting {
5904 position,
5905 trigger,
5906 options: lsp_command::lsp_formatting_options(tab_size.get()).into(),
5907 push_to_history,
5908 },
5909 cx,
5910 )
5911 }
5912
5913 pub fn on_type_format<T: ToPointUtf16>(
5914 &self,
5915 buffer: Model<Buffer>,
5916 position: T,
5917 trigger: String,
5918 push_to_history: bool,
5919 cx: &mut ModelContext<Self>,
5920 ) -> Task<Result<Option<Transaction>>> {
5921 let position = position.to_point_utf16(buffer.read(cx));
5922 self.on_type_format_impl(buffer, position, trigger, push_to_history, cx)
5923 }
5924
5925 pub fn inlay_hints<T: ToOffset>(
5926 &self,
5927 buffer_handle: Model<Buffer>,
5928 range: Range<T>,
5929 cx: &mut ModelContext<Self>,
5930 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5931 let buffer = buffer_handle.read(cx);
5932 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
5933 self.inlay_hints_impl(buffer_handle, range, cx)
5934 }
5935 fn inlay_hints_impl(
5936 &self,
5937 buffer_handle: Model<Buffer>,
5938 range: Range<Anchor>,
5939 cx: &mut ModelContext<Self>,
5940 ) -> Task<anyhow::Result<Vec<InlayHint>>> {
5941 let buffer = buffer_handle.read(cx);
5942 let range_start = range.start;
5943 let range_end = range.end;
5944 let buffer_id = buffer.remote_id().into();
5945 let lsp_request = InlayHints { range };
5946
5947 if self.is_local() {
5948 let lsp_request_task = self.request_lsp(
5949 buffer_handle.clone(),
5950 LanguageServerToQuery::Primary,
5951 lsp_request,
5952 cx,
5953 );
5954 cx.spawn(move |_, mut cx| async move {
5955 buffer_handle
5956 .update(&mut cx, |buffer, _| {
5957 buffer.wait_for_edits(vec![range_start.timestamp, range_end.timestamp])
5958 })?
5959 .await
5960 .context("waiting for inlay hint request range edits")?;
5961 lsp_request_task.await.context("inlay hints LSP request")
5962 })
5963 } else if let Some(project_id) = self.remote_id() {
5964 let client = self.client.clone();
5965 let request = proto::InlayHints {
5966 project_id,
5967 buffer_id,
5968 start: Some(serialize_anchor(&range_start)),
5969 end: Some(serialize_anchor(&range_end)),
5970 version: serialize_version(&buffer_handle.read(cx).version()),
5971 };
5972 cx.spawn(move |project, cx| async move {
5973 let response = client
5974 .request(request)
5975 .await
5976 .context("inlay hints proto request")?;
5977 LspCommand::response_from_proto(
5978 lsp_request,
5979 response,
5980 project.upgrade().ok_or_else(|| anyhow!("No project"))?,
5981 buffer_handle.clone(),
5982 cx.clone(),
5983 )
5984 .await
5985 .context("inlay hints proto response conversion")
5986 })
5987 } else {
5988 Task::ready(Err(anyhow!("project does not have a remote id")))
5989 }
5990 }
5991
5992 pub fn resolve_inlay_hint(
5993 &self,
5994 hint: InlayHint,
5995 buffer_handle: Model<Buffer>,
5996 server_id: LanguageServerId,
5997 cx: &mut ModelContext<Self>,
5998 ) -> Task<anyhow::Result<InlayHint>> {
5999 if self.is_local() {
6000 let buffer = buffer_handle.read(cx);
6001 let (_, lang_server) = if let Some((adapter, server)) =
6002 self.language_server_for_buffer(buffer, server_id, cx)
6003 {
6004 (adapter.clone(), server.clone())
6005 } else {
6006 return Task::ready(Ok(hint));
6007 };
6008 if !InlayHints::can_resolve_inlays(lang_server.capabilities()) {
6009 return Task::ready(Ok(hint));
6010 }
6011
6012 let buffer_snapshot = buffer.snapshot();
6013 cx.spawn(move |_, mut cx| async move {
6014 let resolve_task = lang_server.request::<lsp::request::InlayHintResolveRequest>(
6015 InlayHints::project_to_lsp_hint(hint, &buffer_snapshot),
6016 );
6017 let resolved_hint = resolve_task
6018 .await
6019 .context("inlay hint resolve LSP request")?;
6020 let resolved_hint = InlayHints::lsp_to_project_hint(
6021 resolved_hint,
6022 &buffer_handle,
6023 server_id,
6024 ResolveState::Resolved,
6025 false,
6026 &mut cx,
6027 )
6028 .await?;
6029 Ok(resolved_hint)
6030 })
6031 } else if let Some(project_id) = self.remote_id() {
6032 let client = self.client.clone();
6033 let request = proto::ResolveInlayHint {
6034 project_id,
6035 buffer_id: buffer_handle.read(cx).remote_id().into(),
6036 language_server_id: server_id.0 as u64,
6037 hint: Some(InlayHints::project_to_proto_hint(hint.clone())),
6038 };
6039 cx.spawn(move |_, _| async move {
6040 let response = client
6041 .request(request)
6042 .await
6043 .context("inlay hints proto request")?;
6044 match response.hint {
6045 Some(resolved_hint) => InlayHints::proto_to_project_hint(resolved_hint)
6046 .context("inlay hints proto resolve response conversion"),
6047 None => Ok(hint),
6048 }
6049 })
6050 } else {
6051 Task::ready(Err(anyhow!("project does not have a remote id")))
6052 }
6053 }
6054
6055 #[allow(clippy::type_complexity)]
6056 pub fn search(
6057 &self,
6058 query: SearchQuery,
6059 cx: &mut ModelContext<Self>,
6060 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
6061 if self.is_local() {
6062 self.search_local(query, cx)
6063 } else if let Some(project_id) = self.remote_id() {
6064 let (tx, rx) = smol::channel::unbounded();
6065 let request = self.client.request(query.to_proto(project_id));
6066 cx.spawn(move |this, mut cx| async move {
6067 let response = request.await?;
6068 let mut result = HashMap::default();
6069 for location in response.locations {
6070 let buffer_id = BufferId::new(location.buffer_id)?;
6071 let target_buffer = this
6072 .update(&mut cx, |this, cx| {
6073 this.wait_for_remote_buffer(buffer_id, cx)
6074 })?
6075 .await?;
6076 let start = location
6077 .start
6078 .and_then(deserialize_anchor)
6079 .ok_or_else(|| anyhow!("missing target start"))?;
6080 let end = location
6081 .end
6082 .and_then(deserialize_anchor)
6083 .ok_or_else(|| anyhow!("missing target end"))?;
6084 result
6085 .entry(target_buffer)
6086 .or_insert(Vec::new())
6087 .push(start..end)
6088 }
6089 for (buffer, ranges) in result {
6090 let _ = tx.send((buffer, ranges)).await;
6091 }
6092 Result::<(), anyhow::Error>::Ok(())
6093 })
6094 .detach_and_log_err(cx);
6095 rx
6096 } else {
6097 unimplemented!();
6098 }
6099 }
6100
6101 pub fn search_local(
6102 &self,
6103 query: SearchQuery,
6104 cx: &mut ModelContext<Self>,
6105 ) -> Receiver<(Model<Buffer>, Vec<Range<Anchor>>)> {
6106 // Local search is split into several phases.
6107 // TL;DR is that we do 2 passes; initial pass to pick files which contain at least one match
6108 // and the second phase that finds positions of all the matches found in the candidate files.
6109 // The Receiver obtained from this function returns matches sorted by buffer path. Files without a buffer path are reported first.
6110 //
6111 // It gets a bit hairy though, because we must account for files that do not have a persistent representation
6112 // on FS. Namely, if you have an untitled buffer or unsaved changes in a buffer, we want to scan that too.
6113 //
6114 // 1. We initialize a queue of match candidates and feed all opened buffers into it (== unsaved files / untitled buffers).
6115 // Then, we go through a worktree and check for files that do match a predicate. If the file had an opened version, we skip the scan
6116 // of FS version for that file altogether - after all, what we have in memory is more up-to-date than what's in FS.
6117 // 2. At this point, we have a list of all potentially matching buffers/files.
6118 // We sort that list by buffer path - this list is retained for later use.
6119 // We ensure that all buffers are now opened and available in project.
6120 // 3. We run a scan over all the candidate buffers on multiple background threads.
6121 // We cannot assume that there will even be a match - while at least one match
6122 // is guaranteed for files obtained from FS, the buffers we got from memory (unsaved files/unnamed buffers) might not have a match at all.
6123 // There is also an auxiliary background thread responsible for result gathering.
6124 // This is where the sorted list of buffers comes into play to maintain sorted order; Whenever this background thread receives a notification (buffer has/doesn't have matches),
6125 // it keeps it around. It reports matches in sorted order, though it accepts them in unsorted order as well.
6126 // As soon as the match info on next position in sorted order becomes available, it reports it (if it's a match) or skips to the next
6127 // entry - which might already be available thanks to out-of-order processing.
6128 //
6129 // We could also report matches fully out-of-order, without maintaining a sorted list of matching paths.
6130 // This however would mean that project search (that is the main user of this function) would have to do the sorting itself, on the go.
6131 // This isn't as straightforward as running an insertion sort sadly, and would also mean that it would have to care about maintaining match index
6132 // in face of constantly updating list of sorted matches.
6133 // Meanwhile, this implementation offers index stability, since the matches are already reported in a sorted order.
6134 let snapshots = self
6135 .visible_worktrees(cx)
6136 .filter_map(|tree| {
6137 let tree = tree.read(cx).as_local()?;
6138 Some(tree.snapshot())
6139 })
6140 .collect::<Vec<_>>();
6141
6142 let background = cx.background_executor().clone();
6143 let path_count: usize = snapshots
6144 .iter()
6145 .map(|s| {
6146 if query.include_ignored() {
6147 s.file_count()
6148 } else {
6149 s.visible_file_count()
6150 }
6151 })
6152 .sum();
6153 if path_count == 0 {
6154 let (_, rx) = smol::channel::bounded(1024);
6155 return rx;
6156 }
6157 let workers = background.num_cpus().min(path_count);
6158 let (matching_paths_tx, matching_paths_rx) = smol::channel::bounded(1024);
6159 let mut unnamed_files = vec![];
6160 let opened_buffers = self
6161 .opened_buffers
6162 .iter()
6163 .filter_map(|(_, b)| {
6164 let buffer = b.upgrade()?;
6165 let (is_ignored, snapshot) = buffer.update(cx, |buffer, cx| {
6166 let is_ignored = buffer
6167 .project_path(cx)
6168 .and_then(|path| self.entry_for_path(&path, cx))
6169 .map_or(false, |entry| entry.is_ignored);
6170 (is_ignored, buffer.snapshot())
6171 });
6172 if is_ignored && !query.include_ignored() {
6173 return None;
6174 } else if let Some(path) = snapshot.file().map(|file| file.path()) {
6175 Some((path.clone(), (buffer, snapshot)))
6176 } else {
6177 unnamed_files.push(buffer);
6178 None
6179 }
6180 })
6181 .collect();
6182 cx.background_executor()
6183 .spawn(Self::background_search(
6184 unnamed_files,
6185 opened_buffers,
6186 cx.background_executor().clone(),
6187 self.fs.clone(),
6188 workers,
6189 query.clone(),
6190 path_count,
6191 snapshots,
6192 matching_paths_tx,
6193 ))
6194 .detach();
6195
6196 let (buffers, buffers_rx) = Self::sort_candidates_and_open_buffers(matching_paths_rx, cx);
6197 let background = cx.background_executor().clone();
6198 let (result_tx, result_rx) = smol::channel::bounded(1024);
6199 cx.background_executor()
6200 .spawn(async move {
6201 let Ok(buffers) = buffers.await else {
6202 return;
6203 };
6204
6205 let buffers_len = buffers.len();
6206 if buffers_len == 0 {
6207 return;
6208 }
6209 let query = &query;
6210 let (finished_tx, mut finished_rx) = smol::channel::unbounded();
6211 background
6212 .scoped(|scope| {
6213 #[derive(Clone)]
6214 struct FinishedStatus {
6215 entry: Option<(Model<Buffer>, Vec<Range<Anchor>>)>,
6216 buffer_index: SearchMatchCandidateIndex,
6217 }
6218
6219 for _ in 0..workers {
6220 let finished_tx = finished_tx.clone();
6221 let mut buffers_rx = buffers_rx.clone();
6222 scope.spawn(async move {
6223 while let Some((entry, buffer_index)) = buffers_rx.next().await {
6224 let buffer_matches = if let Some((_, snapshot)) = entry.as_ref()
6225 {
6226 if query.file_matches(
6227 snapshot.file().map(|file| file.path().as_ref()),
6228 ) {
6229 query
6230 .search(snapshot, None)
6231 .await
6232 .iter()
6233 .map(|range| {
6234 snapshot.anchor_before(range.start)
6235 ..snapshot.anchor_after(range.end)
6236 })
6237 .collect()
6238 } else {
6239 Vec::new()
6240 }
6241 } else {
6242 Vec::new()
6243 };
6244
6245 let status = if !buffer_matches.is_empty() {
6246 let entry = if let Some((buffer, _)) = entry.as_ref() {
6247 Some((buffer.clone(), buffer_matches))
6248 } else {
6249 None
6250 };
6251 FinishedStatus {
6252 entry,
6253 buffer_index,
6254 }
6255 } else {
6256 FinishedStatus {
6257 entry: None,
6258 buffer_index,
6259 }
6260 };
6261 if finished_tx.send(status).await.is_err() {
6262 break;
6263 }
6264 }
6265 });
6266 }
6267 // Report sorted matches
6268 scope.spawn(async move {
6269 let mut current_index = 0;
6270 let mut scratch = vec![None; buffers_len];
6271 while let Some(status) = finished_rx.next().await {
6272 debug_assert!(
6273 scratch[status.buffer_index].is_none(),
6274 "Got match status of position {} twice",
6275 status.buffer_index
6276 );
6277 let index = status.buffer_index;
6278 scratch[index] = Some(status);
6279 while current_index < buffers_len {
6280 let Some(current_entry) = scratch[current_index].take() else {
6281 // We intentionally **do not** increment `current_index` here. When next element arrives
6282 // from `finished_rx`, we will inspect the same position again, hoping for it to be Some(_)
6283 // this time.
6284 break;
6285 };
6286 if let Some(entry) = current_entry.entry {
6287 result_tx.send(entry).await.log_err();
6288 }
6289 current_index += 1;
6290 }
6291 if current_index == buffers_len {
6292 break;
6293 }
6294 }
6295 });
6296 })
6297 .await;
6298 })
6299 .detach();
6300 result_rx
6301 }
6302
6303 /// Pick paths that might potentially contain a match of a given search query.
6304 #[allow(clippy::too_many_arguments)]
6305 async fn background_search(
6306 unnamed_buffers: Vec<Model<Buffer>>,
6307 opened_buffers: HashMap<Arc<Path>, (Model<Buffer>, BufferSnapshot)>,
6308 executor: BackgroundExecutor,
6309 fs: Arc<dyn Fs>,
6310 workers: usize,
6311 query: SearchQuery,
6312 path_count: usize,
6313 snapshots: Vec<LocalSnapshot>,
6314 matching_paths_tx: Sender<SearchMatchCandidate>,
6315 ) {
6316 let fs = &fs;
6317 let query = &query;
6318 let matching_paths_tx = &matching_paths_tx;
6319 let snapshots = &snapshots;
6320 let paths_per_worker = (path_count + workers - 1) / workers;
6321 for buffer in unnamed_buffers {
6322 matching_paths_tx
6323 .send(SearchMatchCandidate::OpenBuffer {
6324 buffer: buffer.clone(),
6325 path: None,
6326 })
6327 .await
6328 .log_err();
6329 }
6330 for (path, (buffer, _)) in opened_buffers.iter() {
6331 matching_paths_tx
6332 .send(SearchMatchCandidate::OpenBuffer {
6333 buffer: buffer.clone(),
6334 path: Some(path.clone()),
6335 })
6336 .await
6337 .log_err();
6338 }
6339 executor
6340 .scoped(|scope| {
6341 let max_concurrent_workers = Arc::new(Semaphore::new(workers));
6342
6343 for worker_ix in 0..workers {
6344 let worker_start_ix = worker_ix * paths_per_worker;
6345 let worker_end_ix = worker_start_ix + paths_per_worker;
6346 let unnamed_buffers = opened_buffers.clone();
6347 let limiter = Arc::clone(&max_concurrent_workers);
6348 scope.spawn(async move {
6349 let _guard = limiter.acquire().await;
6350 let mut snapshot_start_ix = 0;
6351 let mut abs_path = PathBuf::new();
6352 for snapshot in snapshots {
6353 let snapshot_end_ix = snapshot_start_ix
6354 + if query.include_ignored() {
6355 snapshot.file_count()
6356 } else {
6357 snapshot.visible_file_count()
6358 };
6359 if worker_end_ix <= snapshot_start_ix {
6360 break;
6361 } else if worker_start_ix > snapshot_end_ix {
6362 snapshot_start_ix = snapshot_end_ix;
6363 continue;
6364 } else {
6365 let start_in_snapshot =
6366 worker_start_ix.saturating_sub(snapshot_start_ix);
6367 let end_in_snapshot =
6368 cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
6369
6370 for entry in snapshot
6371 .files(query.include_ignored(), start_in_snapshot)
6372 .take(end_in_snapshot - start_in_snapshot)
6373 {
6374 if matching_paths_tx.is_closed() {
6375 break;
6376 }
6377 if unnamed_buffers.contains_key(&entry.path) {
6378 continue;
6379 }
6380 let matches = if query.file_matches(Some(&entry.path)) {
6381 abs_path.clear();
6382 abs_path.push(&snapshot.abs_path());
6383 abs_path.push(&entry.path);
6384 if let Some(file) = fs.open_sync(&abs_path).await.log_err()
6385 {
6386 query.detect(file).unwrap_or(false)
6387 } else {
6388 false
6389 }
6390 } else {
6391 false
6392 };
6393
6394 if matches {
6395 let project_path = SearchMatchCandidate::Path {
6396 worktree_id: snapshot.id(),
6397 path: entry.path.clone(),
6398 is_ignored: entry.is_ignored,
6399 };
6400 if matching_paths_tx.send(project_path).await.is_err() {
6401 break;
6402 }
6403 }
6404 }
6405
6406 snapshot_start_ix = snapshot_end_ix;
6407 }
6408 }
6409 });
6410 }
6411
6412 if query.include_ignored() {
6413 for snapshot in snapshots {
6414 for ignored_entry in snapshot
6415 .entries(query.include_ignored())
6416 .filter(|e| e.is_ignored)
6417 {
6418 let limiter = Arc::clone(&max_concurrent_workers);
6419 scope.spawn(async move {
6420 let _guard = limiter.acquire().await;
6421 let mut ignored_paths_to_process =
6422 VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
6423 while let Some(ignored_abs_path) =
6424 ignored_paths_to_process.pop_front()
6425 {
6426 if let Some(fs_metadata) = fs
6427 .metadata(&ignored_abs_path)
6428 .await
6429 .with_context(|| {
6430 format!("fetching fs metadata for {ignored_abs_path:?}")
6431 })
6432 .log_err()
6433 .flatten()
6434 {
6435 if fs_metadata.is_dir {
6436 if let Some(mut subfiles) = fs
6437 .read_dir(&ignored_abs_path)
6438 .await
6439 .with_context(|| {
6440 format!(
6441 "listing ignored path {ignored_abs_path:?}"
6442 )
6443 })
6444 .log_err()
6445 {
6446 while let Some(subfile) = subfiles.next().await {
6447 if let Some(subfile) = subfile.log_err() {
6448 ignored_paths_to_process.push_back(subfile);
6449 }
6450 }
6451 }
6452 } else if !fs_metadata.is_symlink {
6453 if !query.file_matches(Some(&ignored_abs_path))
6454 || snapshot.is_path_excluded(
6455 ignored_entry.path.to_path_buf(),
6456 )
6457 {
6458 continue;
6459 }
6460 let matches = if let Some(file) = fs
6461 .open_sync(&ignored_abs_path)
6462 .await
6463 .with_context(|| {
6464 format!(
6465 "Opening ignored path {ignored_abs_path:?}"
6466 )
6467 })
6468 .log_err()
6469 {
6470 query.detect(file).unwrap_or(false)
6471 } else {
6472 false
6473 };
6474 if matches {
6475 let project_path = SearchMatchCandidate::Path {
6476 worktree_id: snapshot.id(),
6477 path: Arc::from(
6478 ignored_abs_path
6479 .strip_prefix(snapshot.abs_path())
6480 .expect(
6481 "scanning worktree-related files",
6482 ),
6483 ),
6484 is_ignored: true,
6485 };
6486 if matching_paths_tx
6487 .send(project_path)
6488 .await
6489 .is_err()
6490 {
6491 return;
6492 }
6493 }
6494 }
6495 }
6496 }
6497 });
6498 }
6499 }
6500 }
6501 })
6502 .await;
6503 }
6504
6505 pub fn request_lsp<R: LspCommand>(
6506 &self,
6507 buffer_handle: Model<Buffer>,
6508 server: LanguageServerToQuery,
6509 request: R,
6510 cx: &mut ModelContext<Self>,
6511 ) -> Task<Result<R::Response>>
6512 where
6513 <R::LspRequest as lsp::request::Request>::Result: Send,
6514 <R::LspRequest as lsp::request::Request>::Params: Send,
6515 {
6516 let buffer = buffer_handle.read(cx);
6517 if self.is_local() {
6518 let language_server = match server {
6519 LanguageServerToQuery::Primary => {
6520 match self.primary_language_server_for_buffer(buffer, cx) {
6521 Some((_, server)) => Some(Arc::clone(server)),
6522 None => return Task::ready(Ok(Default::default())),
6523 }
6524 }
6525 LanguageServerToQuery::Other(id) => self
6526 .language_server_for_buffer(buffer, id, cx)
6527 .map(|(_, server)| Arc::clone(server)),
6528 };
6529 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
6530 if let (Some(file), Some(language_server)) = (file, language_server) {
6531 let lsp_params = request.to_lsp(&file.abs_path(cx), buffer, &language_server, cx);
6532 return cx.spawn(move |this, cx| async move {
6533 if !request.check_capabilities(language_server.capabilities()) {
6534 return Ok(Default::default());
6535 }
6536
6537 let result = language_server.request::<R::LspRequest>(lsp_params).await;
6538 let response = match result {
6539 Ok(response) => response,
6540
6541 Err(err) => {
6542 log::warn!(
6543 "Generic lsp request to {} failed: {}",
6544 language_server.name(),
6545 err
6546 );
6547 return Err(err);
6548 }
6549 };
6550
6551 request
6552 .response_from_lsp(
6553 response,
6554 this.upgrade().ok_or_else(|| anyhow!("no app context"))?,
6555 buffer_handle,
6556 language_server.server_id(),
6557 cx,
6558 )
6559 .await
6560 });
6561 }
6562 } else if let Some(project_id) = self.remote_id() {
6563 return self.send_lsp_proto_request(buffer_handle, project_id, request, cx);
6564 }
6565
6566 Task::ready(Ok(Default::default()))
6567 }
6568
6569 fn send_lsp_proto_request<R: LspCommand>(
6570 &self,
6571 buffer: Model<Buffer>,
6572 project_id: u64,
6573 request: R,
6574 cx: &mut ModelContext<'_, Project>,
6575 ) -> Task<anyhow::Result<<R as LspCommand>::Response>> {
6576 let rpc = self.client.clone();
6577 let message = request.to_proto(project_id, buffer.read(cx));
6578 cx.spawn(move |this, mut cx| async move {
6579 // Ensure the project is still alive by the time the task
6580 // is scheduled.
6581 this.upgrade().context("project dropped")?;
6582 let response = rpc.request(message).await?;
6583 let this = this.upgrade().context("project dropped")?;
6584 if this.update(&mut cx, |this, _| this.is_disconnected())? {
6585 Err(anyhow!("disconnected before completing request"))
6586 } else {
6587 request
6588 .response_from_proto(response, this, buffer, cx)
6589 .await
6590 }
6591 })
6592 }
6593
6594 fn sort_candidates_and_open_buffers(
6595 mut matching_paths_rx: Receiver<SearchMatchCandidate>,
6596 cx: &mut ModelContext<Self>,
6597 ) -> (
6598 futures::channel::oneshot::Receiver<Vec<SearchMatchCandidate>>,
6599 Receiver<(
6600 Option<(Model<Buffer>, BufferSnapshot)>,
6601 SearchMatchCandidateIndex,
6602 )>,
6603 ) {
6604 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
6605 let (sorted_buffers_tx, sorted_buffers_rx) = futures::channel::oneshot::channel();
6606 cx.spawn(move |this, cx| async move {
6607 let mut buffers = Vec::new();
6608 let mut ignored_buffers = Vec::new();
6609 while let Some(entry) = matching_paths_rx.next().await {
6610 if matches!(
6611 entry,
6612 SearchMatchCandidate::Path {
6613 is_ignored: true,
6614 ..
6615 }
6616 ) {
6617 ignored_buffers.push(entry);
6618 } else {
6619 buffers.push(entry);
6620 }
6621 }
6622 buffers.sort_by_key(|candidate| candidate.path());
6623 ignored_buffers.sort_by_key(|candidate| candidate.path());
6624 buffers.extend(ignored_buffers);
6625 let matching_paths = buffers.clone();
6626 let _ = sorted_buffers_tx.send(buffers);
6627 for (index, candidate) in matching_paths.into_iter().enumerate() {
6628 if buffers_tx.is_closed() {
6629 break;
6630 }
6631 let this = this.clone();
6632 let buffers_tx = buffers_tx.clone();
6633 cx.spawn(move |mut cx| async move {
6634 let buffer = match candidate {
6635 SearchMatchCandidate::OpenBuffer { buffer, .. } => Some(buffer),
6636 SearchMatchCandidate::Path {
6637 worktree_id, path, ..
6638 } => this
6639 .update(&mut cx, |this, cx| {
6640 this.open_buffer((worktree_id, path), cx)
6641 })?
6642 .await
6643 .log_err(),
6644 };
6645 if let Some(buffer) = buffer {
6646 let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
6647 buffers_tx
6648 .send((Some((buffer, snapshot)), index))
6649 .await
6650 .log_err();
6651 } else {
6652 buffers_tx.send((None, index)).await.log_err();
6653 }
6654
6655 Ok::<_, anyhow::Error>(())
6656 })
6657 .detach();
6658 }
6659 })
6660 .detach();
6661 (sorted_buffers_rx, buffers_rx)
6662 }
6663
6664 pub fn find_or_create_local_worktree(
6665 &mut self,
6666 abs_path: impl AsRef<Path>,
6667 visible: bool,
6668 cx: &mut ModelContext<Self>,
6669 ) -> Task<Result<(Model<Worktree>, PathBuf)>> {
6670 let abs_path = abs_path.as_ref();
6671 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
6672 Task::ready(Ok((tree, relative_path)))
6673 } else {
6674 let worktree = self.create_local_worktree(abs_path, visible, cx);
6675 cx.background_executor()
6676 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
6677 }
6678 }
6679
6680 pub fn find_local_worktree(
6681 &self,
6682 abs_path: &Path,
6683 cx: &AppContext,
6684 ) -> Option<(Model<Worktree>, PathBuf)> {
6685 for tree in &self.worktrees {
6686 if let Some(tree) = tree.upgrade() {
6687 if let Some(relative_path) = tree
6688 .read(cx)
6689 .as_local()
6690 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
6691 {
6692 return Some((tree.clone(), relative_path.into()));
6693 }
6694 }
6695 }
6696 None
6697 }
6698
6699 pub fn is_shared(&self) -> bool {
6700 match &self.client_state {
6701 ProjectClientState::Shared { .. } => true,
6702 ProjectClientState::Local | ProjectClientState::Remote { .. } => false,
6703 }
6704 }
6705
6706 fn create_local_worktree(
6707 &mut self,
6708 abs_path: impl AsRef<Path>,
6709 visible: bool,
6710 cx: &mut ModelContext<Self>,
6711 ) -> Task<Result<Model<Worktree>>> {
6712 let fs = self.fs.clone();
6713 let client = self.client.clone();
6714 let next_entry_id = self.next_entry_id.clone();
6715 let path: Arc<Path> = abs_path.as_ref().into();
6716 let task = self
6717 .loading_local_worktrees
6718 .entry(path.clone())
6719 .or_insert_with(|| {
6720 cx.spawn(move |project, mut cx| {
6721 async move {
6722 let worktree = Worktree::local(
6723 client.clone(),
6724 path.clone(),
6725 visible,
6726 fs,
6727 next_entry_id,
6728 &mut cx,
6729 )
6730 .await;
6731
6732 project.update(&mut cx, |project, _| {
6733 project.loading_local_worktrees.remove(&path);
6734 })?;
6735
6736 let worktree = worktree?;
6737 project
6738 .update(&mut cx, |project, cx| project.add_worktree(&worktree, cx))?;
6739 Ok(worktree)
6740 }
6741 .map_err(Arc::new)
6742 })
6743 .shared()
6744 })
6745 .clone();
6746 cx.background_executor().spawn(async move {
6747 match task.await {
6748 Ok(worktree) => Ok(worktree),
6749 Err(err) => Err(anyhow!("{}", err)),
6750 }
6751 })
6752 }
6753
6754 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
6755 let mut servers_to_remove = HashMap::default();
6756 let mut servers_to_preserve = HashSet::default();
6757 for ((worktree_id, server_name), &server_id) in &self.language_server_ids {
6758 if worktree_id == &id_to_remove {
6759 servers_to_remove.insert(server_id, server_name.clone());
6760 } else {
6761 servers_to_preserve.insert(server_id);
6762 }
6763 }
6764 servers_to_remove.retain(|server_id, _| !servers_to_preserve.contains(server_id));
6765 for (server_id_to_remove, server_name) in servers_to_remove {
6766 self.language_server_ids
6767 .remove(&(id_to_remove, server_name));
6768 self.language_server_statuses.remove(&server_id_to_remove);
6769 self.language_server_watched_paths
6770 .remove(&server_id_to_remove);
6771 self.last_workspace_edits_by_language_server
6772 .remove(&server_id_to_remove);
6773 self.language_servers.remove(&server_id_to_remove);
6774 cx.emit(Event::LanguageServerRemoved(server_id_to_remove));
6775 }
6776
6777 let mut prettier_instances_to_clean = FuturesUnordered::new();
6778 if let Some(prettier_paths) = self.prettiers_per_worktree.remove(&id_to_remove) {
6779 for path in prettier_paths.iter().flatten() {
6780 if let Some(prettier_instance) = self.prettier_instances.remove(path) {
6781 prettier_instances_to_clean.push(async move {
6782 prettier_instance
6783 .server()
6784 .await
6785 .map(|server| server.server_id())
6786 });
6787 }
6788 }
6789 }
6790 cx.spawn(|project, mut cx| async move {
6791 while let Some(prettier_server_id) = prettier_instances_to_clean.next().await {
6792 if let Some(prettier_server_id) = prettier_server_id {
6793 project
6794 .update(&mut cx, |project, cx| {
6795 project
6796 .supplementary_language_servers
6797 .remove(&prettier_server_id);
6798 cx.emit(Event::LanguageServerRemoved(prettier_server_id));
6799 })
6800 .ok();
6801 }
6802 }
6803 })
6804 .detach();
6805
6806 self.task_inventory().update(cx, |inventory, _| {
6807 inventory.remove_worktree_sources(id_to_remove);
6808 });
6809
6810 self.worktrees.retain(|worktree| {
6811 if let Some(worktree) = worktree.upgrade() {
6812 let id = worktree.read(cx).id();
6813 if id == id_to_remove {
6814 cx.emit(Event::WorktreeRemoved(id));
6815 false
6816 } else {
6817 true
6818 }
6819 } else {
6820 false
6821 }
6822 });
6823 self.metadata_changed(cx);
6824 }
6825
6826 fn add_worktree(&mut self, worktree: &Model<Worktree>, cx: &mut ModelContext<Self>) {
6827 cx.observe(worktree, |_, _, cx| cx.notify()).detach();
6828 cx.subscribe(worktree, |this, worktree, event, cx| {
6829 let is_local = worktree.read(cx).is_local();
6830 match event {
6831 worktree::Event::UpdatedEntries(changes) => {
6832 if is_local {
6833 this.update_local_worktree_buffers(&worktree, changes, cx);
6834 this.update_local_worktree_language_servers(&worktree, changes, cx);
6835 this.update_local_worktree_settings(&worktree, changes, cx);
6836 this.update_prettier_settings(&worktree, changes, cx);
6837 }
6838
6839 cx.emit(Event::WorktreeUpdatedEntries(
6840 worktree.read(cx).id(),
6841 changes.clone(),
6842 ));
6843 }
6844 worktree::Event::UpdatedGitRepositories(updated_repos) => {
6845 if is_local {
6846 this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
6847 }
6848 }
6849 }
6850 })
6851 .detach();
6852
6853 let push_strong_handle = {
6854 let worktree = worktree.read(cx);
6855 self.is_shared() || worktree.is_visible() || worktree.is_remote()
6856 };
6857 if push_strong_handle {
6858 self.worktrees
6859 .push(WorktreeHandle::Strong(worktree.clone()));
6860 } else {
6861 self.worktrees
6862 .push(WorktreeHandle::Weak(worktree.downgrade()));
6863 }
6864
6865 let handle_id = worktree.entity_id();
6866 cx.observe_release(worktree, move |this, worktree, cx| {
6867 let _ = this.remove_worktree(worktree.id(), cx);
6868 cx.update_global::<SettingsStore, _>(|store, cx| {
6869 store
6870 .clear_local_settings(handle_id.as_u64() as usize, cx)
6871 .log_err()
6872 });
6873 })
6874 .detach();
6875
6876 cx.emit(Event::WorktreeAdded);
6877 self.metadata_changed(cx);
6878 }
6879
6880 fn update_local_worktree_buffers(
6881 &mut self,
6882 worktree_handle: &Model<Worktree>,
6883 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
6884 cx: &mut ModelContext<Self>,
6885 ) {
6886 let snapshot = worktree_handle.read(cx).snapshot();
6887
6888 let mut renamed_buffers = Vec::new();
6889 for (path, entry_id, _) in changes {
6890 let worktree_id = worktree_handle.read(cx).id();
6891 let project_path = ProjectPath {
6892 worktree_id,
6893 path: path.clone(),
6894 };
6895
6896 let buffer_id = match self.local_buffer_ids_by_entry_id.get(entry_id) {
6897 Some(&buffer_id) => buffer_id,
6898 None => match self.local_buffer_ids_by_path.get(&project_path) {
6899 Some(&buffer_id) => buffer_id,
6900 None => {
6901 continue;
6902 }
6903 },
6904 };
6905
6906 let open_buffer = self.opened_buffers.get(&buffer_id);
6907 let buffer = if let Some(buffer) = open_buffer.and_then(|buffer| buffer.upgrade()) {
6908 buffer
6909 } else {
6910 self.opened_buffers.remove(&buffer_id);
6911 self.local_buffer_ids_by_path.remove(&project_path);
6912 self.local_buffer_ids_by_entry_id.remove(entry_id);
6913 continue;
6914 };
6915
6916 buffer.update(cx, |buffer, cx| {
6917 if let Some(old_file) = File::from_dyn(buffer.file()) {
6918 if old_file.worktree != *worktree_handle {
6919 return;
6920 }
6921
6922 let new_file = if let Some(entry) = old_file
6923 .entry_id
6924 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
6925 {
6926 File {
6927 is_local: true,
6928 entry_id: Some(entry.id),
6929 mtime: entry.mtime,
6930 path: entry.path.clone(),
6931 worktree: worktree_handle.clone(),
6932 is_deleted: false,
6933 is_private: entry.is_private,
6934 }
6935 } else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
6936 File {
6937 is_local: true,
6938 entry_id: Some(entry.id),
6939 mtime: entry.mtime,
6940 path: entry.path.clone(),
6941 worktree: worktree_handle.clone(),
6942 is_deleted: false,
6943 is_private: entry.is_private,
6944 }
6945 } else {
6946 File {
6947 is_local: true,
6948 entry_id: old_file.entry_id,
6949 path: old_file.path().clone(),
6950 mtime: old_file.mtime(),
6951 worktree: worktree_handle.clone(),
6952 is_deleted: true,
6953 is_private: old_file.is_private,
6954 }
6955 };
6956
6957 let old_path = old_file.abs_path(cx);
6958 if new_file.abs_path(cx) != old_path {
6959 renamed_buffers.push((cx.handle(), old_file.clone()));
6960 self.local_buffer_ids_by_path.remove(&project_path);
6961 self.local_buffer_ids_by_path.insert(
6962 ProjectPath {
6963 worktree_id,
6964 path: path.clone(),
6965 },
6966 buffer_id,
6967 );
6968 }
6969
6970 if new_file.entry_id != Some(*entry_id) {
6971 self.local_buffer_ids_by_entry_id.remove(entry_id);
6972 if let Some(entry_id) = new_file.entry_id {
6973 self.local_buffer_ids_by_entry_id
6974 .insert(entry_id, buffer_id);
6975 }
6976 }
6977
6978 if new_file != *old_file {
6979 if let Some(project_id) = self.remote_id() {
6980 self.client
6981 .send(proto::UpdateBufferFile {
6982 project_id,
6983 buffer_id: buffer_id.into(),
6984 file: Some(new_file.to_proto()),
6985 })
6986 .log_err();
6987 }
6988
6989 buffer.file_updated(Arc::new(new_file), cx);
6990 }
6991 }
6992 });
6993 }
6994
6995 for (buffer, old_file) in renamed_buffers {
6996 self.unregister_buffer_from_language_servers(&buffer, &old_file, cx);
6997 self.detect_language_for_buffer(&buffer, cx);
6998 self.register_buffer_with_language_servers(&buffer, cx);
6999 }
7000 }
7001
7002 fn update_local_worktree_language_servers(
7003 &mut self,
7004 worktree_handle: &Model<Worktree>,
7005 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
7006 cx: &mut ModelContext<Self>,
7007 ) {
7008 if changes.is_empty() {
7009 return;
7010 }
7011
7012 let worktree_id = worktree_handle.read(cx).id();
7013 let mut language_server_ids = self
7014 .language_server_ids
7015 .iter()
7016 .filter_map(|((server_worktree_id, _), server_id)| {
7017 (*server_worktree_id == worktree_id).then_some(*server_id)
7018 })
7019 .collect::<Vec<_>>();
7020 language_server_ids.sort();
7021 language_server_ids.dedup();
7022
7023 let abs_path = worktree_handle.read(cx).abs_path();
7024 for server_id in &language_server_ids {
7025 if let Some(LanguageServerState::Running { server, .. }) =
7026 self.language_servers.get(server_id)
7027 {
7028 if let Some(watched_paths) = self
7029 .language_server_watched_paths
7030 .get(&server_id)
7031 .and_then(|paths| paths.get(&worktree_id))
7032 {
7033 let params = lsp::DidChangeWatchedFilesParams {
7034 changes: changes
7035 .iter()
7036 .filter_map(|(path, _, change)| {
7037 if !watched_paths.is_match(&path) {
7038 return None;
7039 }
7040 let typ = match change {
7041 PathChange::Loaded => return None,
7042 PathChange::Added => lsp::FileChangeType::CREATED,
7043 PathChange::Removed => lsp::FileChangeType::DELETED,
7044 PathChange::Updated => lsp::FileChangeType::CHANGED,
7045 PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
7046 };
7047 Some(lsp::FileEvent {
7048 uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
7049 typ,
7050 })
7051 })
7052 .collect(),
7053 };
7054 if !params.changes.is_empty() {
7055 server
7056 .notify::<lsp::notification::DidChangeWatchedFiles>(params)
7057 .log_err();
7058 }
7059 }
7060 }
7061 }
7062 }
7063
7064 fn update_local_worktree_buffers_git_repos(
7065 &mut self,
7066 worktree_handle: Model<Worktree>,
7067 changed_repos: &UpdatedGitRepositoriesSet,
7068 cx: &mut ModelContext<Self>,
7069 ) {
7070 debug_assert!(worktree_handle.read(cx).is_local());
7071
7072 // Identify the loading buffers whose containing repository that has changed.
7073 let future_buffers = self
7074 .loading_buffers_by_path
7075 .iter()
7076 .filter_map(|(project_path, receiver)| {
7077 if project_path.worktree_id != worktree_handle.read(cx).id() {
7078 return None;
7079 }
7080 let path = &project_path.path;
7081 changed_repos
7082 .iter()
7083 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7084 let receiver = receiver.clone();
7085 let path = path.clone();
7086 Some(async move {
7087 wait_for_loading_buffer(receiver)
7088 .await
7089 .ok()
7090 .map(|buffer| (buffer, path))
7091 })
7092 })
7093 .collect::<FuturesUnordered<_>>();
7094
7095 // Identify the current buffers whose containing repository has changed.
7096 let current_buffers = self
7097 .opened_buffers
7098 .values()
7099 .filter_map(|buffer| {
7100 let buffer = buffer.upgrade()?;
7101 let file = File::from_dyn(buffer.read(cx).file())?;
7102 if file.worktree != worktree_handle {
7103 return None;
7104 }
7105 let path = file.path();
7106 changed_repos
7107 .iter()
7108 .find(|(work_dir, _)| path.starts_with(work_dir))?;
7109 Some((buffer, path.clone()))
7110 })
7111 .collect::<Vec<_>>();
7112
7113 if future_buffers.len() + current_buffers.len() == 0 {
7114 return;
7115 }
7116
7117 let remote_id = self.remote_id();
7118 let client = self.client.clone();
7119 cx.spawn(move |_, mut cx| async move {
7120 // Wait for all of the buffers to load.
7121 let future_buffers = future_buffers.collect::<Vec<_>>().await;
7122
7123 // Reload the diff base for every buffer whose containing git repository has changed.
7124 let snapshot =
7125 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
7126 let diff_bases_by_buffer = cx
7127 .background_executor()
7128 .spawn(async move {
7129 future_buffers
7130 .into_iter()
7131 .flatten()
7132 .chain(current_buffers)
7133 .filter_map(|(buffer, path)| {
7134 let (work_directory, repo) =
7135 snapshot.repository_and_work_directory_for_path(&path)?;
7136 let repo = snapshot.get_local_repo(&repo)?;
7137 let relative_path = path.strip_prefix(&work_directory).ok()?;
7138 let base_text = repo.load_index_text(relative_path);
7139 Some((buffer, base_text))
7140 })
7141 .collect::<Vec<_>>()
7142 })
7143 .await;
7144
7145 // Assign the new diff bases on all of the buffers.
7146 for (buffer, diff_base) in diff_bases_by_buffer {
7147 let buffer_id = buffer.update(&mut cx, |buffer, cx| {
7148 buffer.set_diff_base(diff_base.clone(), cx);
7149 buffer.remote_id().into()
7150 })?;
7151 if let Some(project_id) = remote_id {
7152 client
7153 .send(proto::UpdateDiffBase {
7154 project_id,
7155 buffer_id,
7156 diff_base,
7157 })
7158 .log_err();
7159 }
7160 }
7161
7162 anyhow::Ok(())
7163 })
7164 .detach();
7165 }
7166
7167 fn update_local_worktree_settings(
7168 &mut self,
7169 worktree: &Model<Worktree>,
7170 changes: &UpdatedEntriesSet,
7171 cx: &mut ModelContext<Self>,
7172 ) {
7173 if worktree.read(cx).as_local().is_none() {
7174 return;
7175 }
7176 let project_id = self.remote_id();
7177 let worktree_id = worktree.entity_id();
7178 let remote_worktree_id = worktree.read(cx).id();
7179
7180 let mut settings_contents = Vec::new();
7181 for (path, _, change) in changes.iter() {
7182 let removed = change == &PathChange::Removed;
7183 let abs_path = match worktree.read(cx).absolutize(path) {
7184 Ok(abs_path) => abs_path,
7185 Err(e) => {
7186 log::warn!("Cannot absolutize {path:?} received as {change:?} FS change: {e}");
7187 continue;
7188 }
7189 };
7190
7191 if abs_path.ends_with(&*LOCAL_SETTINGS_RELATIVE_PATH) {
7192 let settings_dir = Arc::from(
7193 path.ancestors()
7194 .nth(LOCAL_SETTINGS_RELATIVE_PATH.components().count())
7195 .unwrap(),
7196 );
7197 let fs = self.fs.clone();
7198 settings_contents.push(async move {
7199 (
7200 settings_dir,
7201 if removed {
7202 None
7203 } else {
7204 Some(async move { fs.load(&abs_path).await }.await)
7205 },
7206 )
7207 });
7208 } else if abs_path.ends_with(&*LOCAL_TASKS_RELATIVE_PATH) {
7209 self.task_inventory().update(cx, |task_inventory, cx| {
7210 if removed {
7211 task_inventory.remove_local_static_source(&abs_path);
7212 } else {
7213 let fs = self.fs.clone();
7214 let task_abs_path = abs_path.clone();
7215 task_inventory.add_source(
7216 TaskSourceKind::Worktree {
7217 id: remote_worktree_id,
7218 abs_path,
7219 },
7220 |cx| {
7221 let tasks_file_rx =
7222 watch_config_file(&cx.background_executor(), fs, task_abs_path);
7223 StaticSource::new(
7224 format!("local_tasks_for_workspace_{remote_worktree_id}"),
7225 tasks_file_rx,
7226 cx,
7227 )
7228 },
7229 cx,
7230 );
7231 }
7232 })
7233 }
7234 }
7235
7236 if settings_contents.is_empty() {
7237 return;
7238 }
7239
7240 let client = self.client.clone();
7241 cx.spawn(move |_, cx| async move {
7242 let settings_contents: Vec<(Arc<Path>, _)> =
7243 futures::future::join_all(settings_contents).await;
7244 cx.update(|cx| {
7245 cx.update_global::<SettingsStore, _>(|store, cx| {
7246 for (directory, file_content) in settings_contents {
7247 let file_content = file_content.and_then(|content| content.log_err());
7248 store
7249 .set_local_settings(
7250 worktree_id.as_u64() as usize,
7251 directory.clone(),
7252 file_content.as_deref(),
7253 cx,
7254 )
7255 .log_err();
7256 if let Some(remote_id) = project_id {
7257 client
7258 .send(proto::UpdateWorktreeSettings {
7259 project_id: remote_id,
7260 worktree_id: remote_worktree_id.to_proto(),
7261 path: directory.to_string_lossy().into_owned(),
7262 content: file_content,
7263 })
7264 .log_err();
7265 }
7266 }
7267 });
7268 })
7269 .ok();
7270 })
7271 .detach();
7272 }
7273
7274 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
7275 let new_active_entry = entry.and_then(|project_path| {
7276 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
7277 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
7278 Some(entry.id)
7279 });
7280 if new_active_entry != self.active_entry {
7281 self.active_entry = new_active_entry;
7282 cx.emit(Event::ActiveEntryChanged(new_active_entry));
7283 }
7284 }
7285
7286 pub fn language_servers_running_disk_based_diagnostics(
7287 &self,
7288 ) -> impl Iterator<Item = LanguageServerId> + '_ {
7289 self.language_server_statuses
7290 .iter()
7291 .filter_map(|(id, status)| {
7292 if status.has_pending_diagnostic_updates {
7293 Some(*id)
7294 } else {
7295 None
7296 }
7297 })
7298 }
7299
7300 pub fn diagnostic_summary(&self, include_ignored: bool, cx: &AppContext) -> DiagnosticSummary {
7301 let mut summary = DiagnosticSummary::default();
7302 for (_, _, path_summary) in
7303 self.diagnostic_summaries(include_ignored, cx)
7304 .filter(|(path, _, _)| {
7305 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7306 include_ignored || worktree == Some(false)
7307 })
7308 {
7309 summary.error_count += path_summary.error_count;
7310 summary.warning_count += path_summary.warning_count;
7311 }
7312 summary
7313 }
7314
7315 pub fn diagnostic_summaries<'a>(
7316 &'a self,
7317 include_ignored: bool,
7318 cx: &'a AppContext,
7319 ) -> impl Iterator<Item = (ProjectPath, LanguageServerId, DiagnosticSummary)> + 'a {
7320 self.visible_worktrees(cx)
7321 .flat_map(move |worktree| {
7322 let worktree = worktree.read(cx);
7323 let worktree_id = worktree.id();
7324 worktree
7325 .diagnostic_summaries()
7326 .map(move |(path, server_id, summary)| {
7327 (ProjectPath { worktree_id, path }, server_id, summary)
7328 })
7329 })
7330 .filter(move |(path, _, _)| {
7331 let worktree = self.entry_for_path(path, cx).map(|entry| entry.is_ignored);
7332 include_ignored || worktree == Some(false)
7333 })
7334 }
7335
7336 pub fn disk_based_diagnostics_started(
7337 &mut self,
7338 language_server_id: LanguageServerId,
7339 cx: &mut ModelContext<Self>,
7340 ) {
7341 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
7342 }
7343
7344 pub fn disk_based_diagnostics_finished(
7345 &mut self,
7346 language_server_id: LanguageServerId,
7347 cx: &mut ModelContext<Self>,
7348 ) {
7349 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
7350 }
7351
7352 pub fn active_entry(&self) -> Option<ProjectEntryId> {
7353 self.active_entry
7354 }
7355
7356 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<Entry> {
7357 self.worktree_for_id(path.worktree_id, cx)?
7358 .read(cx)
7359 .entry_for_path(&path.path)
7360 .cloned()
7361 }
7362
7363 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
7364 let worktree = self.worktree_for_entry(entry_id, cx)?;
7365 let worktree = worktree.read(cx);
7366 let worktree_id = worktree.id();
7367 let path = worktree.entry_for_id(entry_id)?.path.clone();
7368 Some(ProjectPath { worktree_id, path })
7369 }
7370
7371 pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
7372 let workspace_root = self
7373 .worktree_for_id(project_path.worktree_id, cx)?
7374 .read(cx)
7375 .abs_path();
7376 let project_path = project_path.path.as_ref();
7377
7378 Some(if project_path == Path::new("") {
7379 workspace_root.to_path_buf()
7380 } else {
7381 workspace_root.join(project_path)
7382 })
7383 }
7384
7385 pub fn get_repo(
7386 &self,
7387 project_path: &ProjectPath,
7388 cx: &AppContext,
7389 ) -> Option<Arc<Mutex<dyn GitRepository>>> {
7390 self.worktree_for_id(project_path.worktree_id, cx)?
7391 .read(cx)
7392 .as_local()?
7393 .snapshot()
7394 .local_git_repo(&project_path.path)
7395 }
7396
7397 // RPC message handlers
7398
7399 async fn handle_unshare_project(
7400 this: Model<Self>,
7401 _: TypedEnvelope<proto::UnshareProject>,
7402 _: Arc<Client>,
7403 mut cx: AsyncAppContext,
7404 ) -> Result<()> {
7405 this.update(&mut cx, |this, cx| {
7406 if this.is_local() {
7407 this.unshare(cx)?;
7408 } else {
7409 this.disconnected_from_host(cx);
7410 }
7411 Ok(())
7412 })?
7413 }
7414
7415 async fn handle_add_collaborator(
7416 this: Model<Self>,
7417 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
7418 _: Arc<Client>,
7419 mut cx: AsyncAppContext,
7420 ) -> Result<()> {
7421 let collaborator = envelope
7422 .payload
7423 .collaborator
7424 .take()
7425 .ok_or_else(|| anyhow!("empty collaborator"))?;
7426
7427 let collaborator = Collaborator::from_proto(collaborator)?;
7428 this.update(&mut cx, |this, cx| {
7429 this.shared_buffers.remove(&collaborator.peer_id);
7430 cx.emit(Event::CollaboratorJoined(collaborator.peer_id));
7431 this.collaborators
7432 .insert(collaborator.peer_id, collaborator);
7433 cx.notify();
7434 })?;
7435
7436 Ok(())
7437 }
7438
7439 async fn handle_update_project_collaborator(
7440 this: Model<Self>,
7441 envelope: TypedEnvelope<proto::UpdateProjectCollaborator>,
7442 _: Arc<Client>,
7443 mut cx: AsyncAppContext,
7444 ) -> Result<()> {
7445 let old_peer_id = envelope
7446 .payload
7447 .old_peer_id
7448 .ok_or_else(|| anyhow!("missing old peer id"))?;
7449 let new_peer_id = envelope
7450 .payload
7451 .new_peer_id
7452 .ok_or_else(|| anyhow!("missing new peer id"))?;
7453 this.update(&mut cx, |this, cx| {
7454 let collaborator = this
7455 .collaborators
7456 .remove(&old_peer_id)
7457 .ok_or_else(|| anyhow!("received UpdateProjectCollaborator for unknown peer"))?;
7458 let is_host = collaborator.replica_id == 0;
7459 this.collaborators.insert(new_peer_id, collaborator);
7460
7461 let buffers = this.shared_buffers.remove(&old_peer_id);
7462 log::info!(
7463 "peer {} became {}. moving buffers {:?}",
7464 old_peer_id,
7465 new_peer_id,
7466 &buffers
7467 );
7468 if let Some(buffers) = buffers {
7469 this.shared_buffers.insert(new_peer_id, buffers);
7470 }
7471
7472 if is_host {
7473 this.opened_buffers
7474 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
7475 this.enqueue_buffer_ordered_message(BufferOrderedMessage::Resync)
7476 .unwrap();
7477 }
7478
7479 cx.emit(Event::CollaboratorUpdated {
7480 old_peer_id,
7481 new_peer_id,
7482 });
7483 cx.notify();
7484 Ok(())
7485 })?
7486 }
7487
7488 async fn handle_remove_collaborator(
7489 this: Model<Self>,
7490 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
7491 _: Arc<Client>,
7492 mut cx: AsyncAppContext,
7493 ) -> Result<()> {
7494 this.update(&mut cx, |this, cx| {
7495 let peer_id = envelope
7496 .payload
7497 .peer_id
7498 .ok_or_else(|| anyhow!("invalid peer id"))?;
7499 let replica_id = this
7500 .collaborators
7501 .remove(&peer_id)
7502 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
7503 .replica_id;
7504 for buffer in this.opened_buffers.values() {
7505 if let Some(buffer) = buffer.upgrade() {
7506 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
7507 }
7508 }
7509 this.shared_buffers.remove(&peer_id);
7510
7511 cx.emit(Event::CollaboratorLeft(peer_id));
7512 cx.notify();
7513 Ok(())
7514 })?
7515 }
7516
7517 async fn handle_update_project(
7518 this: Model<Self>,
7519 envelope: TypedEnvelope<proto::UpdateProject>,
7520 _: Arc<Client>,
7521 mut cx: AsyncAppContext,
7522 ) -> Result<()> {
7523 this.update(&mut cx, |this, cx| {
7524 // Don't handle messages that were sent before the response to us joining the project
7525 if envelope.message_id > this.join_project_response_message_id {
7526 this.set_worktrees_from_proto(envelope.payload.worktrees, cx)?;
7527 }
7528 Ok(())
7529 })?
7530 }
7531
7532 async fn handle_update_worktree(
7533 this: Model<Self>,
7534 envelope: TypedEnvelope<proto::UpdateWorktree>,
7535 _: Arc<Client>,
7536 mut cx: AsyncAppContext,
7537 ) -> Result<()> {
7538 this.update(&mut cx, |this, cx| {
7539 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7540 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7541 worktree.update(cx, |worktree, _| {
7542 let worktree = worktree.as_remote_mut().unwrap();
7543 worktree.update_from_remote(envelope.payload);
7544 });
7545 }
7546 Ok(())
7547 })?
7548 }
7549
7550 async fn handle_update_worktree_settings(
7551 this: Model<Self>,
7552 envelope: TypedEnvelope<proto::UpdateWorktreeSettings>,
7553 _: Arc<Client>,
7554 mut cx: AsyncAppContext,
7555 ) -> Result<()> {
7556 this.update(&mut cx, |this, cx| {
7557 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7558 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7559 cx.update_global::<SettingsStore, _>(|store, cx| {
7560 store
7561 .set_local_settings(
7562 worktree.entity_id().as_u64() as usize,
7563 PathBuf::from(&envelope.payload.path).into(),
7564 envelope.payload.content.as_deref(),
7565 cx,
7566 )
7567 .log_err();
7568 });
7569 }
7570 Ok(())
7571 })?
7572 }
7573
7574 async fn handle_create_project_entry(
7575 this: Model<Self>,
7576 envelope: TypedEnvelope<proto::CreateProjectEntry>,
7577 _: Arc<Client>,
7578 mut cx: AsyncAppContext,
7579 ) -> Result<proto::ProjectEntryResponse> {
7580 let worktree = this.update(&mut cx, |this, cx| {
7581 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7582 this.worktree_for_id(worktree_id, cx)
7583 .ok_or_else(|| anyhow!("worktree not found"))
7584 })??;
7585 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7586 let entry = worktree
7587 .update(&mut cx, |worktree, cx| {
7588 let worktree = worktree.as_local_mut().unwrap();
7589 let path = PathBuf::from(envelope.payload.path);
7590 worktree.create_entry(path, envelope.payload.is_directory, cx)
7591 })?
7592 .await?;
7593 Ok(proto::ProjectEntryResponse {
7594 entry: entry.as_ref().map(|e| e.into()),
7595 worktree_scan_id: worktree_scan_id as u64,
7596 })
7597 }
7598
7599 async fn handle_rename_project_entry(
7600 this: Model<Self>,
7601 envelope: TypedEnvelope<proto::RenameProjectEntry>,
7602 _: Arc<Client>,
7603 mut cx: AsyncAppContext,
7604 ) -> Result<proto::ProjectEntryResponse> {
7605 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7606 let worktree = this.update(&mut cx, |this, cx| {
7607 this.worktree_for_entry(entry_id, cx)
7608 .ok_or_else(|| anyhow!("worktree not found"))
7609 })??;
7610 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7611 let entry = worktree
7612 .update(&mut cx, |worktree, cx| {
7613 let new_path = PathBuf::from(envelope.payload.new_path);
7614 worktree
7615 .as_local_mut()
7616 .unwrap()
7617 .rename_entry(entry_id, new_path, cx)
7618 })?
7619 .await?;
7620 Ok(proto::ProjectEntryResponse {
7621 entry: entry.as_ref().map(|e| e.into()),
7622 worktree_scan_id: worktree_scan_id as u64,
7623 })
7624 }
7625
7626 async fn handle_copy_project_entry(
7627 this: Model<Self>,
7628 envelope: TypedEnvelope<proto::CopyProjectEntry>,
7629 _: Arc<Client>,
7630 mut cx: AsyncAppContext,
7631 ) -> Result<proto::ProjectEntryResponse> {
7632 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7633 let worktree = this.update(&mut cx, |this, cx| {
7634 this.worktree_for_entry(entry_id, cx)
7635 .ok_or_else(|| anyhow!("worktree not found"))
7636 })??;
7637 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7638 let entry = worktree
7639 .update(&mut cx, |worktree, cx| {
7640 let new_path = PathBuf::from(envelope.payload.new_path);
7641 worktree
7642 .as_local_mut()
7643 .unwrap()
7644 .copy_entry(entry_id, new_path, cx)
7645 })?
7646 .await?;
7647 Ok(proto::ProjectEntryResponse {
7648 entry: entry.as_ref().map(|e| e.into()),
7649 worktree_scan_id: worktree_scan_id as u64,
7650 })
7651 }
7652
7653 async fn handle_delete_project_entry(
7654 this: Model<Self>,
7655 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
7656 _: Arc<Client>,
7657 mut cx: AsyncAppContext,
7658 ) -> Result<proto::ProjectEntryResponse> {
7659 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7660
7661 this.update(&mut cx, |_, cx| cx.emit(Event::DeletedEntry(entry_id)))?;
7662
7663 let worktree = this.update(&mut cx, |this, cx| {
7664 this.worktree_for_entry(entry_id, cx)
7665 .ok_or_else(|| anyhow!("worktree not found"))
7666 })??;
7667 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())?;
7668 worktree
7669 .update(&mut cx, |worktree, cx| {
7670 worktree
7671 .as_local_mut()
7672 .unwrap()
7673 .delete_entry(entry_id, cx)
7674 .ok_or_else(|| anyhow!("invalid entry"))
7675 })??
7676 .await?;
7677 Ok(proto::ProjectEntryResponse {
7678 entry: None,
7679 worktree_scan_id: worktree_scan_id as u64,
7680 })
7681 }
7682
7683 async fn handle_expand_project_entry(
7684 this: Model<Self>,
7685 envelope: TypedEnvelope<proto::ExpandProjectEntry>,
7686 _: Arc<Client>,
7687 mut cx: AsyncAppContext,
7688 ) -> Result<proto::ExpandProjectEntryResponse> {
7689 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
7690 let worktree = this
7691 .update(&mut cx, |this, cx| this.worktree_for_entry(entry_id, cx))?
7692 .ok_or_else(|| anyhow!("invalid request"))?;
7693 worktree
7694 .update(&mut cx, |worktree, cx| {
7695 worktree
7696 .as_local_mut()
7697 .unwrap()
7698 .expand_entry(entry_id, cx)
7699 .ok_or_else(|| anyhow!("invalid entry"))
7700 })??
7701 .await?;
7702 let worktree_scan_id = worktree.update(&mut cx, |worktree, _| worktree.scan_id())? as u64;
7703 Ok(proto::ExpandProjectEntryResponse { worktree_scan_id })
7704 }
7705
7706 async fn handle_update_diagnostic_summary(
7707 this: Model<Self>,
7708 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
7709 _: Arc<Client>,
7710 mut cx: AsyncAppContext,
7711 ) -> Result<()> {
7712 this.update(&mut cx, |this, cx| {
7713 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
7714 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
7715 if let Some(summary) = envelope.payload.summary {
7716 let project_path = ProjectPath {
7717 worktree_id,
7718 path: Path::new(&summary.path).into(),
7719 };
7720 worktree.update(cx, |worktree, _| {
7721 worktree
7722 .as_remote_mut()
7723 .unwrap()
7724 .update_diagnostic_summary(project_path.path.clone(), &summary);
7725 });
7726 cx.emit(Event::DiagnosticsUpdated {
7727 language_server_id: LanguageServerId(summary.language_server_id as usize),
7728 path: project_path,
7729 });
7730 }
7731 }
7732 Ok(())
7733 })?
7734 }
7735
7736 async fn handle_start_language_server(
7737 this: Model<Self>,
7738 envelope: TypedEnvelope<proto::StartLanguageServer>,
7739 _: Arc<Client>,
7740 mut cx: AsyncAppContext,
7741 ) -> Result<()> {
7742 let server = envelope
7743 .payload
7744 .server
7745 .ok_or_else(|| anyhow!("invalid server"))?;
7746 this.update(&mut cx, |this, cx| {
7747 this.language_server_statuses.insert(
7748 LanguageServerId(server.id as usize),
7749 LanguageServerStatus {
7750 name: server.name,
7751 pending_work: Default::default(),
7752 has_pending_diagnostic_updates: false,
7753 progress_tokens: Default::default(),
7754 },
7755 );
7756 cx.notify();
7757 })?;
7758 Ok(())
7759 }
7760
7761 async fn handle_update_language_server(
7762 this: Model<Self>,
7763 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
7764 _: Arc<Client>,
7765 mut cx: AsyncAppContext,
7766 ) -> Result<()> {
7767 this.update(&mut cx, |this, cx| {
7768 let language_server_id = LanguageServerId(envelope.payload.language_server_id as usize);
7769
7770 match envelope
7771 .payload
7772 .variant
7773 .ok_or_else(|| anyhow!("invalid variant"))?
7774 {
7775 proto::update_language_server::Variant::WorkStart(payload) => {
7776 this.on_lsp_work_start(
7777 language_server_id,
7778 payload.token,
7779 LanguageServerProgress {
7780 message: payload.message,
7781 percentage: payload.percentage.map(|p| p as usize),
7782 last_update_at: Instant::now(),
7783 },
7784 cx,
7785 );
7786 }
7787
7788 proto::update_language_server::Variant::WorkProgress(payload) => {
7789 this.on_lsp_work_progress(
7790 language_server_id,
7791 payload.token,
7792 LanguageServerProgress {
7793 message: payload.message,
7794 percentage: payload.percentage.map(|p| p as usize),
7795 last_update_at: Instant::now(),
7796 },
7797 cx,
7798 );
7799 }
7800
7801 proto::update_language_server::Variant::WorkEnd(payload) => {
7802 this.on_lsp_work_end(language_server_id, payload.token, cx);
7803 }
7804
7805 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
7806 this.disk_based_diagnostics_started(language_server_id, cx);
7807 }
7808
7809 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
7810 this.disk_based_diagnostics_finished(language_server_id, cx)
7811 }
7812 }
7813
7814 Ok(())
7815 })?
7816 }
7817
7818 async fn handle_update_buffer(
7819 this: Model<Self>,
7820 envelope: TypedEnvelope<proto::UpdateBuffer>,
7821 _: Arc<Client>,
7822 mut cx: AsyncAppContext,
7823 ) -> Result<proto::Ack> {
7824 this.update(&mut cx, |this, cx| {
7825 let payload = envelope.payload.clone();
7826 let buffer_id = BufferId::new(payload.buffer_id)?;
7827 let ops = payload
7828 .operations
7829 .into_iter()
7830 .map(language::proto::deserialize_operation)
7831 .collect::<Result<Vec<_>, _>>()?;
7832 let is_remote = this.is_remote();
7833 match this.opened_buffers.entry(buffer_id) {
7834 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
7835 OpenBuffer::Strong(buffer) => {
7836 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
7837 }
7838 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
7839 OpenBuffer::Weak(_) => {}
7840 },
7841 hash_map::Entry::Vacant(e) => {
7842 assert!(
7843 is_remote,
7844 "received buffer update from {:?}",
7845 envelope.original_sender_id
7846 );
7847 e.insert(OpenBuffer::Operations(ops));
7848 }
7849 }
7850 Ok(proto::Ack {})
7851 })?
7852 }
7853
7854 async fn handle_create_buffer_for_peer(
7855 this: Model<Self>,
7856 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
7857 _: Arc<Client>,
7858 mut cx: AsyncAppContext,
7859 ) -> Result<()> {
7860 this.update(&mut cx, |this, cx| {
7861 match envelope
7862 .payload
7863 .variant
7864 .ok_or_else(|| anyhow!("missing variant"))?
7865 {
7866 proto::create_buffer_for_peer::Variant::State(mut state) => {
7867 let mut buffer_file = None;
7868 if let Some(file) = state.file.take() {
7869 let worktree_id = WorktreeId::from_proto(file.worktree_id);
7870 let worktree = this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
7871 anyhow!("no worktree found for id {}", file.worktree_id)
7872 })?;
7873 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
7874 as Arc<dyn language::File>);
7875 }
7876
7877 let buffer_id = BufferId::new(state.id)?;
7878 let buffer = cx.new_model(|_| {
7879 Buffer::from_proto(this.replica_id(), this.capability(), state, buffer_file)
7880 .unwrap()
7881 });
7882 this.incomplete_remote_buffers
7883 .insert(buffer_id, Some(buffer));
7884 }
7885 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
7886 let buffer_id = BufferId::new(chunk.buffer_id)?;
7887 let buffer = this
7888 .incomplete_remote_buffers
7889 .get(&buffer_id)
7890 .cloned()
7891 .flatten()
7892 .ok_or_else(|| {
7893 anyhow!(
7894 "received chunk for buffer {} without initial state",
7895 chunk.buffer_id
7896 )
7897 })?;
7898 let operations = chunk
7899 .operations
7900 .into_iter()
7901 .map(language::proto::deserialize_operation)
7902 .collect::<Result<Vec<_>>>()?;
7903 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;
7904
7905 if chunk.is_last {
7906 this.incomplete_remote_buffers.remove(&buffer_id);
7907 this.register_buffer(&buffer, cx)?;
7908 }
7909 }
7910 }
7911
7912 Ok(())
7913 })?
7914 }
7915
7916 async fn handle_update_diff_base(
7917 this: Model<Self>,
7918 envelope: TypedEnvelope<proto::UpdateDiffBase>,
7919 _: Arc<Client>,
7920 mut cx: AsyncAppContext,
7921 ) -> Result<()> {
7922 this.update(&mut cx, |this, cx| {
7923 let buffer_id = envelope.payload.buffer_id;
7924 let buffer_id = BufferId::new(buffer_id)?;
7925 let diff_base = envelope.payload.diff_base;
7926 if let Some(buffer) = this
7927 .opened_buffers
7928 .get_mut(&buffer_id)
7929 .and_then(|b| b.upgrade())
7930 .or_else(|| {
7931 this.incomplete_remote_buffers
7932 .get(&buffer_id)
7933 .cloned()
7934 .flatten()
7935 })
7936 {
7937 buffer.update(cx, |buffer, cx| buffer.set_diff_base(diff_base, cx));
7938 }
7939 Ok(())
7940 })?
7941 }
7942
7943 async fn handle_update_buffer_file(
7944 this: Model<Self>,
7945 envelope: TypedEnvelope<proto::UpdateBufferFile>,
7946 _: Arc<Client>,
7947 mut cx: AsyncAppContext,
7948 ) -> Result<()> {
7949 let buffer_id = envelope.payload.buffer_id;
7950 let buffer_id = BufferId::new(buffer_id)?;
7951
7952 this.update(&mut cx, |this, cx| {
7953 let payload = envelope.payload.clone();
7954 if let Some(buffer) = this
7955 .opened_buffers
7956 .get(&buffer_id)
7957 .and_then(|b| b.upgrade())
7958 .or_else(|| {
7959 this.incomplete_remote_buffers
7960 .get(&buffer_id)
7961 .cloned()
7962 .flatten()
7963 })
7964 {
7965 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
7966 let worktree = this
7967 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
7968 .ok_or_else(|| anyhow!("no such worktree"))?;
7969 let file = File::from_proto(file, worktree, cx)?;
7970 buffer.update(cx, |buffer, cx| {
7971 buffer.file_updated(Arc::new(file), cx);
7972 });
7973 this.detect_language_for_buffer(&buffer, cx);
7974 }
7975 Ok(())
7976 })?
7977 }
7978
7979 async fn handle_save_buffer(
7980 this: Model<Self>,
7981 envelope: TypedEnvelope<proto::SaveBuffer>,
7982 _: Arc<Client>,
7983 mut cx: AsyncAppContext,
7984 ) -> Result<proto::BufferSaved> {
7985 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
7986 let (project_id, buffer) = this.update(&mut cx, |this, _cx| {
7987 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
7988 let buffer = this
7989 .opened_buffers
7990 .get(&buffer_id)
7991 .and_then(|buffer| buffer.upgrade())
7992 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
7993 anyhow::Ok((project_id, buffer))
7994 })??;
7995 buffer
7996 .update(&mut cx, |buffer, _| {
7997 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
7998 })?
7999 .await?;
8000 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
8001
8002 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
8003 .await?;
8004 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
8005 project_id,
8006 buffer_id: buffer_id.into(),
8007 version: serialize_version(buffer.saved_version()),
8008 mtime: Some(buffer.saved_mtime().into()),
8009 fingerprint: language::proto::serialize_fingerprint(buffer.saved_version_fingerprint()),
8010 })
8011 }
8012
8013 async fn handle_reload_buffers(
8014 this: Model<Self>,
8015 envelope: TypedEnvelope<proto::ReloadBuffers>,
8016 _: Arc<Client>,
8017 mut cx: AsyncAppContext,
8018 ) -> Result<proto::ReloadBuffersResponse> {
8019 let sender_id = envelope.original_sender_id()?;
8020 let reload = this.update(&mut cx, |this, cx| {
8021 let mut buffers = HashSet::default();
8022 for buffer_id in &envelope.payload.buffer_ids {
8023 let buffer_id = BufferId::new(*buffer_id)?;
8024 buffers.insert(
8025 this.opened_buffers
8026 .get(&buffer_id)
8027 .and_then(|buffer| buffer.upgrade())
8028 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8029 );
8030 }
8031 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
8032 })??;
8033
8034 let project_transaction = reload.await?;
8035 let project_transaction = this.update(&mut cx, |this, cx| {
8036 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8037 })?;
8038 Ok(proto::ReloadBuffersResponse {
8039 transaction: Some(project_transaction),
8040 })
8041 }
8042
8043 async fn handle_synchronize_buffers(
8044 this: Model<Self>,
8045 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
8046 _: Arc<Client>,
8047 mut cx: AsyncAppContext,
8048 ) -> Result<proto::SynchronizeBuffersResponse> {
8049 let project_id = envelope.payload.project_id;
8050 let mut response = proto::SynchronizeBuffersResponse {
8051 buffers: Default::default(),
8052 };
8053
8054 this.update(&mut cx, |this, cx| {
8055 let Some(guest_id) = envelope.original_sender_id else {
8056 error!("missing original_sender_id on SynchronizeBuffers request");
8057 bail!("missing original_sender_id on SynchronizeBuffers request");
8058 };
8059
8060 this.shared_buffers.entry(guest_id).or_default().clear();
8061 for buffer in envelope.payload.buffers {
8062 let buffer_id = BufferId::new(buffer.id)?;
8063 let remote_version = language::proto::deserialize_version(&buffer.version);
8064 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8065 this.shared_buffers
8066 .entry(guest_id)
8067 .or_default()
8068 .insert(buffer_id);
8069
8070 let buffer = buffer.read(cx);
8071 response.buffers.push(proto::BufferVersion {
8072 id: buffer_id.into(),
8073 version: language::proto::serialize_version(&buffer.version),
8074 });
8075
8076 let operations = buffer.serialize_ops(Some(remote_version), cx);
8077 let client = this.client.clone();
8078 if let Some(file) = buffer.file() {
8079 client
8080 .send(proto::UpdateBufferFile {
8081 project_id,
8082 buffer_id: buffer_id.into(),
8083 file: Some(file.to_proto()),
8084 })
8085 .log_err();
8086 }
8087
8088 client
8089 .send(proto::UpdateDiffBase {
8090 project_id,
8091 buffer_id: buffer_id.into(),
8092 diff_base: buffer.diff_base().map(Into::into),
8093 })
8094 .log_err();
8095
8096 client
8097 .send(proto::BufferReloaded {
8098 project_id,
8099 buffer_id: buffer_id.into(),
8100 version: language::proto::serialize_version(buffer.saved_version()),
8101 mtime: Some(buffer.saved_mtime().into()),
8102 fingerprint: language::proto::serialize_fingerprint(
8103 buffer.saved_version_fingerprint(),
8104 ),
8105 line_ending: language::proto::serialize_line_ending(
8106 buffer.line_ending(),
8107 ) as i32,
8108 })
8109 .log_err();
8110
8111 cx.background_executor()
8112 .spawn(
8113 async move {
8114 let operations = operations.await;
8115 for chunk in split_operations(operations) {
8116 client
8117 .request(proto::UpdateBuffer {
8118 project_id,
8119 buffer_id: buffer_id.into(),
8120 operations: chunk,
8121 })
8122 .await?;
8123 }
8124 anyhow::Ok(())
8125 }
8126 .log_err(),
8127 )
8128 .detach();
8129 }
8130 }
8131 Ok(())
8132 })??;
8133
8134 Ok(response)
8135 }
8136
8137 async fn handle_format_buffers(
8138 this: Model<Self>,
8139 envelope: TypedEnvelope<proto::FormatBuffers>,
8140 _: Arc<Client>,
8141 mut cx: AsyncAppContext,
8142 ) -> Result<proto::FormatBuffersResponse> {
8143 let sender_id = envelope.original_sender_id()?;
8144 let format = this.update(&mut cx, |this, cx| {
8145 let mut buffers = HashSet::default();
8146 for buffer_id in &envelope.payload.buffer_ids {
8147 let buffer_id = BufferId::new(*buffer_id)?;
8148 buffers.insert(
8149 this.opened_buffers
8150 .get(&buffer_id)
8151 .and_then(|buffer| buffer.upgrade())
8152 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
8153 );
8154 }
8155 let trigger = FormatTrigger::from_proto(envelope.payload.trigger);
8156 Ok::<_, anyhow::Error>(this.format(buffers, false, trigger, cx))
8157 })??;
8158
8159 let project_transaction = format.await?;
8160 let project_transaction = this.update(&mut cx, |this, cx| {
8161 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8162 })?;
8163 Ok(proto::FormatBuffersResponse {
8164 transaction: Some(project_transaction),
8165 })
8166 }
8167
8168 async fn handle_apply_additional_edits_for_completion(
8169 this: Model<Self>,
8170 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
8171 _: Arc<Client>,
8172 mut cx: AsyncAppContext,
8173 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
8174 let languages = this.update(&mut cx, |this, _| this.languages.clone())?;
8175 let (buffer, completion) = this.update(&mut cx, |this, cx| {
8176 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8177 let buffer = this
8178 .opened_buffers
8179 .get(&buffer_id)
8180 .and_then(|buffer| buffer.upgrade())
8181 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8182 let language = buffer.read(cx).language();
8183 let completion = language::proto::deserialize_completion(
8184 envelope
8185 .payload
8186 .completion
8187 .ok_or_else(|| anyhow!("invalid completion"))?,
8188 language.cloned(),
8189 &languages,
8190 );
8191 Ok::<_, anyhow::Error>((buffer, completion))
8192 })??;
8193
8194 let completion = completion.await?;
8195
8196 let apply_additional_edits = this.update(&mut cx, |this, cx| {
8197 this.apply_additional_edits_for_completion(buffer, completion, false, cx)
8198 })?;
8199
8200 Ok(proto::ApplyCompletionAdditionalEditsResponse {
8201 transaction: apply_additional_edits
8202 .await?
8203 .as_ref()
8204 .map(language::proto::serialize_transaction),
8205 })
8206 }
8207
8208 async fn handle_resolve_completion_documentation(
8209 this: Model<Self>,
8210 envelope: TypedEnvelope<proto::ResolveCompletionDocumentation>,
8211 _: Arc<Client>,
8212 mut cx: AsyncAppContext,
8213 ) -> Result<proto::ResolveCompletionDocumentationResponse> {
8214 let lsp_completion = serde_json::from_slice(&envelope.payload.lsp_completion)?;
8215
8216 let completion = this
8217 .read_with(&mut cx, |this, _| {
8218 let id = LanguageServerId(envelope.payload.language_server_id as usize);
8219 let Some(server) = this.language_server_for_id(id) else {
8220 return Err(anyhow!("No language server {id}"));
8221 };
8222
8223 Ok(server.request::<lsp::request::ResolveCompletionItem>(lsp_completion))
8224 })??
8225 .await?;
8226
8227 let mut is_markdown = false;
8228 let text = match completion.documentation {
8229 Some(lsp::Documentation::String(text)) => text,
8230
8231 Some(lsp::Documentation::MarkupContent(lsp::MarkupContent { kind, value })) => {
8232 is_markdown = kind == lsp::MarkupKind::Markdown;
8233 value
8234 }
8235
8236 _ => String::new(),
8237 };
8238
8239 Ok(proto::ResolveCompletionDocumentationResponse { text, is_markdown })
8240 }
8241
8242 async fn handle_apply_code_action(
8243 this: Model<Self>,
8244 envelope: TypedEnvelope<proto::ApplyCodeAction>,
8245 _: Arc<Client>,
8246 mut cx: AsyncAppContext,
8247 ) -> Result<proto::ApplyCodeActionResponse> {
8248 let sender_id = envelope.original_sender_id()?;
8249 let action = language::proto::deserialize_code_action(
8250 envelope
8251 .payload
8252 .action
8253 .ok_or_else(|| anyhow!("invalid action"))?,
8254 )?;
8255 let apply_code_action = this.update(&mut cx, |this, cx| {
8256 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8257 let buffer = this
8258 .opened_buffers
8259 .get(&buffer_id)
8260 .and_then(|buffer| buffer.upgrade())
8261 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
8262 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
8263 })??;
8264
8265 let project_transaction = apply_code_action.await?;
8266 let project_transaction = this.update(&mut cx, |this, cx| {
8267 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
8268 })?;
8269 Ok(proto::ApplyCodeActionResponse {
8270 transaction: Some(project_transaction),
8271 })
8272 }
8273
8274 async fn handle_on_type_formatting(
8275 this: Model<Self>,
8276 envelope: TypedEnvelope<proto::OnTypeFormatting>,
8277 _: Arc<Client>,
8278 mut cx: AsyncAppContext,
8279 ) -> Result<proto::OnTypeFormattingResponse> {
8280 let on_type_formatting = this.update(&mut cx, |this, cx| {
8281 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8282 let buffer = this
8283 .opened_buffers
8284 .get(&buffer_id)
8285 .and_then(|buffer| buffer.upgrade())
8286 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
8287 let position = envelope
8288 .payload
8289 .position
8290 .and_then(deserialize_anchor)
8291 .ok_or_else(|| anyhow!("invalid position"))?;
8292 Ok::<_, anyhow::Error>(this.apply_on_type_formatting(
8293 buffer,
8294 position,
8295 envelope.payload.trigger.clone(),
8296 cx,
8297 ))
8298 })??;
8299
8300 let transaction = on_type_formatting
8301 .await?
8302 .as_ref()
8303 .map(language::proto::serialize_transaction);
8304 Ok(proto::OnTypeFormattingResponse { transaction })
8305 }
8306
8307 async fn handle_inlay_hints(
8308 this: Model<Self>,
8309 envelope: TypedEnvelope<proto::InlayHints>,
8310 _: Arc<Client>,
8311 mut cx: AsyncAppContext,
8312 ) -> Result<proto::InlayHintsResponse> {
8313 let sender_id = envelope.original_sender_id()?;
8314 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8315 let buffer = this.update(&mut cx, |this, _| {
8316 this.opened_buffers
8317 .get(&buffer_id)
8318 .and_then(|buffer| buffer.upgrade())
8319 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
8320 })??;
8321 buffer
8322 .update(&mut cx, |buffer, _| {
8323 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
8324 })?
8325 .await
8326 .with_context(|| format!("waiting for version for buffer {}", buffer.entity_id()))?;
8327
8328 let start = envelope
8329 .payload
8330 .start
8331 .and_then(deserialize_anchor)
8332 .context("missing range start")?;
8333 let end = envelope
8334 .payload
8335 .end
8336 .and_then(deserialize_anchor)
8337 .context("missing range end")?;
8338 let buffer_hints = this
8339 .update(&mut cx, |project, cx| {
8340 project.inlay_hints(buffer.clone(), start..end, cx)
8341 })?
8342 .await
8343 .context("inlay hints fetch")?;
8344
8345 this.update(&mut cx, |project, cx| {
8346 InlayHints::response_to_proto(
8347 buffer_hints,
8348 project,
8349 sender_id,
8350 &buffer.read(cx).version(),
8351 cx,
8352 )
8353 })
8354 }
8355
8356 async fn handle_resolve_inlay_hint(
8357 this: Model<Self>,
8358 envelope: TypedEnvelope<proto::ResolveInlayHint>,
8359 _: Arc<Client>,
8360 mut cx: AsyncAppContext,
8361 ) -> Result<proto::ResolveInlayHintResponse> {
8362 let proto_hint = envelope
8363 .payload
8364 .hint
8365 .expect("incorrect protobuf resolve inlay hint message: missing the inlay hint");
8366 let hint = InlayHints::proto_to_project_hint(proto_hint)
8367 .context("resolved proto inlay hint conversion")?;
8368 let buffer = this.update(&mut cx, |this, _cx| {
8369 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8370 this.opened_buffers
8371 .get(&buffer_id)
8372 .and_then(|buffer| buffer.upgrade())
8373 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8374 })??;
8375 let response_hint = this
8376 .update(&mut cx, |project, cx| {
8377 project.resolve_inlay_hint(
8378 hint,
8379 buffer,
8380 LanguageServerId(envelope.payload.language_server_id as usize),
8381 cx,
8382 )
8383 })?
8384 .await
8385 .context("inlay hints fetch")?;
8386 Ok(proto::ResolveInlayHintResponse {
8387 hint: Some(InlayHints::project_to_proto_hint(response_hint)),
8388 })
8389 }
8390
8391 async fn try_resolve_code_action(
8392 lang_server: &LanguageServer,
8393 action: &mut CodeAction,
8394 ) -> anyhow::Result<()> {
8395 if GetCodeActions::can_resolve_actions(&lang_server.capabilities()) {
8396 if action.lsp_action.data.is_some()
8397 && (action.lsp_action.command.is_none() || action.lsp_action.edit.is_none())
8398 {
8399 action.lsp_action = lang_server
8400 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action.clone())
8401 .await?;
8402 }
8403 }
8404
8405 anyhow::Ok(())
8406 }
8407
8408 async fn handle_refresh_inlay_hints(
8409 this: Model<Self>,
8410 _: TypedEnvelope<proto::RefreshInlayHints>,
8411 _: Arc<Client>,
8412 mut cx: AsyncAppContext,
8413 ) -> Result<proto::Ack> {
8414 this.update(&mut cx, |_, cx| {
8415 cx.emit(Event::RefreshInlayHints);
8416 })?;
8417 Ok(proto::Ack {})
8418 }
8419
8420 async fn handle_lsp_command<T: LspCommand>(
8421 this: Model<Self>,
8422 envelope: TypedEnvelope<T::ProtoRequest>,
8423 _: Arc<Client>,
8424 mut cx: AsyncAppContext,
8425 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
8426 where
8427 <T::LspRequest as lsp::request::Request>::Params: Send,
8428 <T::LspRequest as lsp::request::Request>::Result: Send,
8429 {
8430 let sender_id = envelope.original_sender_id()?;
8431 let buffer_id = T::buffer_id_from_proto(&envelope.payload)?;
8432 let buffer_handle = this.update(&mut cx, |this, _cx| {
8433 this.opened_buffers
8434 .get(&buffer_id)
8435 .and_then(|buffer| buffer.upgrade())
8436 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
8437 })??;
8438 let request = T::from_proto(
8439 envelope.payload,
8440 this.clone(),
8441 buffer_handle.clone(),
8442 cx.clone(),
8443 )
8444 .await?;
8445 let response = this
8446 .update(&mut cx, |this, cx| {
8447 this.request_lsp(
8448 buffer_handle.clone(),
8449 LanguageServerToQuery::Primary,
8450 request,
8451 cx,
8452 )
8453 })?
8454 .await?;
8455 this.update(&mut cx, |this, cx| {
8456 Ok(T::response_to_proto(
8457 response,
8458 this,
8459 sender_id,
8460 &buffer_handle.read(cx).version(),
8461 cx,
8462 ))
8463 })?
8464 }
8465
8466 async fn handle_get_project_symbols(
8467 this: Model<Self>,
8468 envelope: TypedEnvelope<proto::GetProjectSymbols>,
8469 _: Arc<Client>,
8470 mut cx: AsyncAppContext,
8471 ) -> Result<proto::GetProjectSymbolsResponse> {
8472 let symbols = this
8473 .update(&mut cx, |this, cx| {
8474 this.symbols(&envelope.payload.query, cx)
8475 })?
8476 .await?;
8477
8478 Ok(proto::GetProjectSymbolsResponse {
8479 symbols: symbols.iter().map(serialize_symbol).collect(),
8480 })
8481 }
8482
8483 async fn handle_search_project(
8484 this: Model<Self>,
8485 envelope: TypedEnvelope<proto::SearchProject>,
8486 _: Arc<Client>,
8487 mut cx: AsyncAppContext,
8488 ) -> Result<proto::SearchProjectResponse> {
8489 let peer_id = envelope.original_sender_id()?;
8490 let query = SearchQuery::from_proto(envelope.payload)?;
8491 let mut result = this.update(&mut cx, |this, cx| this.search(query, cx))?;
8492
8493 cx.spawn(move |mut cx| async move {
8494 let mut locations = Vec::new();
8495 while let Some((buffer, ranges)) = result.next().await {
8496 for range in ranges {
8497 let start = serialize_anchor(&range.start);
8498 let end = serialize_anchor(&range.end);
8499 let buffer_id = this.update(&mut cx, |this, cx| {
8500 this.create_buffer_for_peer(&buffer, peer_id, cx).into()
8501 })?;
8502 locations.push(proto::Location {
8503 buffer_id,
8504 start: Some(start),
8505 end: Some(end),
8506 });
8507 }
8508 }
8509 Ok(proto::SearchProjectResponse { locations })
8510 })
8511 .await
8512 }
8513
8514 async fn handle_open_buffer_for_symbol(
8515 this: Model<Self>,
8516 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
8517 _: Arc<Client>,
8518 mut cx: AsyncAppContext,
8519 ) -> Result<proto::OpenBufferForSymbolResponse> {
8520 let peer_id = envelope.original_sender_id()?;
8521 let symbol = envelope
8522 .payload
8523 .symbol
8524 .ok_or_else(|| anyhow!("invalid symbol"))?;
8525 let symbol = this
8526 .update(&mut cx, |this, _| this.deserialize_symbol(symbol))?
8527 .await?;
8528 let symbol = this.update(&mut cx, |this, _| {
8529 let signature = this.symbol_signature(&symbol.path);
8530 if signature == symbol.signature {
8531 Ok(symbol)
8532 } else {
8533 Err(anyhow!("invalid symbol signature"))
8534 }
8535 })??;
8536 let buffer = this
8537 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))?
8538 .await?;
8539
8540 this.update(&mut cx, |this, cx| {
8541 let is_private = buffer
8542 .read(cx)
8543 .file()
8544 .map(|f| f.is_private())
8545 .unwrap_or_default();
8546 if is_private {
8547 Err(anyhow!(ErrorCode::UnsharedItem))
8548 } else {
8549 Ok(proto::OpenBufferForSymbolResponse {
8550 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
8551 })
8552 }
8553 })?
8554 }
8555
8556 fn symbol_signature(&self, project_path: &ProjectPath) -> [u8; 32] {
8557 let mut hasher = Sha256::new();
8558 hasher.update(project_path.worktree_id.to_proto().to_be_bytes());
8559 hasher.update(project_path.path.to_string_lossy().as_bytes());
8560 hasher.update(self.nonce.to_be_bytes());
8561 hasher.finalize().as_slice().try_into().unwrap()
8562 }
8563
8564 async fn handle_open_buffer_by_id(
8565 this: Model<Self>,
8566 envelope: TypedEnvelope<proto::OpenBufferById>,
8567 _: Arc<Client>,
8568 mut cx: AsyncAppContext,
8569 ) -> Result<proto::OpenBufferResponse> {
8570 let peer_id = envelope.original_sender_id()?;
8571 let buffer_id = BufferId::new(envelope.payload.id)?;
8572 let buffer = this
8573 .update(&mut cx, |this, cx| this.open_buffer_by_id(buffer_id, cx))?
8574 .await?;
8575 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
8576 }
8577
8578 async fn handle_open_buffer_by_path(
8579 this: Model<Self>,
8580 envelope: TypedEnvelope<proto::OpenBufferByPath>,
8581 _: Arc<Client>,
8582 mut cx: AsyncAppContext,
8583 ) -> Result<proto::OpenBufferResponse> {
8584 let peer_id = envelope.original_sender_id()?;
8585 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
8586 let open_buffer = this.update(&mut cx, |this, cx| {
8587 this.open_buffer(
8588 ProjectPath {
8589 worktree_id,
8590 path: PathBuf::from(envelope.payload.path).into(),
8591 },
8592 cx,
8593 )
8594 })?;
8595
8596 let buffer = open_buffer.await?;
8597 Project::respond_to_open_buffer_request(this, buffer, peer_id, &mut cx)
8598 }
8599
8600 fn respond_to_open_buffer_request(
8601 this: Model<Self>,
8602 buffer: Model<Buffer>,
8603 peer_id: proto::PeerId,
8604 cx: &mut AsyncAppContext,
8605 ) -> Result<proto::OpenBufferResponse> {
8606 this.update(cx, |this, cx| {
8607 let is_private = buffer
8608 .read(cx)
8609 .file()
8610 .map(|f| f.is_private())
8611 .unwrap_or_default();
8612 if is_private {
8613 Err(anyhow!(ErrorCode::UnsharedItem))
8614 } else {
8615 Ok(proto::OpenBufferResponse {
8616 buffer_id: this.create_buffer_for_peer(&buffer, peer_id, cx).into(),
8617 })
8618 }
8619 })?
8620 }
8621
8622 fn serialize_project_transaction_for_peer(
8623 &mut self,
8624 project_transaction: ProjectTransaction,
8625 peer_id: proto::PeerId,
8626 cx: &mut AppContext,
8627 ) -> proto::ProjectTransaction {
8628 let mut serialized_transaction = proto::ProjectTransaction {
8629 buffer_ids: Default::default(),
8630 transactions: Default::default(),
8631 };
8632 for (buffer, transaction) in project_transaction.0 {
8633 serialized_transaction
8634 .buffer_ids
8635 .push(self.create_buffer_for_peer(&buffer, peer_id, cx).into());
8636 serialized_transaction
8637 .transactions
8638 .push(language::proto::serialize_transaction(&transaction));
8639 }
8640 serialized_transaction
8641 }
8642
8643 fn deserialize_project_transaction(
8644 &mut self,
8645 message: proto::ProjectTransaction,
8646 push_to_history: bool,
8647 cx: &mut ModelContext<Self>,
8648 ) -> Task<Result<ProjectTransaction>> {
8649 cx.spawn(move |this, mut cx| async move {
8650 let mut project_transaction = ProjectTransaction::default();
8651 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
8652 {
8653 let buffer_id = BufferId::new(buffer_id)?;
8654 let buffer = this
8655 .update(&mut cx, |this, cx| {
8656 this.wait_for_remote_buffer(buffer_id, cx)
8657 })?
8658 .await?;
8659 let transaction = language::proto::deserialize_transaction(transaction)?;
8660 project_transaction.0.insert(buffer, transaction);
8661 }
8662
8663 for (buffer, transaction) in &project_transaction.0 {
8664 buffer
8665 .update(&mut cx, |buffer, _| {
8666 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
8667 })?
8668 .await?;
8669
8670 if push_to_history {
8671 buffer.update(&mut cx, |buffer, _| {
8672 buffer.push_transaction(transaction.clone(), Instant::now());
8673 })?;
8674 }
8675 }
8676
8677 Ok(project_transaction)
8678 })
8679 }
8680
8681 fn create_buffer_for_peer(
8682 &mut self,
8683 buffer: &Model<Buffer>,
8684 peer_id: proto::PeerId,
8685 cx: &mut AppContext,
8686 ) -> BufferId {
8687 let buffer_id = buffer.read(cx).remote_id();
8688 if let ProjectClientState::Shared { updates_tx, .. } = &self.client_state {
8689 updates_tx
8690 .unbounded_send(LocalProjectUpdate::CreateBufferForPeer { peer_id, buffer_id })
8691 .ok();
8692 }
8693 buffer_id
8694 }
8695
8696 fn wait_for_remote_buffer(
8697 &mut self,
8698 id: BufferId,
8699 cx: &mut ModelContext<Self>,
8700 ) -> Task<Result<Model<Buffer>>> {
8701 let mut opened_buffer_rx = self.opened_buffer.1.clone();
8702
8703 cx.spawn(move |this, mut cx| async move {
8704 let buffer = loop {
8705 let Some(this) = this.upgrade() else {
8706 return Err(anyhow!("project dropped"));
8707 };
8708
8709 let buffer = this.update(&mut cx, |this, _cx| {
8710 this.opened_buffers
8711 .get(&id)
8712 .and_then(|buffer| buffer.upgrade())
8713 })?;
8714
8715 if let Some(buffer) = buffer {
8716 break buffer;
8717 } else if this.update(&mut cx, |this, _| this.is_disconnected())? {
8718 return Err(anyhow!("disconnected before buffer {} could be opened", id));
8719 }
8720
8721 this.update(&mut cx, |this, _| {
8722 this.incomplete_remote_buffers.entry(id).or_default();
8723 })?;
8724 drop(this);
8725
8726 opened_buffer_rx
8727 .next()
8728 .await
8729 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
8730 };
8731
8732 Ok(buffer)
8733 })
8734 }
8735
8736 fn synchronize_remote_buffers(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
8737 let project_id = match self.client_state {
8738 ProjectClientState::Remote {
8739 sharing_has_stopped,
8740 remote_id,
8741 ..
8742 } => {
8743 if sharing_has_stopped {
8744 return Task::ready(Err(anyhow!(
8745 "can't synchronize remote buffers on a readonly project"
8746 )));
8747 } else {
8748 remote_id
8749 }
8750 }
8751 ProjectClientState::Shared { .. } | ProjectClientState::Local => {
8752 return Task::ready(Err(anyhow!(
8753 "can't synchronize remote buffers on a local project"
8754 )))
8755 }
8756 };
8757
8758 let client = self.client.clone();
8759 cx.spawn(move |this, mut cx| async move {
8760 let (buffers, incomplete_buffer_ids) = this.update(&mut cx, |this, cx| {
8761 let buffers = this
8762 .opened_buffers
8763 .iter()
8764 .filter_map(|(id, buffer)| {
8765 let buffer = buffer.upgrade()?;
8766 Some(proto::BufferVersion {
8767 id: (*id).into(),
8768 version: language::proto::serialize_version(&buffer.read(cx).version),
8769 })
8770 })
8771 .collect();
8772 let incomplete_buffer_ids = this
8773 .incomplete_remote_buffers
8774 .keys()
8775 .copied()
8776 .collect::<Vec<_>>();
8777
8778 (buffers, incomplete_buffer_ids)
8779 })?;
8780 let response = client
8781 .request(proto::SynchronizeBuffers {
8782 project_id,
8783 buffers,
8784 })
8785 .await?;
8786
8787 let send_updates_for_buffers = this.update(&mut cx, |this, cx| {
8788 response
8789 .buffers
8790 .into_iter()
8791 .map(|buffer| {
8792 let client = client.clone();
8793 let buffer_id = match BufferId::new(buffer.id) {
8794 Ok(id) => id,
8795 Err(e) => {
8796 return Task::ready(Err(e));
8797 }
8798 };
8799 let remote_version = language::proto::deserialize_version(&buffer.version);
8800 if let Some(buffer) = this.buffer_for_id(buffer_id) {
8801 let operations =
8802 buffer.read(cx).serialize_ops(Some(remote_version), cx);
8803 cx.background_executor().spawn(async move {
8804 let operations = operations.await;
8805 for chunk in split_operations(operations) {
8806 client
8807 .request(proto::UpdateBuffer {
8808 project_id,
8809 buffer_id: buffer_id.into(),
8810 operations: chunk,
8811 })
8812 .await?;
8813 }
8814 anyhow::Ok(())
8815 })
8816 } else {
8817 Task::ready(Ok(()))
8818 }
8819 })
8820 .collect::<Vec<_>>()
8821 })?;
8822
8823 // Any incomplete buffers have open requests waiting. Request that the host sends
8824 // creates these buffers for us again to unblock any waiting futures.
8825 for id in incomplete_buffer_ids {
8826 cx.background_executor()
8827 .spawn(client.request(proto::OpenBufferById {
8828 project_id,
8829 id: id.into(),
8830 }))
8831 .detach();
8832 }
8833
8834 futures::future::join_all(send_updates_for_buffers)
8835 .await
8836 .into_iter()
8837 .collect()
8838 })
8839 }
8840
8841 pub fn worktree_metadata_protos(&self, cx: &AppContext) -> Vec<proto::WorktreeMetadata> {
8842 self.worktrees()
8843 .map(|worktree| {
8844 let worktree = worktree.read(cx);
8845 proto::WorktreeMetadata {
8846 id: worktree.id().to_proto(),
8847 root_name: worktree.root_name().into(),
8848 visible: worktree.is_visible(),
8849 abs_path: worktree.abs_path().to_string_lossy().into(),
8850 }
8851 })
8852 .collect()
8853 }
8854
8855 fn set_worktrees_from_proto(
8856 &mut self,
8857 worktrees: Vec<proto::WorktreeMetadata>,
8858 cx: &mut ModelContext<Project>,
8859 ) -> Result<()> {
8860 let replica_id = self.replica_id();
8861 let remote_id = self.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
8862
8863 let mut old_worktrees_by_id = self
8864 .worktrees
8865 .drain(..)
8866 .filter_map(|worktree| {
8867 let worktree = worktree.upgrade()?;
8868 Some((worktree.read(cx).id(), worktree))
8869 })
8870 .collect::<HashMap<_, _>>();
8871
8872 for worktree in worktrees {
8873 if let Some(old_worktree) =
8874 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
8875 {
8876 self.worktrees.push(WorktreeHandle::Strong(old_worktree));
8877 } else {
8878 let worktree =
8879 Worktree::remote(remote_id, replica_id, worktree, self.client.clone(), cx);
8880 let _ = self.add_worktree(&worktree, cx);
8881 }
8882 }
8883
8884 self.metadata_changed(cx);
8885 for id in old_worktrees_by_id.keys() {
8886 cx.emit(Event::WorktreeRemoved(*id));
8887 }
8888
8889 Ok(())
8890 }
8891
8892 fn set_collaborators_from_proto(
8893 &mut self,
8894 messages: Vec<proto::Collaborator>,
8895 cx: &mut ModelContext<Self>,
8896 ) -> Result<()> {
8897 let mut collaborators = HashMap::default();
8898 for message in messages {
8899 let collaborator = Collaborator::from_proto(message)?;
8900 collaborators.insert(collaborator.peer_id, collaborator);
8901 }
8902 for old_peer_id in self.collaborators.keys() {
8903 if !collaborators.contains_key(old_peer_id) {
8904 cx.emit(Event::CollaboratorLeft(*old_peer_id));
8905 }
8906 }
8907 self.collaborators = collaborators;
8908 Ok(())
8909 }
8910
8911 fn deserialize_symbol(
8912 &self,
8913 serialized_symbol: proto::Symbol,
8914 ) -> impl Future<Output = Result<Symbol>> {
8915 let languages = self.languages.clone();
8916 async move {
8917 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
8918 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
8919 let start = serialized_symbol
8920 .start
8921 .ok_or_else(|| anyhow!("invalid start"))?;
8922 let end = serialized_symbol
8923 .end
8924 .ok_or_else(|| anyhow!("invalid end"))?;
8925 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
8926 let path = ProjectPath {
8927 worktree_id,
8928 path: PathBuf::from(serialized_symbol.path).into(),
8929 };
8930 let language = languages
8931 .language_for_file(&path.path, None)
8932 .await
8933 .log_err();
8934 let adapter = language
8935 .as_ref()
8936 .and_then(|language| languages.lsp_adapters(language).first().cloned());
8937 Ok(Symbol {
8938 language_server_name: LanguageServerName(
8939 serialized_symbol.language_server_name.into(),
8940 ),
8941 source_worktree_id,
8942 path,
8943 label: {
8944 match language.as_ref().zip(adapter.as_ref()) {
8945 Some((language, adapter)) => {
8946 adapter
8947 .label_for_symbol(&serialized_symbol.name, kind, language)
8948 .await
8949 }
8950 None => None,
8951 }
8952 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None))
8953 },
8954
8955 name: serialized_symbol.name,
8956 range: Unclipped(PointUtf16::new(start.row, start.column))
8957 ..Unclipped(PointUtf16::new(end.row, end.column)),
8958 kind,
8959 signature: serialized_symbol
8960 .signature
8961 .try_into()
8962 .map_err(|_| anyhow!("invalid signature"))?,
8963 })
8964 }
8965 }
8966
8967 async fn handle_buffer_saved(
8968 this: Model<Self>,
8969 envelope: TypedEnvelope<proto::BufferSaved>,
8970 _: Arc<Client>,
8971 mut cx: AsyncAppContext,
8972 ) -> Result<()> {
8973 let fingerprint = deserialize_fingerprint(&envelope.payload.fingerprint)?;
8974 let version = deserialize_version(&envelope.payload.version);
8975 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
8976 let mtime = envelope
8977 .payload
8978 .mtime
8979 .ok_or_else(|| anyhow!("missing mtime"))?
8980 .into();
8981
8982 this.update(&mut cx, |this, cx| {
8983 let buffer = this
8984 .opened_buffers
8985 .get(&buffer_id)
8986 .and_then(|buffer| buffer.upgrade())
8987 .or_else(|| {
8988 this.incomplete_remote_buffers
8989 .get(&buffer_id)
8990 .and_then(|b| b.clone())
8991 });
8992 if let Some(buffer) = buffer {
8993 buffer.update(cx, |buffer, cx| {
8994 buffer.did_save(version, fingerprint, mtime, cx);
8995 });
8996 }
8997 Ok(())
8998 })?
8999 }
9000
9001 async fn handle_buffer_reloaded(
9002 this: Model<Self>,
9003 envelope: TypedEnvelope<proto::BufferReloaded>,
9004 _: Arc<Client>,
9005 mut cx: AsyncAppContext,
9006 ) -> Result<()> {
9007 let payload = envelope.payload;
9008 let version = deserialize_version(&payload.version);
9009 let fingerprint = deserialize_fingerprint(&payload.fingerprint)?;
9010 let line_ending = deserialize_line_ending(
9011 proto::LineEnding::from_i32(payload.line_ending)
9012 .ok_or_else(|| anyhow!("missing line ending"))?,
9013 );
9014 let mtime = payload
9015 .mtime
9016 .ok_or_else(|| anyhow!("missing mtime"))?
9017 .into();
9018 let buffer_id = BufferId::new(payload.buffer_id)?;
9019 this.update(&mut cx, |this, cx| {
9020 let buffer = this
9021 .opened_buffers
9022 .get(&buffer_id)
9023 .and_then(|buffer| buffer.upgrade())
9024 .or_else(|| {
9025 this.incomplete_remote_buffers
9026 .get(&buffer_id)
9027 .cloned()
9028 .flatten()
9029 });
9030 if let Some(buffer) = buffer {
9031 buffer.update(cx, |buffer, cx| {
9032 buffer.did_reload(version, fingerprint, line_ending, mtime, cx);
9033 });
9034 }
9035 Ok(())
9036 })?
9037 }
9038
9039 #[allow(clippy::type_complexity)]
9040 fn edits_from_lsp(
9041 &mut self,
9042 buffer: &Model<Buffer>,
9043 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
9044 server_id: LanguageServerId,
9045 version: Option<i32>,
9046 cx: &mut ModelContext<Self>,
9047 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
9048 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx);
9049 cx.background_executor().spawn(async move {
9050 let snapshot = snapshot?;
9051 let mut lsp_edits = lsp_edits
9052 .into_iter()
9053 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
9054 .collect::<Vec<_>>();
9055 lsp_edits.sort_by_key(|(range, _)| range.start);
9056
9057 let mut lsp_edits = lsp_edits.into_iter().peekable();
9058 let mut edits = Vec::new();
9059 while let Some((range, mut new_text)) = lsp_edits.next() {
9060 // Clip invalid ranges provided by the language server.
9061 let mut range = snapshot.clip_point_utf16(range.start, Bias::Left)
9062 ..snapshot.clip_point_utf16(range.end, Bias::Left);
9063
9064 // Combine any LSP edits that are adjacent.
9065 //
9066 // Also, combine LSP edits that are separated from each other by only
9067 // a newline. This is important because for some code actions,
9068 // Rust-analyzer rewrites the entire buffer via a series of edits that
9069 // are separated by unchanged newline characters.
9070 //
9071 // In order for the diffing logic below to work properly, any edits that
9072 // cancel each other out must be combined into one.
9073 while let Some((next_range, next_text)) = lsp_edits.peek() {
9074 if next_range.start.0 > range.end {
9075 if next_range.start.0.row > range.end.row + 1
9076 || next_range.start.0.column > 0
9077 || snapshot.clip_point_utf16(
9078 Unclipped(PointUtf16::new(range.end.row, u32::MAX)),
9079 Bias::Left,
9080 ) > range.end
9081 {
9082 break;
9083 }
9084 new_text.push('\n');
9085 }
9086 range.end = snapshot.clip_point_utf16(next_range.end, Bias::Left);
9087 new_text.push_str(next_text);
9088 lsp_edits.next();
9089 }
9090
9091 // For multiline edits, perform a diff of the old and new text so that
9092 // we can identify the changes more precisely, preserving the locations
9093 // of any anchors positioned in the unchanged regions.
9094 if range.end.row > range.start.row {
9095 let mut offset = range.start.to_offset(&snapshot);
9096 let old_text = snapshot.text_for_range(range).collect::<String>();
9097
9098 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
9099 let mut moved_since_edit = true;
9100 for change in diff.iter_all_changes() {
9101 let tag = change.tag();
9102 let value = change.value();
9103 match tag {
9104 ChangeTag::Equal => {
9105 offset += value.len();
9106 moved_since_edit = true;
9107 }
9108 ChangeTag::Delete => {
9109 let start = snapshot.anchor_after(offset);
9110 let end = snapshot.anchor_before(offset + value.len());
9111 if moved_since_edit {
9112 edits.push((start..end, String::new()));
9113 } else {
9114 edits.last_mut().unwrap().0.end = end;
9115 }
9116 offset += value.len();
9117 moved_since_edit = false;
9118 }
9119 ChangeTag::Insert => {
9120 if moved_since_edit {
9121 let anchor = snapshot.anchor_after(offset);
9122 edits.push((anchor..anchor, value.to_string()));
9123 } else {
9124 edits.last_mut().unwrap().1.push_str(value);
9125 }
9126 moved_since_edit = false;
9127 }
9128 }
9129 }
9130 } else if range.end == range.start {
9131 let anchor = snapshot.anchor_after(range.start);
9132 edits.push((anchor..anchor, new_text));
9133 } else {
9134 let edit_start = snapshot.anchor_after(range.start);
9135 let edit_end = snapshot.anchor_before(range.end);
9136 edits.push((edit_start..edit_end, new_text));
9137 }
9138 }
9139
9140 Ok(edits)
9141 })
9142 }
9143
9144 fn buffer_snapshot_for_lsp_version(
9145 &mut self,
9146 buffer: &Model<Buffer>,
9147 server_id: LanguageServerId,
9148 version: Option<i32>,
9149 cx: &AppContext,
9150 ) -> Result<TextBufferSnapshot> {
9151 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
9152
9153 if let Some(version) = version {
9154 let buffer_id = buffer.read(cx).remote_id();
9155 let snapshots = self
9156 .buffer_snapshots
9157 .get_mut(&buffer_id)
9158 .and_then(|m| m.get_mut(&server_id))
9159 .ok_or_else(|| {
9160 anyhow!("no snapshots found for buffer {buffer_id} and server {server_id}")
9161 })?;
9162
9163 let found_snapshot = snapshots
9164 .binary_search_by_key(&version, |e| e.version)
9165 .map(|ix| snapshots[ix].snapshot.clone())
9166 .map_err(|_| {
9167 anyhow!("snapshot not found for buffer {buffer_id} server {server_id} at version {version}")
9168 })?;
9169
9170 snapshots.retain(|snapshot| snapshot.version + OLD_VERSIONS_TO_RETAIN >= version);
9171 Ok(found_snapshot)
9172 } else {
9173 Ok((buffer.read(cx)).text_snapshot())
9174 }
9175 }
9176
9177 pub fn language_servers(
9178 &self,
9179 ) -> impl '_ + Iterator<Item = (LanguageServerId, LanguageServerName, WorktreeId)> {
9180 self.language_server_ids
9181 .iter()
9182 .map(|((worktree_id, server_name), server_id)| {
9183 (*server_id, server_name.clone(), *worktree_id)
9184 })
9185 }
9186
9187 pub fn supplementary_language_servers(
9188 &self,
9189 ) -> impl '_
9190 + Iterator<
9191 Item = (
9192 &LanguageServerId,
9193 &(LanguageServerName, Arc<LanguageServer>),
9194 ),
9195 > {
9196 self.supplementary_language_servers.iter()
9197 }
9198
9199 pub fn language_server_adapter_for_id(
9200 &self,
9201 id: LanguageServerId,
9202 ) -> Option<Arc<CachedLspAdapter>> {
9203 if let Some(LanguageServerState::Running { adapter, .. }) = self.language_servers.get(&id) {
9204 Some(adapter.clone())
9205 } else {
9206 None
9207 }
9208 }
9209
9210 pub fn language_server_for_id(&self, id: LanguageServerId) -> Option<Arc<LanguageServer>> {
9211 if let Some(LanguageServerState::Running { server, .. }) = self.language_servers.get(&id) {
9212 Some(server.clone())
9213 } else if let Some((_, server)) = self.supplementary_language_servers.get(&id) {
9214 Some(Arc::clone(server))
9215 } else {
9216 None
9217 }
9218 }
9219
9220 pub fn language_servers_for_buffer(
9221 &self,
9222 buffer: &Buffer,
9223 cx: &AppContext,
9224 ) -> impl Iterator<Item = (&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9225 self.language_server_ids_for_buffer(buffer, cx)
9226 .into_iter()
9227 .filter_map(|server_id| match self.language_servers.get(&server_id)? {
9228 LanguageServerState::Running {
9229 adapter, server, ..
9230 } => Some((adapter, server)),
9231 _ => None,
9232 })
9233 }
9234
9235 fn primary_language_server_for_buffer(
9236 &self,
9237 buffer: &Buffer,
9238 cx: &AppContext,
9239 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9240 self.language_servers_for_buffer(buffer, cx).next()
9241 }
9242
9243 pub fn language_server_for_buffer(
9244 &self,
9245 buffer: &Buffer,
9246 server_id: LanguageServerId,
9247 cx: &AppContext,
9248 ) -> Option<(&Arc<CachedLspAdapter>, &Arc<LanguageServer>)> {
9249 self.language_servers_for_buffer(buffer, cx)
9250 .find(|(_, s)| s.server_id() == server_id)
9251 }
9252
9253 fn language_server_ids_for_buffer(
9254 &self,
9255 buffer: &Buffer,
9256 cx: &AppContext,
9257 ) -> Vec<LanguageServerId> {
9258 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
9259 let worktree_id = file.worktree_id(cx);
9260 self.languages
9261 .lsp_adapters(&language)
9262 .iter()
9263 .flat_map(|adapter| {
9264 let key = (worktree_id, adapter.name.clone());
9265 self.language_server_ids.get(&key).copied()
9266 })
9267 .collect()
9268 } else {
9269 Vec::new()
9270 }
9271 }
9272}
9273
9274fn subscribe_for_copilot_events(
9275 copilot: &Model<Copilot>,
9276 cx: &mut ModelContext<'_, Project>,
9277) -> gpui::Subscription {
9278 cx.subscribe(
9279 copilot,
9280 |project, copilot, copilot_event, cx| match copilot_event {
9281 copilot::Event::CopilotLanguageServerStarted => {
9282 match copilot.read(cx).language_server() {
9283 Some((name, copilot_server)) => {
9284 // Another event wants to re-add the server that was already added and subscribed to, avoid doing it again.
9285 if !copilot_server.has_notification_handler::<copilot::request::LogMessage>() {
9286 let new_server_id = copilot_server.server_id();
9287 let weak_project = cx.weak_model();
9288 let copilot_log_subscription = copilot_server
9289 .on_notification::<copilot::request::LogMessage, _>(
9290 move |params, mut cx| {
9291 weak_project.update(&mut cx, |_, cx| {
9292 cx.emit(Event::LanguageServerLog(
9293 new_server_id,
9294 params.message,
9295 ));
9296 }).ok();
9297 },
9298 );
9299 project.supplementary_language_servers.insert(new_server_id, (name.clone(), Arc::clone(copilot_server)));
9300 project.copilot_log_subscription = Some(copilot_log_subscription);
9301 cx.emit(Event::LanguageServerAdded(new_server_id));
9302 }
9303 }
9304 None => debug_panic!("Received Copilot language server started event, but no language server is running"),
9305 }
9306 }
9307 },
9308 )
9309}
9310
9311fn glob_literal_prefix(glob: &str) -> &str {
9312 let mut literal_end = 0;
9313 for (i, part) in glob.split(path::MAIN_SEPARATOR).enumerate() {
9314 if part.contains(&['*', '?', '{', '}']) {
9315 break;
9316 } else {
9317 if i > 0 {
9318 // Account for separator prior to this part
9319 literal_end += path::MAIN_SEPARATOR.len_utf8();
9320 }
9321 literal_end += part.len();
9322 }
9323 }
9324 &glob[..literal_end]
9325}
9326
9327impl WorktreeHandle {
9328 pub fn upgrade(&self) -> Option<Model<Worktree>> {
9329 match self {
9330 WorktreeHandle::Strong(handle) => Some(handle.clone()),
9331 WorktreeHandle::Weak(handle) => handle.upgrade(),
9332 }
9333 }
9334
9335 pub fn handle_id(&self) -> usize {
9336 match self {
9337 WorktreeHandle::Strong(handle) => handle.entity_id().as_u64() as usize,
9338 WorktreeHandle::Weak(handle) => handle.entity_id().as_u64() as usize,
9339 }
9340 }
9341}
9342
9343impl OpenBuffer {
9344 pub fn upgrade(&self) -> Option<Model<Buffer>> {
9345 match self {
9346 OpenBuffer::Strong(handle) => Some(handle.clone()),
9347 OpenBuffer::Weak(handle) => handle.upgrade(),
9348 OpenBuffer::Operations(_) => None,
9349 }
9350 }
9351}
9352
9353pub struct PathMatchCandidateSet {
9354 pub snapshot: Snapshot,
9355 pub include_ignored: bool,
9356 pub include_root_name: bool,
9357}
9358
9359impl<'a> fuzzy::PathMatchCandidateSet<'a> for PathMatchCandidateSet {
9360 type Candidates = PathMatchCandidateSetIter<'a>;
9361
9362 fn id(&self) -> usize {
9363 self.snapshot.id().to_usize()
9364 }
9365
9366 fn len(&self) -> usize {
9367 if self.include_ignored {
9368 self.snapshot.file_count()
9369 } else {
9370 self.snapshot.visible_file_count()
9371 }
9372 }
9373
9374 fn prefix(&self) -> Arc<str> {
9375 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
9376 self.snapshot.root_name().into()
9377 } else if self.include_root_name {
9378 format!("{}/", self.snapshot.root_name()).into()
9379 } else {
9380 "".into()
9381 }
9382 }
9383
9384 fn candidates(&'a self, start: usize) -> Self::Candidates {
9385 PathMatchCandidateSetIter {
9386 traversal: self.snapshot.files(self.include_ignored, start),
9387 }
9388 }
9389}
9390
9391pub struct PathMatchCandidateSetIter<'a> {
9392 traversal: Traversal<'a>,
9393}
9394
9395impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
9396 type Item = fuzzy::PathMatchCandidate<'a>;
9397
9398 fn next(&mut self) -> Option<Self::Item> {
9399 self.traversal.next().map(|entry| {
9400 if let EntryKind::File(char_bag) = entry.kind {
9401 fuzzy::PathMatchCandidate {
9402 path: &entry.path,
9403 char_bag,
9404 }
9405 } else {
9406 unreachable!()
9407 }
9408 })
9409 }
9410}
9411
9412impl EventEmitter<Event> for Project {}
9413
9414impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
9415 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
9416 Self {
9417 worktree_id,
9418 path: path.as_ref().into(),
9419 }
9420 }
9421}
9422
9423struct ProjectLspAdapterDelegate {
9424 project: WeakModel<Project>,
9425 worktree: worktree::Snapshot,
9426 fs: Arc<dyn Fs>,
9427 http_client: Arc<dyn HttpClient>,
9428 language_registry: Arc<LanguageRegistry>,
9429 shell_env: Mutex<Option<HashMap<String, String>>>,
9430}
9431
9432impl ProjectLspAdapterDelegate {
9433 fn new(project: &Project, worktree: &Model<Worktree>, cx: &ModelContext<Project>) -> Arc<Self> {
9434 Arc::new(Self {
9435 project: cx.weak_model(),
9436 worktree: worktree.read(cx).snapshot(),
9437 fs: project.fs.clone(),
9438 http_client: project.client.http_client(),
9439 language_registry: project.languages.clone(),
9440 shell_env: Default::default(),
9441 })
9442 }
9443
9444 async fn load_shell_env(&self) {
9445 let worktree_abs_path = self.worktree.abs_path();
9446 let shell_env = load_shell_environment(&worktree_abs_path)
9447 .await
9448 .with_context(|| {
9449 format!("failed to determine load login shell environment in {worktree_abs_path:?}")
9450 })
9451 .log_err()
9452 .unwrap_or_default();
9453 *self.shell_env.lock() = Some(shell_env);
9454 }
9455}
9456
9457#[async_trait]
9458impl LspAdapterDelegate for ProjectLspAdapterDelegate {
9459 fn show_notification(&self, message: &str, cx: &mut AppContext) {
9460 self.project
9461 .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())))
9462 .ok();
9463 }
9464
9465 fn http_client(&self) -> Arc<dyn HttpClient> {
9466 self.http_client.clone()
9467 }
9468
9469 async fn shell_env(&self) -> HashMap<String, String> {
9470 self.load_shell_env().await;
9471 self.shell_env.lock().as_ref().cloned().unwrap_or_default()
9472 }
9473
9474 async fn which(&self, command: &OsStr) -> Option<PathBuf> {
9475 let worktree_abs_path = self.worktree.abs_path();
9476 self.load_shell_env().await;
9477 let shell_path = self
9478 .shell_env
9479 .lock()
9480 .as_ref()
9481 .and_then(|shell_env| shell_env.get("PATH").cloned());
9482 which::which_in(command, shell_path.as_ref(), &worktree_abs_path).ok()
9483 }
9484
9485 fn update_status(
9486 &self,
9487 server_name: LanguageServerName,
9488 status: language::LanguageServerBinaryStatus,
9489 ) {
9490 self.language_registry
9491 .update_lsp_status(server_name, status);
9492 }
9493
9494 async fn read_text_file(&self, path: PathBuf) -> Result<String> {
9495 if self.worktree.entry_for_path(&path).is_none() {
9496 return Err(anyhow!("no such path {path:?}"));
9497 }
9498 let path = self.worktree.absolutize(path.as_ref())?;
9499 let content = self.fs.load(&path).await?;
9500 Ok(content)
9501 }
9502}
9503
9504fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
9505 proto::Symbol {
9506 language_server_name: symbol.language_server_name.0.to_string(),
9507 source_worktree_id: symbol.source_worktree_id.to_proto(),
9508 worktree_id: symbol.path.worktree_id.to_proto(),
9509 path: symbol.path.path.to_string_lossy().to_string(),
9510 name: symbol.name.clone(),
9511 kind: unsafe { mem::transmute(symbol.kind) },
9512 start: Some(proto::PointUtf16 {
9513 row: symbol.range.start.0.row,
9514 column: symbol.range.start.0.column,
9515 }),
9516 end: Some(proto::PointUtf16 {
9517 row: symbol.range.end.0.row,
9518 column: symbol.range.end.0.column,
9519 }),
9520 signature: symbol.signature.to_vec(),
9521 }
9522}
9523
9524fn relativize_path(base: &Path, path: &Path) -> PathBuf {
9525 let mut path_components = path.components();
9526 let mut base_components = base.components();
9527 let mut components: Vec<Component> = Vec::new();
9528 loop {
9529 match (path_components.next(), base_components.next()) {
9530 (None, None) => break,
9531 (Some(a), None) => {
9532 components.push(a);
9533 components.extend(path_components.by_ref());
9534 break;
9535 }
9536 (None, _) => components.push(Component::ParentDir),
9537 (Some(a), Some(b)) if components.is_empty() && a == b => (),
9538 (Some(a), Some(Component::CurDir)) => components.push(a),
9539 (Some(a), Some(_)) => {
9540 components.push(Component::ParentDir);
9541 for _ in base_components {
9542 components.push(Component::ParentDir);
9543 }
9544 components.push(a);
9545 components.extend(path_components.by_ref());
9546 break;
9547 }
9548 }
9549 }
9550 components.iter().map(|c| c.as_os_str()).collect()
9551}
9552
9553fn resolve_path(base: &Path, path: &Path) -> PathBuf {
9554 let mut result = base.to_path_buf();
9555 for component in path.components() {
9556 match component {
9557 Component::ParentDir => {
9558 result.pop();
9559 }
9560 Component::CurDir => (),
9561 _ => result.push(component),
9562 }
9563 }
9564 result
9565}
9566
9567impl Item for Buffer {
9568 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
9569 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
9570 }
9571
9572 fn project_path(&self, cx: &AppContext) -> Option<ProjectPath> {
9573 File::from_dyn(self.file()).map(|file| ProjectPath {
9574 worktree_id: file.worktree_id(cx),
9575 path: file.path().clone(),
9576 })
9577 }
9578}
9579
9580async fn wait_for_loading_buffer(
9581 mut receiver: postage::watch::Receiver<Option<Result<Model<Buffer>, Arc<anyhow::Error>>>>,
9582) -> Result<Model<Buffer>, Arc<anyhow::Error>> {
9583 loop {
9584 if let Some(result) = receiver.borrow().as_ref() {
9585 match result {
9586 Ok(buffer) => return Ok(buffer.to_owned()),
9587 Err(e) => return Err(e.to_owned()),
9588 }
9589 }
9590 receiver.next().await;
9591 }
9592}
9593
9594fn include_text(server: &lsp::LanguageServer) -> bool {
9595 server
9596 .capabilities()
9597 .text_document_sync
9598 .as_ref()
9599 .and_then(|sync| match sync {
9600 lsp::TextDocumentSyncCapability::Kind(_) => None,
9601 lsp::TextDocumentSyncCapability::Options(options) => options.save.as_ref(),
9602 })
9603 .and_then(|save_options| match save_options {
9604 lsp::TextDocumentSyncSaveOptions::Supported(_) => None,
9605 lsp::TextDocumentSyncSaveOptions::SaveOptions(options) => options.include_text,
9606 })
9607 .unwrap_or(false)
9608}
9609
9610async fn load_shell_environment(dir: &Path) -> Result<HashMap<String, String>> {
9611 let marker = "ZED_SHELL_START";
9612 let shell = env::var("SHELL").context(
9613 "SHELL environment variable is not assigned so we can't source login environment variables",
9614 )?;
9615
9616 // What we're doing here is to spawn a shell and then `cd` into
9617 // the project directory to get the env in there as if the user
9618 // `cd`'d into it. We do that because tools like direnv, asdf, ...
9619 // hook into `cd` and only set up the env after that.
9620 //
9621 // In certain shells we need to execute additional_command in order to
9622 // trigger the behavior of direnv, etc.
9623 //
9624 //
9625 // The `exit 0` is the result of hours of debugging, trying to find out
9626 // why running this command here, without `exit 0`, would mess
9627 // up signal process for our process so that `ctrl-c` doesn't work
9628 // anymore.
9629 //
9630 // We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would
9631 // do that, but it does, and `exit 0` helps.
9632 let additional_command = PathBuf::from(&shell)
9633 .file_name()
9634 .and_then(|f| f.to_str())
9635 .and_then(|shell| match shell {
9636 "fish" => Some("emit fish_prompt;"),
9637 _ => None,
9638 });
9639
9640 let command = format!(
9641 "cd '{}';{} echo {marker}; /usr/bin/env -0; exit 0;",
9642 dir.display(),
9643 additional_command.unwrap_or("")
9644 );
9645
9646 let output = smol::process::Command::new(&shell)
9647 .args(["-i", "-c", &command])
9648 .output()
9649 .await
9650 .context("failed to spawn login shell to source login environment variables")?;
9651
9652 anyhow::ensure!(
9653 output.status.success(),
9654 "login shell exited with error {:?}",
9655 output.status
9656 );
9657
9658 let stdout = String::from_utf8_lossy(&output.stdout);
9659 let env_output_start = stdout.find(marker).ok_or_else(|| {
9660 anyhow!(
9661 "failed to parse output of `env` command in login shell: {}",
9662 stdout
9663 )
9664 })?;
9665
9666 let mut parsed_env = HashMap::default();
9667 let env_output = &stdout[env_output_start + marker.len()..];
9668 for line in env_output.split_terminator('\0') {
9669 if let Some(separator_index) = line.find('=') {
9670 let key = line[..separator_index].to_string();
9671 let value = line[separator_index + 1..].to_string();
9672 parsed_env.insert(key, value);
9673 }
9674 }
9675 Ok(parsed_env)
9676}