1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CharKind, CodeAction, CodeLabel,
22 Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _,
23 Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt,
24 Operation, Patch, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{
27 DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer, LanguageString,
28 MarkedString,
29};
30use lsp_command::*;
31use parking_lot::Mutex;
32use postage::stream::Stream;
33use postage::watch;
34use rand::prelude::*;
35use search::SearchQuery;
36use serde::Serialize;
37use settings::Settings;
38use sha2::{Digest, Sha256};
39use similar::{ChangeTag, TextDiff};
40use std::{
41 cell::RefCell,
42 cmp::{self, Ordering},
43 convert::TryInto,
44 ffi::OsString,
45 hash::Hash,
46 mem,
47 ops::Range,
48 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
49 path::{Component, Path, PathBuf},
50 rc::Rc,
51 sync::{
52 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
53 Arc,
54 },
55 time::Instant,
56};
57use thiserror::Error;
58use util::{post_inc, ResultExt, TryFutureExt as _};
59
60pub use db::Db;
61pub use fs::*;
62pub use worktree::*;
63
64pub trait Item: Entity {
65 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
66}
67
68pub struct ProjectStore {
69 db: Arc<Db>,
70 projects: Vec<WeakModelHandle<Project>>,
71}
72
73// Language server state is stored across 3 collections:
74// language_servers =>
75// a mapping from unique server id to LanguageServerState which can either be a task for a
76// server in the process of starting, or a running server with adapter and language server arcs
77// language_server_ids => a mapping from worktreeId and server name to the unique server id
78// language_server_statuses => a mapping from unique server id to the current server status
79//
80// Multiple worktrees can map to the same language server for example when you jump to the definition
81// of a file in the standard library. So language_server_ids is used to look up which server is active
82// for a given worktree and language server name
83//
84// When starting a language server, first the id map is checked to make sure a server isn't already available
85// for that worktree. If there is one, it finishes early. Otherwise, a new id is allocated and and
86// the Starting variant of LanguageServerState is stored in the language_servers map.
87pub struct Project {
88 worktrees: Vec<WorktreeHandle>,
89 active_entry: Option<ProjectEntryId>,
90 languages: Arc<LanguageRegistry>,
91 language_servers: HashMap<usize, LanguageServerState>,
92 language_server_ids: HashMap<(WorktreeId, LanguageServerName), usize>,
93 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
94 language_server_settings: Arc<Mutex<serde_json::Value>>,
95 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
96 next_language_server_id: usize,
97 client: Arc<client::Client>,
98 next_entry_id: Arc<AtomicUsize>,
99 next_diagnostic_group_id: usize,
100 user_store: ModelHandle<UserStore>,
101 project_store: ModelHandle<ProjectStore>,
102 fs: Arc<dyn Fs>,
103 client_state: ProjectClientState,
104 collaborators: HashMap<PeerId, Collaborator>,
105 client_subscriptions: Vec<client::Subscription>,
106 _subscriptions: Vec<gpui::Subscription>,
107 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
108 shared_buffers: HashMap<PeerId, HashSet<u64>>,
109 loading_buffers: HashMap<
110 ProjectPath,
111 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
112 >,
113 loading_local_worktrees:
114 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
115 opened_buffers: HashMap<u64, OpenBuffer>,
116 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
117 nonce: u128,
118 initialized_persistent_state: bool,
119}
120
121#[derive(Error, Debug)]
122pub enum JoinProjectError {
123 #[error("host declined join request")]
124 HostDeclined,
125 #[error("host closed the project")]
126 HostClosedProject,
127 #[error("host went offline")]
128 HostWentOffline,
129 #[error("{0}")]
130 Other(#[from] anyhow::Error),
131}
132
133enum OpenBuffer {
134 Strong(ModelHandle<Buffer>),
135 Weak(WeakModelHandle<Buffer>),
136 Loading(Vec<Operation>),
137}
138
139enum WorktreeHandle {
140 Strong(ModelHandle<Worktree>),
141 Weak(WeakModelHandle<Worktree>),
142}
143
144enum ProjectClientState {
145 Local {
146 is_shared: bool,
147 remote_id_tx: watch::Sender<Option<u64>>,
148 remote_id_rx: watch::Receiver<Option<u64>>,
149 online_tx: watch::Sender<bool>,
150 online_rx: watch::Receiver<bool>,
151 _maintain_remote_id_task: Task<Option<()>>,
152 },
153 Remote {
154 sharing_has_stopped: bool,
155 remote_id: u64,
156 replica_id: ReplicaId,
157 _detect_unshare_task: Task<Option<()>>,
158 },
159}
160
161#[derive(Clone, Debug)]
162pub struct Collaborator {
163 pub user: Arc<User>,
164 pub peer_id: PeerId,
165 pub replica_id: ReplicaId,
166}
167
168#[derive(Clone, Debug, PartialEq, Eq)]
169pub enum Event {
170 ActiveEntryChanged(Option<ProjectEntryId>),
171 WorktreeAdded,
172 WorktreeRemoved(WorktreeId),
173 DiskBasedDiagnosticsStarted {
174 language_server_id: usize,
175 },
176 DiskBasedDiagnosticsFinished {
177 language_server_id: usize,
178 },
179 DiagnosticsUpdated {
180 path: ProjectPath,
181 language_server_id: usize,
182 },
183 RemoteIdChanged(Option<u64>),
184 CollaboratorLeft(PeerId),
185 ContactRequestedJoin(Arc<User>),
186 ContactCancelledJoinRequest(Arc<User>),
187}
188
189pub enum LanguageServerState {
190 Starting(Task<Option<Arc<LanguageServer>>>),
191 Running {
192 adapter: Arc<dyn LspAdapter>,
193 server: Arc<LanguageServer>,
194 },
195}
196
197#[derive(Serialize)]
198pub struct LanguageServerStatus {
199 pub name: String,
200 pub pending_work: BTreeMap<String, LanguageServerProgress>,
201 pub has_pending_diagnostic_updates: bool,
202 progress_tokens: HashSet<String>,
203}
204
205#[derive(Clone, Debug, Serialize)]
206pub struct LanguageServerProgress {
207 pub message: Option<String>,
208 pub percentage: Option<usize>,
209 #[serde(skip_serializing)]
210 pub last_update_at: Instant,
211}
212
213#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
214pub struct ProjectPath {
215 pub worktree_id: WorktreeId,
216 pub path: Arc<Path>,
217}
218
219#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
220pub struct DiagnosticSummary {
221 pub language_server_id: usize,
222 pub error_count: usize,
223 pub warning_count: usize,
224}
225
226#[derive(Debug, Clone)]
227pub struct Location {
228 pub buffer: ModelHandle<Buffer>,
229 pub range: Range<language::Anchor>,
230}
231
232#[derive(Debug, Clone)]
233pub struct LocationLink {
234 pub origin: Option<Location>,
235 pub target: Location,
236}
237
238#[derive(Debug)]
239pub struct DocumentHighlight {
240 pub range: Range<language::Anchor>,
241 pub kind: DocumentHighlightKind,
242}
243
244#[derive(Clone, Debug)]
245pub struct Symbol {
246 pub source_worktree_id: WorktreeId,
247 pub worktree_id: WorktreeId,
248 pub language_server_name: LanguageServerName,
249 pub path: PathBuf,
250 pub label: CodeLabel,
251 pub name: String,
252 pub kind: lsp::SymbolKind,
253 pub range: Range<PointUtf16>,
254 pub signature: [u8; 32],
255}
256
257#[derive(Clone, Debug, PartialEq)]
258pub struct HoverBlock {
259 pub text: String,
260 pub language: Option<String>,
261}
262
263impl HoverBlock {
264 fn try_new(marked_string: MarkedString) -> Option<Self> {
265 let result = match marked_string {
266 MarkedString::LanguageString(LanguageString { language, value }) => HoverBlock {
267 text: value,
268 language: Some(language),
269 },
270 MarkedString::String(text) => HoverBlock {
271 text,
272 language: None,
273 },
274 };
275 if result.text.is_empty() {
276 None
277 } else {
278 Some(result)
279 }
280 }
281}
282
283#[derive(Debug)]
284pub struct Hover {
285 pub contents: Vec<HoverBlock>,
286 pub range: Option<Range<language::Anchor>>,
287}
288
289#[derive(Default)]
290pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
291
292impl DiagnosticSummary {
293 fn new<'a, T: 'a>(
294 language_server_id: usize,
295 diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>,
296 ) -> Self {
297 let mut this = Self {
298 language_server_id,
299 error_count: 0,
300 warning_count: 0,
301 };
302
303 for entry in diagnostics {
304 if entry.diagnostic.is_primary {
305 match entry.diagnostic.severity {
306 DiagnosticSeverity::ERROR => this.error_count += 1,
307 DiagnosticSeverity::WARNING => this.warning_count += 1,
308 _ => {}
309 }
310 }
311 }
312
313 this
314 }
315
316 pub fn is_empty(&self) -> bool {
317 self.error_count == 0 && self.warning_count == 0
318 }
319
320 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
321 proto::DiagnosticSummary {
322 path: path.to_string_lossy().to_string(),
323 language_server_id: self.language_server_id as u64,
324 error_count: self.error_count as u32,
325 warning_count: self.warning_count as u32,
326 }
327 }
328}
329
330#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
331pub struct ProjectEntryId(usize);
332
333impl ProjectEntryId {
334 pub const MAX: Self = Self(usize::MAX);
335
336 pub fn new(counter: &AtomicUsize) -> Self {
337 Self(counter.fetch_add(1, SeqCst))
338 }
339
340 pub fn from_proto(id: u64) -> Self {
341 Self(id as usize)
342 }
343
344 pub fn to_proto(&self) -> u64 {
345 self.0 as u64
346 }
347
348 pub fn to_usize(&self) -> usize {
349 self.0
350 }
351}
352
353impl Project {
354 pub fn init(client: &Arc<Client>) {
355 client.add_model_message_handler(Self::handle_request_join_project);
356 client.add_model_message_handler(Self::handle_add_collaborator);
357 client.add_model_message_handler(Self::handle_buffer_reloaded);
358 client.add_model_message_handler(Self::handle_buffer_saved);
359 client.add_model_message_handler(Self::handle_start_language_server);
360 client.add_model_message_handler(Self::handle_update_language_server);
361 client.add_model_message_handler(Self::handle_remove_collaborator);
362 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
363 client.add_model_message_handler(Self::handle_update_project);
364 client.add_model_message_handler(Self::handle_unregister_project);
365 client.add_model_message_handler(Self::handle_project_unshared);
366 client.add_model_message_handler(Self::handle_update_buffer_file);
367 client.add_model_message_handler(Self::handle_update_buffer);
368 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
369 client.add_model_message_handler(Self::handle_update_worktree);
370 client.add_model_request_handler(Self::handle_create_project_entry);
371 client.add_model_request_handler(Self::handle_rename_project_entry);
372 client.add_model_request_handler(Self::handle_copy_project_entry);
373 client.add_model_request_handler(Self::handle_delete_project_entry);
374 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
375 client.add_model_request_handler(Self::handle_apply_code_action);
376 client.add_model_request_handler(Self::handle_reload_buffers);
377 client.add_model_request_handler(Self::handle_format_buffers);
378 client.add_model_request_handler(Self::handle_get_code_actions);
379 client.add_model_request_handler(Self::handle_get_completions);
380 client.add_model_request_handler(Self::handle_lsp_command::<GetHover>);
381 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
382 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
383 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
384 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
385 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
386 client.add_model_request_handler(Self::handle_search_project);
387 client.add_model_request_handler(Self::handle_get_project_symbols);
388 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
389 client.add_model_request_handler(Self::handle_open_buffer_by_id);
390 client.add_model_request_handler(Self::handle_open_buffer_by_path);
391 client.add_model_request_handler(Self::handle_save_buffer);
392 }
393
394 pub fn local(
395 online: bool,
396 client: Arc<Client>,
397 user_store: ModelHandle<UserStore>,
398 project_store: ModelHandle<ProjectStore>,
399 languages: Arc<LanguageRegistry>,
400 fs: Arc<dyn Fs>,
401 cx: &mut MutableAppContext,
402 ) -> ModelHandle<Self> {
403 cx.add_model(|cx: &mut ModelContext<Self>| {
404 let (online_tx, online_rx) = watch::channel_with(online);
405 let (remote_id_tx, remote_id_rx) = watch::channel();
406 let _maintain_remote_id_task = cx.spawn_weak({
407 let status_rx = client.clone().status();
408 let online_rx = online_rx.clone();
409 move |this, mut cx| async move {
410 let mut stream = Stream::map(status_rx.clone(), drop)
411 .merge(Stream::map(online_rx.clone(), drop));
412 while stream.recv().await.is_some() {
413 let this = this.upgrade(&cx)?;
414 if status_rx.borrow().is_connected() && *online_rx.borrow() {
415 this.update(&mut cx, |this, cx| this.register(cx))
416 .await
417 .log_err()?;
418 } else {
419 this.update(&mut cx, |this, cx| this.unregister(cx))
420 .await
421 .log_err();
422 }
423 }
424 None
425 }
426 });
427
428 let handle = cx.weak_handle();
429 project_store.update(cx, |store, cx| store.add_project(handle, cx));
430
431 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
432 Self {
433 worktrees: Default::default(),
434 collaborators: Default::default(),
435 opened_buffers: Default::default(),
436 shared_buffers: Default::default(),
437 loading_buffers: Default::default(),
438 loading_local_worktrees: Default::default(),
439 buffer_snapshots: Default::default(),
440 client_state: ProjectClientState::Local {
441 is_shared: false,
442 remote_id_tx,
443 remote_id_rx,
444 online_tx,
445 online_rx,
446 _maintain_remote_id_task,
447 },
448 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
449 client_subscriptions: Vec::new(),
450 _subscriptions: vec![cx.observe_global::<Settings, _>(Self::on_settings_changed)],
451 active_entry: None,
452 languages,
453 client,
454 user_store,
455 project_store,
456 fs,
457 next_entry_id: Default::default(),
458 next_diagnostic_group_id: Default::default(),
459 language_servers: Default::default(),
460 language_server_ids: Default::default(),
461 language_server_statuses: Default::default(),
462 last_workspace_edits_by_language_server: Default::default(),
463 language_server_settings: Default::default(),
464 next_language_server_id: 0,
465 nonce: StdRng::from_entropy().gen(),
466 initialized_persistent_state: false,
467 }
468 })
469 }
470
471 pub async fn remote(
472 remote_id: u64,
473 client: Arc<Client>,
474 user_store: ModelHandle<UserStore>,
475 project_store: ModelHandle<ProjectStore>,
476 languages: Arc<LanguageRegistry>,
477 fs: Arc<dyn Fs>,
478 mut cx: AsyncAppContext,
479 ) -> Result<ModelHandle<Self>, JoinProjectError> {
480 client.authenticate_and_connect(true, &cx).await?;
481
482 let response = client
483 .request(proto::JoinProject {
484 project_id: remote_id,
485 })
486 .await?;
487
488 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
489 proto::join_project_response::Variant::Accept(response) => response,
490 proto::join_project_response::Variant::Decline(decline) => {
491 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
492 Some(proto::join_project_response::decline::Reason::Declined) => {
493 Err(JoinProjectError::HostDeclined)?
494 }
495 Some(proto::join_project_response::decline::Reason::Closed) => {
496 Err(JoinProjectError::HostClosedProject)?
497 }
498 Some(proto::join_project_response::decline::Reason::WentOffline) => {
499 Err(JoinProjectError::HostWentOffline)?
500 }
501 None => Err(anyhow!("missing decline reason"))?,
502 }
503 }
504 };
505
506 let replica_id = response.replica_id as ReplicaId;
507
508 let mut worktrees = Vec::new();
509 for worktree in response.worktrees {
510 let (worktree, load_task) = cx
511 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
512 worktrees.push(worktree);
513 load_task.detach();
514 }
515
516 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
517 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
518 let handle = cx.weak_handle();
519 project_store.update(cx, |store, cx| store.add_project(handle, cx));
520
521 let mut this = Self {
522 worktrees: Vec::new(),
523 loading_buffers: Default::default(),
524 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
525 shared_buffers: Default::default(),
526 loading_local_worktrees: Default::default(),
527 active_entry: None,
528 collaborators: Default::default(),
529 languages,
530 user_store: user_store.clone(),
531 project_store,
532 fs,
533 next_entry_id: Default::default(),
534 next_diagnostic_group_id: Default::default(),
535 client_subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
536 _subscriptions: Default::default(),
537 client: client.clone(),
538 client_state: ProjectClientState::Remote {
539 sharing_has_stopped: false,
540 remote_id,
541 replica_id,
542 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
543 async move {
544 let mut status = client.status();
545 let is_connected =
546 status.next().await.map_or(false, |s| s.is_connected());
547 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
548 if !is_connected || status.next().await.is_some() {
549 if let Some(this) = this.upgrade(&cx) {
550 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
551 }
552 }
553 Ok(())
554 }
555 .log_err()
556 }),
557 },
558 language_servers: Default::default(),
559 language_server_ids: Default::default(),
560 language_server_settings: Default::default(),
561 language_server_statuses: response
562 .language_servers
563 .into_iter()
564 .map(|server| {
565 (
566 server.id as usize,
567 LanguageServerStatus {
568 name: server.name,
569 pending_work: Default::default(),
570 has_pending_diagnostic_updates: false,
571 progress_tokens: Default::default(),
572 },
573 )
574 })
575 .collect(),
576 last_workspace_edits_by_language_server: Default::default(),
577 next_language_server_id: 0,
578 opened_buffers: Default::default(),
579 buffer_snapshots: Default::default(),
580 nonce: StdRng::from_entropy().gen(),
581 initialized_persistent_state: false,
582 };
583 for worktree in worktrees {
584 this.add_worktree(&worktree, cx);
585 }
586 this
587 });
588
589 let user_ids = response
590 .collaborators
591 .iter()
592 .map(|peer| peer.user_id)
593 .collect();
594 user_store
595 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
596 .await?;
597 let mut collaborators = HashMap::default();
598 for message in response.collaborators {
599 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
600 collaborators.insert(collaborator.peer_id, collaborator);
601 }
602
603 this.update(&mut cx, |this, _| {
604 this.collaborators = collaborators;
605 });
606
607 Ok(this)
608 }
609
610 #[cfg(any(test, feature = "test-support"))]
611 pub async fn test(
612 fs: Arc<dyn Fs>,
613 root_paths: impl IntoIterator<Item = &Path>,
614 cx: &mut gpui::TestAppContext,
615 ) -> ModelHandle<Project> {
616 if !cx.read(|cx| cx.has_global::<Settings>()) {
617 cx.update(|cx| cx.set_global(Settings::test(cx)));
618 }
619
620 let languages = Arc::new(LanguageRegistry::test());
621 let http_client = client::test::FakeHttpClient::with_404_response();
622 let client = client::Client::new(http_client.clone());
623 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
624 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
625 let project = cx.update(|cx| {
626 Project::local(true, client, user_store, project_store, languages, fs, cx)
627 });
628 for path in root_paths {
629 let (tree, _) = project
630 .update(cx, |project, cx| {
631 project.find_or_create_local_worktree(path, true, cx)
632 })
633 .await
634 .unwrap();
635 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
636 .await;
637 }
638 project
639 }
640
641 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
642 if self.is_remote() {
643 return Task::ready(Ok(()));
644 }
645
646 let db = self.project_store.read(cx).db.clone();
647 let keys = self.db_keys_for_online_state(cx);
648 let online_by_default = cx.global::<Settings>().projects_online_by_default;
649 let read_online = cx.background().spawn(async move {
650 let values = db.read(keys)?;
651 anyhow::Ok(
652 values
653 .into_iter()
654 .all(|e| e.map_or(online_by_default, |e| e == [true as u8])),
655 )
656 });
657 cx.spawn(|this, mut cx| async move {
658 let online = read_online.await.log_err().unwrap_or(false);
659 this.update(&mut cx, |this, cx| {
660 this.initialized_persistent_state = true;
661 if let ProjectClientState::Local { online_tx, .. } = &mut this.client_state {
662 let mut online_tx = online_tx.borrow_mut();
663 if *online_tx != online {
664 *online_tx = online;
665 drop(online_tx);
666 this.metadata_changed(false, cx);
667 }
668 }
669 });
670 Ok(())
671 })
672 }
673
674 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
675 if self.is_remote() || !self.initialized_persistent_state {
676 return Task::ready(Ok(()));
677 }
678
679 let db = self.project_store.read(cx).db.clone();
680 let keys = self.db_keys_for_online_state(cx);
681 let is_online = self.is_online();
682 cx.background().spawn(async move {
683 let value = &[is_online as u8];
684 db.write(keys.into_iter().map(|key| (key, value)))
685 })
686 }
687
688 fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
689 let settings = cx.global::<Settings>();
690
691 let mut language_servers_to_start = Vec::new();
692 for buffer in self.opened_buffers.values() {
693 if let Some(buffer) = buffer.upgrade(cx) {
694 let buffer = buffer.read(cx);
695 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language())
696 {
697 if settings.enable_language_server(Some(&language.name())) {
698 let worktree = file.worktree.read(cx);
699 language_servers_to_start.push((
700 worktree.id(),
701 worktree.as_local().unwrap().abs_path().clone(),
702 language.clone(),
703 ));
704 }
705 }
706 }
707 }
708
709 let mut language_servers_to_stop = Vec::new();
710 for language in self.languages.to_vec() {
711 if let Some(lsp_adapter) = language.lsp_adapter() {
712 if !settings.enable_language_server(Some(&language.name())) {
713 let lsp_name = lsp_adapter.name();
714 for (worktree_id, started_lsp_name) in self.language_server_ids.keys() {
715 if lsp_name == *started_lsp_name {
716 language_servers_to_stop.push((*worktree_id, started_lsp_name.clone()));
717 }
718 }
719 }
720 }
721 }
722
723 // Stop all newly-disabled language servers.
724 for (worktree_id, adapter_name) in language_servers_to_stop {
725 self.stop_language_server(worktree_id, adapter_name, cx)
726 .detach();
727 }
728
729 // Start all the newly-enabled language servers.
730 for (worktree_id, worktree_path, language) in language_servers_to_start {
731 self.start_language_server(worktree_id, worktree_path, language, cx);
732 }
733
734 cx.notify();
735 }
736
737 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
738 self.opened_buffers
739 .get(&remote_id)
740 .and_then(|buffer| buffer.upgrade(cx))
741 }
742
743 pub fn languages(&self) -> &Arc<LanguageRegistry> {
744 &self.languages
745 }
746
747 pub fn client(&self) -> Arc<Client> {
748 self.client.clone()
749 }
750
751 pub fn user_store(&self) -> ModelHandle<UserStore> {
752 self.user_store.clone()
753 }
754
755 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
756 self.project_store.clone()
757 }
758
759 #[cfg(any(test, feature = "test-support"))]
760 pub fn check_invariants(&self, cx: &AppContext) {
761 if self.is_local() {
762 let mut worktree_root_paths = HashMap::default();
763 for worktree in self.worktrees(cx) {
764 let worktree = worktree.read(cx);
765 let abs_path = worktree.as_local().unwrap().abs_path().clone();
766 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
767 assert_eq!(
768 prev_worktree_id,
769 None,
770 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
771 abs_path,
772 worktree.id(),
773 prev_worktree_id
774 )
775 }
776 } else {
777 let replica_id = self.replica_id();
778 for buffer in self.opened_buffers.values() {
779 if let Some(buffer) = buffer.upgrade(cx) {
780 let buffer = buffer.read(cx);
781 assert_eq!(
782 buffer.deferred_ops_len(),
783 0,
784 "replica {}, buffer {} has deferred operations",
785 replica_id,
786 buffer.remote_id()
787 );
788 }
789 }
790 }
791 }
792
793 #[cfg(any(test, feature = "test-support"))]
794 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
795 let path = path.into();
796 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
797 self.opened_buffers.iter().any(|(_, buffer)| {
798 if let Some(buffer) = buffer.upgrade(cx) {
799 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
800 if file.worktree == worktree && file.path() == &path.path {
801 return true;
802 }
803 }
804 }
805 false
806 })
807 } else {
808 false
809 }
810 }
811
812 pub fn fs(&self) -> &Arc<dyn Fs> {
813 &self.fs
814 }
815
816 pub fn set_online(&mut self, online: bool, cx: &mut ModelContext<Self>) {
817 if let ProjectClientState::Local { online_tx, .. } = &mut self.client_state {
818 let mut online_tx = online_tx.borrow_mut();
819 if *online_tx != online {
820 *online_tx = online;
821 drop(online_tx);
822 self.metadata_changed(true, cx);
823 }
824 }
825 }
826
827 pub fn is_online(&self) -> bool {
828 match &self.client_state {
829 ProjectClientState::Local { online_rx, .. } => *online_rx.borrow(),
830 ProjectClientState::Remote { .. } => true,
831 }
832 }
833
834 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
835 self.unshared(cx);
836 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
837 if let Some(remote_id) = *remote_id_rx.borrow() {
838 let request = self.client.request(proto::UnregisterProject {
839 project_id: remote_id,
840 });
841 return cx.spawn(|this, mut cx| async move {
842 let response = request.await;
843
844 // Unregistering the project causes the server to send out a
845 // contact update removing this project from the host's list
846 // of online projects. Wait until this contact update has been
847 // processed before clearing out this project's remote id, so
848 // that there is no moment where this project appears in the
849 // contact metadata and *also* has no remote id.
850 this.update(&mut cx, |this, cx| {
851 this.user_store()
852 .update(cx, |store, _| store.contact_updates_done())
853 })
854 .await;
855
856 this.update(&mut cx, |this, cx| {
857 if let ProjectClientState::Local { remote_id_tx, .. } =
858 &mut this.client_state
859 {
860 *remote_id_tx.borrow_mut() = None;
861 }
862 this.client_subscriptions.clear();
863 this.metadata_changed(false, cx);
864 });
865 response.map(drop)
866 });
867 }
868 }
869 Task::ready(Ok(()))
870 }
871
872 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
873 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
874 if remote_id_rx.borrow().is_some() {
875 return Task::ready(Ok(()));
876 }
877 }
878
879 let response = self.client.request(proto::RegisterProject {});
880 cx.spawn(|this, mut cx| async move {
881 let remote_id = response.await?.project_id;
882 this.update(&mut cx, |this, cx| {
883 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
884 *remote_id_tx.borrow_mut() = Some(remote_id);
885 }
886
887 this.metadata_changed(false, cx);
888 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
889 this.client_subscriptions
890 .push(this.client.add_model_for_remote_entity(remote_id, cx));
891 Ok(())
892 })
893 })
894 }
895
896 pub fn remote_id(&self) -> Option<u64> {
897 match &self.client_state {
898 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
899 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
900 }
901 }
902
903 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
904 let mut id = None;
905 let mut watch = None;
906 match &self.client_state {
907 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
908 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
909 }
910
911 async move {
912 if let Some(id) = id {
913 return id;
914 }
915 let mut watch = watch.unwrap();
916 loop {
917 let id = *watch.borrow();
918 if let Some(id) = id {
919 return id;
920 }
921 watch.next().await;
922 }
923 }
924 }
925
926 pub fn shared_remote_id(&self) -> Option<u64> {
927 match &self.client_state {
928 ProjectClientState::Local {
929 remote_id_rx,
930 is_shared,
931 ..
932 } => {
933 if *is_shared {
934 *remote_id_rx.borrow()
935 } else {
936 None
937 }
938 }
939 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
940 }
941 }
942
943 pub fn replica_id(&self) -> ReplicaId {
944 match &self.client_state {
945 ProjectClientState::Local { .. } => 0,
946 ProjectClientState::Remote { replica_id, .. } => *replica_id,
947 }
948 }
949
950 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
951 if let ProjectClientState::Local {
952 remote_id_rx,
953 online_rx,
954 ..
955 } = &self.client_state
956 {
957 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *online_rx.borrow()) {
958 self.client
959 .send(proto::UpdateProject {
960 project_id,
961 worktrees: self
962 .worktrees
963 .iter()
964 .filter_map(|worktree| {
965 worktree.upgrade(&cx).map(|worktree| {
966 worktree.read(cx).as_local().unwrap().metadata_proto()
967 })
968 })
969 .collect(),
970 })
971 .log_err();
972 }
973
974 self.project_store.update(cx, |_, cx| cx.notify());
975 if persist {
976 self.persist_state(cx).detach_and_log_err(cx);
977 }
978 cx.notify();
979 }
980 }
981
982 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
983 &self.collaborators
984 }
985
986 pub fn worktrees<'a>(
987 &'a self,
988 cx: &'a AppContext,
989 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
990 self.worktrees
991 .iter()
992 .filter_map(move |worktree| worktree.upgrade(cx))
993 }
994
995 pub fn visible_worktrees<'a>(
996 &'a self,
997 cx: &'a AppContext,
998 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
999 self.worktrees.iter().filter_map(|worktree| {
1000 worktree.upgrade(cx).and_then(|worktree| {
1001 if worktree.read(cx).is_visible() {
1002 Some(worktree)
1003 } else {
1004 None
1005 }
1006 })
1007 })
1008 }
1009
1010 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
1011 self.visible_worktrees(cx)
1012 .map(|tree| tree.read(cx).root_name())
1013 }
1014
1015 fn db_keys_for_online_state(&self, cx: &AppContext) -> Vec<String> {
1016 self.worktrees
1017 .iter()
1018 .filter_map(|worktree| {
1019 let worktree = worktree.upgrade(&cx)?.read(cx);
1020 if worktree.is_visible() {
1021 Some(format!(
1022 "project-path-online:{}",
1023 worktree.as_local().unwrap().abs_path().to_string_lossy()
1024 ))
1025 } else {
1026 None
1027 }
1028 })
1029 .collect::<Vec<_>>()
1030 }
1031
1032 pub fn worktree_for_id(
1033 &self,
1034 id: WorktreeId,
1035 cx: &AppContext,
1036 ) -> Option<ModelHandle<Worktree>> {
1037 self.worktrees(cx)
1038 .find(|worktree| worktree.read(cx).id() == id)
1039 }
1040
1041 pub fn worktree_for_entry(
1042 &self,
1043 entry_id: ProjectEntryId,
1044 cx: &AppContext,
1045 ) -> Option<ModelHandle<Worktree>> {
1046 self.worktrees(cx)
1047 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
1048 }
1049
1050 pub fn worktree_id_for_entry(
1051 &self,
1052 entry_id: ProjectEntryId,
1053 cx: &AppContext,
1054 ) -> Option<WorktreeId> {
1055 self.worktree_for_entry(entry_id, cx)
1056 .map(|worktree| worktree.read(cx).id())
1057 }
1058
1059 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
1060 paths.iter().all(|path| self.contains_path(&path, cx))
1061 }
1062
1063 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
1064 for worktree in self.worktrees(cx) {
1065 let worktree = worktree.read(cx).as_local();
1066 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
1067 return true;
1068 }
1069 }
1070 false
1071 }
1072
1073 pub fn create_entry(
1074 &mut self,
1075 project_path: impl Into<ProjectPath>,
1076 is_directory: bool,
1077 cx: &mut ModelContext<Self>,
1078 ) -> Option<Task<Result<Entry>>> {
1079 let project_path = project_path.into();
1080 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
1081 if self.is_local() {
1082 Some(worktree.update(cx, |worktree, cx| {
1083 worktree
1084 .as_local_mut()
1085 .unwrap()
1086 .create_entry(project_path.path, is_directory, cx)
1087 }))
1088 } else {
1089 let client = self.client.clone();
1090 let project_id = self.remote_id().unwrap();
1091 Some(cx.spawn_weak(|_, mut cx| async move {
1092 let response = client
1093 .request(proto::CreateProjectEntry {
1094 worktree_id: project_path.worktree_id.to_proto(),
1095 project_id,
1096 path: project_path.path.as_os_str().as_bytes().to_vec(),
1097 is_directory,
1098 })
1099 .await?;
1100 let entry = response
1101 .entry
1102 .ok_or_else(|| anyhow!("missing entry in response"))?;
1103 worktree
1104 .update(&mut cx, |worktree, cx| {
1105 worktree.as_remote().unwrap().insert_entry(
1106 entry,
1107 response.worktree_scan_id as usize,
1108 cx,
1109 )
1110 })
1111 .await
1112 }))
1113 }
1114 }
1115
1116 pub fn copy_entry(
1117 &mut self,
1118 entry_id: ProjectEntryId,
1119 new_path: impl Into<Arc<Path>>,
1120 cx: &mut ModelContext<Self>,
1121 ) -> Option<Task<Result<Entry>>> {
1122 let worktree = self.worktree_for_entry(entry_id, cx)?;
1123 let new_path = new_path.into();
1124 if self.is_local() {
1125 worktree.update(cx, |worktree, cx| {
1126 worktree
1127 .as_local_mut()
1128 .unwrap()
1129 .copy_entry(entry_id, new_path, cx)
1130 })
1131 } else {
1132 let client = self.client.clone();
1133 let project_id = self.remote_id().unwrap();
1134
1135 Some(cx.spawn_weak(|_, mut cx| async move {
1136 let response = client
1137 .request(proto::CopyProjectEntry {
1138 project_id,
1139 entry_id: entry_id.to_proto(),
1140 new_path: new_path.as_os_str().as_bytes().to_vec(),
1141 })
1142 .await?;
1143 let entry = response
1144 .entry
1145 .ok_or_else(|| anyhow!("missing entry in response"))?;
1146 worktree
1147 .update(&mut cx, |worktree, cx| {
1148 worktree.as_remote().unwrap().insert_entry(
1149 entry,
1150 response.worktree_scan_id as usize,
1151 cx,
1152 )
1153 })
1154 .await
1155 }))
1156 }
1157 }
1158
1159 pub fn rename_entry(
1160 &mut self,
1161 entry_id: ProjectEntryId,
1162 new_path: impl Into<Arc<Path>>,
1163 cx: &mut ModelContext<Self>,
1164 ) -> Option<Task<Result<Entry>>> {
1165 let worktree = self.worktree_for_entry(entry_id, cx)?;
1166 let new_path = new_path.into();
1167 if self.is_local() {
1168 worktree.update(cx, |worktree, cx| {
1169 worktree
1170 .as_local_mut()
1171 .unwrap()
1172 .rename_entry(entry_id, new_path, cx)
1173 })
1174 } else {
1175 let client = self.client.clone();
1176 let project_id = self.remote_id().unwrap();
1177
1178 Some(cx.spawn_weak(|_, mut cx| async move {
1179 let response = client
1180 .request(proto::RenameProjectEntry {
1181 project_id,
1182 entry_id: entry_id.to_proto(),
1183 new_path: new_path.as_os_str().as_bytes().to_vec(),
1184 })
1185 .await?;
1186 let entry = response
1187 .entry
1188 .ok_or_else(|| anyhow!("missing entry in response"))?;
1189 worktree
1190 .update(&mut cx, |worktree, cx| {
1191 worktree.as_remote().unwrap().insert_entry(
1192 entry,
1193 response.worktree_scan_id as usize,
1194 cx,
1195 )
1196 })
1197 .await
1198 }))
1199 }
1200 }
1201
1202 pub fn delete_entry(
1203 &mut self,
1204 entry_id: ProjectEntryId,
1205 cx: &mut ModelContext<Self>,
1206 ) -> Option<Task<Result<()>>> {
1207 let worktree = self.worktree_for_entry(entry_id, cx)?;
1208 if self.is_local() {
1209 worktree.update(cx, |worktree, cx| {
1210 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1211 })
1212 } else {
1213 let client = self.client.clone();
1214 let project_id = self.remote_id().unwrap();
1215 Some(cx.spawn_weak(|_, mut cx| async move {
1216 let response = client
1217 .request(proto::DeleteProjectEntry {
1218 project_id,
1219 entry_id: entry_id.to_proto(),
1220 })
1221 .await?;
1222 worktree
1223 .update(&mut cx, move |worktree, cx| {
1224 worktree.as_remote().unwrap().delete_entry(
1225 entry_id,
1226 response.worktree_scan_id as usize,
1227 cx,
1228 )
1229 })
1230 .await
1231 }))
1232 }
1233 }
1234
1235 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1236 let project_id;
1237 if let ProjectClientState::Local {
1238 remote_id_rx,
1239 is_shared,
1240 ..
1241 } = &mut self.client_state
1242 {
1243 if *is_shared {
1244 return Task::ready(Ok(()));
1245 }
1246 *is_shared = true;
1247 if let Some(id) = *remote_id_rx.borrow() {
1248 project_id = id;
1249 } else {
1250 return Task::ready(Err(anyhow!("project hasn't been registered")));
1251 }
1252 } else {
1253 return Task::ready(Err(anyhow!("can't share a remote project")));
1254 };
1255
1256 for open_buffer in self.opened_buffers.values_mut() {
1257 match open_buffer {
1258 OpenBuffer::Strong(_) => {}
1259 OpenBuffer::Weak(buffer) => {
1260 if let Some(buffer) = buffer.upgrade(cx) {
1261 *open_buffer = OpenBuffer::Strong(buffer);
1262 }
1263 }
1264 OpenBuffer::Loading(_) => unreachable!(),
1265 }
1266 }
1267
1268 for worktree_handle in self.worktrees.iter_mut() {
1269 match worktree_handle {
1270 WorktreeHandle::Strong(_) => {}
1271 WorktreeHandle::Weak(worktree) => {
1272 if let Some(worktree) = worktree.upgrade(cx) {
1273 *worktree_handle = WorktreeHandle::Strong(worktree);
1274 }
1275 }
1276 }
1277 }
1278
1279 let mut tasks = Vec::new();
1280 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1281 worktree.update(cx, |worktree, cx| {
1282 let worktree = worktree.as_local_mut().unwrap();
1283 tasks.push(worktree.share(project_id, cx));
1284 });
1285 }
1286
1287 for (server_id, status) in &self.language_server_statuses {
1288 self.client
1289 .send(proto::StartLanguageServer {
1290 project_id,
1291 server: Some(proto::LanguageServer {
1292 id: *server_id as u64,
1293 name: status.name.clone(),
1294 }),
1295 })
1296 .log_err();
1297 }
1298
1299 cx.spawn(|this, mut cx| async move {
1300 for task in tasks {
1301 task.await?;
1302 }
1303 this.update(&mut cx, |_, cx| cx.notify());
1304 Ok(())
1305 })
1306 }
1307
1308 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1309 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1310 if !*is_shared {
1311 return;
1312 }
1313
1314 *is_shared = false;
1315 self.collaborators.clear();
1316 self.shared_buffers.clear();
1317 for worktree_handle in self.worktrees.iter_mut() {
1318 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1319 let is_visible = worktree.update(cx, |worktree, _| {
1320 worktree.as_local_mut().unwrap().unshare();
1321 worktree.is_visible()
1322 });
1323 if !is_visible {
1324 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1325 }
1326 }
1327 }
1328
1329 for open_buffer in self.opened_buffers.values_mut() {
1330 match open_buffer {
1331 OpenBuffer::Strong(buffer) => {
1332 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1333 }
1334 _ => {}
1335 }
1336 }
1337
1338 cx.notify();
1339 } else {
1340 log::error!("attempted to unshare a remote project");
1341 }
1342 }
1343
1344 pub fn respond_to_join_request(
1345 &mut self,
1346 requester_id: u64,
1347 allow: bool,
1348 cx: &mut ModelContext<Self>,
1349 ) {
1350 if let Some(project_id) = self.remote_id() {
1351 let share = self.share(cx);
1352 let client = self.client.clone();
1353 cx.foreground()
1354 .spawn(async move {
1355 share.await?;
1356 client.send(proto::RespondToJoinProjectRequest {
1357 requester_id,
1358 project_id,
1359 allow,
1360 })
1361 })
1362 .detach_and_log_err(cx);
1363 }
1364 }
1365
1366 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1367 if let ProjectClientState::Remote {
1368 sharing_has_stopped,
1369 ..
1370 } = &mut self.client_state
1371 {
1372 *sharing_has_stopped = true;
1373 self.collaborators.clear();
1374 for worktree in &self.worktrees {
1375 if let Some(worktree) = worktree.upgrade(cx) {
1376 worktree.update(cx, |worktree, _| {
1377 if let Some(worktree) = worktree.as_remote_mut() {
1378 worktree.disconnected_from_host();
1379 }
1380 });
1381 }
1382 }
1383 cx.notify();
1384 }
1385 }
1386
1387 pub fn is_read_only(&self) -> bool {
1388 match &self.client_state {
1389 ProjectClientState::Local { .. } => false,
1390 ProjectClientState::Remote {
1391 sharing_has_stopped,
1392 ..
1393 } => *sharing_has_stopped,
1394 }
1395 }
1396
1397 pub fn is_local(&self) -> bool {
1398 match &self.client_state {
1399 ProjectClientState::Local { .. } => true,
1400 ProjectClientState::Remote { .. } => false,
1401 }
1402 }
1403
1404 pub fn is_remote(&self) -> bool {
1405 !self.is_local()
1406 }
1407
1408 pub fn create_buffer(
1409 &mut self,
1410 text: &str,
1411 language: Option<Arc<Language>>,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Result<ModelHandle<Buffer>> {
1414 if self.is_remote() {
1415 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1416 }
1417
1418 let buffer = cx.add_model(|cx| {
1419 Buffer::new(self.replica_id(), text, cx)
1420 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1421 });
1422 self.register_buffer(&buffer, cx)?;
1423 Ok(buffer)
1424 }
1425
1426 pub fn open_path(
1427 &mut self,
1428 path: impl Into<ProjectPath>,
1429 cx: &mut ModelContext<Self>,
1430 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1431 let task = self.open_buffer(path, cx);
1432 cx.spawn_weak(|_, cx| async move {
1433 let buffer = task.await?;
1434 let project_entry_id = buffer
1435 .read_with(&cx, |buffer, cx| {
1436 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1437 })
1438 .ok_or_else(|| anyhow!("no project entry"))?;
1439 Ok((project_entry_id, buffer.into()))
1440 })
1441 }
1442
1443 pub fn open_local_buffer(
1444 &mut self,
1445 abs_path: impl AsRef<Path>,
1446 cx: &mut ModelContext<Self>,
1447 ) -> Task<Result<ModelHandle<Buffer>>> {
1448 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1449 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1450 } else {
1451 Task::ready(Err(anyhow!("no such path")))
1452 }
1453 }
1454
1455 pub fn open_buffer(
1456 &mut self,
1457 path: impl Into<ProjectPath>,
1458 cx: &mut ModelContext<Self>,
1459 ) -> Task<Result<ModelHandle<Buffer>>> {
1460 let project_path = path.into();
1461 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1462 worktree
1463 } else {
1464 return Task::ready(Err(anyhow!("no such worktree")));
1465 };
1466
1467 // If there is already a buffer for the given path, then return it.
1468 let existing_buffer = self.get_open_buffer(&project_path, cx);
1469 if let Some(existing_buffer) = existing_buffer {
1470 return Task::ready(Ok(existing_buffer));
1471 }
1472
1473 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1474 // If the given path is already being loaded, then wait for that existing
1475 // task to complete and return the same buffer.
1476 hash_map::Entry::Occupied(e) => e.get().clone(),
1477
1478 // Otherwise, record the fact that this path is now being loaded.
1479 hash_map::Entry::Vacant(entry) => {
1480 let (mut tx, rx) = postage::watch::channel();
1481 entry.insert(rx.clone());
1482
1483 let load_buffer = if worktree.read(cx).is_local() {
1484 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1485 } else {
1486 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1487 };
1488
1489 cx.spawn(move |this, mut cx| async move {
1490 let load_result = load_buffer.await;
1491 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1492 // Record the fact that the buffer is no longer loading.
1493 this.loading_buffers.remove(&project_path);
1494 let buffer = load_result.map_err(Arc::new)?;
1495 Ok(buffer)
1496 }));
1497 })
1498 .detach();
1499 rx
1500 }
1501 };
1502
1503 cx.foreground().spawn(async move {
1504 loop {
1505 if let Some(result) = loading_watch.borrow().as_ref() {
1506 match result {
1507 Ok(buffer) => return Ok(buffer.clone()),
1508 Err(error) => return Err(anyhow!("{}", error)),
1509 }
1510 }
1511 loading_watch.next().await;
1512 }
1513 })
1514 }
1515
1516 fn open_local_buffer_internal(
1517 &mut self,
1518 path: &Arc<Path>,
1519 worktree: &ModelHandle<Worktree>,
1520 cx: &mut ModelContext<Self>,
1521 ) -> Task<Result<ModelHandle<Buffer>>> {
1522 let load_buffer = worktree.update(cx, |worktree, cx| {
1523 let worktree = worktree.as_local_mut().unwrap();
1524 worktree.load_buffer(path, cx)
1525 });
1526 cx.spawn(|this, mut cx| async move {
1527 let buffer = load_buffer.await?;
1528 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1529 Ok(buffer)
1530 })
1531 }
1532
1533 fn open_remote_buffer_internal(
1534 &mut self,
1535 path: &Arc<Path>,
1536 worktree: &ModelHandle<Worktree>,
1537 cx: &mut ModelContext<Self>,
1538 ) -> Task<Result<ModelHandle<Buffer>>> {
1539 let rpc = self.client.clone();
1540 let project_id = self.remote_id().unwrap();
1541 let remote_worktree_id = worktree.read(cx).id();
1542 let path = path.clone();
1543 let path_string = path.to_string_lossy().to_string();
1544 cx.spawn(|this, mut cx| async move {
1545 let response = rpc
1546 .request(proto::OpenBufferByPath {
1547 project_id,
1548 worktree_id: remote_worktree_id.to_proto(),
1549 path: path_string,
1550 })
1551 .await?;
1552 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1553 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1554 .await
1555 })
1556 }
1557
1558 fn open_local_buffer_via_lsp(
1559 &mut self,
1560 abs_path: lsp::Url,
1561 language_server_id: usize,
1562 language_server_name: LanguageServerName,
1563 cx: &mut ModelContext<Self>,
1564 ) -> Task<Result<ModelHandle<Buffer>>> {
1565 cx.spawn(|this, mut cx| async move {
1566 let abs_path = abs_path
1567 .to_file_path()
1568 .map_err(|_| anyhow!("can't convert URI to path"))?;
1569 let (worktree, relative_path) = if let Some(result) =
1570 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1571 {
1572 result
1573 } else {
1574 let worktree = this
1575 .update(&mut cx, |this, cx| {
1576 this.create_local_worktree(&abs_path, false, cx)
1577 })
1578 .await?;
1579 this.update(&mut cx, |this, cx| {
1580 this.language_server_ids.insert(
1581 (worktree.read(cx).id(), language_server_name),
1582 language_server_id,
1583 );
1584 });
1585 (worktree, PathBuf::new())
1586 };
1587
1588 let project_path = ProjectPath {
1589 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1590 path: relative_path.into(),
1591 };
1592 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1593 .await
1594 })
1595 }
1596
1597 pub fn open_buffer_by_id(
1598 &mut self,
1599 id: u64,
1600 cx: &mut ModelContext<Self>,
1601 ) -> Task<Result<ModelHandle<Buffer>>> {
1602 if let Some(buffer) = self.buffer_for_id(id, cx) {
1603 Task::ready(Ok(buffer))
1604 } else if self.is_local() {
1605 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1606 } else if let Some(project_id) = self.remote_id() {
1607 let request = self
1608 .client
1609 .request(proto::OpenBufferById { project_id, id });
1610 cx.spawn(|this, mut cx| async move {
1611 let buffer = request
1612 .await?
1613 .buffer
1614 .ok_or_else(|| anyhow!("invalid buffer"))?;
1615 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1616 .await
1617 })
1618 } else {
1619 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1620 }
1621 }
1622
1623 pub fn save_buffer_as(
1624 &mut self,
1625 buffer: ModelHandle<Buffer>,
1626 abs_path: PathBuf,
1627 cx: &mut ModelContext<Project>,
1628 ) -> Task<Result<()>> {
1629 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1630 let old_path =
1631 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1632 cx.spawn(|this, mut cx| async move {
1633 if let Some(old_path) = old_path {
1634 this.update(&mut cx, |this, cx| {
1635 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1636 });
1637 }
1638 let (worktree, path) = worktree_task.await?;
1639 worktree
1640 .update(&mut cx, |worktree, cx| {
1641 worktree
1642 .as_local_mut()
1643 .unwrap()
1644 .save_buffer_as(buffer.clone(), path, cx)
1645 })
1646 .await?;
1647 this.update(&mut cx, |this, cx| {
1648 this.assign_language_to_buffer(&buffer, cx);
1649 this.register_buffer_with_language_server(&buffer, cx);
1650 });
1651 Ok(())
1652 })
1653 }
1654
1655 pub fn get_open_buffer(
1656 &mut self,
1657 path: &ProjectPath,
1658 cx: &mut ModelContext<Self>,
1659 ) -> Option<ModelHandle<Buffer>> {
1660 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1661 self.opened_buffers.values().find_map(|buffer| {
1662 let buffer = buffer.upgrade(cx)?;
1663 let file = File::from_dyn(buffer.read(cx).file())?;
1664 if file.worktree == worktree && file.path() == &path.path {
1665 Some(buffer)
1666 } else {
1667 None
1668 }
1669 })
1670 }
1671
1672 fn register_buffer(
1673 &mut self,
1674 buffer: &ModelHandle<Buffer>,
1675 cx: &mut ModelContext<Self>,
1676 ) -> Result<()> {
1677 let remote_id = buffer.read(cx).remote_id();
1678 let open_buffer = if self.is_remote() || self.is_shared() {
1679 OpenBuffer::Strong(buffer.clone())
1680 } else {
1681 OpenBuffer::Weak(buffer.downgrade())
1682 };
1683
1684 match self.opened_buffers.insert(remote_id, open_buffer) {
1685 None => {}
1686 Some(OpenBuffer::Loading(operations)) => {
1687 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1688 }
1689 Some(OpenBuffer::Weak(existing_handle)) => {
1690 if existing_handle.upgrade(cx).is_some() {
1691 Err(anyhow!(
1692 "already registered buffer with remote id {}",
1693 remote_id
1694 ))?
1695 }
1696 }
1697 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1698 "already registered buffer with remote id {}",
1699 remote_id
1700 ))?,
1701 }
1702 cx.subscribe(buffer, |this, buffer, event, cx| {
1703 this.on_buffer_event(buffer, event, cx);
1704 })
1705 .detach();
1706
1707 self.assign_language_to_buffer(buffer, cx);
1708 self.register_buffer_with_language_server(buffer, cx);
1709 cx.observe_release(buffer, |this, buffer, cx| {
1710 if let Some(file) = File::from_dyn(buffer.file()) {
1711 if file.is_local() {
1712 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1713 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1714 server
1715 .notify::<lsp::notification::DidCloseTextDocument>(
1716 lsp::DidCloseTextDocumentParams {
1717 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1718 },
1719 )
1720 .log_err();
1721 }
1722 }
1723 }
1724 })
1725 .detach();
1726
1727 Ok(())
1728 }
1729
1730 fn register_buffer_with_language_server(
1731 &mut self,
1732 buffer_handle: &ModelHandle<Buffer>,
1733 cx: &mut ModelContext<Self>,
1734 ) {
1735 let buffer = buffer_handle.read(cx);
1736 let buffer_id = buffer.remote_id();
1737 if let Some(file) = File::from_dyn(buffer.file()) {
1738 if file.is_local() {
1739 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1740 let initial_snapshot = buffer.text_snapshot();
1741
1742 let mut language_server = None;
1743 let mut language_id = None;
1744 if let Some(language) = buffer.language() {
1745 let worktree_id = file.worktree_id(cx);
1746 if let Some(adapter) = language.lsp_adapter() {
1747 language_id = adapter.id_for_language(language.name().as_ref());
1748 language_server = self
1749 .language_server_ids
1750 .get(&(worktree_id, adapter.name()))
1751 .and_then(|id| self.language_servers.get(&id))
1752 .and_then(|server_state| {
1753 if let LanguageServerState::Running { server, .. } = server_state {
1754 Some(server.clone())
1755 } else {
1756 None
1757 }
1758 });
1759 }
1760 }
1761
1762 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1763 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1764 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1765 .log_err();
1766 }
1767 }
1768
1769 if let Some(server) = language_server {
1770 server
1771 .notify::<lsp::notification::DidOpenTextDocument>(
1772 lsp::DidOpenTextDocumentParams {
1773 text_document: lsp::TextDocumentItem::new(
1774 uri,
1775 language_id.unwrap_or_default(),
1776 0,
1777 initial_snapshot.text(),
1778 ),
1779 }
1780 .clone(),
1781 )
1782 .log_err();
1783 buffer_handle.update(cx, |buffer, cx| {
1784 buffer.set_completion_triggers(
1785 server
1786 .capabilities()
1787 .completion_provider
1788 .as_ref()
1789 .and_then(|provider| provider.trigger_characters.clone())
1790 .unwrap_or(Vec::new()),
1791 cx,
1792 )
1793 });
1794 self.buffer_snapshots
1795 .insert(buffer_id, vec![(0, initial_snapshot)]);
1796 }
1797 }
1798 }
1799 }
1800
1801 fn unregister_buffer_from_language_server(
1802 &mut self,
1803 buffer: &ModelHandle<Buffer>,
1804 old_path: PathBuf,
1805 cx: &mut ModelContext<Self>,
1806 ) {
1807 buffer.update(cx, |buffer, cx| {
1808 buffer.update_diagnostics(Default::default(), cx);
1809 self.buffer_snapshots.remove(&buffer.remote_id());
1810 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1811 language_server
1812 .notify::<lsp::notification::DidCloseTextDocument>(
1813 lsp::DidCloseTextDocumentParams {
1814 text_document: lsp::TextDocumentIdentifier::new(
1815 lsp::Url::from_file_path(old_path).unwrap(),
1816 ),
1817 },
1818 )
1819 .log_err();
1820 }
1821 });
1822 }
1823
1824 fn on_buffer_event(
1825 &mut self,
1826 buffer: ModelHandle<Buffer>,
1827 event: &BufferEvent,
1828 cx: &mut ModelContext<Self>,
1829 ) -> Option<()> {
1830 match event {
1831 BufferEvent::Operation(operation) => {
1832 if let Some(project_id) = self.shared_remote_id() {
1833 let request = self.client.request(proto::UpdateBuffer {
1834 project_id,
1835 buffer_id: buffer.read(cx).remote_id(),
1836 operations: vec![language::proto::serialize_operation(&operation)],
1837 });
1838 cx.background().spawn(request).detach_and_log_err(cx);
1839 } else if let Some(project_id) = self.remote_id() {
1840 let _ = self
1841 .client
1842 .send(proto::RegisterProjectActivity { project_id });
1843 }
1844 }
1845 BufferEvent::Edited { .. } => {
1846 let language_server = self
1847 .language_server_for_buffer(buffer.read(cx), cx)
1848 .map(|(_, server)| server.clone())?;
1849 let buffer = buffer.read(cx);
1850 let file = File::from_dyn(buffer.file())?;
1851 let abs_path = file.as_local()?.abs_path(cx);
1852 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1853 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1854 let (version, prev_snapshot) = buffer_snapshots.last()?;
1855 let next_snapshot = buffer.text_snapshot();
1856 let next_version = version + 1;
1857
1858 let content_changes = buffer
1859 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1860 .map(|edit| {
1861 let edit_start = edit.new.start.0;
1862 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1863 let new_text = next_snapshot
1864 .text_for_range(edit.new.start.1..edit.new.end.1)
1865 .collect();
1866 lsp::TextDocumentContentChangeEvent {
1867 range: Some(lsp::Range::new(
1868 point_to_lsp(edit_start),
1869 point_to_lsp(edit_end),
1870 )),
1871 range_length: None,
1872 text: new_text,
1873 }
1874 })
1875 .collect();
1876
1877 buffer_snapshots.push((next_version, next_snapshot));
1878
1879 language_server
1880 .notify::<lsp::notification::DidChangeTextDocument>(
1881 lsp::DidChangeTextDocumentParams {
1882 text_document: lsp::VersionedTextDocumentIdentifier::new(
1883 uri,
1884 next_version,
1885 ),
1886 content_changes,
1887 },
1888 )
1889 .log_err();
1890 }
1891 BufferEvent::Saved => {
1892 let file = File::from_dyn(buffer.read(cx).file())?;
1893 let worktree_id = file.worktree_id(cx);
1894 let abs_path = file.as_local()?.abs_path(cx);
1895 let text_document = lsp::TextDocumentIdentifier {
1896 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1897 };
1898
1899 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1900 server
1901 .notify::<lsp::notification::DidSaveTextDocument>(
1902 lsp::DidSaveTextDocumentParams {
1903 text_document: text_document.clone(),
1904 text: None,
1905 },
1906 )
1907 .log_err();
1908 }
1909
1910 // After saving a buffer, simulate disk-based diagnostics being finished for languages
1911 // that don't support a disk-based progress token.
1912 let (lsp_adapter, language_server) =
1913 self.language_server_for_buffer(buffer.read(cx), cx)?;
1914 if lsp_adapter
1915 .disk_based_diagnostics_progress_token()
1916 .is_none()
1917 {
1918 let server_id = language_server.server_id();
1919 self.disk_based_diagnostics_finished(server_id, cx);
1920 self.broadcast_language_server_update(
1921 server_id,
1922 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1923 proto::LspDiskBasedDiagnosticsUpdated {},
1924 ),
1925 );
1926 }
1927 }
1928 _ => {}
1929 }
1930
1931 None
1932 }
1933
1934 fn language_servers_for_worktree(
1935 &self,
1936 worktree_id: WorktreeId,
1937 ) -> impl Iterator<Item = (&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
1938 self.language_server_ids
1939 .iter()
1940 .filter_map(move |((language_server_worktree_id, _), id)| {
1941 if *language_server_worktree_id == worktree_id {
1942 if let Some(LanguageServerState::Running { adapter, server }) =
1943 self.language_servers.get(&id)
1944 {
1945 return Some((adapter, server));
1946 }
1947 }
1948 None
1949 })
1950 }
1951
1952 fn assign_language_to_buffer(
1953 &mut self,
1954 buffer: &ModelHandle<Buffer>,
1955 cx: &mut ModelContext<Self>,
1956 ) -> Option<()> {
1957 // If the buffer has a language, set it and start the language server if we haven't already.
1958 let full_path = buffer.read(cx).file()?.full_path(cx);
1959 let language = self.languages.select_language(&full_path)?;
1960 buffer.update(cx, |buffer, cx| {
1961 buffer.set_language(Some(language.clone()), cx);
1962 });
1963
1964 let file = File::from_dyn(buffer.read(cx).file())?;
1965 let worktree = file.worktree.read(cx).as_local()?;
1966 let worktree_id = worktree.id();
1967 let worktree_abs_path = worktree.abs_path().clone();
1968 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1969
1970 None
1971 }
1972
1973 fn start_language_server(
1974 &mut self,
1975 worktree_id: WorktreeId,
1976 worktree_path: Arc<Path>,
1977 language: Arc<Language>,
1978 cx: &mut ModelContext<Self>,
1979 ) {
1980 if !cx
1981 .global::<Settings>()
1982 .enable_language_server(Some(&language.name()))
1983 {
1984 return;
1985 }
1986
1987 let adapter = if let Some(adapter) = language.lsp_adapter() {
1988 adapter
1989 } else {
1990 return;
1991 };
1992 let key = (worktree_id, adapter.name());
1993
1994 self.language_server_ids
1995 .entry(key.clone())
1996 .or_insert_with(|| {
1997 let server_id = post_inc(&mut self.next_language_server_id);
1998 let language_server = self.languages.start_language_server(
1999 server_id,
2000 language.clone(),
2001 worktree_path,
2002 self.client.http_client(),
2003 cx,
2004 );
2005 self.language_servers.insert(
2006 server_id,
2007 LanguageServerState::Starting(cx.spawn_weak(|this, mut cx| async move {
2008 let language_server = language_server?.await.log_err()?;
2009 let language_server = language_server
2010 .initialize(adapter.initialization_options())
2011 .await
2012 .log_err()?;
2013 let this = this.upgrade(&cx)?;
2014 let disk_based_diagnostics_progress_token =
2015 adapter.disk_based_diagnostics_progress_token();
2016
2017 language_server
2018 .on_notification::<lsp::notification::PublishDiagnostics, _>({
2019 let this = this.downgrade();
2020 let adapter = adapter.clone();
2021 move |params, mut cx| {
2022 if let Some(this) = this.upgrade(&cx) {
2023 this.update(&mut cx, |this, cx| {
2024 this.on_lsp_diagnostics_published(
2025 server_id, params, &adapter, cx,
2026 );
2027 });
2028 }
2029 }
2030 })
2031 .detach();
2032
2033 language_server
2034 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
2035 let settings = this.read_with(&cx, |this, _| {
2036 this.language_server_settings.clone()
2037 });
2038 move |params, _| {
2039 let settings = settings.lock().clone();
2040 async move {
2041 Ok(params
2042 .items
2043 .into_iter()
2044 .map(|item| {
2045 if let Some(section) = &item.section {
2046 settings
2047 .get(section)
2048 .cloned()
2049 .unwrap_or(serde_json::Value::Null)
2050 } else {
2051 settings.clone()
2052 }
2053 })
2054 .collect())
2055 }
2056 }
2057 })
2058 .detach();
2059
2060 // Even though we don't have handling for these requests, respond to them to
2061 // avoid stalling any language server like `gopls` which waits for a response
2062 // to these requests when initializing.
2063 language_server
2064 .on_request::<lsp::request::WorkDoneProgressCreate, _, _>({
2065 let this = this.downgrade();
2066 move |params, mut cx| async move {
2067 if let Some(this) = this.upgrade(&cx) {
2068 this.update(&mut cx, |this, _| {
2069 if let Some(status) =
2070 this.language_server_statuses.get_mut(&server_id)
2071 {
2072 if let lsp::NumberOrString::String(token) =
2073 params.token
2074 {
2075 status.progress_tokens.insert(token);
2076 }
2077 }
2078 });
2079 }
2080 Ok(())
2081 }
2082 })
2083 .detach();
2084 language_server
2085 .on_request::<lsp::request::RegisterCapability, _, _>(|_, _| async {
2086 Ok(())
2087 })
2088 .detach();
2089
2090 language_server
2091 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
2092 let this = this.downgrade();
2093 let adapter = adapter.clone();
2094 let language_server = language_server.clone();
2095 move |params, cx| {
2096 Self::on_lsp_workspace_edit(
2097 this,
2098 params,
2099 server_id,
2100 adapter.clone(),
2101 language_server.clone(),
2102 cx,
2103 )
2104 }
2105 })
2106 .detach();
2107
2108 language_server
2109 .on_notification::<lsp::notification::Progress, _>({
2110 let this = this.downgrade();
2111 move |params, mut cx| {
2112 if let Some(this) = this.upgrade(&cx) {
2113 this.update(&mut cx, |this, cx| {
2114 this.on_lsp_progress(
2115 params,
2116 server_id,
2117 disk_based_diagnostics_progress_token,
2118 cx,
2119 );
2120 });
2121 }
2122 }
2123 })
2124 .detach();
2125
2126 this.update(&mut cx, |this, cx| {
2127 // If the language server for this key doesn't match the server id, don't store the
2128 // server. Which will cause it to be dropped, killing the process
2129 if this
2130 .language_server_ids
2131 .get(&key)
2132 .map(|id| id != &server_id)
2133 .unwrap_or(false)
2134 {
2135 return None;
2136 }
2137
2138 // Update language_servers collection with Running variant of LanguageServerState
2139 // indicating that the server is up and running and ready
2140 this.language_servers.insert(
2141 server_id,
2142 LanguageServerState::Running {
2143 adapter: adapter.clone(),
2144 server: language_server.clone(),
2145 },
2146 );
2147 this.language_server_statuses.insert(
2148 server_id,
2149 LanguageServerStatus {
2150 name: language_server.name().to_string(),
2151 pending_work: Default::default(),
2152 has_pending_diagnostic_updates: false,
2153 progress_tokens: Default::default(),
2154 },
2155 );
2156 language_server
2157 .notify::<lsp::notification::DidChangeConfiguration>(
2158 lsp::DidChangeConfigurationParams {
2159 settings: this.language_server_settings.lock().clone(),
2160 },
2161 )
2162 .ok();
2163
2164 if let Some(project_id) = this.shared_remote_id() {
2165 this.client
2166 .send(proto::StartLanguageServer {
2167 project_id,
2168 server: Some(proto::LanguageServer {
2169 id: server_id as u64,
2170 name: language_server.name().to_string(),
2171 }),
2172 })
2173 .log_err();
2174 }
2175
2176 // Tell the language server about every open buffer in the worktree that matches the language.
2177 for buffer in this.opened_buffers.values() {
2178 if let Some(buffer_handle) = buffer.upgrade(cx) {
2179 let buffer = buffer_handle.read(cx);
2180 let file = if let Some(file) = File::from_dyn(buffer.file()) {
2181 file
2182 } else {
2183 continue;
2184 };
2185 let language = if let Some(language) = buffer.language() {
2186 language
2187 } else {
2188 continue;
2189 };
2190 if file.worktree.read(cx).id() != key.0
2191 || language.lsp_adapter().map(|a| a.name())
2192 != Some(key.1.clone())
2193 {
2194 continue;
2195 }
2196
2197 let file = file.as_local()?;
2198 let versions = this
2199 .buffer_snapshots
2200 .entry(buffer.remote_id())
2201 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
2202 let (version, initial_snapshot) = versions.last().unwrap();
2203 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
2204 let language_id =
2205 adapter.id_for_language(language.name().as_ref());
2206 language_server
2207 .notify::<lsp::notification::DidOpenTextDocument>(
2208 lsp::DidOpenTextDocumentParams {
2209 text_document: lsp::TextDocumentItem::new(
2210 uri,
2211 language_id.unwrap_or_default(),
2212 *version,
2213 initial_snapshot.text(),
2214 ),
2215 },
2216 )
2217 .log_err()?;
2218 buffer_handle.update(cx, |buffer, cx| {
2219 buffer.set_completion_triggers(
2220 language_server
2221 .capabilities()
2222 .completion_provider
2223 .as_ref()
2224 .and_then(|provider| {
2225 provider.trigger_characters.clone()
2226 })
2227 .unwrap_or(Vec::new()),
2228 cx,
2229 )
2230 });
2231 }
2232 }
2233
2234 cx.notify();
2235 Some(language_server)
2236 })
2237 })),
2238 );
2239
2240 server_id
2241 });
2242 }
2243
2244 // Returns a list of all of the worktrees which no longer have a language server and the root path
2245 // for the stopped server
2246 fn stop_language_server(
2247 &mut self,
2248 worktree_id: WorktreeId,
2249 adapter_name: LanguageServerName,
2250 cx: &mut ModelContext<Self>,
2251 ) -> Task<(Option<PathBuf>, Vec<WorktreeId>)> {
2252 let key = (worktree_id, adapter_name);
2253 if let Some(server_id) = self.language_server_ids.remove(&key) {
2254 // Remove other entries for this language server as well
2255 let mut orphaned_worktrees = vec![worktree_id];
2256 let other_keys = self.language_server_ids.keys().cloned().collect::<Vec<_>>();
2257 for other_key in other_keys {
2258 if self.language_server_ids.get(&other_key) == Some(&server_id) {
2259 self.language_server_ids.remove(&other_key);
2260 orphaned_worktrees.push(other_key.0);
2261 }
2262 }
2263
2264 self.language_server_statuses.remove(&server_id);
2265 cx.notify();
2266
2267 let server_state = self.language_servers.remove(&server_id);
2268 cx.spawn_weak(|this, mut cx| async move {
2269 let mut root_path = None;
2270
2271 let server = match server_state {
2272 Some(LanguageServerState::Starting(started_language_server)) => {
2273 started_language_server.await
2274 }
2275 Some(LanguageServerState::Running { server, .. }) => Some(server),
2276 None => None,
2277 };
2278
2279 if let Some(server) = server {
2280 root_path = Some(server.root_path().clone());
2281 if let Some(shutdown) = server.shutdown() {
2282 shutdown.await;
2283 }
2284 }
2285
2286 if let Some(this) = this.upgrade(&cx) {
2287 this.update(&mut cx, |this, cx| {
2288 this.language_server_statuses.remove(&server_id);
2289 cx.notify();
2290 });
2291 }
2292
2293 (root_path, orphaned_worktrees)
2294 })
2295 } else {
2296 Task::ready((None, Vec::new()))
2297 }
2298 }
2299
2300 pub fn restart_language_servers_for_buffers(
2301 &mut self,
2302 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2303 cx: &mut ModelContext<Self>,
2304 ) -> Option<()> {
2305 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2306 .into_iter()
2307 .filter_map(|buffer| {
2308 let file = File::from_dyn(buffer.read(cx).file())?;
2309 let worktree = file.worktree.read(cx).as_local()?;
2310 let worktree_id = worktree.id();
2311 let worktree_abs_path = worktree.abs_path().clone();
2312 let full_path = file.full_path(cx);
2313 Some((worktree_id, worktree_abs_path, full_path))
2314 })
2315 .collect();
2316 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2317 let language = self.languages.select_language(&full_path)?;
2318 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2319 }
2320
2321 None
2322 }
2323
2324 fn restart_language_server(
2325 &mut self,
2326 worktree_id: WorktreeId,
2327 fallback_path: Arc<Path>,
2328 language: Arc<Language>,
2329 cx: &mut ModelContext<Self>,
2330 ) {
2331 let adapter = if let Some(adapter) = language.lsp_adapter() {
2332 adapter
2333 } else {
2334 return;
2335 };
2336
2337 let server_name = adapter.name();
2338 let stop = self.stop_language_server(worktree_id, server_name.clone(), cx);
2339 cx.spawn_weak(|this, mut cx| async move {
2340 let (original_root_path, orphaned_worktrees) = stop.await;
2341 if let Some(this) = this.upgrade(&cx) {
2342 this.update(&mut cx, |this, cx| {
2343 // Attempt to restart using original server path. Fallback to passed in
2344 // path if we could not retrieve the root path
2345 let root_path = original_root_path
2346 .map(|path_buf| Arc::from(path_buf.as_path()))
2347 .unwrap_or(fallback_path);
2348
2349 this.start_language_server(worktree_id, root_path, language, cx);
2350
2351 // Lookup new server id and set it for each of the orphaned worktrees
2352 if let Some(new_server_id) = this
2353 .language_server_ids
2354 .get(&(worktree_id, server_name.clone()))
2355 .cloned()
2356 {
2357 for orphaned_worktree in orphaned_worktrees {
2358 this.language_server_ids.insert(
2359 (orphaned_worktree, server_name.clone()),
2360 new_server_id.clone(),
2361 );
2362 }
2363 }
2364 });
2365 }
2366 })
2367 .detach();
2368 }
2369
2370 fn on_lsp_diagnostics_published(
2371 &mut self,
2372 server_id: usize,
2373 mut params: lsp::PublishDiagnosticsParams,
2374 adapter: &Arc<dyn LspAdapter>,
2375 cx: &mut ModelContext<Self>,
2376 ) {
2377 adapter.process_diagnostics(&mut params);
2378 self.update_diagnostics(
2379 server_id,
2380 params,
2381 adapter.disk_based_diagnostic_sources(),
2382 cx,
2383 )
2384 .log_err();
2385 }
2386
2387 fn on_lsp_progress(
2388 &mut self,
2389 progress: lsp::ProgressParams,
2390 server_id: usize,
2391 disk_based_diagnostics_progress_token: Option<&str>,
2392 cx: &mut ModelContext<Self>,
2393 ) {
2394 let token = match progress.token {
2395 lsp::NumberOrString::String(token) => token,
2396 lsp::NumberOrString::Number(token) => {
2397 log::info!("skipping numeric progress token {}", token);
2398 return;
2399 }
2400 };
2401 let progress = match progress.value {
2402 lsp::ProgressParamsValue::WorkDone(value) => value,
2403 };
2404 let language_server_status =
2405 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2406 status
2407 } else {
2408 return;
2409 };
2410
2411 if !language_server_status.progress_tokens.contains(&token) {
2412 return;
2413 }
2414
2415 match progress {
2416 lsp::WorkDoneProgress::Begin(report) => {
2417 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2418 language_server_status.has_pending_diagnostic_updates = true;
2419 self.disk_based_diagnostics_started(server_id, cx);
2420 self.broadcast_language_server_update(
2421 server_id,
2422 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2423 proto::LspDiskBasedDiagnosticsUpdating {},
2424 ),
2425 );
2426 } else {
2427 self.on_lsp_work_start(
2428 server_id,
2429 token.clone(),
2430 LanguageServerProgress {
2431 message: report.message.clone(),
2432 percentage: report.percentage.map(|p| p as usize),
2433 last_update_at: Instant::now(),
2434 },
2435 cx,
2436 );
2437 self.broadcast_language_server_update(
2438 server_id,
2439 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2440 token,
2441 message: report.message,
2442 percentage: report.percentage.map(|p| p as u32),
2443 }),
2444 );
2445 }
2446 }
2447 lsp::WorkDoneProgress::Report(report) => {
2448 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2449 self.on_lsp_work_progress(
2450 server_id,
2451 token.clone(),
2452 LanguageServerProgress {
2453 message: report.message.clone(),
2454 percentage: report.percentage.map(|p| p as usize),
2455 last_update_at: Instant::now(),
2456 },
2457 cx,
2458 );
2459 self.broadcast_language_server_update(
2460 server_id,
2461 proto::update_language_server::Variant::WorkProgress(
2462 proto::LspWorkProgress {
2463 token,
2464 message: report.message,
2465 percentage: report.percentage.map(|p| p as u32),
2466 },
2467 ),
2468 );
2469 }
2470 }
2471 lsp::WorkDoneProgress::End(_) => {
2472 language_server_status.progress_tokens.remove(&token);
2473
2474 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2475 language_server_status.has_pending_diagnostic_updates = false;
2476 self.disk_based_diagnostics_finished(server_id, cx);
2477 self.broadcast_language_server_update(
2478 server_id,
2479 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2480 proto::LspDiskBasedDiagnosticsUpdated {},
2481 ),
2482 );
2483 } else {
2484 self.on_lsp_work_end(server_id, token.clone(), cx);
2485 self.broadcast_language_server_update(
2486 server_id,
2487 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2488 token,
2489 }),
2490 );
2491 }
2492 }
2493 }
2494 }
2495
2496 fn on_lsp_work_start(
2497 &mut self,
2498 language_server_id: usize,
2499 token: String,
2500 progress: LanguageServerProgress,
2501 cx: &mut ModelContext<Self>,
2502 ) {
2503 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2504 status.pending_work.insert(token, progress);
2505 cx.notify();
2506 }
2507 }
2508
2509 fn on_lsp_work_progress(
2510 &mut self,
2511 language_server_id: usize,
2512 token: String,
2513 progress: LanguageServerProgress,
2514 cx: &mut ModelContext<Self>,
2515 ) {
2516 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2517 let entry = status
2518 .pending_work
2519 .entry(token)
2520 .or_insert(LanguageServerProgress {
2521 message: Default::default(),
2522 percentage: Default::default(),
2523 last_update_at: progress.last_update_at,
2524 });
2525 if progress.message.is_some() {
2526 entry.message = progress.message;
2527 }
2528 if progress.percentage.is_some() {
2529 entry.percentage = progress.percentage;
2530 }
2531 entry.last_update_at = progress.last_update_at;
2532 cx.notify();
2533 }
2534 }
2535
2536 fn on_lsp_work_end(
2537 &mut self,
2538 language_server_id: usize,
2539 token: String,
2540 cx: &mut ModelContext<Self>,
2541 ) {
2542 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2543 status.pending_work.remove(&token);
2544 cx.notify();
2545 }
2546 }
2547
2548 async fn on_lsp_workspace_edit(
2549 this: WeakModelHandle<Self>,
2550 params: lsp::ApplyWorkspaceEditParams,
2551 server_id: usize,
2552 adapter: Arc<dyn LspAdapter>,
2553 language_server: Arc<LanguageServer>,
2554 mut cx: AsyncAppContext,
2555 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2556 let this = this
2557 .upgrade(&cx)
2558 .ok_or_else(|| anyhow!("project project closed"))?;
2559 let transaction = Self::deserialize_workspace_edit(
2560 this.clone(),
2561 params.edit,
2562 true,
2563 adapter.clone(),
2564 language_server.clone(),
2565 &mut cx,
2566 )
2567 .await
2568 .log_err();
2569 this.update(&mut cx, |this, _| {
2570 if let Some(transaction) = transaction {
2571 this.last_workspace_edits_by_language_server
2572 .insert(server_id, transaction);
2573 }
2574 });
2575 Ok(lsp::ApplyWorkspaceEditResponse {
2576 applied: true,
2577 failed_change: None,
2578 failure_reason: None,
2579 })
2580 }
2581
2582 fn broadcast_language_server_update(
2583 &self,
2584 language_server_id: usize,
2585 event: proto::update_language_server::Variant,
2586 ) {
2587 if let Some(project_id) = self.shared_remote_id() {
2588 self.client
2589 .send(proto::UpdateLanguageServer {
2590 project_id,
2591 language_server_id: language_server_id as u64,
2592 variant: Some(event),
2593 })
2594 .log_err();
2595 }
2596 }
2597
2598 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2599 for server_state in self.language_servers.values() {
2600 if let LanguageServerState::Running { server, .. } = server_state {
2601 server
2602 .notify::<lsp::notification::DidChangeConfiguration>(
2603 lsp::DidChangeConfigurationParams {
2604 settings: settings.clone(),
2605 },
2606 )
2607 .ok();
2608 }
2609 }
2610 *self.language_server_settings.lock() = settings;
2611 }
2612
2613 pub fn language_server_statuses(
2614 &self,
2615 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2616 self.language_server_statuses.values()
2617 }
2618
2619 pub fn update_diagnostics(
2620 &mut self,
2621 language_server_id: usize,
2622 params: lsp::PublishDiagnosticsParams,
2623 disk_based_sources: &[&str],
2624 cx: &mut ModelContext<Self>,
2625 ) -> Result<()> {
2626 let abs_path = params
2627 .uri
2628 .to_file_path()
2629 .map_err(|_| anyhow!("URI is not a file"))?;
2630 let mut diagnostics = Vec::default();
2631 let mut primary_diagnostic_group_ids = HashMap::default();
2632 let mut sources_by_group_id = HashMap::default();
2633 let mut supporting_diagnostics = HashMap::default();
2634 for diagnostic in ¶ms.diagnostics {
2635 let source = diagnostic.source.as_ref();
2636 let code = diagnostic.code.as_ref().map(|code| match code {
2637 lsp::NumberOrString::Number(code) => code.to_string(),
2638 lsp::NumberOrString::String(code) => code.clone(),
2639 });
2640 let range = range_from_lsp(diagnostic.range);
2641 let is_supporting = diagnostic
2642 .related_information
2643 .as_ref()
2644 .map_or(false, |infos| {
2645 infos.iter().any(|info| {
2646 primary_diagnostic_group_ids.contains_key(&(
2647 source,
2648 code.clone(),
2649 range_from_lsp(info.location.range),
2650 ))
2651 })
2652 });
2653
2654 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2655 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2656 });
2657
2658 if is_supporting {
2659 supporting_diagnostics.insert(
2660 (source, code.clone(), range),
2661 (diagnostic.severity, is_unnecessary),
2662 );
2663 } else {
2664 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2665 let is_disk_based = source.map_or(false, |source| {
2666 disk_based_sources.contains(&source.as_str())
2667 });
2668
2669 sources_by_group_id.insert(group_id, source);
2670 primary_diagnostic_group_ids
2671 .insert((source, code.clone(), range.clone()), group_id);
2672
2673 diagnostics.push(DiagnosticEntry {
2674 range,
2675 diagnostic: Diagnostic {
2676 code: code.clone(),
2677 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2678 message: diagnostic.message.clone(),
2679 group_id,
2680 is_primary: true,
2681 is_valid: true,
2682 is_disk_based,
2683 is_unnecessary,
2684 },
2685 });
2686 if let Some(infos) = &diagnostic.related_information {
2687 for info in infos {
2688 if info.location.uri == params.uri && !info.message.is_empty() {
2689 let range = range_from_lsp(info.location.range);
2690 diagnostics.push(DiagnosticEntry {
2691 range,
2692 diagnostic: Diagnostic {
2693 code: code.clone(),
2694 severity: DiagnosticSeverity::INFORMATION,
2695 message: info.message.clone(),
2696 group_id,
2697 is_primary: false,
2698 is_valid: true,
2699 is_disk_based,
2700 is_unnecessary: false,
2701 },
2702 });
2703 }
2704 }
2705 }
2706 }
2707 }
2708
2709 for entry in &mut diagnostics {
2710 let diagnostic = &mut entry.diagnostic;
2711 if !diagnostic.is_primary {
2712 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2713 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2714 source,
2715 diagnostic.code.clone(),
2716 entry.range.clone(),
2717 )) {
2718 if let Some(severity) = severity {
2719 diagnostic.severity = severity;
2720 }
2721 diagnostic.is_unnecessary = is_unnecessary;
2722 }
2723 }
2724 }
2725
2726 self.update_diagnostic_entries(
2727 language_server_id,
2728 abs_path,
2729 params.version,
2730 diagnostics,
2731 cx,
2732 )?;
2733 Ok(())
2734 }
2735
2736 pub fn update_diagnostic_entries(
2737 &mut self,
2738 language_server_id: usize,
2739 abs_path: PathBuf,
2740 version: Option<i32>,
2741 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2742 cx: &mut ModelContext<Project>,
2743 ) -> Result<(), anyhow::Error> {
2744 let (worktree, relative_path) = self
2745 .find_local_worktree(&abs_path, cx)
2746 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2747
2748 let project_path = ProjectPath {
2749 worktree_id: worktree.read(cx).id(),
2750 path: relative_path.into(),
2751 };
2752 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2753 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2754 }
2755
2756 let updated = worktree.update(cx, |worktree, cx| {
2757 worktree
2758 .as_local_mut()
2759 .ok_or_else(|| anyhow!("not a local worktree"))?
2760 .update_diagnostics(
2761 language_server_id,
2762 project_path.path.clone(),
2763 diagnostics,
2764 cx,
2765 )
2766 })?;
2767 if updated {
2768 cx.emit(Event::DiagnosticsUpdated {
2769 language_server_id,
2770 path: project_path,
2771 });
2772 }
2773 Ok(())
2774 }
2775
2776 fn update_buffer_diagnostics(
2777 &mut self,
2778 buffer: &ModelHandle<Buffer>,
2779 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2780 version: Option<i32>,
2781 cx: &mut ModelContext<Self>,
2782 ) -> Result<()> {
2783 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2784 Ordering::Equal
2785 .then_with(|| b.is_primary.cmp(&a.is_primary))
2786 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2787 .then_with(|| a.severity.cmp(&b.severity))
2788 .then_with(|| a.message.cmp(&b.message))
2789 }
2790
2791 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2792
2793 diagnostics.sort_unstable_by(|a, b| {
2794 Ordering::Equal
2795 .then_with(|| a.range.start.cmp(&b.range.start))
2796 .then_with(|| b.range.end.cmp(&a.range.end))
2797 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2798 });
2799
2800 let mut sanitized_diagnostics = Vec::new();
2801 let edits_since_save = Patch::new(
2802 snapshot
2803 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2804 .collect(),
2805 );
2806 for entry in diagnostics {
2807 let start;
2808 let end;
2809 if entry.diagnostic.is_disk_based {
2810 // Some diagnostics are based on files on disk instead of buffers'
2811 // current contents. Adjust these diagnostics' ranges to reflect
2812 // any unsaved edits.
2813 start = edits_since_save.old_to_new(entry.range.start);
2814 end = edits_since_save.old_to_new(entry.range.end);
2815 } else {
2816 start = entry.range.start;
2817 end = entry.range.end;
2818 }
2819
2820 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2821 ..snapshot.clip_point_utf16(end, Bias::Right);
2822
2823 // Expand empty ranges by one character
2824 if range.start == range.end {
2825 range.end.column += 1;
2826 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2827 if range.start == range.end && range.end.column > 0 {
2828 range.start.column -= 1;
2829 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2830 }
2831 }
2832
2833 sanitized_diagnostics.push(DiagnosticEntry {
2834 range,
2835 diagnostic: entry.diagnostic,
2836 });
2837 }
2838 drop(edits_since_save);
2839
2840 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2841 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2842 Ok(())
2843 }
2844
2845 pub fn reload_buffers(
2846 &self,
2847 buffers: HashSet<ModelHandle<Buffer>>,
2848 push_to_history: bool,
2849 cx: &mut ModelContext<Self>,
2850 ) -> Task<Result<ProjectTransaction>> {
2851 let mut local_buffers = Vec::new();
2852 let mut remote_buffers = None;
2853 for buffer_handle in buffers {
2854 let buffer = buffer_handle.read(cx);
2855 if buffer.is_dirty() {
2856 if let Some(file) = File::from_dyn(buffer.file()) {
2857 if file.is_local() {
2858 local_buffers.push(buffer_handle);
2859 } else {
2860 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2861 }
2862 }
2863 }
2864 }
2865
2866 let remote_buffers = self.remote_id().zip(remote_buffers);
2867 let client = self.client.clone();
2868
2869 cx.spawn(|this, mut cx| async move {
2870 let mut project_transaction = ProjectTransaction::default();
2871
2872 if let Some((project_id, remote_buffers)) = remote_buffers {
2873 let response = client
2874 .request(proto::ReloadBuffers {
2875 project_id,
2876 buffer_ids: remote_buffers
2877 .iter()
2878 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2879 .collect(),
2880 })
2881 .await?
2882 .transaction
2883 .ok_or_else(|| anyhow!("missing transaction"))?;
2884 project_transaction = this
2885 .update(&mut cx, |this, cx| {
2886 this.deserialize_project_transaction(response, push_to_history, cx)
2887 })
2888 .await?;
2889 }
2890
2891 for buffer in local_buffers {
2892 let transaction = buffer
2893 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2894 .await?;
2895 buffer.update(&mut cx, |buffer, cx| {
2896 if let Some(transaction) = transaction {
2897 if !push_to_history {
2898 buffer.forget_transaction(transaction.id);
2899 }
2900 project_transaction.0.insert(cx.handle(), transaction);
2901 }
2902 });
2903 }
2904
2905 Ok(project_transaction)
2906 })
2907 }
2908
2909 pub fn format(
2910 &self,
2911 buffers: HashSet<ModelHandle<Buffer>>,
2912 push_to_history: bool,
2913 cx: &mut ModelContext<Project>,
2914 ) -> Task<Result<ProjectTransaction>> {
2915 let mut local_buffers = Vec::new();
2916 let mut remote_buffers = None;
2917 for buffer_handle in buffers {
2918 let buffer = buffer_handle.read(cx);
2919 if let Some(file) = File::from_dyn(buffer.file()) {
2920 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2921 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2922 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2923 }
2924 } else {
2925 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2926 }
2927 } else {
2928 return Task::ready(Ok(Default::default()));
2929 }
2930 }
2931
2932 let remote_buffers = self.remote_id().zip(remote_buffers);
2933 let client = self.client.clone();
2934
2935 cx.spawn(|this, mut cx| async move {
2936 let mut project_transaction = ProjectTransaction::default();
2937
2938 if let Some((project_id, remote_buffers)) = remote_buffers {
2939 let response = client
2940 .request(proto::FormatBuffers {
2941 project_id,
2942 buffer_ids: remote_buffers
2943 .iter()
2944 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2945 .collect(),
2946 })
2947 .await?
2948 .transaction
2949 .ok_or_else(|| anyhow!("missing transaction"))?;
2950 project_transaction = this
2951 .update(&mut cx, |this, cx| {
2952 this.deserialize_project_transaction(response, push_to_history, cx)
2953 })
2954 .await?;
2955 }
2956
2957 for (buffer, buffer_abs_path, language_server) in local_buffers {
2958 let text_document = lsp::TextDocumentIdentifier::new(
2959 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2960 );
2961 let capabilities = &language_server.capabilities();
2962 let tab_size = cx.update(|cx| {
2963 let language_name = buffer.read(cx).language().map(|language| language.name());
2964 cx.global::<Settings>().tab_size(language_name.as_deref())
2965 });
2966 let lsp_edits = if capabilities
2967 .document_formatting_provider
2968 .as_ref()
2969 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2970 {
2971 language_server
2972 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2973 text_document,
2974 options: lsp::FormattingOptions {
2975 tab_size: tab_size.into(),
2976 insert_spaces: true,
2977 insert_final_newline: Some(true),
2978 ..Default::default()
2979 },
2980 work_done_progress_params: Default::default(),
2981 })
2982 .await?
2983 } else if capabilities
2984 .document_range_formatting_provider
2985 .as_ref()
2986 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2987 {
2988 let buffer_start = lsp::Position::new(0, 0);
2989 let buffer_end =
2990 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2991 language_server
2992 .request::<lsp::request::RangeFormatting>(
2993 lsp::DocumentRangeFormattingParams {
2994 text_document,
2995 range: lsp::Range::new(buffer_start, buffer_end),
2996 options: lsp::FormattingOptions {
2997 tab_size: tab_size.into(),
2998 insert_spaces: true,
2999 insert_final_newline: Some(true),
3000 ..Default::default()
3001 },
3002 work_done_progress_params: Default::default(),
3003 },
3004 )
3005 .await?
3006 } else {
3007 continue;
3008 };
3009
3010 if let Some(lsp_edits) = lsp_edits {
3011 let edits = this
3012 .update(&mut cx, |this, cx| {
3013 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
3014 })
3015 .await?;
3016 buffer.update(&mut cx, |buffer, cx| {
3017 buffer.finalize_last_transaction();
3018 buffer.start_transaction();
3019 for (range, text) in edits {
3020 buffer.edit([(range, text)], cx);
3021 }
3022 if buffer.end_transaction(cx).is_some() {
3023 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3024 if !push_to_history {
3025 buffer.forget_transaction(transaction.id);
3026 }
3027 project_transaction.0.insert(cx.handle(), transaction);
3028 }
3029 });
3030 }
3031 }
3032
3033 Ok(project_transaction)
3034 })
3035 }
3036
3037 pub fn definition<T: ToPointUtf16>(
3038 &self,
3039 buffer: &ModelHandle<Buffer>,
3040 position: T,
3041 cx: &mut ModelContext<Self>,
3042 ) -> Task<Result<Vec<LocationLink>>> {
3043 let position = position.to_point_utf16(buffer.read(cx));
3044 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
3045 }
3046
3047 pub fn references<T: ToPointUtf16>(
3048 &self,
3049 buffer: &ModelHandle<Buffer>,
3050 position: T,
3051 cx: &mut ModelContext<Self>,
3052 ) -> Task<Result<Vec<Location>>> {
3053 let position = position.to_point_utf16(buffer.read(cx));
3054 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
3055 }
3056
3057 pub fn document_highlights<T: ToPointUtf16>(
3058 &self,
3059 buffer: &ModelHandle<Buffer>,
3060 position: T,
3061 cx: &mut ModelContext<Self>,
3062 ) -> Task<Result<Vec<DocumentHighlight>>> {
3063 let position = position.to_point_utf16(buffer.read(cx));
3064
3065 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
3066 }
3067
3068 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
3069 if self.is_local() {
3070 let mut requests = Vec::new();
3071 for ((worktree_id, _), server_id) in self.language_server_ids.iter() {
3072 let worktree_id = *worktree_id;
3073 if let Some(worktree) = self
3074 .worktree_for_id(worktree_id, cx)
3075 .and_then(|worktree| worktree.read(cx).as_local())
3076 {
3077 if let Some(LanguageServerState::Running { adapter, server }) =
3078 self.language_servers.get(server_id)
3079 {
3080 let adapter = adapter.clone();
3081 let worktree_abs_path = worktree.abs_path().clone();
3082 requests.push(
3083 server
3084 .request::<lsp::request::WorkspaceSymbol>(
3085 lsp::WorkspaceSymbolParams {
3086 query: query.to_string(),
3087 ..Default::default()
3088 },
3089 )
3090 .log_err()
3091 .map(move |response| {
3092 (
3093 adapter,
3094 worktree_id,
3095 worktree_abs_path,
3096 response.unwrap_or_default(),
3097 )
3098 }),
3099 );
3100 }
3101 }
3102 }
3103
3104 cx.spawn_weak(|this, cx| async move {
3105 let responses = futures::future::join_all(requests).await;
3106 let this = if let Some(this) = this.upgrade(&cx) {
3107 this
3108 } else {
3109 return Ok(Default::default());
3110 };
3111 this.read_with(&cx, |this, cx| {
3112 let mut symbols = Vec::new();
3113 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
3114 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
3115 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
3116 let mut worktree_id = source_worktree_id;
3117 let path;
3118 if let Some((worktree, rel_path)) =
3119 this.find_local_worktree(&abs_path, cx)
3120 {
3121 worktree_id = worktree.read(cx).id();
3122 path = rel_path;
3123 } else {
3124 path = relativize_path(&worktree_abs_path, &abs_path);
3125 }
3126
3127 let label = this
3128 .languages
3129 .select_language(&path)
3130 .and_then(|language| {
3131 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
3132 })
3133 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
3134 let signature = this.symbol_signature(worktree_id, &path);
3135
3136 Some(Symbol {
3137 source_worktree_id,
3138 worktree_id,
3139 language_server_name: adapter.name(),
3140 name: lsp_symbol.name,
3141 kind: lsp_symbol.kind,
3142 label,
3143 path,
3144 range: range_from_lsp(lsp_symbol.location.range),
3145 signature,
3146 })
3147 }));
3148 }
3149 Ok(symbols)
3150 })
3151 })
3152 } else if let Some(project_id) = self.remote_id() {
3153 let request = self.client.request(proto::GetProjectSymbols {
3154 project_id,
3155 query: query.to_string(),
3156 });
3157 cx.spawn_weak(|this, cx| async move {
3158 let response = request.await?;
3159 let mut symbols = Vec::new();
3160 if let Some(this) = this.upgrade(&cx) {
3161 this.read_with(&cx, |this, _| {
3162 symbols.extend(
3163 response
3164 .symbols
3165 .into_iter()
3166 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
3167 );
3168 })
3169 }
3170 Ok(symbols)
3171 })
3172 } else {
3173 Task::ready(Ok(Default::default()))
3174 }
3175 }
3176
3177 pub fn open_buffer_for_symbol(
3178 &mut self,
3179 symbol: &Symbol,
3180 cx: &mut ModelContext<Self>,
3181 ) -> Task<Result<ModelHandle<Buffer>>> {
3182 if self.is_local() {
3183 let language_server_id = if let Some(id) = self.language_server_ids.get(&(
3184 symbol.source_worktree_id,
3185 symbol.language_server_name.clone(),
3186 )) {
3187 *id
3188 } else {
3189 return Task::ready(Err(anyhow!(
3190 "language server for worktree and language not found"
3191 )));
3192 };
3193
3194 let worktree_abs_path = if let Some(worktree_abs_path) = self
3195 .worktree_for_id(symbol.worktree_id, cx)
3196 .and_then(|worktree| worktree.read(cx).as_local())
3197 .map(|local_worktree| local_worktree.abs_path())
3198 {
3199 worktree_abs_path
3200 } else {
3201 return Task::ready(Err(anyhow!("worktree not found for symbol")));
3202 };
3203 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
3204 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
3205 uri
3206 } else {
3207 return Task::ready(Err(anyhow!("invalid symbol path")));
3208 };
3209
3210 self.open_local_buffer_via_lsp(
3211 symbol_uri,
3212 language_server_id,
3213 symbol.language_server_name.clone(),
3214 cx,
3215 )
3216 } else if let Some(project_id) = self.remote_id() {
3217 let request = self.client.request(proto::OpenBufferForSymbol {
3218 project_id,
3219 symbol: Some(serialize_symbol(symbol)),
3220 });
3221 cx.spawn(|this, mut cx| async move {
3222 let response = request.await?;
3223 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
3224 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3225 .await
3226 })
3227 } else {
3228 Task::ready(Err(anyhow!("project does not have a remote id")))
3229 }
3230 }
3231
3232 pub fn hover<T: ToPointUtf16>(
3233 &self,
3234 buffer: &ModelHandle<Buffer>,
3235 position: T,
3236 cx: &mut ModelContext<Self>,
3237 ) -> Task<Result<Option<Hover>>> {
3238 let position = position.to_point_utf16(buffer.read(cx));
3239 self.request_lsp(buffer.clone(), GetHover { position }, cx)
3240 }
3241
3242 pub fn completions<T: ToPointUtf16>(
3243 &self,
3244 source_buffer_handle: &ModelHandle<Buffer>,
3245 position: T,
3246 cx: &mut ModelContext<Self>,
3247 ) -> Task<Result<Vec<Completion>>> {
3248 let source_buffer_handle = source_buffer_handle.clone();
3249 let source_buffer = source_buffer_handle.read(cx);
3250 let buffer_id = source_buffer.remote_id();
3251 let language = source_buffer.language().cloned();
3252 let worktree;
3253 let buffer_abs_path;
3254 if let Some(file) = File::from_dyn(source_buffer.file()) {
3255 worktree = file.worktree.clone();
3256 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3257 } else {
3258 return Task::ready(Ok(Default::default()));
3259 };
3260
3261 let position = position.to_point_utf16(source_buffer);
3262 let anchor = source_buffer.anchor_after(position);
3263
3264 if worktree.read(cx).as_local().is_some() {
3265 let buffer_abs_path = buffer_abs_path.unwrap();
3266 let lang_server =
3267 if let Some((_, server)) = self.language_server_for_buffer(source_buffer, cx) {
3268 server.clone()
3269 } else {
3270 return Task::ready(Ok(Default::default()));
3271 };
3272
3273 cx.spawn(|_, cx| async move {
3274 let completions = lang_server
3275 .request::<lsp::request::Completion>(lsp::CompletionParams {
3276 text_document_position: lsp::TextDocumentPositionParams::new(
3277 lsp::TextDocumentIdentifier::new(
3278 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3279 ),
3280 point_to_lsp(position),
3281 ),
3282 context: Default::default(),
3283 work_done_progress_params: Default::default(),
3284 partial_result_params: Default::default(),
3285 })
3286 .await
3287 .context("lsp completion request failed")?;
3288
3289 let completions = if let Some(completions) = completions {
3290 match completions {
3291 lsp::CompletionResponse::Array(completions) => completions,
3292 lsp::CompletionResponse::List(list) => list.items,
3293 }
3294 } else {
3295 Default::default()
3296 };
3297
3298 source_buffer_handle.read_with(&cx, |this, _| {
3299 let snapshot = this.snapshot();
3300 let clipped_position = this.clip_point_utf16(position, Bias::Left);
3301 let mut range_for_token = None;
3302 Ok(completions
3303 .into_iter()
3304 .filter_map(|lsp_completion| {
3305 // For now, we can only handle additional edits if they are returned
3306 // when resolving the completion, not if they are present initially.
3307 if lsp_completion
3308 .additional_text_edits
3309 .as_ref()
3310 .map_or(false, |edits| !edits.is_empty())
3311 {
3312 return None;
3313 }
3314
3315 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
3316 // If the language server provides a range to overwrite, then
3317 // check that the range is valid.
3318 Some(lsp::CompletionTextEdit::Edit(edit)) => {
3319 let range = range_from_lsp(edit.range);
3320 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
3321 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
3322 if start != range.start || end != range.end {
3323 log::info!("completion out of expected range");
3324 return None;
3325 }
3326 (
3327 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3328 edit.new_text.clone(),
3329 )
3330 }
3331 // If the language server does not provide a range, then infer
3332 // the range based on the syntax tree.
3333 None => {
3334 if position != clipped_position {
3335 log::info!("completion out of expected range");
3336 return None;
3337 }
3338 let Range { start, end } = range_for_token
3339 .get_or_insert_with(|| {
3340 let offset = position.to_offset(&snapshot);
3341 let (range, kind) = snapshot.surrounding_word(offset);
3342 if kind == Some(CharKind::Word) {
3343 range
3344 } else {
3345 offset..offset
3346 }
3347 })
3348 .clone();
3349 let text = lsp_completion
3350 .insert_text
3351 .as_ref()
3352 .unwrap_or(&lsp_completion.label)
3353 .clone();
3354 (
3355 snapshot.anchor_before(start)..snapshot.anchor_after(end),
3356 text.clone(),
3357 )
3358 }
3359 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
3360 log::info!("unsupported insert/replace completion");
3361 return None;
3362 }
3363 };
3364
3365 Some(Completion {
3366 old_range,
3367 new_text,
3368 label: language
3369 .as_ref()
3370 .and_then(|l| l.label_for_completion(&lsp_completion))
3371 .unwrap_or_else(|| {
3372 CodeLabel::plain(
3373 lsp_completion.label.clone(),
3374 lsp_completion.filter_text.as_deref(),
3375 )
3376 }),
3377 lsp_completion,
3378 })
3379 })
3380 .collect())
3381 })
3382 })
3383 } else if let Some(project_id) = self.remote_id() {
3384 let rpc = self.client.clone();
3385 let message = proto::GetCompletions {
3386 project_id,
3387 buffer_id,
3388 position: Some(language::proto::serialize_anchor(&anchor)),
3389 version: serialize_version(&source_buffer.version()),
3390 };
3391 cx.spawn_weak(|_, mut cx| async move {
3392 let response = rpc.request(message).await?;
3393
3394 source_buffer_handle
3395 .update(&mut cx, |buffer, _| {
3396 buffer.wait_for_version(deserialize_version(response.version))
3397 })
3398 .await;
3399
3400 response
3401 .completions
3402 .into_iter()
3403 .map(|completion| {
3404 language::proto::deserialize_completion(completion, language.as_ref())
3405 })
3406 .collect()
3407 })
3408 } else {
3409 Task::ready(Ok(Default::default()))
3410 }
3411 }
3412
3413 pub fn apply_additional_edits_for_completion(
3414 &self,
3415 buffer_handle: ModelHandle<Buffer>,
3416 completion: Completion,
3417 push_to_history: bool,
3418 cx: &mut ModelContext<Self>,
3419 ) -> Task<Result<Option<Transaction>>> {
3420 let buffer = buffer_handle.read(cx);
3421 let buffer_id = buffer.remote_id();
3422
3423 if self.is_local() {
3424 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3425 {
3426 server.clone()
3427 } else {
3428 return Task::ready(Ok(Default::default()));
3429 };
3430
3431 cx.spawn(|this, mut cx| async move {
3432 let resolved_completion = lang_server
3433 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3434 .await?;
3435 if let Some(edits) = resolved_completion.additional_text_edits {
3436 let edits = this
3437 .update(&mut cx, |this, cx| {
3438 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3439 })
3440 .await?;
3441 buffer_handle.update(&mut cx, |buffer, cx| {
3442 buffer.finalize_last_transaction();
3443 buffer.start_transaction();
3444 for (range, text) in edits {
3445 buffer.edit([(range, text)], cx);
3446 }
3447 let transaction = if buffer.end_transaction(cx).is_some() {
3448 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3449 if !push_to_history {
3450 buffer.forget_transaction(transaction.id);
3451 }
3452 Some(transaction)
3453 } else {
3454 None
3455 };
3456 Ok(transaction)
3457 })
3458 } else {
3459 Ok(None)
3460 }
3461 })
3462 } else if let Some(project_id) = self.remote_id() {
3463 let client = self.client.clone();
3464 cx.spawn(|_, mut cx| async move {
3465 let response = client
3466 .request(proto::ApplyCompletionAdditionalEdits {
3467 project_id,
3468 buffer_id,
3469 completion: Some(language::proto::serialize_completion(&completion)),
3470 })
3471 .await?;
3472
3473 if let Some(transaction) = response.transaction {
3474 let transaction = language::proto::deserialize_transaction(transaction)?;
3475 buffer_handle
3476 .update(&mut cx, |buffer, _| {
3477 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3478 })
3479 .await;
3480 if push_to_history {
3481 buffer_handle.update(&mut cx, |buffer, _| {
3482 buffer.push_transaction(transaction.clone(), Instant::now());
3483 });
3484 }
3485 Ok(Some(transaction))
3486 } else {
3487 Ok(None)
3488 }
3489 })
3490 } else {
3491 Task::ready(Err(anyhow!("project does not have a remote id")))
3492 }
3493 }
3494
3495 pub fn code_actions<T: Clone + ToOffset>(
3496 &self,
3497 buffer_handle: &ModelHandle<Buffer>,
3498 range: Range<T>,
3499 cx: &mut ModelContext<Self>,
3500 ) -> Task<Result<Vec<CodeAction>>> {
3501 let buffer_handle = buffer_handle.clone();
3502 let buffer = buffer_handle.read(cx);
3503 let snapshot = buffer.snapshot();
3504 let relevant_diagnostics = snapshot
3505 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3506 .map(|entry| entry.to_lsp_diagnostic_stub())
3507 .collect();
3508 let buffer_id = buffer.remote_id();
3509 let worktree;
3510 let buffer_abs_path;
3511 if let Some(file) = File::from_dyn(buffer.file()) {
3512 worktree = file.worktree.clone();
3513 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3514 } else {
3515 return Task::ready(Ok(Default::default()));
3516 };
3517 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3518
3519 if worktree.read(cx).as_local().is_some() {
3520 let buffer_abs_path = buffer_abs_path.unwrap();
3521 let lang_server = if let Some((_, server)) = self.language_server_for_buffer(buffer, cx)
3522 {
3523 server.clone()
3524 } else {
3525 return Task::ready(Ok(Default::default()));
3526 };
3527
3528 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3529 cx.foreground().spawn(async move {
3530 if !lang_server.capabilities().code_action_provider.is_some() {
3531 return Ok(Default::default());
3532 }
3533
3534 Ok(lang_server
3535 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3536 text_document: lsp::TextDocumentIdentifier::new(
3537 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3538 ),
3539 range: lsp_range,
3540 work_done_progress_params: Default::default(),
3541 partial_result_params: Default::default(),
3542 context: lsp::CodeActionContext {
3543 diagnostics: relevant_diagnostics,
3544 only: Some(vec![
3545 lsp::CodeActionKind::QUICKFIX,
3546 lsp::CodeActionKind::REFACTOR,
3547 lsp::CodeActionKind::REFACTOR_EXTRACT,
3548 lsp::CodeActionKind::SOURCE,
3549 ]),
3550 },
3551 })
3552 .await?
3553 .unwrap_or_default()
3554 .into_iter()
3555 .filter_map(|entry| {
3556 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3557 Some(CodeAction {
3558 range: range.clone(),
3559 lsp_action,
3560 })
3561 } else {
3562 None
3563 }
3564 })
3565 .collect())
3566 })
3567 } else if let Some(project_id) = self.remote_id() {
3568 let rpc = self.client.clone();
3569 let version = buffer.version();
3570 cx.spawn_weak(|_, mut cx| async move {
3571 let response = rpc
3572 .request(proto::GetCodeActions {
3573 project_id,
3574 buffer_id,
3575 start: Some(language::proto::serialize_anchor(&range.start)),
3576 end: Some(language::proto::serialize_anchor(&range.end)),
3577 version: serialize_version(&version),
3578 })
3579 .await?;
3580
3581 buffer_handle
3582 .update(&mut cx, |buffer, _| {
3583 buffer.wait_for_version(deserialize_version(response.version))
3584 })
3585 .await;
3586
3587 response
3588 .actions
3589 .into_iter()
3590 .map(language::proto::deserialize_code_action)
3591 .collect()
3592 })
3593 } else {
3594 Task::ready(Ok(Default::default()))
3595 }
3596 }
3597
3598 pub fn apply_code_action(
3599 &self,
3600 buffer_handle: ModelHandle<Buffer>,
3601 mut action: CodeAction,
3602 push_to_history: bool,
3603 cx: &mut ModelContext<Self>,
3604 ) -> Task<Result<ProjectTransaction>> {
3605 if self.is_local() {
3606 let buffer = buffer_handle.read(cx);
3607 let (lsp_adapter, lang_server) =
3608 if let Some((adapter, server)) = self.language_server_for_buffer(buffer, cx) {
3609 (adapter.clone(), server.clone())
3610 } else {
3611 return Task::ready(Ok(Default::default()));
3612 };
3613 let range = action.range.to_point_utf16(buffer);
3614
3615 cx.spawn(|this, mut cx| async move {
3616 if let Some(lsp_range) = action
3617 .lsp_action
3618 .data
3619 .as_mut()
3620 .and_then(|d| d.get_mut("codeActionParams"))
3621 .and_then(|d| d.get_mut("range"))
3622 {
3623 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3624 action.lsp_action = lang_server
3625 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3626 .await?;
3627 } else {
3628 let actions = this
3629 .update(&mut cx, |this, cx| {
3630 this.code_actions(&buffer_handle, action.range, cx)
3631 })
3632 .await?;
3633 action.lsp_action = actions
3634 .into_iter()
3635 .find(|a| a.lsp_action.title == action.lsp_action.title)
3636 .ok_or_else(|| anyhow!("code action is outdated"))?
3637 .lsp_action;
3638 }
3639
3640 if let Some(edit) = action.lsp_action.edit {
3641 Self::deserialize_workspace_edit(
3642 this,
3643 edit,
3644 push_to_history,
3645 lsp_adapter.clone(),
3646 lang_server.clone(),
3647 &mut cx,
3648 )
3649 .await
3650 } else if let Some(command) = action.lsp_action.command {
3651 this.update(&mut cx, |this, _| {
3652 this.last_workspace_edits_by_language_server
3653 .remove(&lang_server.server_id());
3654 });
3655 lang_server
3656 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3657 command: command.command,
3658 arguments: command.arguments.unwrap_or_default(),
3659 ..Default::default()
3660 })
3661 .await?;
3662 Ok(this.update(&mut cx, |this, _| {
3663 this.last_workspace_edits_by_language_server
3664 .remove(&lang_server.server_id())
3665 .unwrap_or_default()
3666 }))
3667 } else {
3668 Ok(ProjectTransaction::default())
3669 }
3670 })
3671 } else if let Some(project_id) = self.remote_id() {
3672 let client = self.client.clone();
3673 let request = proto::ApplyCodeAction {
3674 project_id,
3675 buffer_id: buffer_handle.read(cx).remote_id(),
3676 action: Some(language::proto::serialize_code_action(&action)),
3677 };
3678 cx.spawn(|this, mut cx| async move {
3679 let response = client
3680 .request(request)
3681 .await?
3682 .transaction
3683 .ok_or_else(|| anyhow!("missing transaction"))?;
3684 this.update(&mut cx, |this, cx| {
3685 this.deserialize_project_transaction(response, push_to_history, cx)
3686 })
3687 .await
3688 })
3689 } else {
3690 Task::ready(Err(anyhow!("project does not have a remote id")))
3691 }
3692 }
3693
3694 async fn deserialize_workspace_edit(
3695 this: ModelHandle<Self>,
3696 edit: lsp::WorkspaceEdit,
3697 push_to_history: bool,
3698 lsp_adapter: Arc<dyn LspAdapter>,
3699 language_server: Arc<LanguageServer>,
3700 cx: &mut AsyncAppContext,
3701 ) -> Result<ProjectTransaction> {
3702 let fs = this.read_with(cx, |this, _| this.fs.clone());
3703 let mut operations = Vec::new();
3704 if let Some(document_changes) = edit.document_changes {
3705 match document_changes {
3706 lsp::DocumentChanges::Edits(edits) => {
3707 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3708 }
3709 lsp::DocumentChanges::Operations(ops) => operations = ops,
3710 }
3711 } else if let Some(changes) = edit.changes {
3712 operations.extend(changes.into_iter().map(|(uri, edits)| {
3713 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3714 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3715 uri,
3716 version: None,
3717 },
3718 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3719 })
3720 }));
3721 }
3722
3723 let mut project_transaction = ProjectTransaction::default();
3724 for operation in operations {
3725 match operation {
3726 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3727 let abs_path = op
3728 .uri
3729 .to_file_path()
3730 .map_err(|_| anyhow!("can't convert URI to path"))?;
3731
3732 if let Some(parent_path) = abs_path.parent() {
3733 fs.create_dir(parent_path).await?;
3734 }
3735 if abs_path.ends_with("/") {
3736 fs.create_dir(&abs_path).await?;
3737 } else {
3738 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3739 .await?;
3740 }
3741 }
3742 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3743 let source_abs_path = op
3744 .old_uri
3745 .to_file_path()
3746 .map_err(|_| anyhow!("can't convert URI to path"))?;
3747 let target_abs_path = op
3748 .new_uri
3749 .to_file_path()
3750 .map_err(|_| anyhow!("can't convert URI to path"))?;
3751 fs.rename(
3752 &source_abs_path,
3753 &target_abs_path,
3754 op.options.map(Into::into).unwrap_or_default(),
3755 )
3756 .await?;
3757 }
3758 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3759 let abs_path = op
3760 .uri
3761 .to_file_path()
3762 .map_err(|_| anyhow!("can't convert URI to path"))?;
3763 let options = op.options.map(Into::into).unwrap_or_default();
3764 if abs_path.ends_with("/") {
3765 fs.remove_dir(&abs_path, options).await?;
3766 } else {
3767 fs.remove_file(&abs_path, options).await?;
3768 }
3769 }
3770 lsp::DocumentChangeOperation::Edit(op) => {
3771 let buffer_to_edit = this
3772 .update(cx, |this, cx| {
3773 this.open_local_buffer_via_lsp(
3774 op.text_document.uri,
3775 language_server.server_id(),
3776 lsp_adapter.name(),
3777 cx,
3778 )
3779 })
3780 .await?;
3781
3782 let edits = this
3783 .update(cx, |this, cx| {
3784 let edits = op.edits.into_iter().map(|edit| match edit {
3785 lsp::OneOf::Left(edit) => edit,
3786 lsp::OneOf::Right(edit) => edit.text_edit,
3787 });
3788 this.edits_from_lsp(
3789 &buffer_to_edit,
3790 edits,
3791 op.text_document.version,
3792 cx,
3793 )
3794 })
3795 .await?;
3796
3797 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3798 buffer.finalize_last_transaction();
3799 buffer.start_transaction();
3800 for (range, text) in edits {
3801 buffer.edit([(range, text)], cx);
3802 }
3803 let transaction = if buffer.end_transaction(cx).is_some() {
3804 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3805 if !push_to_history {
3806 buffer.forget_transaction(transaction.id);
3807 }
3808 Some(transaction)
3809 } else {
3810 None
3811 };
3812
3813 transaction
3814 });
3815 if let Some(transaction) = transaction {
3816 project_transaction.0.insert(buffer_to_edit, transaction);
3817 }
3818 }
3819 }
3820 }
3821
3822 Ok(project_transaction)
3823 }
3824
3825 pub fn prepare_rename<T: ToPointUtf16>(
3826 &self,
3827 buffer: ModelHandle<Buffer>,
3828 position: T,
3829 cx: &mut ModelContext<Self>,
3830 ) -> Task<Result<Option<Range<Anchor>>>> {
3831 let position = position.to_point_utf16(buffer.read(cx));
3832 self.request_lsp(buffer, PrepareRename { position }, cx)
3833 }
3834
3835 pub fn perform_rename<T: ToPointUtf16>(
3836 &self,
3837 buffer: ModelHandle<Buffer>,
3838 position: T,
3839 new_name: String,
3840 push_to_history: bool,
3841 cx: &mut ModelContext<Self>,
3842 ) -> Task<Result<ProjectTransaction>> {
3843 let position = position.to_point_utf16(buffer.read(cx));
3844 self.request_lsp(
3845 buffer,
3846 PerformRename {
3847 position,
3848 new_name,
3849 push_to_history,
3850 },
3851 cx,
3852 )
3853 }
3854
3855 pub fn search(
3856 &self,
3857 query: SearchQuery,
3858 cx: &mut ModelContext<Self>,
3859 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3860 if self.is_local() {
3861 let snapshots = self
3862 .visible_worktrees(cx)
3863 .filter_map(|tree| {
3864 let tree = tree.read(cx).as_local()?;
3865 Some(tree.snapshot())
3866 })
3867 .collect::<Vec<_>>();
3868
3869 let background = cx.background().clone();
3870 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3871 if path_count == 0 {
3872 return Task::ready(Ok(Default::default()));
3873 }
3874 let workers = background.num_cpus().min(path_count);
3875 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3876 cx.background()
3877 .spawn({
3878 let fs = self.fs.clone();
3879 let background = cx.background().clone();
3880 let query = query.clone();
3881 async move {
3882 let fs = &fs;
3883 let query = &query;
3884 let matching_paths_tx = &matching_paths_tx;
3885 let paths_per_worker = (path_count + workers - 1) / workers;
3886 let snapshots = &snapshots;
3887 background
3888 .scoped(|scope| {
3889 for worker_ix in 0..workers {
3890 let worker_start_ix = worker_ix * paths_per_worker;
3891 let worker_end_ix = worker_start_ix + paths_per_worker;
3892 scope.spawn(async move {
3893 let mut snapshot_start_ix = 0;
3894 let mut abs_path = PathBuf::new();
3895 for snapshot in snapshots {
3896 let snapshot_end_ix =
3897 snapshot_start_ix + snapshot.visible_file_count();
3898 if worker_end_ix <= snapshot_start_ix {
3899 break;
3900 } else if worker_start_ix > snapshot_end_ix {
3901 snapshot_start_ix = snapshot_end_ix;
3902 continue;
3903 } else {
3904 let start_in_snapshot = worker_start_ix
3905 .saturating_sub(snapshot_start_ix);
3906 let end_in_snapshot =
3907 cmp::min(worker_end_ix, snapshot_end_ix)
3908 - snapshot_start_ix;
3909
3910 for entry in snapshot
3911 .files(false, start_in_snapshot)
3912 .take(end_in_snapshot - start_in_snapshot)
3913 {
3914 if matching_paths_tx.is_closed() {
3915 break;
3916 }
3917
3918 abs_path.clear();
3919 abs_path.push(&snapshot.abs_path());
3920 abs_path.push(&entry.path);
3921 let matches = if let Some(file) =
3922 fs.open_sync(&abs_path).await.log_err()
3923 {
3924 query.detect(file).unwrap_or(false)
3925 } else {
3926 false
3927 };
3928
3929 if matches {
3930 let project_path =
3931 (snapshot.id(), entry.path.clone());
3932 if matching_paths_tx
3933 .send(project_path)
3934 .await
3935 .is_err()
3936 {
3937 break;
3938 }
3939 }
3940 }
3941
3942 snapshot_start_ix = snapshot_end_ix;
3943 }
3944 }
3945 });
3946 }
3947 })
3948 .await;
3949 }
3950 })
3951 .detach();
3952
3953 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3954 let open_buffers = self
3955 .opened_buffers
3956 .values()
3957 .filter_map(|b| b.upgrade(cx))
3958 .collect::<HashSet<_>>();
3959 cx.spawn(|this, cx| async move {
3960 for buffer in &open_buffers {
3961 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3962 buffers_tx.send((buffer.clone(), snapshot)).await?;
3963 }
3964
3965 let open_buffers = Rc::new(RefCell::new(open_buffers));
3966 while let Some(project_path) = matching_paths_rx.next().await {
3967 if buffers_tx.is_closed() {
3968 break;
3969 }
3970
3971 let this = this.clone();
3972 let open_buffers = open_buffers.clone();
3973 let buffers_tx = buffers_tx.clone();
3974 cx.spawn(|mut cx| async move {
3975 if let Some(buffer) = this
3976 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3977 .await
3978 .log_err()
3979 {
3980 if open_buffers.borrow_mut().insert(buffer.clone()) {
3981 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3982 buffers_tx.send((buffer, snapshot)).await?;
3983 }
3984 }
3985
3986 Ok::<_, anyhow::Error>(())
3987 })
3988 .detach();
3989 }
3990
3991 Ok::<_, anyhow::Error>(())
3992 })
3993 .detach_and_log_err(cx);
3994
3995 let background = cx.background().clone();
3996 cx.background().spawn(async move {
3997 let query = &query;
3998 let mut matched_buffers = Vec::new();
3999 for _ in 0..workers {
4000 matched_buffers.push(HashMap::default());
4001 }
4002 background
4003 .scoped(|scope| {
4004 for worker_matched_buffers in matched_buffers.iter_mut() {
4005 let mut buffers_rx = buffers_rx.clone();
4006 scope.spawn(async move {
4007 while let Some((buffer, snapshot)) = buffers_rx.next().await {
4008 let buffer_matches = query
4009 .search(snapshot.as_rope())
4010 .await
4011 .iter()
4012 .map(|range| {
4013 snapshot.anchor_before(range.start)
4014 ..snapshot.anchor_after(range.end)
4015 })
4016 .collect::<Vec<_>>();
4017 if !buffer_matches.is_empty() {
4018 worker_matched_buffers
4019 .insert(buffer.clone(), buffer_matches);
4020 }
4021 }
4022 });
4023 }
4024 })
4025 .await;
4026 Ok(matched_buffers.into_iter().flatten().collect())
4027 })
4028 } else if let Some(project_id) = self.remote_id() {
4029 let request = self.client.request(query.to_proto(project_id));
4030 cx.spawn(|this, mut cx| async move {
4031 let response = request.await?;
4032 let mut result = HashMap::default();
4033 for location in response.locations {
4034 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
4035 let target_buffer = this
4036 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4037 .await?;
4038 let start = location
4039 .start
4040 .and_then(deserialize_anchor)
4041 .ok_or_else(|| anyhow!("missing target start"))?;
4042 let end = location
4043 .end
4044 .and_then(deserialize_anchor)
4045 .ok_or_else(|| anyhow!("missing target end"))?;
4046 result
4047 .entry(target_buffer)
4048 .or_insert(Vec::new())
4049 .push(start..end)
4050 }
4051 Ok(result)
4052 })
4053 } else {
4054 Task::ready(Ok(Default::default()))
4055 }
4056 }
4057
4058 fn request_lsp<R: LspCommand>(
4059 &self,
4060 buffer_handle: ModelHandle<Buffer>,
4061 request: R,
4062 cx: &mut ModelContext<Self>,
4063 ) -> Task<Result<R::Response>>
4064 where
4065 <R::LspRequest as lsp::request::Request>::Result: Send,
4066 {
4067 let buffer = buffer_handle.read(cx);
4068 if self.is_local() {
4069 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
4070 if let Some((file, language_server)) = file.zip(
4071 self.language_server_for_buffer(buffer, cx)
4072 .map(|(_, server)| server.clone()),
4073 ) {
4074 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
4075 return cx.spawn(|this, cx| async move {
4076 if !request.check_capabilities(&language_server.capabilities()) {
4077 return Ok(Default::default());
4078 }
4079
4080 let response = language_server
4081 .request::<R::LspRequest>(lsp_params)
4082 .await
4083 .context("lsp request failed")?;
4084 request
4085 .response_from_lsp(response, this, buffer_handle, cx)
4086 .await
4087 });
4088 }
4089 } else if let Some(project_id) = self.remote_id() {
4090 let rpc = self.client.clone();
4091 let message = request.to_proto(project_id, buffer);
4092 return cx.spawn(|this, cx| async move {
4093 let response = rpc.request(message).await?;
4094 request
4095 .response_from_proto(response, this, buffer_handle, cx)
4096 .await
4097 });
4098 }
4099 Task::ready(Ok(Default::default()))
4100 }
4101
4102 pub fn find_or_create_local_worktree(
4103 &mut self,
4104 abs_path: impl AsRef<Path>,
4105 visible: bool,
4106 cx: &mut ModelContext<Self>,
4107 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
4108 let abs_path = abs_path.as_ref();
4109 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
4110 Task::ready(Ok((tree.clone(), relative_path.into())))
4111 } else {
4112 let worktree = self.create_local_worktree(abs_path, visible, cx);
4113 cx.foreground()
4114 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
4115 }
4116 }
4117
4118 pub fn find_local_worktree(
4119 &self,
4120 abs_path: &Path,
4121 cx: &AppContext,
4122 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
4123 for tree in &self.worktrees {
4124 if let Some(tree) = tree.upgrade(cx) {
4125 if let Some(relative_path) = tree
4126 .read(cx)
4127 .as_local()
4128 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
4129 {
4130 return Some((tree.clone(), relative_path.into()));
4131 }
4132 }
4133 }
4134 None
4135 }
4136
4137 pub fn is_shared(&self) -> bool {
4138 match &self.client_state {
4139 ProjectClientState::Local { is_shared, .. } => *is_shared,
4140 ProjectClientState::Remote { .. } => false,
4141 }
4142 }
4143
4144 fn create_local_worktree(
4145 &mut self,
4146 abs_path: impl AsRef<Path>,
4147 visible: bool,
4148 cx: &mut ModelContext<Self>,
4149 ) -> Task<Result<ModelHandle<Worktree>>> {
4150 let fs = self.fs.clone();
4151 let client = self.client.clone();
4152 let next_entry_id = self.next_entry_id.clone();
4153 let path: Arc<Path> = abs_path.as_ref().into();
4154 let task = self
4155 .loading_local_worktrees
4156 .entry(path.clone())
4157 .or_insert_with(|| {
4158 cx.spawn(|project, mut cx| {
4159 async move {
4160 let worktree = Worktree::local(
4161 client.clone(),
4162 path.clone(),
4163 visible,
4164 fs,
4165 next_entry_id,
4166 &mut cx,
4167 )
4168 .await;
4169 project.update(&mut cx, |project, _| {
4170 project.loading_local_worktrees.remove(&path);
4171 });
4172 let worktree = worktree?;
4173
4174 let project_id = project.update(&mut cx, |project, cx| {
4175 project.add_worktree(&worktree, cx);
4176 project.shared_remote_id()
4177 });
4178
4179 if let Some(project_id) = project_id {
4180 worktree
4181 .update(&mut cx, |worktree, cx| {
4182 worktree.as_local_mut().unwrap().share(project_id, cx)
4183 })
4184 .await
4185 .log_err();
4186 }
4187
4188 Ok(worktree)
4189 }
4190 .map_err(|err| Arc::new(err))
4191 })
4192 .shared()
4193 })
4194 .clone();
4195 cx.foreground().spawn(async move {
4196 match task.await {
4197 Ok(worktree) => Ok(worktree),
4198 Err(err) => Err(anyhow!("{}", err)),
4199 }
4200 })
4201 }
4202
4203 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
4204 self.worktrees.retain(|worktree| {
4205 if let Some(worktree) = worktree.upgrade(cx) {
4206 let id = worktree.read(cx).id();
4207 if id == id_to_remove {
4208 cx.emit(Event::WorktreeRemoved(id));
4209 false
4210 } else {
4211 true
4212 }
4213 } else {
4214 false
4215 }
4216 });
4217 self.metadata_changed(true, cx);
4218 cx.notify();
4219 }
4220
4221 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
4222 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
4223 if worktree.read(cx).is_local() {
4224 cx.subscribe(&worktree, |this, worktree, _, cx| {
4225 this.update_local_worktree_buffers(worktree, cx);
4226 })
4227 .detach();
4228 }
4229
4230 let push_strong_handle = {
4231 let worktree = worktree.read(cx);
4232 self.is_shared() || worktree.is_visible() || worktree.is_remote()
4233 };
4234 if push_strong_handle {
4235 self.worktrees
4236 .push(WorktreeHandle::Strong(worktree.clone()));
4237 } else {
4238 self.worktrees
4239 .push(WorktreeHandle::Weak(worktree.downgrade()));
4240 }
4241
4242 self.metadata_changed(true, cx);
4243 cx.observe_release(&worktree, |this, worktree, cx| {
4244 this.remove_worktree(worktree.id(), cx);
4245 cx.notify();
4246 })
4247 .detach();
4248
4249 cx.emit(Event::WorktreeAdded);
4250 cx.notify();
4251 }
4252
4253 fn update_local_worktree_buffers(
4254 &mut self,
4255 worktree_handle: ModelHandle<Worktree>,
4256 cx: &mut ModelContext<Self>,
4257 ) {
4258 let snapshot = worktree_handle.read(cx).snapshot();
4259 let mut buffers_to_delete = Vec::new();
4260 let mut renamed_buffers = Vec::new();
4261 for (buffer_id, buffer) in &self.opened_buffers {
4262 if let Some(buffer) = buffer.upgrade(cx) {
4263 buffer.update(cx, |buffer, cx| {
4264 if let Some(old_file) = File::from_dyn(buffer.file()) {
4265 if old_file.worktree != worktree_handle {
4266 return;
4267 }
4268
4269 let new_file = if let Some(entry) = old_file
4270 .entry_id
4271 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
4272 {
4273 File {
4274 is_local: true,
4275 entry_id: Some(entry.id),
4276 mtime: entry.mtime,
4277 path: entry.path.clone(),
4278 worktree: worktree_handle.clone(),
4279 }
4280 } else if let Some(entry) =
4281 snapshot.entry_for_path(old_file.path().as_ref())
4282 {
4283 File {
4284 is_local: true,
4285 entry_id: Some(entry.id),
4286 mtime: entry.mtime,
4287 path: entry.path.clone(),
4288 worktree: worktree_handle.clone(),
4289 }
4290 } else {
4291 File {
4292 is_local: true,
4293 entry_id: None,
4294 path: old_file.path().clone(),
4295 mtime: old_file.mtime(),
4296 worktree: worktree_handle.clone(),
4297 }
4298 };
4299
4300 let old_path = old_file.abs_path(cx);
4301 if new_file.abs_path(cx) != old_path {
4302 renamed_buffers.push((cx.handle(), old_path));
4303 }
4304
4305 if let Some(project_id) = self.shared_remote_id() {
4306 self.client
4307 .send(proto::UpdateBufferFile {
4308 project_id,
4309 buffer_id: *buffer_id as u64,
4310 file: Some(new_file.to_proto()),
4311 })
4312 .log_err();
4313 }
4314 buffer.file_updated(Arc::new(new_file), cx).detach();
4315 }
4316 });
4317 } else {
4318 buffers_to_delete.push(*buffer_id);
4319 }
4320 }
4321
4322 for buffer_id in buffers_to_delete {
4323 self.opened_buffers.remove(&buffer_id);
4324 }
4325
4326 for (buffer, old_path) in renamed_buffers {
4327 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
4328 self.assign_language_to_buffer(&buffer, cx);
4329 self.register_buffer_with_language_server(&buffer, cx);
4330 }
4331 }
4332
4333 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
4334 let new_active_entry = entry.and_then(|project_path| {
4335 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
4336 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
4337 Some(entry.id)
4338 });
4339 if new_active_entry != self.active_entry {
4340 self.active_entry = new_active_entry;
4341 cx.emit(Event::ActiveEntryChanged(new_active_entry));
4342 }
4343 }
4344
4345 pub fn language_servers_running_disk_based_diagnostics<'a>(
4346 &'a self,
4347 ) -> impl 'a + Iterator<Item = usize> {
4348 self.language_server_statuses
4349 .iter()
4350 .filter_map(|(id, status)| {
4351 if status.has_pending_diagnostic_updates {
4352 Some(*id)
4353 } else {
4354 None
4355 }
4356 })
4357 }
4358
4359 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
4360 let mut summary = DiagnosticSummary::default();
4361 for (_, path_summary) in self.diagnostic_summaries(cx) {
4362 summary.error_count += path_summary.error_count;
4363 summary.warning_count += path_summary.warning_count;
4364 }
4365 summary
4366 }
4367
4368 pub fn diagnostic_summaries<'a>(
4369 &'a self,
4370 cx: &'a AppContext,
4371 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
4372 self.visible_worktrees(cx).flat_map(move |worktree| {
4373 let worktree = worktree.read(cx);
4374 let worktree_id = worktree.id();
4375 worktree
4376 .diagnostic_summaries()
4377 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
4378 })
4379 }
4380
4381 pub fn disk_based_diagnostics_started(
4382 &mut self,
4383 language_server_id: usize,
4384 cx: &mut ModelContext<Self>,
4385 ) {
4386 cx.emit(Event::DiskBasedDiagnosticsStarted { language_server_id });
4387 }
4388
4389 pub fn disk_based_diagnostics_finished(
4390 &mut self,
4391 language_server_id: usize,
4392 cx: &mut ModelContext<Self>,
4393 ) {
4394 cx.emit(Event::DiskBasedDiagnosticsFinished { language_server_id });
4395 }
4396
4397 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4398 self.active_entry
4399 }
4400
4401 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4402 self.worktree_for_id(path.worktree_id, cx)?
4403 .read(cx)
4404 .entry_for_path(&path.path)
4405 .map(|entry| entry.id)
4406 }
4407
4408 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4409 let worktree = self.worktree_for_entry(entry_id, cx)?;
4410 let worktree = worktree.read(cx);
4411 let worktree_id = worktree.id();
4412 let path = worktree.entry_for_id(entry_id)?.path.clone();
4413 Some(ProjectPath { worktree_id, path })
4414 }
4415
4416 // RPC message handlers
4417
4418 async fn handle_request_join_project(
4419 this: ModelHandle<Self>,
4420 message: TypedEnvelope<proto::RequestJoinProject>,
4421 _: Arc<Client>,
4422 mut cx: AsyncAppContext,
4423 ) -> Result<()> {
4424 let user_id = message.payload.requester_id;
4425 if this.read_with(&cx, |project, _| {
4426 project.collaborators.values().any(|c| c.user.id == user_id)
4427 }) {
4428 this.update(&mut cx, |this, cx| {
4429 this.respond_to_join_request(user_id, true, cx)
4430 });
4431 } else {
4432 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4433 let user = user_store
4434 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4435 .await?;
4436 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4437 }
4438 Ok(())
4439 }
4440
4441 async fn handle_unregister_project(
4442 this: ModelHandle<Self>,
4443 _: TypedEnvelope<proto::UnregisterProject>,
4444 _: Arc<Client>,
4445 mut cx: AsyncAppContext,
4446 ) -> Result<()> {
4447 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4448 Ok(())
4449 }
4450
4451 async fn handle_project_unshared(
4452 this: ModelHandle<Self>,
4453 _: TypedEnvelope<proto::ProjectUnshared>,
4454 _: Arc<Client>,
4455 mut cx: AsyncAppContext,
4456 ) -> Result<()> {
4457 this.update(&mut cx, |this, cx| this.unshared(cx));
4458 Ok(())
4459 }
4460
4461 async fn handle_add_collaborator(
4462 this: ModelHandle<Self>,
4463 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4464 _: Arc<Client>,
4465 mut cx: AsyncAppContext,
4466 ) -> Result<()> {
4467 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4468 let collaborator = envelope
4469 .payload
4470 .collaborator
4471 .take()
4472 .ok_or_else(|| anyhow!("empty collaborator"))?;
4473
4474 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4475 this.update(&mut cx, |this, cx| {
4476 this.collaborators
4477 .insert(collaborator.peer_id, collaborator);
4478 cx.notify();
4479 });
4480
4481 Ok(())
4482 }
4483
4484 async fn handle_remove_collaborator(
4485 this: ModelHandle<Self>,
4486 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4487 _: Arc<Client>,
4488 mut cx: AsyncAppContext,
4489 ) -> Result<()> {
4490 this.update(&mut cx, |this, cx| {
4491 let peer_id = PeerId(envelope.payload.peer_id);
4492 let replica_id = this
4493 .collaborators
4494 .remove(&peer_id)
4495 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4496 .replica_id;
4497 for (_, buffer) in &this.opened_buffers {
4498 if let Some(buffer) = buffer.upgrade(cx) {
4499 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4500 }
4501 }
4502
4503 cx.emit(Event::CollaboratorLeft(peer_id));
4504 cx.notify();
4505 Ok(())
4506 })
4507 }
4508
4509 async fn handle_join_project_request_cancelled(
4510 this: ModelHandle<Self>,
4511 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4512 _: Arc<Client>,
4513 mut cx: AsyncAppContext,
4514 ) -> Result<()> {
4515 let user = this
4516 .update(&mut cx, |this, cx| {
4517 this.user_store.update(cx, |user_store, cx| {
4518 user_store.fetch_user(envelope.payload.requester_id, cx)
4519 })
4520 })
4521 .await?;
4522
4523 this.update(&mut cx, |_, cx| {
4524 cx.emit(Event::ContactCancelledJoinRequest(user));
4525 });
4526
4527 Ok(())
4528 }
4529
4530 async fn handle_update_project(
4531 this: ModelHandle<Self>,
4532 envelope: TypedEnvelope<proto::UpdateProject>,
4533 client: Arc<Client>,
4534 mut cx: AsyncAppContext,
4535 ) -> Result<()> {
4536 this.update(&mut cx, |this, cx| {
4537 let replica_id = this.replica_id();
4538 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4539
4540 let mut old_worktrees_by_id = this
4541 .worktrees
4542 .drain(..)
4543 .filter_map(|worktree| {
4544 let worktree = worktree.upgrade(cx)?;
4545 Some((worktree.read(cx).id(), worktree))
4546 })
4547 .collect::<HashMap<_, _>>();
4548
4549 for worktree in envelope.payload.worktrees {
4550 if let Some(old_worktree) =
4551 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4552 {
4553 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4554 } else {
4555 let worktree = proto::Worktree {
4556 id: worktree.id,
4557 root_name: worktree.root_name,
4558 entries: Default::default(),
4559 diagnostic_summaries: Default::default(),
4560 visible: worktree.visible,
4561 scan_id: 0,
4562 };
4563 let (worktree, load_task) =
4564 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4565 this.add_worktree(&worktree, cx);
4566 load_task.detach();
4567 }
4568 }
4569
4570 this.metadata_changed(true, cx);
4571 for (id, _) in old_worktrees_by_id {
4572 cx.emit(Event::WorktreeRemoved(id));
4573 }
4574
4575 Ok(())
4576 })
4577 }
4578
4579 async fn handle_update_worktree(
4580 this: ModelHandle<Self>,
4581 envelope: TypedEnvelope<proto::UpdateWorktree>,
4582 _: Arc<Client>,
4583 mut cx: AsyncAppContext,
4584 ) -> Result<()> {
4585 this.update(&mut cx, |this, cx| {
4586 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4587 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4588 worktree.update(cx, |worktree, _| {
4589 let worktree = worktree.as_remote_mut().unwrap();
4590 worktree.update_from_remote(envelope)
4591 })?;
4592 }
4593 Ok(())
4594 })
4595 }
4596
4597 async fn handle_create_project_entry(
4598 this: ModelHandle<Self>,
4599 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4600 _: Arc<Client>,
4601 mut cx: AsyncAppContext,
4602 ) -> Result<proto::ProjectEntryResponse> {
4603 let worktree = this.update(&mut cx, |this, cx| {
4604 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4605 this.worktree_for_id(worktree_id, cx)
4606 .ok_or_else(|| anyhow!("worktree not found"))
4607 })?;
4608 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4609 let entry = worktree
4610 .update(&mut cx, |worktree, cx| {
4611 let worktree = worktree.as_local_mut().unwrap();
4612 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4613 worktree.create_entry(path, envelope.payload.is_directory, cx)
4614 })
4615 .await?;
4616 Ok(proto::ProjectEntryResponse {
4617 entry: Some((&entry).into()),
4618 worktree_scan_id: worktree_scan_id as u64,
4619 })
4620 }
4621
4622 async fn handle_rename_project_entry(
4623 this: ModelHandle<Self>,
4624 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4625 _: Arc<Client>,
4626 mut cx: AsyncAppContext,
4627 ) -> Result<proto::ProjectEntryResponse> {
4628 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4629 let worktree = this.read_with(&cx, |this, cx| {
4630 this.worktree_for_entry(entry_id, cx)
4631 .ok_or_else(|| anyhow!("worktree not found"))
4632 })?;
4633 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4634 let entry = worktree
4635 .update(&mut cx, |worktree, cx| {
4636 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4637 worktree
4638 .as_local_mut()
4639 .unwrap()
4640 .rename_entry(entry_id, new_path, cx)
4641 .ok_or_else(|| anyhow!("invalid entry"))
4642 })?
4643 .await?;
4644 Ok(proto::ProjectEntryResponse {
4645 entry: Some((&entry).into()),
4646 worktree_scan_id: worktree_scan_id as u64,
4647 })
4648 }
4649
4650 async fn handle_copy_project_entry(
4651 this: ModelHandle<Self>,
4652 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4653 _: Arc<Client>,
4654 mut cx: AsyncAppContext,
4655 ) -> Result<proto::ProjectEntryResponse> {
4656 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4657 let worktree = this.read_with(&cx, |this, cx| {
4658 this.worktree_for_entry(entry_id, cx)
4659 .ok_or_else(|| anyhow!("worktree not found"))
4660 })?;
4661 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4662 let entry = worktree
4663 .update(&mut cx, |worktree, cx| {
4664 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4665 worktree
4666 .as_local_mut()
4667 .unwrap()
4668 .copy_entry(entry_id, new_path, cx)
4669 .ok_or_else(|| anyhow!("invalid entry"))
4670 })?
4671 .await?;
4672 Ok(proto::ProjectEntryResponse {
4673 entry: Some((&entry).into()),
4674 worktree_scan_id: worktree_scan_id as u64,
4675 })
4676 }
4677
4678 async fn handle_delete_project_entry(
4679 this: ModelHandle<Self>,
4680 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4681 _: Arc<Client>,
4682 mut cx: AsyncAppContext,
4683 ) -> Result<proto::ProjectEntryResponse> {
4684 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4685 let worktree = this.read_with(&cx, |this, cx| {
4686 this.worktree_for_entry(entry_id, cx)
4687 .ok_or_else(|| anyhow!("worktree not found"))
4688 })?;
4689 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4690 worktree
4691 .update(&mut cx, |worktree, cx| {
4692 worktree
4693 .as_local_mut()
4694 .unwrap()
4695 .delete_entry(entry_id, cx)
4696 .ok_or_else(|| anyhow!("invalid entry"))
4697 })?
4698 .await?;
4699 Ok(proto::ProjectEntryResponse {
4700 entry: None,
4701 worktree_scan_id: worktree_scan_id as u64,
4702 })
4703 }
4704
4705 async fn handle_update_diagnostic_summary(
4706 this: ModelHandle<Self>,
4707 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4708 _: Arc<Client>,
4709 mut cx: AsyncAppContext,
4710 ) -> Result<()> {
4711 this.update(&mut cx, |this, cx| {
4712 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4713 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4714 if let Some(summary) = envelope.payload.summary {
4715 let project_path = ProjectPath {
4716 worktree_id,
4717 path: Path::new(&summary.path).into(),
4718 };
4719 worktree.update(cx, |worktree, _| {
4720 worktree
4721 .as_remote_mut()
4722 .unwrap()
4723 .update_diagnostic_summary(project_path.path.clone(), &summary);
4724 });
4725 cx.emit(Event::DiagnosticsUpdated {
4726 language_server_id: summary.language_server_id as usize,
4727 path: project_path,
4728 });
4729 }
4730 }
4731 Ok(())
4732 })
4733 }
4734
4735 async fn handle_start_language_server(
4736 this: ModelHandle<Self>,
4737 envelope: TypedEnvelope<proto::StartLanguageServer>,
4738 _: Arc<Client>,
4739 mut cx: AsyncAppContext,
4740 ) -> Result<()> {
4741 let server = envelope
4742 .payload
4743 .server
4744 .ok_or_else(|| anyhow!("invalid server"))?;
4745 this.update(&mut cx, |this, cx| {
4746 this.language_server_statuses.insert(
4747 server.id as usize,
4748 LanguageServerStatus {
4749 name: server.name,
4750 pending_work: Default::default(),
4751 has_pending_diagnostic_updates: false,
4752 progress_tokens: Default::default(),
4753 },
4754 );
4755 cx.notify();
4756 });
4757 Ok(())
4758 }
4759
4760 async fn handle_update_language_server(
4761 this: ModelHandle<Self>,
4762 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4763 _: Arc<Client>,
4764 mut cx: AsyncAppContext,
4765 ) -> Result<()> {
4766 let language_server_id = envelope.payload.language_server_id as usize;
4767 match envelope
4768 .payload
4769 .variant
4770 .ok_or_else(|| anyhow!("invalid variant"))?
4771 {
4772 proto::update_language_server::Variant::WorkStart(payload) => {
4773 this.update(&mut cx, |this, cx| {
4774 this.on_lsp_work_start(
4775 language_server_id,
4776 payload.token,
4777 LanguageServerProgress {
4778 message: payload.message,
4779 percentage: payload.percentage.map(|p| p as usize),
4780 last_update_at: Instant::now(),
4781 },
4782 cx,
4783 );
4784 })
4785 }
4786 proto::update_language_server::Variant::WorkProgress(payload) => {
4787 this.update(&mut cx, |this, cx| {
4788 this.on_lsp_work_progress(
4789 language_server_id,
4790 payload.token,
4791 LanguageServerProgress {
4792 message: payload.message,
4793 percentage: payload.percentage.map(|p| p as usize),
4794 last_update_at: Instant::now(),
4795 },
4796 cx,
4797 );
4798 })
4799 }
4800 proto::update_language_server::Variant::WorkEnd(payload) => {
4801 this.update(&mut cx, |this, cx| {
4802 this.on_lsp_work_end(language_server_id, payload.token, cx);
4803 })
4804 }
4805 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4806 this.update(&mut cx, |this, cx| {
4807 this.disk_based_diagnostics_started(language_server_id, cx);
4808 })
4809 }
4810 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4811 this.update(&mut cx, |this, cx| {
4812 this.disk_based_diagnostics_finished(language_server_id, cx)
4813 });
4814 }
4815 }
4816
4817 Ok(())
4818 }
4819
4820 async fn handle_update_buffer(
4821 this: ModelHandle<Self>,
4822 envelope: TypedEnvelope<proto::UpdateBuffer>,
4823 _: Arc<Client>,
4824 mut cx: AsyncAppContext,
4825 ) -> Result<()> {
4826 this.update(&mut cx, |this, cx| {
4827 let payload = envelope.payload.clone();
4828 let buffer_id = payload.buffer_id;
4829 let ops = payload
4830 .operations
4831 .into_iter()
4832 .map(|op| language::proto::deserialize_operation(op))
4833 .collect::<Result<Vec<_>, _>>()?;
4834 let is_remote = this.is_remote();
4835 match this.opened_buffers.entry(buffer_id) {
4836 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4837 OpenBuffer::Strong(buffer) => {
4838 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4839 }
4840 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4841 OpenBuffer::Weak(_) => {}
4842 },
4843 hash_map::Entry::Vacant(e) => {
4844 assert!(
4845 is_remote,
4846 "received buffer update from {:?}",
4847 envelope.original_sender_id
4848 );
4849 e.insert(OpenBuffer::Loading(ops));
4850 }
4851 }
4852 Ok(())
4853 })
4854 }
4855
4856 async fn handle_update_buffer_file(
4857 this: ModelHandle<Self>,
4858 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4859 _: Arc<Client>,
4860 mut cx: AsyncAppContext,
4861 ) -> Result<()> {
4862 this.update(&mut cx, |this, cx| {
4863 let payload = envelope.payload.clone();
4864 let buffer_id = payload.buffer_id;
4865 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4866 let worktree = this
4867 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4868 .ok_or_else(|| anyhow!("no such worktree"))?;
4869 let file = File::from_proto(file, worktree.clone(), cx)?;
4870 let buffer = this
4871 .opened_buffers
4872 .get_mut(&buffer_id)
4873 .and_then(|b| b.upgrade(cx))
4874 .ok_or_else(|| anyhow!("no such buffer"))?;
4875 buffer.update(cx, |buffer, cx| {
4876 buffer.file_updated(Arc::new(file), cx).detach();
4877 });
4878 Ok(())
4879 })
4880 }
4881
4882 async fn handle_save_buffer(
4883 this: ModelHandle<Self>,
4884 envelope: TypedEnvelope<proto::SaveBuffer>,
4885 _: Arc<Client>,
4886 mut cx: AsyncAppContext,
4887 ) -> Result<proto::BufferSaved> {
4888 let buffer_id = envelope.payload.buffer_id;
4889 let requested_version = deserialize_version(envelope.payload.version);
4890
4891 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4892 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4893 let buffer = this
4894 .opened_buffers
4895 .get(&buffer_id)
4896 .and_then(|buffer| buffer.upgrade(cx))
4897 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4898 Ok::<_, anyhow::Error>((project_id, buffer))
4899 })?;
4900 buffer
4901 .update(&mut cx, |buffer, _| {
4902 buffer.wait_for_version(requested_version)
4903 })
4904 .await;
4905
4906 let (saved_version, fingerprint, mtime) =
4907 buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4908 Ok(proto::BufferSaved {
4909 project_id,
4910 buffer_id,
4911 version: serialize_version(&saved_version),
4912 mtime: Some(mtime.into()),
4913 fingerprint,
4914 })
4915 }
4916
4917 async fn handle_reload_buffers(
4918 this: ModelHandle<Self>,
4919 envelope: TypedEnvelope<proto::ReloadBuffers>,
4920 _: Arc<Client>,
4921 mut cx: AsyncAppContext,
4922 ) -> Result<proto::ReloadBuffersResponse> {
4923 let sender_id = envelope.original_sender_id()?;
4924 let reload = this.update(&mut cx, |this, cx| {
4925 let mut buffers = HashSet::default();
4926 for buffer_id in &envelope.payload.buffer_ids {
4927 buffers.insert(
4928 this.opened_buffers
4929 .get(buffer_id)
4930 .and_then(|buffer| buffer.upgrade(cx))
4931 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4932 );
4933 }
4934 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4935 })?;
4936
4937 let project_transaction = reload.await?;
4938 let project_transaction = this.update(&mut cx, |this, cx| {
4939 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4940 });
4941 Ok(proto::ReloadBuffersResponse {
4942 transaction: Some(project_transaction),
4943 })
4944 }
4945
4946 async fn handle_format_buffers(
4947 this: ModelHandle<Self>,
4948 envelope: TypedEnvelope<proto::FormatBuffers>,
4949 _: Arc<Client>,
4950 mut cx: AsyncAppContext,
4951 ) -> Result<proto::FormatBuffersResponse> {
4952 let sender_id = envelope.original_sender_id()?;
4953 let format = this.update(&mut cx, |this, cx| {
4954 let mut buffers = HashSet::default();
4955 for buffer_id in &envelope.payload.buffer_ids {
4956 buffers.insert(
4957 this.opened_buffers
4958 .get(buffer_id)
4959 .and_then(|buffer| buffer.upgrade(cx))
4960 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4961 );
4962 }
4963 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4964 })?;
4965
4966 let project_transaction = format.await?;
4967 let project_transaction = this.update(&mut cx, |this, cx| {
4968 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4969 });
4970 Ok(proto::FormatBuffersResponse {
4971 transaction: Some(project_transaction),
4972 })
4973 }
4974
4975 async fn handle_get_completions(
4976 this: ModelHandle<Self>,
4977 envelope: TypedEnvelope<proto::GetCompletions>,
4978 _: Arc<Client>,
4979 mut cx: AsyncAppContext,
4980 ) -> Result<proto::GetCompletionsResponse> {
4981 let position = envelope
4982 .payload
4983 .position
4984 .and_then(language::proto::deserialize_anchor)
4985 .ok_or_else(|| anyhow!("invalid position"))?;
4986 let version = deserialize_version(envelope.payload.version);
4987 let buffer = this.read_with(&cx, |this, cx| {
4988 this.opened_buffers
4989 .get(&envelope.payload.buffer_id)
4990 .and_then(|buffer| buffer.upgrade(cx))
4991 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4992 })?;
4993 buffer
4994 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4995 .await;
4996 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4997 let completions = this
4998 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4999 .await?;
5000
5001 Ok(proto::GetCompletionsResponse {
5002 completions: completions
5003 .iter()
5004 .map(language::proto::serialize_completion)
5005 .collect(),
5006 version: serialize_version(&version),
5007 })
5008 }
5009
5010 async fn handle_apply_additional_edits_for_completion(
5011 this: ModelHandle<Self>,
5012 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
5013 _: Arc<Client>,
5014 mut cx: AsyncAppContext,
5015 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
5016 let apply_additional_edits = this.update(&mut cx, |this, cx| {
5017 let buffer = this
5018 .opened_buffers
5019 .get(&envelope.payload.buffer_id)
5020 .and_then(|buffer| buffer.upgrade(cx))
5021 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5022 let language = buffer.read(cx).language();
5023 let completion = language::proto::deserialize_completion(
5024 envelope
5025 .payload
5026 .completion
5027 .ok_or_else(|| anyhow!("invalid completion"))?,
5028 language,
5029 )?;
5030 Ok::<_, anyhow::Error>(
5031 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
5032 )
5033 })?;
5034
5035 Ok(proto::ApplyCompletionAdditionalEditsResponse {
5036 transaction: apply_additional_edits
5037 .await?
5038 .as_ref()
5039 .map(language::proto::serialize_transaction),
5040 })
5041 }
5042
5043 async fn handle_get_code_actions(
5044 this: ModelHandle<Self>,
5045 envelope: TypedEnvelope<proto::GetCodeActions>,
5046 _: Arc<Client>,
5047 mut cx: AsyncAppContext,
5048 ) -> Result<proto::GetCodeActionsResponse> {
5049 let start = envelope
5050 .payload
5051 .start
5052 .and_then(language::proto::deserialize_anchor)
5053 .ok_or_else(|| anyhow!("invalid start"))?;
5054 let end = envelope
5055 .payload
5056 .end
5057 .and_then(language::proto::deserialize_anchor)
5058 .ok_or_else(|| anyhow!("invalid end"))?;
5059 let buffer = this.update(&mut cx, |this, cx| {
5060 this.opened_buffers
5061 .get(&envelope.payload.buffer_id)
5062 .and_then(|buffer| buffer.upgrade(cx))
5063 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
5064 })?;
5065 buffer
5066 .update(&mut cx, |buffer, _| {
5067 buffer.wait_for_version(deserialize_version(envelope.payload.version))
5068 })
5069 .await;
5070
5071 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
5072 let code_actions = this.update(&mut cx, |this, cx| {
5073 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
5074 })?;
5075
5076 Ok(proto::GetCodeActionsResponse {
5077 actions: code_actions
5078 .await?
5079 .iter()
5080 .map(language::proto::serialize_code_action)
5081 .collect(),
5082 version: serialize_version(&version),
5083 })
5084 }
5085
5086 async fn handle_apply_code_action(
5087 this: ModelHandle<Self>,
5088 envelope: TypedEnvelope<proto::ApplyCodeAction>,
5089 _: Arc<Client>,
5090 mut cx: AsyncAppContext,
5091 ) -> Result<proto::ApplyCodeActionResponse> {
5092 let sender_id = envelope.original_sender_id()?;
5093 let action = language::proto::deserialize_code_action(
5094 envelope
5095 .payload
5096 .action
5097 .ok_or_else(|| anyhow!("invalid action"))?,
5098 )?;
5099 let apply_code_action = this.update(&mut cx, |this, cx| {
5100 let buffer = this
5101 .opened_buffers
5102 .get(&envelope.payload.buffer_id)
5103 .and_then(|buffer| buffer.upgrade(cx))
5104 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
5105 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
5106 })?;
5107
5108 let project_transaction = apply_code_action.await?;
5109 let project_transaction = this.update(&mut cx, |this, cx| {
5110 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
5111 });
5112 Ok(proto::ApplyCodeActionResponse {
5113 transaction: Some(project_transaction),
5114 })
5115 }
5116
5117 async fn handle_lsp_command<T: LspCommand>(
5118 this: ModelHandle<Self>,
5119 envelope: TypedEnvelope<T::ProtoRequest>,
5120 _: Arc<Client>,
5121 mut cx: AsyncAppContext,
5122 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
5123 where
5124 <T::LspRequest as lsp::request::Request>::Result: Send,
5125 {
5126 let sender_id = envelope.original_sender_id()?;
5127 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
5128 let buffer_handle = this.read_with(&cx, |this, _| {
5129 this.opened_buffers
5130 .get(&buffer_id)
5131 .and_then(|buffer| buffer.upgrade(&cx))
5132 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
5133 })?;
5134 let request = T::from_proto(
5135 envelope.payload,
5136 this.clone(),
5137 buffer_handle.clone(),
5138 cx.clone(),
5139 )
5140 .await?;
5141 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
5142 let response = this
5143 .update(&mut cx, |this, cx| {
5144 this.request_lsp(buffer_handle, request, cx)
5145 })
5146 .await?;
5147 this.update(&mut cx, |this, cx| {
5148 Ok(T::response_to_proto(
5149 response,
5150 this,
5151 sender_id,
5152 &buffer_version,
5153 cx,
5154 ))
5155 })
5156 }
5157
5158 async fn handle_get_project_symbols(
5159 this: ModelHandle<Self>,
5160 envelope: TypedEnvelope<proto::GetProjectSymbols>,
5161 _: Arc<Client>,
5162 mut cx: AsyncAppContext,
5163 ) -> Result<proto::GetProjectSymbolsResponse> {
5164 let symbols = this
5165 .update(&mut cx, |this, cx| {
5166 this.symbols(&envelope.payload.query, cx)
5167 })
5168 .await?;
5169
5170 Ok(proto::GetProjectSymbolsResponse {
5171 symbols: symbols.iter().map(serialize_symbol).collect(),
5172 })
5173 }
5174
5175 async fn handle_search_project(
5176 this: ModelHandle<Self>,
5177 envelope: TypedEnvelope<proto::SearchProject>,
5178 _: Arc<Client>,
5179 mut cx: AsyncAppContext,
5180 ) -> Result<proto::SearchProjectResponse> {
5181 let peer_id = envelope.original_sender_id()?;
5182 let query = SearchQuery::from_proto(envelope.payload)?;
5183 let result = this
5184 .update(&mut cx, |this, cx| this.search(query, cx))
5185 .await?;
5186
5187 this.update(&mut cx, |this, cx| {
5188 let mut locations = Vec::new();
5189 for (buffer, ranges) in result {
5190 for range in ranges {
5191 let start = serialize_anchor(&range.start);
5192 let end = serialize_anchor(&range.end);
5193 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
5194 locations.push(proto::Location {
5195 buffer: Some(buffer),
5196 start: Some(start),
5197 end: Some(end),
5198 });
5199 }
5200 }
5201 Ok(proto::SearchProjectResponse { locations })
5202 })
5203 }
5204
5205 async fn handle_open_buffer_for_symbol(
5206 this: ModelHandle<Self>,
5207 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
5208 _: Arc<Client>,
5209 mut cx: AsyncAppContext,
5210 ) -> Result<proto::OpenBufferForSymbolResponse> {
5211 let peer_id = envelope.original_sender_id()?;
5212 let symbol = envelope
5213 .payload
5214 .symbol
5215 .ok_or_else(|| anyhow!("invalid symbol"))?;
5216 let symbol = this.read_with(&cx, |this, _| {
5217 let symbol = this.deserialize_symbol(symbol)?;
5218 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
5219 if signature == symbol.signature {
5220 Ok(symbol)
5221 } else {
5222 Err(anyhow!("invalid symbol signature"))
5223 }
5224 })?;
5225 let buffer = this
5226 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
5227 .await?;
5228
5229 Ok(proto::OpenBufferForSymbolResponse {
5230 buffer: Some(this.update(&mut cx, |this, cx| {
5231 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
5232 })),
5233 })
5234 }
5235
5236 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
5237 let mut hasher = Sha256::new();
5238 hasher.update(worktree_id.to_proto().to_be_bytes());
5239 hasher.update(path.to_string_lossy().as_bytes());
5240 hasher.update(self.nonce.to_be_bytes());
5241 hasher.finalize().as_slice().try_into().unwrap()
5242 }
5243
5244 async fn handle_open_buffer_by_id(
5245 this: ModelHandle<Self>,
5246 envelope: TypedEnvelope<proto::OpenBufferById>,
5247 _: Arc<Client>,
5248 mut cx: AsyncAppContext,
5249 ) -> Result<proto::OpenBufferResponse> {
5250 let peer_id = envelope.original_sender_id()?;
5251 let buffer = this
5252 .update(&mut cx, |this, cx| {
5253 this.open_buffer_by_id(envelope.payload.id, cx)
5254 })
5255 .await?;
5256 this.update(&mut cx, |this, cx| {
5257 Ok(proto::OpenBufferResponse {
5258 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5259 })
5260 })
5261 }
5262
5263 async fn handle_open_buffer_by_path(
5264 this: ModelHandle<Self>,
5265 envelope: TypedEnvelope<proto::OpenBufferByPath>,
5266 _: Arc<Client>,
5267 mut cx: AsyncAppContext,
5268 ) -> Result<proto::OpenBufferResponse> {
5269 let peer_id = envelope.original_sender_id()?;
5270 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
5271 let open_buffer = this.update(&mut cx, |this, cx| {
5272 this.open_buffer(
5273 ProjectPath {
5274 worktree_id,
5275 path: PathBuf::from(envelope.payload.path).into(),
5276 },
5277 cx,
5278 )
5279 });
5280
5281 let buffer = open_buffer.await?;
5282 this.update(&mut cx, |this, cx| {
5283 Ok(proto::OpenBufferResponse {
5284 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
5285 })
5286 })
5287 }
5288
5289 fn serialize_project_transaction_for_peer(
5290 &mut self,
5291 project_transaction: ProjectTransaction,
5292 peer_id: PeerId,
5293 cx: &AppContext,
5294 ) -> proto::ProjectTransaction {
5295 let mut serialized_transaction = proto::ProjectTransaction {
5296 buffers: Default::default(),
5297 transactions: Default::default(),
5298 };
5299 for (buffer, transaction) in project_transaction.0 {
5300 serialized_transaction
5301 .buffers
5302 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
5303 serialized_transaction
5304 .transactions
5305 .push(language::proto::serialize_transaction(&transaction));
5306 }
5307 serialized_transaction
5308 }
5309
5310 fn deserialize_project_transaction(
5311 &mut self,
5312 message: proto::ProjectTransaction,
5313 push_to_history: bool,
5314 cx: &mut ModelContext<Self>,
5315 ) -> Task<Result<ProjectTransaction>> {
5316 cx.spawn(|this, mut cx| async move {
5317 let mut project_transaction = ProjectTransaction::default();
5318 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
5319 let buffer = this
5320 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
5321 .await?;
5322 let transaction = language::proto::deserialize_transaction(transaction)?;
5323 project_transaction.0.insert(buffer, transaction);
5324 }
5325
5326 for (buffer, transaction) in &project_transaction.0 {
5327 buffer
5328 .update(&mut cx, |buffer, _| {
5329 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
5330 })
5331 .await;
5332
5333 if push_to_history {
5334 buffer.update(&mut cx, |buffer, _| {
5335 buffer.push_transaction(transaction.clone(), Instant::now());
5336 });
5337 }
5338 }
5339
5340 Ok(project_transaction)
5341 })
5342 }
5343
5344 fn serialize_buffer_for_peer(
5345 &mut self,
5346 buffer: &ModelHandle<Buffer>,
5347 peer_id: PeerId,
5348 cx: &AppContext,
5349 ) -> proto::Buffer {
5350 let buffer_id = buffer.read(cx).remote_id();
5351 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
5352 if shared_buffers.insert(buffer_id) {
5353 proto::Buffer {
5354 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
5355 }
5356 } else {
5357 proto::Buffer {
5358 variant: Some(proto::buffer::Variant::Id(buffer_id)),
5359 }
5360 }
5361 }
5362
5363 fn deserialize_buffer(
5364 &mut self,
5365 buffer: proto::Buffer,
5366 cx: &mut ModelContext<Self>,
5367 ) -> Task<Result<ModelHandle<Buffer>>> {
5368 let replica_id = self.replica_id();
5369
5370 let opened_buffer_tx = self.opened_buffer.0.clone();
5371 let mut opened_buffer_rx = self.opened_buffer.1.clone();
5372 cx.spawn(|this, mut cx| async move {
5373 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
5374 proto::buffer::Variant::Id(id) => {
5375 let buffer = loop {
5376 let buffer = this.read_with(&cx, |this, cx| {
5377 this.opened_buffers
5378 .get(&id)
5379 .and_then(|buffer| buffer.upgrade(cx))
5380 });
5381 if let Some(buffer) = buffer {
5382 break buffer;
5383 }
5384 opened_buffer_rx
5385 .next()
5386 .await
5387 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
5388 };
5389 Ok(buffer)
5390 }
5391 proto::buffer::Variant::State(mut buffer) => {
5392 let mut buffer_worktree = None;
5393 let mut buffer_file = None;
5394 if let Some(file) = buffer.file.take() {
5395 this.read_with(&cx, |this, cx| {
5396 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5397 let worktree =
5398 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5399 anyhow!("no worktree found for id {}", file.worktree_id)
5400 })?;
5401 buffer_file =
5402 Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
5403 as Arc<dyn language::File>);
5404 buffer_worktree = Some(worktree);
5405 Ok::<_, anyhow::Error>(())
5406 })?;
5407 }
5408
5409 let buffer = cx.add_model(|cx| {
5410 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5411 });
5412
5413 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5414
5415 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5416 Ok(buffer)
5417 }
5418 }
5419 })
5420 }
5421
5422 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5423 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5424 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5425 let start = serialized_symbol
5426 .start
5427 .ok_or_else(|| anyhow!("invalid start"))?;
5428 let end = serialized_symbol
5429 .end
5430 .ok_or_else(|| anyhow!("invalid end"))?;
5431 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5432 let path = PathBuf::from(serialized_symbol.path);
5433 let language = self.languages.select_language(&path);
5434 Ok(Symbol {
5435 source_worktree_id,
5436 worktree_id,
5437 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5438 label: language
5439 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5440 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5441 name: serialized_symbol.name,
5442 path,
5443 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5444 kind,
5445 signature: serialized_symbol
5446 .signature
5447 .try_into()
5448 .map_err(|_| anyhow!("invalid signature"))?,
5449 })
5450 }
5451
5452 async fn handle_buffer_saved(
5453 this: ModelHandle<Self>,
5454 envelope: TypedEnvelope<proto::BufferSaved>,
5455 _: Arc<Client>,
5456 mut cx: AsyncAppContext,
5457 ) -> Result<()> {
5458 let version = deserialize_version(envelope.payload.version);
5459 let mtime = envelope
5460 .payload
5461 .mtime
5462 .ok_or_else(|| anyhow!("missing mtime"))?
5463 .into();
5464
5465 this.update(&mut cx, |this, cx| {
5466 let buffer = this
5467 .opened_buffers
5468 .get(&envelope.payload.buffer_id)
5469 .and_then(|buffer| buffer.upgrade(cx));
5470 if let Some(buffer) = buffer {
5471 buffer.update(cx, |buffer, cx| {
5472 buffer.did_save(version, envelope.payload.fingerprint, mtime, None, cx);
5473 });
5474 }
5475 Ok(())
5476 })
5477 }
5478
5479 async fn handle_buffer_reloaded(
5480 this: ModelHandle<Self>,
5481 envelope: TypedEnvelope<proto::BufferReloaded>,
5482 _: Arc<Client>,
5483 mut cx: AsyncAppContext,
5484 ) -> Result<()> {
5485 let payload = envelope.payload.clone();
5486 let version = deserialize_version(payload.version);
5487 let mtime = payload
5488 .mtime
5489 .ok_or_else(|| anyhow!("missing mtime"))?
5490 .into();
5491 this.update(&mut cx, |this, cx| {
5492 let buffer = this
5493 .opened_buffers
5494 .get(&payload.buffer_id)
5495 .and_then(|buffer| buffer.upgrade(cx));
5496 if let Some(buffer) = buffer {
5497 buffer.update(cx, |buffer, cx| {
5498 buffer.did_reload(version, payload.fingerprint, mtime, cx);
5499 });
5500 }
5501 Ok(())
5502 })
5503 }
5504
5505 pub fn match_paths<'a>(
5506 &self,
5507 query: &'a str,
5508 include_ignored: bool,
5509 smart_case: bool,
5510 max_results: usize,
5511 cancel_flag: &'a AtomicBool,
5512 cx: &AppContext,
5513 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5514 let worktrees = self
5515 .worktrees(cx)
5516 .filter(|worktree| worktree.read(cx).is_visible())
5517 .collect::<Vec<_>>();
5518 let include_root_name = worktrees.len() > 1;
5519 let candidate_sets = worktrees
5520 .into_iter()
5521 .map(|worktree| CandidateSet {
5522 snapshot: worktree.read(cx).snapshot(),
5523 include_ignored,
5524 include_root_name,
5525 })
5526 .collect::<Vec<_>>();
5527
5528 let background = cx.background().clone();
5529 async move {
5530 fuzzy::match_paths(
5531 candidate_sets.as_slice(),
5532 query,
5533 smart_case,
5534 max_results,
5535 cancel_flag,
5536 background,
5537 )
5538 .await
5539 }
5540 }
5541
5542 fn edits_from_lsp(
5543 &mut self,
5544 buffer: &ModelHandle<Buffer>,
5545 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5546 version: Option<i32>,
5547 cx: &mut ModelContext<Self>,
5548 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5549 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5550 cx.background().spawn(async move {
5551 let snapshot = snapshot?;
5552 let mut lsp_edits = lsp_edits
5553 .into_iter()
5554 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5555 .collect::<Vec<_>>();
5556 lsp_edits.sort_by_key(|(range, _)| range.start);
5557
5558 let mut lsp_edits = lsp_edits.into_iter().peekable();
5559 let mut edits = Vec::new();
5560 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5561 // Combine any LSP edits that are adjacent.
5562 //
5563 // Also, combine LSP edits that are separated from each other by only
5564 // a newline. This is important because for some code actions,
5565 // Rust-analyzer rewrites the entire buffer via a series of edits that
5566 // are separated by unchanged newline characters.
5567 //
5568 // In order for the diffing logic below to work properly, any edits that
5569 // cancel each other out must be combined into one.
5570 while let Some((next_range, next_text)) = lsp_edits.peek() {
5571 if next_range.start > range.end {
5572 if next_range.start.row > range.end.row + 1
5573 || next_range.start.column > 0
5574 || snapshot.clip_point_utf16(
5575 PointUtf16::new(range.end.row, u32::MAX),
5576 Bias::Left,
5577 ) > range.end
5578 {
5579 break;
5580 }
5581 new_text.push('\n');
5582 }
5583 range.end = next_range.end;
5584 new_text.push_str(&next_text);
5585 lsp_edits.next();
5586 }
5587
5588 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5589 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5590 {
5591 return Err(anyhow!("invalid edits received from language server"));
5592 }
5593
5594 // For multiline edits, perform a diff of the old and new text so that
5595 // we can identify the changes more precisely, preserving the locations
5596 // of any anchors positioned in the unchanged regions.
5597 if range.end.row > range.start.row {
5598 let mut offset = range.start.to_offset(&snapshot);
5599 let old_text = snapshot.text_for_range(range).collect::<String>();
5600
5601 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5602 let mut moved_since_edit = true;
5603 for change in diff.iter_all_changes() {
5604 let tag = change.tag();
5605 let value = change.value();
5606 match tag {
5607 ChangeTag::Equal => {
5608 offset += value.len();
5609 moved_since_edit = true;
5610 }
5611 ChangeTag::Delete => {
5612 let start = snapshot.anchor_after(offset);
5613 let end = snapshot.anchor_before(offset + value.len());
5614 if moved_since_edit {
5615 edits.push((start..end, String::new()));
5616 } else {
5617 edits.last_mut().unwrap().0.end = end;
5618 }
5619 offset += value.len();
5620 moved_since_edit = false;
5621 }
5622 ChangeTag::Insert => {
5623 if moved_since_edit {
5624 let anchor = snapshot.anchor_after(offset);
5625 edits.push((anchor.clone()..anchor, value.to_string()));
5626 } else {
5627 edits.last_mut().unwrap().1.push_str(value);
5628 }
5629 moved_since_edit = false;
5630 }
5631 }
5632 }
5633 } else if range.end == range.start {
5634 let anchor = snapshot.anchor_after(range.start);
5635 edits.push((anchor.clone()..anchor, new_text));
5636 } else {
5637 let edit_start = snapshot.anchor_after(range.start);
5638 let edit_end = snapshot.anchor_before(range.end);
5639 edits.push((edit_start..edit_end, new_text));
5640 }
5641 }
5642
5643 Ok(edits)
5644 })
5645 }
5646
5647 fn buffer_snapshot_for_lsp_version(
5648 &mut self,
5649 buffer: &ModelHandle<Buffer>,
5650 version: Option<i32>,
5651 cx: &AppContext,
5652 ) -> Result<TextBufferSnapshot> {
5653 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5654
5655 if let Some(version) = version {
5656 let buffer_id = buffer.read(cx).remote_id();
5657 let snapshots = self
5658 .buffer_snapshots
5659 .get_mut(&buffer_id)
5660 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5661 let mut found_snapshot = None;
5662 snapshots.retain(|(snapshot_version, snapshot)| {
5663 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5664 false
5665 } else {
5666 if *snapshot_version == version {
5667 found_snapshot = Some(snapshot.clone());
5668 }
5669 true
5670 }
5671 });
5672
5673 found_snapshot.ok_or_else(|| {
5674 anyhow!(
5675 "snapshot not found for buffer {} at version {}",
5676 buffer_id,
5677 version
5678 )
5679 })
5680 } else {
5681 Ok((buffer.read(cx)).text_snapshot())
5682 }
5683 }
5684
5685 fn language_server_for_buffer(
5686 &self,
5687 buffer: &Buffer,
5688 cx: &AppContext,
5689 ) -> Option<(&Arc<dyn LspAdapter>, &Arc<LanguageServer>)> {
5690 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5691 let worktree_id = file.worktree_id(cx);
5692 let key = (worktree_id, language.lsp_adapter()?.name());
5693
5694 if let Some(server_id) = self.language_server_ids.get(&key) {
5695 if let Some(LanguageServerState::Running { adapter, server }) =
5696 self.language_servers.get(&server_id)
5697 {
5698 return Some((adapter, server));
5699 }
5700 }
5701 }
5702
5703 None
5704 }
5705}
5706
5707impl ProjectStore {
5708 pub fn new(db: Arc<Db>) -> Self {
5709 Self {
5710 db,
5711 projects: Default::default(),
5712 }
5713 }
5714
5715 pub fn projects<'a>(
5716 &'a self,
5717 cx: &'a AppContext,
5718 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5719 self.projects
5720 .iter()
5721 .filter_map(|project| project.upgrade(cx))
5722 }
5723
5724 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5725 if let Err(ix) = self
5726 .projects
5727 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5728 {
5729 self.projects.insert(ix, project);
5730 }
5731 cx.notify();
5732 }
5733
5734 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5735 let mut did_change = false;
5736 self.projects.retain(|project| {
5737 if project.is_upgradable(cx) {
5738 true
5739 } else {
5740 did_change = true;
5741 false
5742 }
5743 });
5744 if did_change {
5745 cx.notify();
5746 }
5747 }
5748}
5749
5750impl WorktreeHandle {
5751 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5752 match self {
5753 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5754 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5755 }
5756 }
5757}
5758
5759impl OpenBuffer {
5760 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5761 match self {
5762 OpenBuffer::Strong(handle) => Some(handle.clone()),
5763 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5764 OpenBuffer::Loading(_) => None,
5765 }
5766 }
5767}
5768
5769struct CandidateSet {
5770 snapshot: Snapshot,
5771 include_ignored: bool,
5772 include_root_name: bool,
5773}
5774
5775impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5776 type Candidates = CandidateSetIter<'a>;
5777
5778 fn id(&self) -> usize {
5779 self.snapshot.id().to_usize()
5780 }
5781
5782 fn len(&self) -> usize {
5783 if self.include_ignored {
5784 self.snapshot.file_count()
5785 } else {
5786 self.snapshot.visible_file_count()
5787 }
5788 }
5789
5790 fn prefix(&self) -> Arc<str> {
5791 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5792 self.snapshot.root_name().into()
5793 } else if self.include_root_name {
5794 format!("{}/", self.snapshot.root_name()).into()
5795 } else {
5796 "".into()
5797 }
5798 }
5799
5800 fn candidates(&'a self, start: usize) -> Self::Candidates {
5801 CandidateSetIter {
5802 traversal: self.snapshot.files(self.include_ignored, start),
5803 }
5804 }
5805}
5806
5807struct CandidateSetIter<'a> {
5808 traversal: Traversal<'a>,
5809}
5810
5811impl<'a> Iterator for CandidateSetIter<'a> {
5812 type Item = PathMatchCandidate<'a>;
5813
5814 fn next(&mut self) -> Option<Self::Item> {
5815 self.traversal.next().map(|entry| {
5816 if let EntryKind::File(char_bag) = entry.kind {
5817 PathMatchCandidate {
5818 path: &entry.path,
5819 char_bag,
5820 }
5821 } else {
5822 unreachable!()
5823 }
5824 })
5825 }
5826}
5827
5828impl Entity for ProjectStore {
5829 type Event = ();
5830}
5831
5832impl Entity for Project {
5833 type Event = Event;
5834
5835 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5836 self.project_store.update(cx, ProjectStore::prune_projects);
5837
5838 match &self.client_state {
5839 ProjectClientState::Local { remote_id_rx, .. } => {
5840 if let Some(project_id) = *remote_id_rx.borrow() {
5841 self.client
5842 .send(proto::UnregisterProject { project_id })
5843 .log_err();
5844 }
5845 }
5846 ProjectClientState::Remote { remote_id, .. } => {
5847 self.client
5848 .send(proto::LeaveProject {
5849 project_id: *remote_id,
5850 })
5851 .log_err();
5852 }
5853 }
5854 }
5855
5856 fn app_will_quit(
5857 &mut self,
5858 _: &mut MutableAppContext,
5859 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5860 let shutdown_futures = self
5861 .language_servers
5862 .drain()
5863 .map(|(_, server_state)| async {
5864 match server_state {
5865 LanguageServerState::Running { server, .. } => server.shutdown()?.await,
5866 LanguageServerState::Starting(starting_server) => {
5867 starting_server.await?.shutdown()?.await
5868 }
5869 }
5870 })
5871 .collect::<Vec<_>>();
5872
5873 Some(
5874 async move {
5875 futures::future::join_all(shutdown_futures).await;
5876 }
5877 .boxed(),
5878 )
5879 }
5880}
5881
5882impl Collaborator {
5883 fn from_proto(
5884 message: proto::Collaborator,
5885 user_store: &ModelHandle<UserStore>,
5886 cx: &mut AsyncAppContext,
5887 ) -> impl Future<Output = Result<Self>> {
5888 let user = user_store.update(cx, |user_store, cx| {
5889 user_store.fetch_user(message.user_id, cx)
5890 });
5891
5892 async move {
5893 Ok(Self {
5894 peer_id: PeerId(message.peer_id),
5895 user: user.await?,
5896 replica_id: message.replica_id as ReplicaId,
5897 })
5898 }
5899 }
5900}
5901
5902impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5903 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5904 Self {
5905 worktree_id,
5906 path: path.as_ref().into(),
5907 }
5908 }
5909}
5910
5911impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5912 fn from(options: lsp::CreateFileOptions) -> Self {
5913 Self {
5914 overwrite: options.overwrite.unwrap_or(false),
5915 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5916 }
5917 }
5918}
5919
5920impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5921 fn from(options: lsp::RenameFileOptions) -> Self {
5922 Self {
5923 overwrite: options.overwrite.unwrap_or(false),
5924 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5925 }
5926 }
5927}
5928
5929impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5930 fn from(options: lsp::DeleteFileOptions) -> Self {
5931 Self {
5932 recursive: options.recursive.unwrap_or(false),
5933 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5934 }
5935 }
5936}
5937
5938fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5939 proto::Symbol {
5940 source_worktree_id: symbol.source_worktree_id.to_proto(),
5941 worktree_id: symbol.worktree_id.to_proto(),
5942 language_server_name: symbol.language_server_name.0.to_string(),
5943 name: symbol.name.clone(),
5944 kind: unsafe { mem::transmute(symbol.kind) },
5945 path: symbol.path.to_string_lossy().to_string(),
5946 start: Some(proto::Point {
5947 row: symbol.range.start.row,
5948 column: symbol.range.start.column,
5949 }),
5950 end: Some(proto::Point {
5951 row: symbol.range.end.row,
5952 column: symbol.range.end.column,
5953 }),
5954 signature: symbol.signature.to_vec(),
5955 }
5956}
5957
5958fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5959 let mut path_components = path.components();
5960 let mut base_components = base.components();
5961 let mut components: Vec<Component> = Vec::new();
5962 loop {
5963 match (path_components.next(), base_components.next()) {
5964 (None, None) => break,
5965 (Some(a), None) => {
5966 components.push(a);
5967 components.extend(path_components.by_ref());
5968 break;
5969 }
5970 (None, _) => components.push(Component::ParentDir),
5971 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5972 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5973 (Some(a), Some(_)) => {
5974 components.push(Component::ParentDir);
5975 for _ in base_components {
5976 components.push(Component::ParentDir);
5977 }
5978 components.push(a);
5979 components.extend(path_components.by_ref());
5980 break;
5981 }
5982 }
5983 }
5984 components.iter().map(|c| c.as_os_str()).collect()
5985}
5986
5987impl Item for Buffer {
5988 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5989 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5990 }
5991}
5992
5993#[cfg(test)]
5994mod tests {
5995 use crate::worktree::WorktreeHandle;
5996
5997 use super::{Event, *};
5998 use fs::RealFs;
5999 use futures::{future, StreamExt};
6000 use gpui::{executor::Deterministic, test::subscribe};
6001 use language::{
6002 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
6003 OffsetRangeExt, Point, ToPoint,
6004 };
6005 use lsp::Url;
6006 use serde_json::json;
6007 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
6008 use unindent::Unindent as _;
6009 use util::{assert_set_eq, test::temp_tree};
6010
6011 #[gpui::test]
6012 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
6013 let dir = temp_tree(json!({
6014 "root": {
6015 "apple": "",
6016 "banana": {
6017 "carrot": {
6018 "date": "",
6019 "endive": "",
6020 }
6021 },
6022 "fennel": {
6023 "grape": "",
6024 }
6025 }
6026 }));
6027
6028 let root_link_path = dir.path().join("root_link");
6029 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
6030 unix::fs::symlink(
6031 &dir.path().join("root/fennel"),
6032 &dir.path().join("root/finnochio"),
6033 )
6034 .unwrap();
6035
6036 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
6037
6038 project.read_with(cx, |project, cx| {
6039 let tree = project.worktrees(cx).next().unwrap().read(cx);
6040 assert_eq!(tree.file_count(), 5);
6041 assert_eq!(
6042 tree.inode_for_path("fennel/grape"),
6043 tree.inode_for_path("finnochio/grape")
6044 );
6045 });
6046
6047 let cancel_flag = Default::default();
6048 let results = project
6049 .read_with(cx, |project, cx| {
6050 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
6051 })
6052 .await;
6053 assert_eq!(
6054 results
6055 .into_iter()
6056 .map(|result| result.path)
6057 .collect::<Vec<Arc<Path>>>(),
6058 vec![
6059 PathBuf::from("banana/carrot/date").into(),
6060 PathBuf::from("banana/carrot/endive").into(),
6061 ]
6062 );
6063 }
6064
6065 #[gpui::test]
6066 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
6067 cx.foreground().forbid_parking();
6068
6069 let mut rust_language = Language::new(
6070 LanguageConfig {
6071 name: "Rust".into(),
6072 path_suffixes: vec!["rs".to_string()],
6073 ..Default::default()
6074 },
6075 Some(tree_sitter_rust::language()),
6076 );
6077 let mut json_language = Language::new(
6078 LanguageConfig {
6079 name: "JSON".into(),
6080 path_suffixes: vec!["json".to_string()],
6081 ..Default::default()
6082 },
6083 None,
6084 );
6085 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
6086 name: "the-rust-language-server",
6087 capabilities: lsp::ServerCapabilities {
6088 completion_provider: Some(lsp::CompletionOptions {
6089 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
6090 ..Default::default()
6091 }),
6092 ..Default::default()
6093 },
6094 ..Default::default()
6095 });
6096 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
6097 name: "the-json-language-server",
6098 capabilities: lsp::ServerCapabilities {
6099 completion_provider: Some(lsp::CompletionOptions {
6100 trigger_characters: Some(vec![":".to_string()]),
6101 ..Default::default()
6102 }),
6103 ..Default::default()
6104 },
6105 ..Default::default()
6106 });
6107
6108 let fs = FakeFs::new(cx.background());
6109 fs.insert_tree(
6110 "/the-root",
6111 json!({
6112 "test.rs": "const A: i32 = 1;",
6113 "test2.rs": "",
6114 "Cargo.toml": "a = 1",
6115 "package.json": "{\"a\": 1}",
6116 }),
6117 )
6118 .await;
6119
6120 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
6121 project.update(cx, |project, _| {
6122 project.languages.add(Arc::new(rust_language));
6123 project.languages.add(Arc::new(json_language));
6124 });
6125
6126 // Open a buffer without an associated language server.
6127 let toml_buffer = project
6128 .update(cx, |project, cx| {
6129 project.open_local_buffer("/the-root/Cargo.toml", cx)
6130 })
6131 .await
6132 .unwrap();
6133
6134 // Open a buffer with an associated language server.
6135 let rust_buffer = project
6136 .update(cx, |project, cx| {
6137 project.open_local_buffer("/the-root/test.rs", cx)
6138 })
6139 .await
6140 .unwrap();
6141
6142 // A server is started up, and it is notified about Rust files.
6143 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6144 assert_eq!(
6145 fake_rust_server
6146 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6147 .await
6148 .text_document,
6149 lsp::TextDocumentItem {
6150 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6151 version: 0,
6152 text: "const A: i32 = 1;".to_string(),
6153 language_id: Default::default()
6154 }
6155 );
6156
6157 // The buffer is configured based on the language server's capabilities.
6158 rust_buffer.read_with(cx, |buffer, _| {
6159 assert_eq!(
6160 buffer.completion_triggers(),
6161 &[".".to_string(), "::".to_string()]
6162 );
6163 });
6164 toml_buffer.read_with(cx, |buffer, _| {
6165 assert!(buffer.completion_triggers().is_empty());
6166 });
6167
6168 // Edit a buffer. The changes are reported to the language server.
6169 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
6170 assert_eq!(
6171 fake_rust_server
6172 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6173 .await
6174 .text_document,
6175 lsp::VersionedTextDocumentIdentifier::new(
6176 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6177 1
6178 )
6179 );
6180
6181 // Open a third buffer with a different associated language server.
6182 let json_buffer = project
6183 .update(cx, |project, cx| {
6184 project.open_local_buffer("/the-root/package.json", cx)
6185 })
6186 .await
6187 .unwrap();
6188
6189 // A json language server is started up and is only notified about the json buffer.
6190 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6191 assert_eq!(
6192 fake_json_server
6193 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6194 .await
6195 .text_document,
6196 lsp::TextDocumentItem {
6197 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6198 version: 0,
6199 text: "{\"a\": 1}".to_string(),
6200 language_id: Default::default()
6201 }
6202 );
6203
6204 // This buffer is configured based on the second language server's
6205 // capabilities.
6206 json_buffer.read_with(cx, |buffer, _| {
6207 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
6208 });
6209
6210 // When opening another buffer whose language server is already running,
6211 // it is also configured based on the existing language server's capabilities.
6212 let rust_buffer2 = project
6213 .update(cx, |project, cx| {
6214 project.open_local_buffer("/the-root/test2.rs", cx)
6215 })
6216 .await
6217 .unwrap();
6218 rust_buffer2.read_with(cx, |buffer, _| {
6219 assert_eq!(
6220 buffer.completion_triggers(),
6221 &[".".to_string(), "::".to_string()]
6222 );
6223 });
6224
6225 // Changes are reported only to servers matching the buffer's language.
6226 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
6227 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
6228 assert_eq!(
6229 fake_rust_server
6230 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6231 .await
6232 .text_document,
6233 lsp::VersionedTextDocumentIdentifier::new(
6234 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
6235 1
6236 )
6237 );
6238
6239 // Save notifications are reported to all servers.
6240 toml_buffer
6241 .update(cx, |buffer, cx| buffer.save(cx))
6242 .await
6243 .unwrap();
6244 assert_eq!(
6245 fake_rust_server
6246 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6247 .await
6248 .text_document,
6249 lsp::TextDocumentIdentifier::new(
6250 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6251 )
6252 );
6253 assert_eq!(
6254 fake_json_server
6255 .receive_notification::<lsp::notification::DidSaveTextDocument>()
6256 .await
6257 .text_document,
6258 lsp::TextDocumentIdentifier::new(
6259 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
6260 )
6261 );
6262
6263 // Renames are reported only to servers matching the buffer's language.
6264 fs.rename(
6265 Path::new("/the-root/test2.rs"),
6266 Path::new("/the-root/test3.rs"),
6267 Default::default(),
6268 )
6269 .await
6270 .unwrap();
6271 assert_eq!(
6272 fake_rust_server
6273 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6274 .await
6275 .text_document,
6276 lsp::TextDocumentIdentifier::new(
6277 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
6278 ),
6279 );
6280 assert_eq!(
6281 fake_rust_server
6282 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6283 .await
6284 .text_document,
6285 lsp::TextDocumentItem {
6286 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6287 version: 0,
6288 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6289 language_id: Default::default()
6290 },
6291 );
6292
6293 rust_buffer2.update(cx, |buffer, cx| {
6294 buffer.update_diagnostics(
6295 DiagnosticSet::from_sorted_entries(
6296 vec![DiagnosticEntry {
6297 diagnostic: Default::default(),
6298 range: Anchor::MIN..Anchor::MAX,
6299 }],
6300 &buffer.snapshot(),
6301 ),
6302 cx,
6303 );
6304 assert_eq!(
6305 buffer
6306 .snapshot()
6307 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6308 .count(),
6309 1
6310 );
6311 });
6312
6313 // When the rename changes the extension of the file, the buffer gets closed on the old
6314 // language server and gets opened on the new one.
6315 fs.rename(
6316 Path::new("/the-root/test3.rs"),
6317 Path::new("/the-root/test3.json"),
6318 Default::default(),
6319 )
6320 .await
6321 .unwrap();
6322 assert_eq!(
6323 fake_rust_server
6324 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6325 .await
6326 .text_document,
6327 lsp::TextDocumentIdentifier::new(
6328 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
6329 ),
6330 );
6331 assert_eq!(
6332 fake_json_server
6333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6334 .await
6335 .text_document,
6336 lsp::TextDocumentItem {
6337 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6338 version: 0,
6339 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6340 language_id: Default::default()
6341 },
6342 );
6343
6344 // We clear the diagnostics, since the language has changed.
6345 rust_buffer2.read_with(cx, |buffer, _| {
6346 assert_eq!(
6347 buffer
6348 .snapshot()
6349 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
6350 .count(),
6351 0
6352 );
6353 });
6354
6355 // The renamed file's version resets after changing language server.
6356 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
6357 assert_eq!(
6358 fake_json_server
6359 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6360 .await
6361 .text_document,
6362 lsp::VersionedTextDocumentIdentifier::new(
6363 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6364 1
6365 )
6366 );
6367
6368 // Restart language servers
6369 project.update(cx, |project, cx| {
6370 project.restart_language_servers_for_buffers(
6371 vec![rust_buffer.clone(), json_buffer.clone()],
6372 cx,
6373 );
6374 });
6375
6376 let mut rust_shutdown_requests = fake_rust_server
6377 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6378 let mut json_shutdown_requests = fake_json_server
6379 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
6380 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
6381
6382 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
6383 let mut fake_json_server = fake_json_servers.next().await.unwrap();
6384
6385 // Ensure rust document is reopened in new rust language server
6386 assert_eq!(
6387 fake_rust_server
6388 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6389 .await
6390 .text_document,
6391 lsp::TextDocumentItem {
6392 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
6393 version: 1,
6394 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
6395 language_id: Default::default()
6396 }
6397 );
6398
6399 // Ensure json documents are reopened in new json language server
6400 assert_set_eq!(
6401 [
6402 fake_json_server
6403 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6404 .await
6405 .text_document,
6406 fake_json_server
6407 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6408 .await
6409 .text_document,
6410 ],
6411 [
6412 lsp::TextDocumentItem {
6413 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6414 version: 0,
6415 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6416 language_id: Default::default()
6417 },
6418 lsp::TextDocumentItem {
6419 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6420 version: 1,
6421 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6422 language_id: Default::default()
6423 }
6424 ]
6425 );
6426
6427 // Close notifications are reported only to servers matching the buffer's language.
6428 cx.update(|_| drop(json_buffer));
6429 let close_message = lsp::DidCloseTextDocumentParams {
6430 text_document: lsp::TextDocumentIdentifier::new(
6431 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6432 ),
6433 };
6434 assert_eq!(
6435 fake_json_server
6436 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6437 .await,
6438 close_message,
6439 );
6440 }
6441
6442 #[gpui::test]
6443 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6444 cx.foreground().forbid_parking();
6445
6446 let fs = FakeFs::new(cx.background());
6447 fs.insert_tree(
6448 "/dir",
6449 json!({
6450 "a.rs": "let a = 1;",
6451 "b.rs": "let b = 2;"
6452 }),
6453 )
6454 .await;
6455
6456 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6457
6458 let buffer_a = project
6459 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6460 .await
6461 .unwrap();
6462 let buffer_b = project
6463 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6464 .await
6465 .unwrap();
6466
6467 project.update(cx, |project, cx| {
6468 project
6469 .update_diagnostics(
6470 0,
6471 lsp::PublishDiagnosticsParams {
6472 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6473 version: None,
6474 diagnostics: vec![lsp::Diagnostic {
6475 range: lsp::Range::new(
6476 lsp::Position::new(0, 4),
6477 lsp::Position::new(0, 5),
6478 ),
6479 severity: Some(lsp::DiagnosticSeverity::ERROR),
6480 message: "error 1".to_string(),
6481 ..Default::default()
6482 }],
6483 },
6484 &[],
6485 cx,
6486 )
6487 .unwrap();
6488 project
6489 .update_diagnostics(
6490 0,
6491 lsp::PublishDiagnosticsParams {
6492 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6493 version: None,
6494 diagnostics: vec![lsp::Diagnostic {
6495 range: lsp::Range::new(
6496 lsp::Position::new(0, 4),
6497 lsp::Position::new(0, 5),
6498 ),
6499 severity: Some(lsp::DiagnosticSeverity::WARNING),
6500 message: "error 2".to_string(),
6501 ..Default::default()
6502 }],
6503 },
6504 &[],
6505 cx,
6506 )
6507 .unwrap();
6508 });
6509
6510 buffer_a.read_with(cx, |buffer, _| {
6511 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6512 assert_eq!(
6513 chunks
6514 .iter()
6515 .map(|(s, d)| (s.as_str(), *d))
6516 .collect::<Vec<_>>(),
6517 &[
6518 ("let ", None),
6519 ("a", Some(DiagnosticSeverity::ERROR)),
6520 (" = 1;", None),
6521 ]
6522 );
6523 });
6524 buffer_b.read_with(cx, |buffer, _| {
6525 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6526 assert_eq!(
6527 chunks
6528 .iter()
6529 .map(|(s, d)| (s.as_str(), *d))
6530 .collect::<Vec<_>>(),
6531 &[
6532 ("let ", None),
6533 ("b", Some(DiagnosticSeverity::WARNING)),
6534 (" = 2;", None),
6535 ]
6536 );
6537 });
6538 }
6539
6540 #[gpui::test]
6541 async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6542 cx.foreground().forbid_parking();
6543
6544 let fs = FakeFs::new(cx.background());
6545 fs.insert_tree(
6546 "/root",
6547 json!({
6548 "dir": {
6549 "a.rs": "let a = 1;",
6550 },
6551 "other.rs": "let b = c;"
6552 }),
6553 )
6554 .await;
6555
6556 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
6557
6558 let (worktree, _) = project
6559 .update(cx, |project, cx| {
6560 project.find_or_create_local_worktree("/root/other.rs", false, cx)
6561 })
6562 .await
6563 .unwrap();
6564 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6565
6566 project.update(cx, |project, cx| {
6567 project
6568 .update_diagnostics(
6569 0,
6570 lsp::PublishDiagnosticsParams {
6571 uri: Url::from_file_path("/root/other.rs").unwrap(),
6572 version: None,
6573 diagnostics: vec![lsp::Diagnostic {
6574 range: lsp::Range::new(
6575 lsp::Position::new(0, 8),
6576 lsp::Position::new(0, 9),
6577 ),
6578 severity: Some(lsp::DiagnosticSeverity::ERROR),
6579 message: "unknown variable 'c'".to_string(),
6580 ..Default::default()
6581 }],
6582 },
6583 &[],
6584 cx,
6585 )
6586 .unwrap();
6587 });
6588
6589 let buffer = project
6590 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
6591 .await
6592 .unwrap();
6593 buffer.read_with(cx, |buffer, _| {
6594 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6595 assert_eq!(
6596 chunks
6597 .iter()
6598 .map(|(s, d)| (s.as_str(), *d))
6599 .collect::<Vec<_>>(),
6600 &[
6601 ("let b = ", None),
6602 ("c", Some(DiagnosticSeverity::ERROR)),
6603 (";", None),
6604 ]
6605 );
6606 });
6607
6608 project.read_with(cx, |project, cx| {
6609 assert_eq!(project.diagnostic_summaries(cx).next(), None);
6610 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
6611 });
6612 }
6613
6614 #[gpui::test]
6615 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6616 cx.foreground().forbid_parking();
6617
6618 let progress_token = "the-progress-token";
6619 let mut language = Language::new(
6620 LanguageConfig {
6621 name: "Rust".into(),
6622 path_suffixes: vec!["rs".to_string()],
6623 ..Default::default()
6624 },
6625 Some(tree_sitter_rust::language()),
6626 );
6627 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6628 disk_based_diagnostics_progress_token: Some(progress_token),
6629 disk_based_diagnostics_sources: &["disk"],
6630 ..Default::default()
6631 });
6632
6633 let fs = FakeFs::new(cx.background());
6634 fs.insert_tree(
6635 "/dir",
6636 json!({
6637 "a.rs": "fn a() { A }",
6638 "b.rs": "const y: i32 = 1",
6639 }),
6640 )
6641 .await;
6642
6643 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6644 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6645 let worktree_id =
6646 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6647
6648 // Cause worktree to start the fake language server
6649 let _buffer = project
6650 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6651 .await
6652 .unwrap();
6653
6654 let mut events = subscribe(&project, cx);
6655
6656 let fake_server = fake_servers.next().await.unwrap();
6657 fake_server.start_progress(progress_token).await;
6658 assert_eq!(
6659 events.next().await.unwrap(),
6660 Event::DiskBasedDiagnosticsStarted {
6661 language_server_id: 0,
6662 }
6663 );
6664
6665 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6666 lsp::PublishDiagnosticsParams {
6667 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6668 version: None,
6669 diagnostics: vec![lsp::Diagnostic {
6670 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6671 severity: Some(lsp::DiagnosticSeverity::ERROR),
6672 message: "undefined variable 'A'".to_string(),
6673 ..Default::default()
6674 }],
6675 },
6676 );
6677 assert_eq!(
6678 events.next().await.unwrap(),
6679 Event::DiagnosticsUpdated {
6680 language_server_id: 0,
6681 path: (worktree_id, Path::new("a.rs")).into()
6682 }
6683 );
6684
6685 fake_server.end_progress(progress_token);
6686 assert_eq!(
6687 events.next().await.unwrap(),
6688 Event::DiskBasedDiagnosticsFinished {
6689 language_server_id: 0
6690 }
6691 );
6692
6693 let buffer = project
6694 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6695 .await
6696 .unwrap();
6697
6698 buffer.read_with(cx, |buffer, _| {
6699 let snapshot = buffer.snapshot();
6700 let diagnostics = snapshot
6701 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6702 .collect::<Vec<_>>();
6703 assert_eq!(
6704 diagnostics,
6705 &[DiagnosticEntry {
6706 range: Point::new(0, 9)..Point::new(0, 10),
6707 diagnostic: Diagnostic {
6708 severity: lsp::DiagnosticSeverity::ERROR,
6709 message: "undefined variable 'A'".to_string(),
6710 group_id: 0,
6711 is_primary: true,
6712 ..Default::default()
6713 }
6714 }]
6715 )
6716 });
6717
6718 // Ensure publishing empty diagnostics twice only results in one update event.
6719 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6720 lsp::PublishDiagnosticsParams {
6721 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6722 version: None,
6723 diagnostics: Default::default(),
6724 },
6725 );
6726 assert_eq!(
6727 events.next().await.unwrap(),
6728 Event::DiagnosticsUpdated {
6729 language_server_id: 0,
6730 path: (worktree_id, Path::new("a.rs")).into()
6731 }
6732 );
6733
6734 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6735 lsp::PublishDiagnosticsParams {
6736 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6737 version: None,
6738 diagnostics: Default::default(),
6739 },
6740 );
6741 cx.foreground().run_until_parked();
6742 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6743 }
6744
6745 #[gpui::test]
6746 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6747 cx.foreground().forbid_parking();
6748
6749 let progress_token = "the-progress-token";
6750 let mut language = Language::new(
6751 LanguageConfig {
6752 path_suffixes: vec!["rs".to_string()],
6753 ..Default::default()
6754 },
6755 None,
6756 );
6757 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6758 disk_based_diagnostics_sources: &["disk"],
6759 disk_based_diagnostics_progress_token: Some(progress_token),
6760 ..Default::default()
6761 });
6762
6763 let fs = FakeFs::new(cx.background());
6764 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6765
6766 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6767 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6768
6769 let buffer = project
6770 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6771 .await
6772 .unwrap();
6773
6774 // Simulate diagnostics starting to update.
6775 let fake_server = fake_servers.next().await.unwrap();
6776 fake_server.start_progress(progress_token).await;
6777
6778 // Restart the server before the diagnostics finish updating.
6779 project.update(cx, |project, cx| {
6780 project.restart_language_servers_for_buffers([buffer], cx);
6781 });
6782 let mut events = subscribe(&project, cx);
6783
6784 // Simulate the newly started server sending more diagnostics.
6785 let fake_server = fake_servers.next().await.unwrap();
6786 fake_server.start_progress(progress_token).await;
6787 assert_eq!(
6788 events.next().await.unwrap(),
6789 Event::DiskBasedDiagnosticsStarted {
6790 language_server_id: 1
6791 }
6792 );
6793 project.read_with(cx, |project, _| {
6794 assert_eq!(
6795 project
6796 .language_servers_running_disk_based_diagnostics()
6797 .collect::<Vec<_>>(),
6798 [1]
6799 );
6800 });
6801
6802 // All diagnostics are considered done, despite the old server's diagnostic
6803 // task never completing.
6804 fake_server.end_progress(progress_token);
6805 assert_eq!(
6806 events.next().await.unwrap(),
6807 Event::DiskBasedDiagnosticsFinished {
6808 language_server_id: 1
6809 }
6810 );
6811 project.read_with(cx, |project, _| {
6812 assert_eq!(
6813 project
6814 .language_servers_running_disk_based_diagnostics()
6815 .collect::<Vec<_>>(),
6816 [0; 0]
6817 );
6818 });
6819 }
6820
6821 #[gpui::test]
6822 async fn test_toggling_enable_language_server(
6823 deterministic: Arc<Deterministic>,
6824 cx: &mut gpui::TestAppContext,
6825 ) {
6826 deterministic.forbid_parking();
6827
6828 let mut rust = Language::new(
6829 LanguageConfig {
6830 name: Arc::from("Rust"),
6831 path_suffixes: vec!["rs".to_string()],
6832 ..Default::default()
6833 },
6834 None,
6835 );
6836 let mut fake_rust_servers = rust.set_fake_lsp_adapter(FakeLspAdapter {
6837 name: "rust-lsp",
6838 ..Default::default()
6839 });
6840 let mut js = Language::new(
6841 LanguageConfig {
6842 name: Arc::from("JavaScript"),
6843 path_suffixes: vec!["js".to_string()],
6844 ..Default::default()
6845 },
6846 None,
6847 );
6848 let mut fake_js_servers = js.set_fake_lsp_adapter(FakeLspAdapter {
6849 name: "js-lsp",
6850 ..Default::default()
6851 });
6852
6853 let fs = FakeFs::new(cx.background());
6854 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
6855 .await;
6856
6857 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6858 project.update(cx, |project, _| {
6859 project.languages.add(Arc::new(rust));
6860 project.languages.add(Arc::new(js));
6861 });
6862
6863 let _rs_buffer = project
6864 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6865 .await
6866 .unwrap();
6867 let _js_buffer = project
6868 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
6869 .await
6870 .unwrap();
6871
6872 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
6873 assert_eq!(
6874 fake_rust_server_1
6875 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6876 .await
6877 .text_document
6878 .uri
6879 .as_str(),
6880 "file:///dir/a.rs"
6881 );
6882
6883 let mut fake_js_server = fake_js_servers.next().await.unwrap();
6884 assert_eq!(
6885 fake_js_server
6886 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6887 .await
6888 .text_document
6889 .uri
6890 .as_str(),
6891 "file:///dir/b.js"
6892 );
6893
6894 // Disable Rust language server, ensuring only that server gets stopped.
6895 cx.update(|cx| {
6896 cx.update_global(|settings: &mut Settings, _| {
6897 settings.language_overrides.insert(
6898 Arc::from("Rust"),
6899 settings::LanguageSettings {
6900 enable_language_server: Some(false),
6901 ..Default::default()
6902 },
6903 );
6904 })
6905 });
6906 fake_rust_server_1
6907 .receive_notification::<lsp::notification::Exit>()
6908 .await;
6909
6910 // Enable Rust and disable JavaScript language servers, ensuring that the
6911 // former gets started again and that the latter stops.
6912 cx.update(|cx| {
6913 cx.update_global(|settings: &mut Settings, _| {
6914 settings.language_overrides.insert(
6915 Arc::from("Rust"),
6916 settings::LanguageSettings {
6917 enable_language_server: Some(true),
6918 ..Default::default()
6919 },
6920 );
6921 settings.language_overrides.insert(
6922 Arc::from("JavaScript"),
6923 settings::LanguageSettings {
6924 enable_language_server: Some(false),
6925 ..Default::default()
6926 },
6927 );
6928 })
6929 });
6930 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
6931 assert_eq!(
6932 fake_rust_server_2
6933 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6934 .await
6935 .text_document
6936 .uri
6937 .as_str(),
6938 "file:///dir/a.rs"
6939 );
6940 fake_js_server
6941 .receive_notification::<lsp::notification::Exit>()
6942 .await;
6943 }
6944
6945 #[gpui::test]
6946 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6947 cx.foreground().forbid_parking();
6948
6949 let mut language = Language::new(
6950 LanguageConfig {
6951 name: "Rust".into(),
6952 path_suffixes: vec!["rs".to_string()],
6953 ..Default::default()
6954 },
6955 Some(tree_sitter_rust::language()),
6956 );
6957 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6958 disk_based_diagnostics_sources: &["disk"],
6959 ..Default::default()
6960 });
6961
6962 let text = "
6963 fn a() { A }
6964 fn b() { BB }
6965 fn c() { CCC }
6966 "
6967 .unindent();
6968
6969 let fs = FakeFs::new(cx.background());
6970 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6971
6972 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6973 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6974
6975 let buffer = project
6976 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6977 .await
6978 .unwrap();
6979
6980 let mut fake_server = fake_servers.next().await.unwrap();
6981 let open_notification = fake_server
6982 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6983 .await;
6984
6985 // Edit the buffer, moving the content down
6986 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6987 let change_notification_1 = fake_server
6988 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6989 .await;
6990 assert!(
6991 change_notification_1.text_document.version > open_notification.text_document.version
6992 );
6993
6994 // Report some diagnostics for the initial version of the buffer
6995 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6996 lsp::PublishDiagnosticsParams {
6997 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6998 version: Some(open_notification.text_document.version),
6999 diagnostics: vec![
7000 lsp::Diagnostic {
7001 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7002 severity: Some(DiagnosticSeverity::ERROR),
7003 message: "undefined variable 'A'".to_string(),
7004 source: Some("disk".to_string()),
7005 ..Default::default()
7006 },
7007 lsp::Diagnostic {
7008 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7009 severity: Some(DiagnosticSeverity::ERROR),
7010 message: "undefined variable 'BB'".to_string(),
7011 source: Some("disk".to_string()),
7012 ..Default::default()
7013 },
7014 lsp::Diagnostic {
7015 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
7016 severity: Some(DiagnosticSeverity::ERROR),
7017 source: Some("disk".to_string()),
7018 message: "undefined variable 'CCC'".to_string(),
7019 ..Default::default()
7020 },
7021 ],
7022 },
7023 );
7024
7025 // The diagnostics have moved down since they were created.
7026 buffer.next_notification(cx).await;
7027 buffer.read_with(cx, |buffer, _| {
7028 assert_eq!(
7029 buffer
7030 .snapshot()
7031 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
7032 .collect::<Vec<_>>(),
7033 &[
7034 DiagnosticEntry {
7035 range: Point::new(3, 9)..Point::new(3, 11),
7036 diagnostic: Diagnostic {
7037 severity: DiagnosticSeverity::ERROR,
7038 message: "undefined variable 'BB'".to_string(),
7039 is_disk_based: true,
7040 group_id: 1,
7041 is_primary: true,
7042 ..Default::default()
7043 },
7044 },
7045 DiagnosticEntry {
7046 range: Point::new(4, 9)..Point::new(4, 12),
7047 diagnostic: Diagnostic {
7048 severity: DiagnosticSeverity::ERROR,
7049 message: "undefined variable 'CCC'".to_string(),
7050 is_disk_based: true,
7051 group_id: 2,
7052 is_primary: true,
7053 ..Default::default()
7054 }
7055 }
7056 ]
7057 );
7058 assert_eq!(
7059 chunks_with_diagnostics(buffer, 0..buffer.len()),
7060 [
7061 ("\n\nfn a() { ".to_string(), None),
7062 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7063 (" }\nfn b() { ".to_string(), None),
7064 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
7065 (" }\nfn c() { ".to_string(), None),
7066 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
7067 (" }\n".to_string(), None),
7068 ]
7069 );
7070 assert_eq!(
7071 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
7072 [
7073 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
7074 (" }\nfn c() { ".to_string(), None),
7075 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
7076 ]
7077 );
7078 });
7079
7080 // Ensure overlapping diagnostics are highlighted correctly.
7081 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7082 lsp::PublishDiagnosticsParams {
7083 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7084 version: Some(open_notification.text_document.version),
7085 diagnostics: vec![
7086 lsp::Diagnostic {
7087 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7088 severity: Some(DiagnosticSeverity::ERROR),
7089 message: "undefined variable 'A'".to_string(),
7090 source: Some("disk".to_string()),
7091 ..Default::default()
7092 },
7093 lsp::Diagnostic {
7094 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
7095 severity: Some(DiagnosticSeverity::WARNING),
7096 message: "unreachable statement".to_string(),
7097 source: Some("disk".to_string()),
7098 ..Default::default()
7099 },
7100 ],
7101 },
7102 );
7103
7104 buffer.next_notification(cx).await;
7105 buffer.read_with(cx, |buffer, _| {
7106 assert_eq!(
7107 buffer
7108 .snapshot()
7109 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
7110 .collect::<Vec<_>>(),
7111 &[
7112 DiagnosticEntry {
7113 range: Point::new(2, 9)..Point::new(2, 12),
7114 diagnostic: Diagnostic {
7115 severity: DiagnosticSeverity::WARNING,
7116 message: "unreachable statement".to_string(),
7117 is_disk_based: true,
7118 group_id: 4,
7119 is_primary: true,
7120 ..Default::default()
7121 }
7122 },
7123 DiagnosticEntry {
7124 range: Point::new(2, 9)..Point::new(2, 10),
7125 diagnostic: Diagnostic {
7126 severity: DiagnosticSeverity::ERROR,
7127 message: "undefined variable 'A'".to_string(),
7128 is_disk_based: true,
7129 group_id: 3,
7130 is_primary: true,
7131 ..Default::default()
7132 },
7133 }
7134 ]
7135 );
7136 assert_eq!(
7137 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
7138 [
7139 ("fn a() { ".to_string(), None),
7140 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
7141 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7142 ("\n".to_string(), None),
7143 ]
7144 );
7145 assert_eq!(
7146 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
7147 [
7148 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
7149 ("\n".to_string(), None),
7150 ]
7151 );
7152 });
7153
7154 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
7155 // changes since the last save.
7156 buffer.update(cx, |buffer, cx| {
7157 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
7158 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
7159 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
7160 });
7161 let change_notification_2 = fake_server
7162 .receive_notification::<lsp::notification::DidChangeTextDocument>()
7163 .await;
7164 assert!(
7165 change_notification_2.text_document.version
7166 > change_notification_1.text_document.version
7167 );
7168
7169 // Handle out-of-order diagnostics
7170 fake_server.notify::<lsp::notification::PublishDiagnostics>(
7171 lsp::PublishDiagnosticsParams {
7172 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7173 version: Some(change_notification_2.text_document.version),
7174 diagnostics: vec![
7175 lsp::Diagnostic {
7176 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
7177 severity: Some(DiagnosticSeverity::ERROR),
7178 message: "undefined variable 'BB'".to_string(),
7179 source: Some("disk".to_string()),
7180 ..Default::default()
7181 },
7182 lsp::Diagnostic {
7183 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7184 severity: Some(DiagnosticSeverity::WARNING),
7185 message: "undefined variable 'A'".to_string(),
7186 source: Some("disk".to_string()),
7187 ..Default::default()
7188 },
7189 ],
7190 },
7191 );
7192
7193 buffer.next_notification(cx).await;
7194 buffer.read_with(cx, |buffer, _| {
7195 assert_eq!(
7196 buffer
7197 .snapshot()
7198 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7199 .collect::<Vec<_>>(),
7200 &[
7201 DiagnosticEntry {
7202 range: Point::new(2, 21)..Point::new(2, 22),
7203 diagnostic: Diagnostic {
7204 severity: DiagnosticSeverity::WARNING,
7205 message: "undefined variable 'A'".to_string(),
7206 is_disk_based: true,
7207 group_id: 6,
7208 is_primary: true,
7209 ..Default::default()
7210 }
7211 },
7212 DiagnosticEntry {
7213 range: Point::new(3, 9)..Point::new(3, 14),
7214 diagnostic: Diagnostic {
7215 severity: DiagnosticSeverity::ERROR,
7216 message: "undefined variable 'BB'".to_string(),
7217 is_disk_based: true,
7218 group_id: 5,
7219 is_primary: true,
7220 ..Default::default()
7221 },
7222 }
7223 ]
7224 );
7225 });
7226 }
7227
7228 #[gpui::test]
7229 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
7230 cx.foreground().forbid_parking();
7231
7232 let text = concat!(
7233 "let one = ;\n", //
7234 "let two = \n",
7235 "let three = 3;\n",
7236 );
7237
7238 let fs = FakeFs::new(cx.background());
7239 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
7240
7241 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7242 let buffer = project
7243 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7244 .await
7245 .unwrap();
7246
7247 project.update(cx, |project, cx| {
7248 project
7249 .update_buffer_diagnostics(
7250 &buffer,
7251 vec![
7252 DiagnosticEntry {
7253 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
7254 diagnostic: Diagnostic {
7255 severity: DiagnosticSeverity::ERROR,
7256 message: "syntax error 1".to_string(),
7257 ..Default::default()
7258 },
7259 },
7260 DiagnosticEntry {
7261 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
7262 diagnostic: Diagnostic {
7263 severity: DiagnosticSeverity::ERROR,
7264 message: "syntax error 2".to_string(),
7265 ..Default::default()
7266 },
7267 },
7268 ],
7269 None,
7270 cx,
7271 )
7272 .unwrap();
7273 });
7274
7275 // An empty range is extended forward to include the following character.
7276 // At the end of a line, an empty range is extended backward to include
7277 // the preceding character.
7278 buffer.read_with(cx, |buffer, _| {
7279 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
7280 assert_eq!(
7281 chunks
7282 .iter()
7283 .map(|(s, d)| (s.as_str(), *d))
7284 .collect::<Vec<_>>(),
7285 &[
7286 ("let one = ", None),
7287 (";", Some(DiagnosticSeverity::ERROR)),
7288 ("\nlet two =", None),
7289 (" ", Some(DiagnosticSeverity::ERROR)),
7290 ("\nlet three = 3;\n", None)
7291 ]
7292 );
7293 });
7294 }
7295
7296 #[gpui::test]
7297 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
7298 cx.foreground().forbid_parking();
7299
7300 let mut language = Language::new(
7301 LanguageConfig {
7302 name: "Rust".into(),
7303 path_suffixes: vec!["rs".to_string()],
7304 ..Default::default()
7305 },
7306 Some(tree_sitter_rust::language()),
7307 );
7308 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7309
7310 let text = "
7311 fn a() {
7312 f1();
7313 }
7314 fn b() {
7315 f2();
7316 }
7317 fn c() {
7318 f3();
7319 }
7320 "
7321 .unindent();
7322
7323 let fs = FakeFs::new(cx.background());
7324 fs.insert_tree(
7325 "/dir",
7326 json!({
7327 "a.rs": text.clone(),
7328 }),
7329 )
7330 .await;
7331
7332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7333 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7334 let buffer = project
7335 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7336 .await
7337 .unwrap();
7338
7339 let mut fake_server = fake_servers.next().await.unwrap();
7340 let lsp_document_version = fake_server
7341 .receive_notification::<lsp::notification::DidOpenTextDocument>()
7342 .await
7343 .text_document
7344 .version;
7345
7346 // Simulate editing the buffer after the language server computes some edits.
7347 buffer.update(cx, |buffer, cx| {
7348 buffer.edit(
7349 [(
7350 Point::new(0, 0)..Point::new(0, 0),
7351 "// above first function\n",
7352 )],
7353 cx,
7354 );
7355 buffer.edit(
7356 [(
7357 Point::new(2, 0)..Point::new(2, 0),
7358 " // inside first function\n",
7359 )],
7360 cx,
7361 );
7362 buffer.edit(
7363 [(
7364 Point::new(6, 4)..Point::new(6, 4),
7365 "// inside second function ",
7366 )],
7367 cx,
7368 );
7369
7370 assert_eq!(
7371 buffer.text(),
7372 "
7373 // above first function
7374 fn a() {
7375 // inside first function
7376 f1();
7377 }
7378 fn b() {
7379 // inside second function f2();
7380 }
7381 fn c() {
7382 f3();
7383 }
7384 "
7385 .unindent()
7386 );
7387 });
7388
7389 let edits = project
7390 .update(cx, |project, cx| {
7391 project.edits_from_lsp(
7392 &buffer,
7393 vec![
7394 // replace body of first function
7395 lsp::TextEdit {
7396 range: lsp::Range::new(
7397 lsp::Position::new(0, 0),
7398 lsp::Position::new(3, 0),
7399 ),
7400 new_text: "
7401 fn a() {
7402 f10();
7403 }
7404 "
7405 .unindent(),
7406 },
7407 // edit inside second function
7408 lsp::TextEdit {
7409 range: lsp::Range::new(
7410 lsp::Position::new(4, 6),
7411 lsp::Position::new(4, 6),
7412 ),
7413 new_text: "00".into(),
7414 },
7415 // edit inside third function via two distinct edits
7416 lsp::TextEdit {
7417 range: lsp::Range::new(
7418 lsp::Position::new(7, 5),
7419 lsp::Position::new(7, 5),
7420 ),
7421 new_text: "4000".into(),
7422 },
7423 lsp::TextEdit {
7424 range: lsp::Range::new(
7425 lsp::Position::new(7, 5),
7426 lsp::Position::new(7, 6),
7427 ),
7428 new_text: "".into(),
7429 },
7430 ],
7431 Some(lsp_document_version),
7432 cx,
7433 )
7434 })
7435 .await
7436 .unwrap();
7437
7438 buffer.update(cx, |buffer, cx| {
7439 for (range, new_text) in edits {
7440 buffer.edit([(range, new_text)], cx);
7441 }
7442 assert_eq!(
7443 buffer.text(),
7444 "
7445 // above first function
7446 fn a() {
7447 // inside first function
7448 f10();
7449 }
7450 fn b() {
7451 // inside second function f200();
7452 }
7453 fn c() {
7454 f4000();
7455 }
7456 "
7457 .unindent()
7458 );
7459 });
7460 }
7461
7462 #[gpui::test]
7463 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
7464 cx.foreground().forbid_parking();
7465
7466 let text = "
7467 use a::b;
7468 use a::c;
7469
7470 fn f() {
7471 b();
7472 c();
7473 }
7474 "
7475 .unindent();
7476
7477 let fs = FakeFs::new(cx.background());
7478 fs.insert_tree(
7479 "/dir",
7480 json!({
7481 "a.rs": text.clone(),
7482 }),
7483 )
7484 .await;
7485
7486 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7487 let buffer = project
7488 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7489 .await
7490 .unwrap();
7491
7492 // Simulate the language server sending us a small edit in the form of a very large diff.
7493 // Rust-analyzer does this when performing a merge-imports code action.
7494 let edits = project
7495 .update(cx, |project, cx| {
7496 project.edits_from_lsp(
7497 &buffer,
7498 [
7499 // Replace the first use statement without editing the semicolon.
7500 lsp::TextEdit {
7501 range: lsp::Range::new(
7502 lsp::Position::new(0, 4),
7503 lsp::Position::new(0, 8),
7504 ),
7505 new_text: "a::{b, c}".into(),
7506 },
7507 // Reinsert the remainder of the file between the semicolon and the final
7508 // newline of the file.
7509 lsp::TextEdit {
7510 range: lsp::Range::new(
7511 lsp::Position::new(0, 9),
7512 lsp::Position::new(0, 9),
7513 ),
7514 new_text: "\n\n".into(),
7515 },
7516 lsp::TextEdit {
7517 range: lsp::Range::new(
7518 lsp::Position::new(0, 9),
7519 lsp::Position::new(0, 9),
7520 ),
7521 new_text: "
7522 fn f() {
7523 b();
7524 c();
7525 }"
7526 .unindent(),
7527 },
7528 // Delete everything after the first newline of the file.
7529 lsp::TextEdit {
7530 range: lsp::Range::new(
7531 lsp::Position::new(1, 0),
7532 lsp::Position::new(7, 0),
7533 ),
7534 new_text: "".into(),
7535 },
7536 ],
7537 None,
7538 cx,
7539 )
7540 })
7541 .await
7542 .unwrap();
7543
7544 buffer.update(cx, |buffer, cx| {
7545 let edits = edits
7546 .into_iter()
7547 .map(|(range, text)| {
7548 (
7549 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7550 text,
7551 )
7552 })
7553 .collect::<Vec<_>>();
7554
7555 assert_eq!(
7556 edits,
7557 [
7558 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7559 (Point::new(1, 0)..Point::new(2, 0), "".into())
7560 ]
7561 );
7562
7563 for (range, new_text) in edits {
7564 buffer.edit([(range, new_text)], cx);
7565 }
7566 assert_eq!(
7567 buffer.text(),
7568 "
7569 use a::{b, c};
7570
7571 fn f() {
7572 b();
7573 c();
7574 }
7575 "
7576 .unindent()
7577 );
7578 });
7579 }
7580
7581 #[gpui::test]
7582 async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
7583 cx.foreground().forbid_parking();
7584
7585 let text = "
7586 use a::b;
7587 use a::c;
7588
7589 fn f() {
7590 b();
7591 c();
7592 }
7593 "
7594 .unindent();
7595
7596 let fs = FakeFs::new(cx.background());
7597 fs.insert_tree(
7598 "/dir",
7599 json!({
7600 "a.rs": text.clone(),
7601 }),
7602 )
7603 .await;
7604
7605 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7606 let buffer = project
7607 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
7608 .await
7609 .unwrap();
7610
7611 // Simulate the language server sending us edits in a non-ordered fashion,
7612 // with ranges sometimes being inverted.
7613 let edits = project
7614 .update(cx, |project, cx| {
7615 project.edits_from_lsp(
7616 &buffer,
7617 [
7618 lsp::TextEdit {
7619 range: lsp::Range::new(
7620 lsp::Position::new(0, 9),
7621 lsp::Position::new(0, 9),
7622 ),
7623 new_text: "\n\n".into(),
7624 },
7625 lsp::TextEdit {
7626 range: lsp::Range::new(
7627 lsp::Position::new(0, 8),
7628 lsp::Position::new(0, 4),
7629 ),
7630 new_text: "a::{b, c}".into(),
7631 },
7632 lsp::TextEdit {
7633 range: lsp::Range::new(
7634 lsp::Position::new(1, 0),
7635 lsp::Position::new(7, 0),
7636 ),
7637 new_text: "".into(),
7638 },
7639 lsp::TextEdit {
7640 range: lsp::Range::new(
7641 lsp::Position::new(0, 9),
7642 lsp::Position::new(0, 9),
7643 ),
7644 new_text: "
7645 fn f() {
7646 b();
7647 c();
7648 }"
7649 .unindent(),
7650 },
7651 ],
7652 None,
7653 cx,
7654 )
7655 })
7656 .await
7657 .unwrap();
7658
7659 buffer.update(cx, |buffer, cx| {
7660 let edits = edits
7661 .into_iter()
7662 .map(|(range, text)| {
7663 (
7664 range.start.to_point(&buffer)..range.end.to_point(&buffer),
7665 text,
7666 )
7667 })
7668 .collect::<Vec<_>>();
7669
7670 assert_eq!(
7671 edits,
7672 [
7673 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
7674 (Point::new(1, 0)..Point::new(2, 0), "".into())
7675 ]
7676 );
7677
7678 for (range, new_text) in edits {
7679 buffer.edit([(range, new_text)], cx);
7680 }
7681 assert_eq!(
7682 buffer.text(),
7683 "
7684 use a::{b, c};
7685
7686 fn f() {
7687 b();
7688 c();
7689 }
7690 "
7691 .unindent()
7692 );
7693 });
7694 }
7695
7696 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
7697 buffer: &Buffer,
7698 range: Range<T>,
7699 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
7700 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
7701 for chunk in buffer.snapshot().chunks(range, true) {
7702 if chunks.last().map_or(false, |prev_chunk| {
7703 prev_chunk.1 == chunk.diagnostic_severity
7704 }) {
7705 chunks.last_mut().unwrap().0.push_str(chunk.text);
7706 } else {
7707 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
7708 }
7709 }
7710 chunks
7711 }
7712
7713 #[gpui::test]
7714 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
7715 let dir = temp_tree(json!({
7716 "root": {
7717 "dir1": {},
7718 "dir2": {
7719 "dir3": {}
7720 }
7721 }
7722 }));
7723
7724 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7725 let cancel_flag = Default::default();
7726 let results = project
7727 .read_with(cx, |project, cx| {
7728 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
7729 })
7730 .await;
7731
7732 assert!(results.is_empty());
7733 }
7734
7735 #[gpui::test(iterations = 10)]
7736 async fn test_definition(cx: &mut gpui::TestAppContext) {
7737 let mut language = Language::new(
7738 LanguageConfig {
7739 name: "Rust".into(),
7740 path_suffixes: vec!["rs".to_string()],
7741 ..Default::default()
7742 },
7743 Some(tree_sitter_rust::language()),
7744 );
7745 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7746
7747 let fs = FakeFs::new(cx.background());
7748 fs.insert_tree(
7749 "/dir",
7750 json!({
7751 "a.rs": "const fn a() { A }",
7752 "b.rs": "const y: i32 = crate::a()",
7753 }),
7754 )
7755 .await;
7756
7757 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7758 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7759
7760 let buffer = project
7761 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7762 .await
7763 .unwrap();
7764
7765 let fake_server = fake_servers.next().await.unwrap();
7766 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7767 let params = params.text_document_position_params;
7768 assert_eq!(
7769 params.text_document.uri.to_file_path().unwrap(),
7770 Path::new("/dir/b.rs"),
7771 );
7772 assert_eq!(params.position, lsp::Position::new(0, 22));
7773
7774 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7775 lsp::Location::new(
7776 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7777 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7778 ),
7779 )))
7780 });
7781
7782 let mut definitions = project
7783 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7784 .await
7785 .unwrap();
7786
7787 // Assert no new language server started
7788 cx.foreground().run_until_parked();
7789 assert!(fake_servers.try_next().is_err());
7790
7791 assert_eq!(definitions.len(), 1);
7792 let definition = definitions.pop().unwrap();
7793 cx.update(|cx| {
7794 let target_buffer = definition.target.buffer.read(cx);
7795 assert_eq!(
7796 target_buffer
7797 .file()
7798 .unwrap()
7799 .as_local()
7800 .unwrap()
7801 .abs_path(cx),
7802 Path::new("/dir/a.rs"),
7803 );
7804 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
7805 assert_eq!(
7806 list_worktrees(&project, cx),
7807 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7808 );
7809
7810 drop(definition);
7811 });
7812 cx.read(|cx| {
7813 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7814 });
7815
7816 fn list_worktrees<'a>(
7817 project: &'a ModelHandle<Project>,
7818 cx: &'a AppContext,
7819 ) -> Vec<(&'a Path, bool)> {
7820 project
7821 .read(cx)
7822 .worktrees(cx)
7823 .map(|worktree| {
7824 let worktree = worktree.read(cx);
7825 (
7826 worktree.as_local().unwrap().abs_path().as_ref(),
7827 worktree.is_visible(),
7828 )
7829 })
7830 .collect::<Vec<_>>()
7831 }
7832 }
7833
7834 #[gpui::test]
7835 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7836 let mut language = Language::new(
7837 LanguageConfig {
7838 name: "TypeScript".into(),
7839 path_suffixes: vec!["ts".to_string()],
7840 ..Default::default()
7841 },
7842 Some(tree_sitter_typescript::language_typescript()),
7843 );
7844 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7845
7846 let fs = FakeFs::new(cx.background());
7847 fs.insert_tree(
7848 "/dir",
7849 json!({
7850 "a.ts": "",
7851 }),
7852 )
7853 .await;
7854
7855 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7856 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7857 let buffer = project
7858 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7859 .await
7860 .unwrap();
7861
7862 let fake_server = fake_language_servers.next().await.unwrap();
7863
7864 let text = "let a = b.fqn";
7865 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7866 let completions = project.update(cx, |project, cx| {
7867 project.completions(&buffer, text.len(), cx)
7868 });
7869
7870 fake_server
7871 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7872 Ok(Some(lsp::CompletionResponse::Array(vec![
7873 lsp::CompletionItem {
7874 label: "fullyQualifiedName?".into(),
7875 insert_text: Some("fullyQualifiedName".into()),
7876 ..Default::default()
7877 },
7878 ])))
7879 })
7880 .next()
7881 .await;
7882 let completions = completions.await.unwrap();
7883 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7884 assert_eq!(completions.len(), 1);
7885 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7886 assert_eq!(
7887 completions[0].old_range.to_offset(&snapshot),
7888 text.len() - 3..text.len()
7889 );
7890
7891 let text = "let a = \"atoms/cmp\"";
7892 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7893 let completions = project.update(cx, |project, cx| {
7894 project.completions(&buffer, text.len() - 1, cx)
7895 });
7896
7897 fake_server
7898 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7899 Ok(Some(lsp::CompletionResponse::Array(vec![
7900 lsp::CompletionItem {
7901 label: "component".into(),
7902 ..Default::default()
7903 },
7904 ])))
7905 })
7906 .next()
7907 .await;
7908 let completions = completions.await.unwrap();
7909 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7910 assert_eq!(completions.len(), 1);
7911 assert_eq!(completions[0].new_text, "component");
7912 assert_eq!(
7913 completions[0].old_range.to_offset(&snapshot),
7914 text.len() - 4..text.len() - 1
7915 );
7916 }
7917
7918 #[gpui::test(iterations = 10)]
7919 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7920 let mut language = Language::new(
7921 LanguageConfig {
7922 name: "TypeScript".into(),
7923 path_suffixes: vec!["ts".to_string()],
7924 ..Default::default()
7925 },
7926 None,
7927 );
7928 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7929
7930 let fs = FakeFs::new(cx.background());
7931 fs.insert_tree(
7932 "/dir",
7933 json!({
7934 "a.ts": "a",
7935 }),
7936 )
7937 .await;
7938
7939 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7940 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7941 let buffer = project
7942 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7943 .await
7944 .unwrap();
7945
7946 let fake_server = fake_language_servers.next().await.unwrap();
7947
7948 // Language server returns code actions that contain commands, and not edits.
7949 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7950 fake_server
7951 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7952 Ok(Some(vec![
7953 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7954 title: "The code action".into(),
7955 command: Some(lsp::Command {
7956 title: "The command".into(),
7957 command: "_the/command".into(),
7958 arguments: Some(vec![json!("the-argument")]),
7959 }),
7960 ..Default::default()
7961 }),
7962 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7963 title: "two".into(),
7964 ..Default::default()
7965 }),
7966 ]))
7967 })
7968 .next()
7969 .await;
7970
7971 let action = actions.await.unwrap()[0].clone();
7972 let apply = project.update(cx, |project, cx| {
7973 project.apply_code_action(buffer.clone(), action, true, cx)
7974 });
7975
7976 // Resolving the code action does not populate its edits. In absence of
7977 // edits, we must execute the given command.
7978 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7979 |action, _| async move { Ok(action) },
7980 );
7981
7982 // While executing the command, the language server sends the editor
7983 // a `workspaceEdit` request.
7984 fake_server
7985 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7986 let fake = fake_server.clone();
7987 move |params, _| {
7988 assert_eq!(params.command, "_the/command");
7989 let fake = fake.clone();
7990 async move {
7991 fake.server
7992 .request::<lsp::request::ApplyWorkspaceEdit>(
7993 lsp::ApplyWorkspaceEditParams {
7994 label: None,
7995 edit: lsp::WorkspaceEdit {
7996 changes: Some(
7997 [(
7998 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7999 vec![lsp::TextEdit {
8000 range: lsp::Range::new(
8001 lsp::Position::new(0, 0),
8002 lsp::Position::new(0, 0),
8003 ),
8004 new_text: "X".into(),
8005 }],
8006 )]
8007 .into_iter()
8008 .collect(),
8009 ),
8010 ..Default::default()
8011 },
8012 },
8013 )
8014 .await
8015 .unwrap();
8016 Ok(Some(json!(null)))
8017 }
8018 }
8019 })
8020 .next()
8021 .await;
8022
8023 // Applying the code action returns a project transaction containing the edits
8024 // sent by the language server in its `workspaceEdit` request.
8025 let transaction = apply.await.unwrap();
8026 assert!(transaction.0.contains_key(&buffer));
8027 buffer.update(cx, |buffer, cx| {
8028 assert_eq!(buffer.text(), "Xa");
8029 buffer.undo(cx);
8030 assert_eq!(buffer.text(), "a");
8031 });
8032 }
8033
8034 #[gpui::test]
8035 async fn test_save_file(cx: &mut gpui::TestAppContext) {
8036 let fs = FakeFs::new(cx.background());
8037 fs.insert_tree(
8038 "/dir",
8039 json!({
8040 "file1": "the old contents",
8041 }),
8042 )
8043 .await;
8044
8045 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8046 let buffer = project
8047 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8048 .await
8049 .unwrap();
8050 buffer
8051 .update(cx, |buffer, cx| {
8052 assert_eq!(buffer.text(), "the old contents");
8053 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8054 buffer.save(cx)
8055 })
8056 .await
8057 .unwrap();
8058
8059 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8060 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8061 }
8062
8063 #[gpui::test]
8064 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
8065 let fs = FakeFs::new(cx.background());
8066 fs.insert_tree(
8067 "/dir",
8068 json!({
8069 "file1": "the old contents",
8070 }),
8071 )
8072 .await;
8073
8074 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
8075 let buffer = project
8076 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8077 .await
8078 .unwrap();
8079 buffer
8080 .update(cx, |buffer, cx| {
8081 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
8082 buffer.save(cx)
8083 })
8084 .await
8085 .unwrap();
8086
8087 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
8088 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
8089 }
8090
8091 #[gpui::test]
8092 async fn test_save_as(cx: &mut gpui::TestAppContext) {
8093 let fs = FakeFs::new(cx.background());
8094 fs.insert_tree("/dir", json!({})).await;
8095
8096 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8097 let buffer = project.update(cx, |project, cx| {
8098 project.create_buffer("", None, cx).unwrap()
8099 });
8100 buffer.update(cx, |buffer, cx| {
8101 buffer.edit([(0..0, "abc")], cx);
8102 assert!(buffer.is_dirty());
8103 assert!(!buffer.has_conflict());
8104 });
8105 project
8106 .update(cx, |project, cx| {
8107 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
8108 })
8109 .await
8110 .unwrap();
8111 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
8112 buffer.read_with(cx, |buffer, cx| {
8113 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
8114 assert!(!buffer.is_dirty());
8115 assert!(!buffer.has_conflict());
8116 });
8117
8118 let opened_buffer = project
8119 .update(cx, |project, cx| {
8120 project.open_local_buffer("/dir/file1", cx)
8121 })
8122 .await
8123 .unwrap();
8124 assert_eq!(opened_buffer, buffer);
8125 }
8126
8127 #[gpui::test(retries = 5)]
8128 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
8129 let dir = temp_tree(json!({
8130 "a": {
8131 "file1": "",
8132 "file2": "",
8133 "file3": "",
8134 },
8135 "b": {
8136 "c": {
8137 "file4": "",
8138 "file5": "",
8139 }
8140 }
8141 }));
8142
8143 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
8144 let rpc = project.read_with(cx, |p, _| p.client.clone());
8145
8146 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
8147 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
8148 async move { buffer.await.unwrap() }
8149 };
8150 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
8151 project.read_with(cx, |project, cx| {
8152 let tree = project.worktrees(cx).next().unwrap();
8153 tree.read(cx)
8154 .entry_for_path(path)
8155 .expect(&format!("no entry for path {}", path))
8156 .id
8157 })
8158 };
8159
8160 let buffer2 = buffer_for_path("a/file2", cx).await;
8161 let buffer3 = buffer_for_path("a/file3", cx).await;
8162 let buffer4 = buffer_for_path("b/c/file4", cx).await;
8163 let buffer5 = buffer_for_path("b/c/file5", cx).await;
8164
8165 let file2_id = id_for_path("a/file2", &cx);
8166 let file3_id = id_for_path("a/file3", &cx);
8167 let file4_id = id_for_path("b/c/file4", &cx);
8168
8169 // Create a remote copy of this worktree.
8170 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8171 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
8172 let (remote, load_task) = cx.update(|cx| {
8173 Worktree::remote(
8174 1,
8175 1,
8176 initial_snapshot.to_proto(&Default::default(), true),
8177 rpc.clone(),
8178 cx,
8179 )
8180 });
8181 // tree
8182 load_task.await;
8183
8184 cx.read(|cx| {
8185 assert!(!buffer2.read(cx).is_dirty());
8186 assert!(!buffer3.read(cx).is_dirty());
8187 assert!(!buffer4.read(cx).is_dirty());
8188 assert!(!buffer5.read(cx).is_dirty());
8189 });
8190
8191 // Rename and delete files and directories.
8192 tree.flush_fs_events(&cx).await;
8193 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
8194 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
8195 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
8196 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
8197 tree.flush_fs_events(&cx).await;
8198
8199 let expected_paths = vec![
8200 "a",
8201 "a/file1",
8202 "a/file2.new",
8203 "b",
8204 "d",
8205 "d/file3",
8206 "d/file4",
8207 ];
8208
8209 cx.read(|app| {
8210 assert_eq!(
8211 tree.read(app)
8212 .paths()
8213 .map(|p| p.to_str().unwrap())
8214 .collect::<Vec<_>>(),
8215 expected_paths
8216 );
8217
8218 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
8219 assert_eq!(id_for_path("d/file3", &cx), file3_id);
8220 assert_eq!(id_for_path("d/file4", &cx), file4_id);
8221
8222 assert_eq!(
8223 buffer2.read(app).file().unwrap().path().as_ref(),
8224 Path::new("a/file2.new")
8225 );
8226 assert_eq!(
8227 buffer3.read(app).file().unwrap().path().as_ref(),
8228 Path::new("d/file3")
8229 );
8230 assert_eq!(
8231 buffer4.read(app).file().unwrap().path().as_ref(),
8232 Path::new("d/file4")
8233 );
8234 assert_eq!(
8235 buffer5.read(app).file().unwrap().path().as_ref(),
8236 Path::new("b/c/file5")
8237 );
8238
8239 assert!(!buffer2.read(app).file().unwrap().is_deleted());
8240 assert!(!buffer3.read(app).file().unwrap().is_deleted());
8241 assert!(!buffer4.read(app).file().unwrap().is_deleted());
8242 assert!(buffer5.read(app).file().unwrap().is_deleted());
8243 });
8244
8245 // Update the remote worktree. Check that it becomes consistent with the
8246 // local worktree.
8247 remote.update(cx, |remote, cx| {
8248 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
8249 &initial_snapshot,
8250 1,
8251 1,
8252 true,
8253 );
8254 remote
8255 .as_remote_mut()
8256 .unwrap()
8257 .snapshot
8258 .apply_remote_update(update_message)
8259 .unwrap();
8260
8261 assert_eq!(
8262 remote
8263 .paths()
8264 .map(|p| p.to_str().unwrap())
8265 .collect::<Vec<_>>(),
8266 expected_paths
8267 );
8268 });
8269 }
8270
8271 #[gpui::test]
8272 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
8273 let fs = FakeFs::new(cx.background());
8274 fs.insert_tree(
8275 "/dir",
8276 json!({
8277 "a.txt": "a-contents",
8278 "b.txt": "b-contents",
8279 }),
8280 )
8281 .await;
8282
8283 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8284
8285 // Spawn multiple tasks to open paths, repeating some paths.
8286 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
8287 (
8288 p.open_local_buffer("/dir/a.txt", cx),
8289 p.open_local_buffer("/dir/b.txt", cx),
8290 p.open_local_buffer("/dir/a.txt", cx),
8291 )
8292 });
8293
8294 let buffer_a_1 = buffer_a_1.await.unwrap();
8295 let buffer_a_2 = buffer_a_2.await.unwrap();
8296 let buffer_b = buffer_b.await.unwrap();
8297 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
8298 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
8299
8300 // There is only one buffer per path.
8301 let buffer_a_id = buffer_a_1.id();
8302 assert_eq!(buffer_a_2.id(), buffer_a_id);
8303
8304 // Open the same path again while it is still open.
8305 drop(buffer_a_1);
8306 let buffer_a_3 = project
8307 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
8308 .await
8309 .unwrap();
8310
8311 // There's still only one buffer per path.
8312 assert_eq!(buffer_a_3.id(), buffer_a_id);
8313 }
8314
8315 #[gpui::test]
8316 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
8317 let fs = FakeFs::new(cx.background());
8318 fs.insert_tree(
8319 "/dir",
8320 json!({
8321 "file1": "abc",
8322 "file2": "def",
8323 "file3": "ghi",
8324 }),
8325 )
8326 .await;
8327
8328 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8329
8330 let buffer1 = project
8331 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
8332 .await
8333 .unwrap();
8334 let events = Rc::new(RefCell::new(Vec::new()));
8335
8336 // initially, the buffer isn't dirty.
8337 buffer1.update(cx, |buffer, cx| {
8338 cx.subscribe(&buffer1, {
8339 let events = events.clone();
8340 move |_, _, event, _| match event {
8341 BufferEvent::Operation(_) => {}
8342 _ => events.borrow_mut().push(event.clone()),
8343 }
8344 })
8345 .detach();
8346
8347 assert!(!buffer.is_dirty());
8348 assert!(events.borrow().is_empty());
8349
8350 buffer.edit([(1..2, "")], cx);
8351 });
8352
8353 // after the first edit, the buffer is dirty, and emits a dirtied event.
8354 buffer1.update(cx, |buffer, cx| {
8355 assert!(buffer.text() == "ac");
8356 assert!(buffer.is_dirty());
8357 assert_eq!(
8358 *events.borrow(),
8359 &[language::Event::Edited, language::Event::DirtyChanged]
8360 );
8361 events.borrow_mut().clear();
8362 buffer.did_save(
8363 buffer.version(),
8364 buffer.as_rope().fingerprint(),
8365 buffer.file().unwrap().mtime(),
8366 None,
8367 cx,
8368 );
8369 });
8370
8371 // after saving, the buffer is not dirty, and emits a saved event.
8372 buffer1.update(cx, |buffer, cx| {
8373 assert!(!buffer.is_dirty());
8374 assert_eq!(*events.borrow(), &[language::Event::Saved]);
8375 events.borrow_mut().clear();
8376
8377 buffer.edit([(1..1, "B")], cx);
8378 buffer.edit([(2..2, "D")], cx);
8379 });
8380
8381 // after editing again, the buffer is dirty, and emits another dirty event.
8382 buffer1.update(cx, |buffer, cx| {
8383 assert!(buffer.text() == "aBDc");
8384 assert!(buffer.is_dirty());
8385 assert_eq!(
8386 *events.borrow(),
8387 &[
8388 language::Event::Edited,
8389 language::Event::DirtyChanged,
8390 language::Event::Edited,
8391 ],
8392 );
8393 events.borrow_mut().clear();
8394
8395 // After restoring the buffer to its previously-saved state,
8396 // the buffer is not considered dirty anymore.
8397 buffer.edit([(1..3, "")], cx);
8398 assert!(buffer.text() == "ac");
8399 assert!(!buffer.is_dirty());
8400 });
8401
8402 assert_eq!(
8403 *events.borrow(),
8404 &[language::Event::Edited, language::Event::DirtyChanged]
8405 );
8406
8407 // When a file is deleted, the buffer is considered dirty.
8408 let events = Rc::new(RefCell::new(Vec::new()));
8409 let buffer2 = project
8410 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
8411 .await
8412 .unwrap();
8413 buffer2.update(cx, |_, cx| {
8414 cx.subscribe(&buffer2, {
8415 let events = events.clone();
8416 move |_, _, event, _| events.borrow_mut().push(event.clone())
8417 })
8418 .detach();
8419 });
8420
8421 fs.remove_file("/dir/file2".as_ref(), Default::default())
8422 .await
8423 .unwrap();
8424 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
8425 assert_eq!(
8426 *events.borrow(),
8427 &[
8428 language::Event::DirtyChanged,
8429 language::Event::FileHandleChanged
8430 ]
8431 );
8432
8433 // When a file is already dirty when deleted, we don't emit a Dirtied event.
8434 let events = Rc::new(RefCell::new(Vec::new()));
8435 let buffer3 = project
8436 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
8437 .await
8438 .unwrap();
8439 buffer3.update(cx, |_, cx| {
8440 cx.subscribe(&buffer3, {
8441 let events = events.clone();
8442 move |_, _, event, _| events.borrow_mut().push(event.clone())
8443 })
8444 .detach();
8445 });
8446
8447 buffer3.update(cx, |buffer, cx| {
8448 buffer.edit([(0..0, "x")], cx);
8449 });
8450 events.borrow_mut().clear();
8451 fs.remove_file("/dir/file3".as_ref(), Default::default())
8452 .await
8453 .unwrap();
8454 buffer3
8455 .condition(&cx, |_, _| !events.borrow().is_empty())
8456 .await;
8457 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
8458 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
8459 }
8460
8461 #[gpui::test]
8462 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
8463 let initial_contents = "aaa\nbbbbb\nc\n";
8464 let fs = FakeFs::new(cx.background());
8465 fs.insert_tree(
8466 "/dir",
8467 json!({
8468 "the-file": initial_contents,
8469 }),
8470 )
8471 .await;
8472 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8473 let buffer = project
8474 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
8475 .await
8476 .unwrap();
8477
8478 let anchors = (0..3)
8479 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
8480 .collect::<Vec<_>>();
8481
8482 // Change the file on disk, adding two new lines of text, and removing
8483 // one line.
8484 buffer.read_with(cx, |buffer, _| {
8485 assert!(!buffer.is_dirty());
8486 assert!(!buffer.has_conflict());
8487 });
8488 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
8489 fs.save("/dir/the-file".as_ref(), &new_contents.into())
8490 .await
8491 .unwrap();
8492
8493 // Because the buffer was not modified, it is reloaded from disk. Its
8494 // contents are edited according to the diff between the old and new
8495 // file contents.
8496 buffer
8497 .condition(&cx, |buffer, _| buffer.text() == new_contents)
8498 .await;
8499
8500 buffer.update(cx, |buffer, _| {
8501 assert_eq!(buffer.text(), new_contents);
8502 assert!(!buffer.is_dirty());
8503 assert!(!buffer.has_conflict());
8504
8505 let anchor_positions = anchors
8506 .iter()
8507 .map(|anchor| anchor.to_point(&*buffer))
8508 .collect::<Vec<_>>();
8509 assert_eq!(
8510 anchor_positions,
8511 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
8512 );
8513 });
8514
8515 // Modify the buffer
8516 buffer.update(cx, |buffer, cx| {
8517 buffer.edit([(0..0, " ")], cx);
8518 assert!(buffer.is_dirty());
8519 assert!(!buffer.has_conflict());
8520 });
8521
8522 // Change the file on disk again, adding blank lines to the beginning.
8523 fs.save(
8524 "/dir/the-file".as_ref(),
8525 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
8526 )
8527 .await
8528 .unwrap();
8529
8530 // Because the buffer is modified, it doesn't reload from disk, but is
8531 // marked as having a conflict.
8532 buffer
8533 .condition(&cx, |buffer, _| buffer.has_conflict())
8534 .await;
8535 }
8536
8537 #[gpui::test]
8538 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
8539 cx.foreground().forbid_parking();
8540
8541 let fs = FakeFs::new(cx.background());
8542 fs.insert_tree(
8543 "/the-dir",
8544 json!({
8545 "a.rs": "
8546 fn foo(mut v: Vec<usize>) {
8547 for x in &v {
8548 v.push(1);
8549 }
8550 }
8551 "
8552 .unindent(),
8553 }),
8554 )
8555 .await;
8556
8557 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
8558 let buffer = project
8559 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
8560 .await
8561 .unwrap();
8562
8563 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
8564 let message = lsp::PublishDiagnosticsParams {
8565 uri: buffer_uri.clone(),
8566 diagnostics: vec![
8567 lsp::Diagnostic {
8568 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8569 severity: Some(DiagnosticSeverity::WARNING),
8570 message: "error 1".to_string(),
8571 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8572 location: lsp::Location {
8573 uri: buffer_uri.clone(),
8574 range: lsp::Range::new(
8575 lsp::Position::new(1, 8),
8576 lsp::Position::new(1, 9),
8577 ),
8578 },
8579 message: "error 1 hint 1".to_string(),
8580 }]),
8581 ..Default::default()
8582 },
8583 lsp::Diagnostic {
8584 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
8585 severity: Some(DiagnosticSeverity::HINT),
8586 message: "error 1 hint 1".to_string(),
8587 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8588 location: lsp::Location {
8589 uri: buffer_uri.clone(),
8590 range: lsp::Range::new(
8591 lsp::Position::new(1, 8),
8592 lsp::Position::new(1, 9),
8593 ),
8594 },
8595 message: "original diagnostic".to_string(),
8596 }]),
8597 ..Default::default()
8598 },
8599 lsp::Diagnostic {
8600 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
8601 severity: Some(DiagnosticSeverity::ERROR),
8602 message: "error 2".to_string(),
8603 related_information: Some(vec![
8604 lsp::DiagnosticRelatedInformation {
8605 location: lsp::Location {
8606 uri: buffer_uri.clone(),
8607 range: lsp::Range::new(
8608 lsp::Position::new(1, 13),
8609 lsp::Position::new(1, 15),
8610 ),
8611 },
8612 message: "error 2 hint 1".to_string(),
8613 },
8614 lsp::DiagnosticRelatedInformation {
8615 location: lsp::Location {
8616 uri: buffer_uri.clone(),
8617 range: lsp::Range::new(
8618 lsp::Position::new(1, 13),
8619 lsp::Position::new(1, 15),
8620 ),
8621 },
8622 message: "error 2 hint 2".to_string(),
8623 },
8624 ]),
8625 ..Default::default()
8626 },
8627 lsp::Diagnostic {
8628 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8629 severity: Some(DiagnosticSeverity::HINT),
8630 message: "error 2 hint 1".to_string(),
8631 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8632 location: lsp::Location {
8633 uri: buffer_uri.clone(),
8634 range: lsp::Range::new(
8635 lsp::Position::new(2, 8),
8636 lsp::Position::new(2, 17),
8637 ),
8638 },
8639 message: "original diagnostic".to_string(),
8640 }]),
8641 ..Default::default()
8642 },
8643 lsp::Diagnostic {
8644 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
8645 severity: Some(DiagnosticSeverity::HINT),
8646 message: "error 2 hint 2".to_string(),
8647 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
8648 location: lsp::Location {
8649 uri: buffer_uri.clone(),
8650 range: lsp::Range::new(
8651 lsp::Position::new(2, 8),
8652 lsp::Position::new(2, 17),
8653 ),
8654 },
8655 message: "original diagnostic".to_string(),
8656 }]),
8657 ..Default::default()
8658 },
8659 ],
8660 version: None,
8661 };
8662
8663 project
8664 .update(cx, |p, cx| p.update_diagnostics(0, message, &[], cx))
8665 .unwrap();
8666 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8667
8668 assert_eq!(
8669 buffer
8670 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
8671 .collect::<Vec<_>>(),
8672 &[
8673 DiagnosticEntry {
8674 range: Point::new(1, 8)..Point::new(1, 9),
8675 diagnostic: Diagnostic {
8676 severity: DiagnosticSeverity::WARNING,
8677 message: "error 1".to_string(),
8678 group_id: 0,
8679 is_primary: true,
8680 ..Default::default()
8681 }
8682 },
8683 DiagnosticEntry {
8684 range: Point::new(1, 8)..Point::new(1, 9),
8685 diagnostic: Diagnostic {
8686 severity: DiagnosticSeverity::HINT,
8687 message: "error 1 hint 1".to_string(),
8688 group_id: 0,
8689 is_primary: false,
8690 ..Default::default()
8691 }
8692 },
8693 DiagnosticEntry {
8694 range: Point::new(1, 13)..Point::new(1, 15),
8695 diagnostic: Diagnostic {
8696 severity: DiagnosticSeverity::HINT,
8697 message: "error 2 hint 1".to_string(),
8698 group_id: 1,
8699 is_primary: false,
8700 ..Default::default()
8701 }
8702 },
8703 DiagnosticEntry {
8704 range: Point::new(1, 13)..Point::new(1, 15),
8705 diagnostic: Diagnostic {
8706 severity: DiagnosticSeverity::HINT,
8707 message: "error 2 hint 2".to_string(),
8708 group_id: 1,
8709 is_primary: false,
8710 ..Default::default()
8711 }
8712 },
8713 DiagnosticEntry {
8714 range: Point::new(2, 8)..Point::new(2, 17),
8715 diagnostic: Diagnostic {
8716 severity: DiagnosticSeverity::ERROR,
8717 message: "error 2".to_string(),
8718 group_id: 1,
8719 is_primary: true,
8720 ..Default::default()
8721 }
8722 }
8723 ]
8724 );
8725
8726 assert_eq!(
8727 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
8728 &[
8729 DiagnosticEntry {
8730 range: Point::new(1, 8)..Point::new(1, 9),
8731 diagnostic: Diagnostic {
8732 severity: DiagnosticSeverity::WARNING,
8733 message: "error 1".to_string(),
8734 group_id: 0,
8735 is_primary: true,
8736 ..Default::default()
8737 }
8738 },
8739 DiagnosticEntry {
8740 range: Point::new(1, 8)..Point::new(1, 9),
8741 diagnostic: Diagnostic {
8742 severity: DiagnosticSeverity::HINT,
8743 message: "error 1 hint 1".to_string(),
8744 group_id: 0,
8745 is_primary: false,
8746 ..Default::default()
8747 }
8748 },
8749 ]
8750 );
8751 assert_eq!(
8752 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
8753 &[
8754 DiagnosticEntry {
8755 range: Point::new(1, 13)..Point::new(1, 15),
8756 diagnostic: Diagnostic {
8757 severity: DiagnosticSeverity::HINT,
8758 message: "error 2 hint 1".to_string(),
8759 group_id: 1,
8760 is_primary: false,
8761 ..Default::default()
8762 }
8763 },
8764 DiagnosticEntry {
8765 range: Point::new(1, 13)..Point::new(1, 15),
8766 diagnostic: Diagnostic {
8767 severity: DiagnosticSeverity::HINT,
8768 message: "error 2 hint 2".to_string(),
8769 group_id: 1,
8770 is_primary: false,
8771 ..Default::default()
8772 }
8773 },
8774 DiagnosticEntry {
8775 range: Point::new(2, 8)..Point::new(2, 17),
8776 diagnostic: Diagnostic {
8777 severity: DiagnosticSeverity::ERROR,
8778 message: "error 2".to_string(),
8779 group_id: 1,
8780 is_primary: true,
8781 ..Default::default()
8782 }
8783 }
8784 ]
8785 );
8786 }
8787
8788 #[gpui::test]
8789 async fn test_rename(cx: &mut gpui::TestAppContext) {
8790 cx.foreground().forbid_parking();
8791
8792 let mut language = Language::new(
8793 LanguageConfig {
8794 name: "Rust".into(),
8795 path_suffixes: vec!["rs".to_string()],
8796 ..Default::default()
8797 },
8798 Some(tree_sitter_rust::language()),
8799 );
8800 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8801 capabilities: lsp::ServerCapabilities {
8802 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8803 prepare_provider: Some(true),
8804 work_done_progress_options: Default::default(),
8805 })),
8806 ..Default::default()
8807 },
8808 ..Default::default()
8809 });
8810
8811 let fs = FakeFs::new(cx.background());
8812 fs.insert_tree(
8813 "/dir",
8814 json!({
8815 "one.rs": "const ONE: usize = 1;",
8816 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8817 }),
8818 )
8819 .await;
8820
8821 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8822 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8823 let buffer = project
8824 .update(cx, |project, cx| {
8825 project.open_local_buffer("/dir/one.rs", cx)
8826 })
8827 .await
8828 .unwrap();
8829
8830 let fake_server = fake_servers.next().await.unwrap();
8831
8832 let response = project.update(cx, |project, cx| {
8833 project.prepare_rename(buffer.clone(), 7, cx)
8834 });
8835 fake_server
8836 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8837 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8838 assert_eq!(params.position, lsp::Position::new(0, 7));
8839 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8840 lsp::Position::new(0, 6),
8841 lsp::Position::new(0, 9),
8842 ))))
8843 })
8844 .next()
8845 .await
8846 .unwrap();
8847 let range = response.await.unwrap().unwrap();
8848 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8849 assert_eq!(range, 6..9);
8850
8851 let response = project.update(cx, |project, cx| {
8852 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8853 });
8854 fake_server
8855 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8856 assert_eq!(
8857 params.text_document_position.text_document.uri.as_str(),
8858 "file:///dir/one.rs"
8859 );
8860 assert_eq!(
8861 params.text_document_position.position,
8862 lsp::Position::new(0, 7)
8863 );
8864 assert_eq!(params.new_name, "THREE");
8865 Ok(Some(lsp::WorkspaceEdit {
8866 changes: Some(
8867 [
8868 (
8869 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8870 vec![lsp::TextEdit::new(
8871 lsp::Range::new(
8872 lsp::Position::new(0, 6),
8873 lsp::Position::new(0, 9),
8874 ),
8875 "THREE".to_string(),
8876 )],
8877 ),
8878 (
8879 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8880 vec![
8881 lsp::TextEdit::new(
8882 lsp::Range::new(
8883 lsp::Position::new(0, 24),
8884 lsp::Position::new(0, 27),
8885 ),
8886 "THREE".to_string(),
8887 ),
8888 lsp::TextEdit::new(
8889 lsp::Range::new(
8890 lsp::Position::new(0, 35),
8891 lsp::Position::new(0, 38),
8892 ),
8893 "THREE".to_string(),
8894 ),
8895 ],
8896 ),
8897 ]
8898 .into_iter()
8899 .collect(),
8900 ),
8901 ..Default::default()
8902 }))
8903 })
8904 .next()
8905 .await
8906 .unwrap();
8907 let mut transaction = response.await.unwrap().0;
8908 assert_eq!(transaction.len(), 2);
8909 assert_eq!(
8910 transaction
8911 .remove_entry(&buffer)
8912 .unwrap()
8913 .0
8914 .read_with(cx, |buffer, _| buffer.text()),
8915 "const THREE: usize = 1;"
8916 );
8917 assert_eq!(
8918 transaction
8919 .into_keys()
8920 .next()
8921 .unwrap()
8922 .read_with(cx, |buffer, _| buffer.text()),
8923 "const TWO: usize = one::THREE + one::THREE;"
8924 );
8925 }
8926
8927 #[gpui::test]
8928 async fn test_search(cx: &mut gpui::TestAppContext) {
8929 let fs = FakeFs::new(cx.background());
8930 fs.insert_tree(
8931 "/dir",
8932 json!({
8933 "one.rs": "const ONE: usize = 1;",
8934 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8935 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8936 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8937 }),
8938 )
8939 .await;
8940 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8941 assert_eq!(
8942 search(&project, SearchQuery::text("TWO", false, true), cx)
8943 .await
8944 .unwrap(),
8945 HashMap::from_iter([
8946 ("two.rs".to_string(), vec![6..9]),
8947 ("three.rs".to_string(), vec![37..40])
8948 ])
8949 );
8950
8951 let buffer_4 = project
8952 .update(cx, |project, cx| {
8953 project.open_local_buffer("/dir/four.rs", cx)
8954 })
8955 .await
8956 .unwrap();
8957 buffer_4.update(cx, |buffer, cx| {
8958 let text = "two::TWO";
8959 buffer.edit([(20..28, text), (31..43, text)], cx);
8960 });
8961
8962 assert_eq!(
8963 search(&project, SearchQuery::text("TWO", false, true), cx)
8964 .await
8965 .unwrap(),
8966 HashMap::from_iter([
8967 ("two.rs".to_string(), vec![6..9]),
8968 ("three.rs".to_string(), vec![37..40]),
8969 ("four.rs".to_string(), vec![25..28, 36..39])
8970 ])
8971 );
8972
8973 async fn search(
8974 project: &ModelHandle<Project>,
8975 query: SearchQuery,
8976 cx: &mut gpui::TestAppContext,
8977 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8978 let results = project
8979 .update(cx, |project, cx| project.search(query, cx))
8980 .await?;
8981
8982 Ok(results
8983 .into_iter()
8984 .map(|(buffer, ranges)| {
8985 buffer.read_with(cx, |buffer, _| {
8986 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8987 let ranges = ranges
8988 .into_iter()
8989 .map(|range| range.to_offset(buffer))
8990 .collect::<Vec<_>>();
8991 (path, ranges)
8992 })
8993 })
8994 .collect())
8995 }
8996 }
8997}