1mod db;
2pub mod fs;
3mod ignore;
4mod lsp_command;
5pub mod search;
6pub mod worktree;
7
8use anyhow::{anyhow, Context, Result};
9use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
10use clock::ReplicaId;
11use collections::{hash_map, BTreeMap, HashMap, HashSet};
12use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
13use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
14use gpui::{
15 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
16 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
17};
18use language::{
19 point_to_lsp,
20 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
21 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
22 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
23 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
24 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
25};
26use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
27use lsp_command::*;
28use parking_lot::Mutex;
29use postage::stream::Stream;
30use postage::watch;
31use rand::prelude::*;
32use search::SearchQuery;
33use serde::Serialize;
34use settings::Settings;
35use sha2::{Digest, Sha256};
36use similar::{ChangeTag, TextDiff};
37use std::{
38 cell::RefCell,
39 cmp::{self, Ordering},
40 convert::TryInto,
41 ffi::OsString,
42 hash::Hash,
43 mem,
44 ops::Range,
45 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
46 path::{Component, Path, PathBuf},
47 rc::Rc,
48 sync::{
49 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
50 Arc,
51 },
52 time::Instant,
53};
54use thiserror::Error;
55use util::{post_inc, ResultExt, TryFutureExt as _};
56
57pub use db::Db;
58pub use fs::*;
59pub use worktree::*;
60
61pub trait Item: Entity {
62 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
63}
64
65pub struct ProjectStore {
66 db: Arc<Db>,
67 projects: Vec<WeakModelHandle<Project>>,
68}
69
70pub struct Project {
71 worktrees: Vec<WorktreeHandle>,
72 active_entry: Option<ProjectEntryId>,
73 languages: Arc<LanguageRegistry>,
74 language_servers:
75 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
76 started_language_servers:
77 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
78 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
79 language_server_settings: Arc<Mutex<serde_json::Value>>,
80 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
81 next_language_server_id: usize,
82 client: Arc<client::Client>,
83 next_entry_id: Arc<AtomicUsize>,
84 next_diagnostic_group_id: usize,
85 user_store: ModelHandle<UserStore>,
86 project_store: ModelHandle<ProjectStore>,
87 fs: Arc<dyn Fs>,
88 client_state: ProjectClientState,
89 collaborators: HashMap<PeerId, Collaborator>,
90 subscriptions: Vec<client::Subscription>,
91 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
92 shared_buffers: HashMap<PeerId, HashSet<u64>>,
93 loading_buffers: HashMap<
94 ProjectPath,
95 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
96 >,
97 loading_local_worktrees:
98 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
99 opened_buffers: HashMap<u64, OpenBuffer>,
100 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
101 nonce: u128,
102}
103
104#[derive(Error, Debug)]
105pub enum JoinProjectError {
106 #[error("host declined join request")]
107 HostDeclined,
108 #[error("host closed the project")]
109 HostClosedProject,
110 #[error("host went offline")]
111 HostWentOffline,
112 #[error("{0}")]
113 Other(#[from] anyhow::Error),
114}
115
116enum OpenBuffer {
117 Strong(ModelHandle<Buffer>),
118 Weak(WeakModelHandle<Buffer>),
119 Loading(Vec<Operation>),
120}
121
122enum WorktreeHandle {
123 Strong(ModelHandle<Worktree>),
124 Weak(WeakModelHandle<Worktree>),
125}
126
127enum ProjectClientState {
128 Local {
129 is_shared: bool,
130 remote_id_tx: watch::Sender<Option<u64>>,
131 remote_id_rx: watch::Receiver<Option<u64>>,
132 public_tx: watch::Sender<bool>,
133 public_rx: watch::Receiver<bool>,
134 _maintain_remote_id_task: Task<Option<()>>,
135 },
136 Remote {
137 sharing_has_stopped: bool,
138 remote_id: u64,
139 replica_id: ReplicaId,
140 _detect_unshare_task: Task<Option<()>>,
141 },
142}
143
144#[derive(Clone, Debug)]
145pub struct Collaborator {
146 pub user: Arc<User>,
147 pub peer_id: PeerId,
148 pub replica_id: ReplicaId,
149}
150
151#[derive(Clone, Debug, PartialEq, Eq)]
152pub enum Event {
153 ActiveEntryChanged(Option<ProjectEntryId>),
154 WorktreeAdded,
155 WorktreeRemoved(WorktreeId),
156 DiskBasedDiagnosticsStarted,
157 DiskBasedDiagnosticsUpdated,
158 DiskBasedDiagnosticsFinished,
159 DiagnosticsUpdated(ProjectPath),
160 RemoteIdChanged(Option<u64>),
161 CollaboratorLeft(PeerId),
162 ContactRequestedJoin(Arc<User>),
163 ContactCancelledJoinRequest(Arc<User>),
164}
165
166#[derive(Serialize)]
167pub struct LanguageServerStatus {
168 pub name: String,
169 pub pending_work: BTreeMap<String, LanguageServerProgress>,
170 pub pending_diagnostic_updates: isize,
171}
172
173#[derive(Clone, Debug, Serialize)]
174pub struct LanguageServerProgress {
175 pub message: Option<String>,
176 pub percentage: Option<usize>,
177 #[serde(skip_serializing)]
178 pub last_update_at: Instant,
179}
180
181#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
182pub struct ProjectPath {
183 pub worktree_id: WorktreeId,
184 pub path: Arc<Path>,
185}
186
187#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
188pub struct DiagnosticSummary {
189 pub error_count: usize,
190 pub warning_count: usize,
191}
192
193#[derive(Debug)]
194pub struct Location {
195 pub buffer: ModelHandle<Buffer>,
196 pub range: Range<language::Anchor>,
197}
198
199#[derive(Debug)]
200pub struct DocumentHighlight {
201 pub range: Range<language::Anchor>,
202 pub kind: DocumentHighlightKind,
203}
204
205#[derive(Clone, Debug)]
206pub struct Symbol {
207 pub source_worktree_id: WorktreeId,
208 pub worktree_id: WorktreeId,
209 pub language_server_name: LanguageServerName,
210 pub path: PathBuf,
211 pub label: CodeLabel,
212 pub name: String,
213 pub kind: lsp::SymbolKind,
214 pub range: Range<PointUtf16>,
215 pub signature: [u8; 32],
216}
217
218#[derive(Default)]
219pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
220
221impl DiagnosticSummary {
222 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
223 let mut this = Self {
224 error_count: 0,
225 warning_count: 0,
226 };
227
228 for entry in diagnostics {
229 if entry.diagnostic.is_primary {
230 match entry.diagnostic.severity {
231 DiagnosticSeverity::ERROR => this.error_count += 1,
232 DiagnosticSeverity::WARNING => this.warning_count += 1,
233 _ => {}
234 }
235 }
236 }
237
238 this
239 }
240
241 pub fn is_empty(&self) -> bool {
242 self.error_count == 0 && self.warning_count == 0
243 }
244
245 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
246 proto::DiagnosticSummary {
247 path: path.to_string_lossy().to_string(),
248 error_count: self.error_count as u32,
249 warning_count: self.warning_count as u32,
250 }
251 }
252}
253
254#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
255pub struct ProjectEntryId(usize);
256
257impl ProjectEntryId {
258 pub const MAX: Self = Self(usize::MAX);
259
260 pub fn new(counter: &AtomicUsize) -> Self {
261 Self(counter.fetch_add(1, SeqCst))
262 }
263
264 pub fn from_proto(id: u64) -> Self {
265 Self(id as usize)
266 }
267
268 pub fn to_proto(&self) -> u64 {
269 self.0 as u64
270 }
271
272 pub fn to_usize(&self) -> usize {
273 self.0
274 }
275}
276
277impl Project {
278 pub fn init(client: &Arc<Client>) {
279 client.add_model_message_handler(Self::handle_request_join_project);
280 client.add_model_message_handler(Self::handle_add_collaborator);
281 client.add_model_message_handler(Self::handle_buffer_reloaded);
282 client.add_model_message_handler(Self::handle_buffer_saved);
283 client.add_model_message_handler(Self::handle_start_language_server);
284 client.add_model_message_handler(Self::handle_update_language_server);
285 client.add_model_message_handler(Self::handle_remove_collaborator);
286 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
287 client.add_model_message_handler(Self::handle_update_project);
288 client.add_model_message_handler(Self::handle_unregister_project);
289 client.add_model_message_handler(Self::handle_project_unshared);
290 client.add_model_message_handler(Self::handle_update_buffer_file);
291 client.add_model_message_handler(Self::handle_update_buffer);
292 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
293 client.add_model_message_handler(Self::handle_update_worktree);
294 client.add_model_request_handler(Self::handle_create_project_entry);
295 client.add_model_request_handler(Self::handle_rename_project_entry);
296 client.add_model_request_handler(Self::handle_copy_project_entry);
297 client.add_model_request_handler(Self::handle_delete_project_entry);
298 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
299 client.add_model_request_handler(Self::handle_apply_code_action);
300 client.add_model_request_handler(Self::handle_reload_buffers);
301 client.add_model_request_handler(Self::handle_format_buffers);
302 client.add_model_request_handler(Self::handle_get_code_actions);
303 client.add_model_request_handler(Self::handle_get_completions);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
305 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
306 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
307 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
308 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
309 client.add_model_request_handler(Self::handle_search_project);
310 client.add_model_request_handler(Self::handle_get_project_symbols);
311 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
312 client.add_model_request_handler(Self::handle_open_buffer_by_id);
313 client.add_model_request_handler(Self::handle_open_buffer_by_path);
314 client.add_model_request_handler(Self::handle_save_buffer);
315 }
316
317 pub fn local(
318 public: bool,
319 client: Arc<Client>,
320 user_store: ModelHandle<UserStore>,
321 project_store: ModelHandle<ProjectStore>,
322 languages: Arc<LanguageRegistry>,
323 fs: Arc<dyn Fs>,
324 cx: &mut MutableAppContext,
325 ) -> ModelHandle<Self> {
326 cx.add_model(|cx: &mut ModelContext<Self>| {
327 let (public_tx, public_rx) = watch::channel_with(public);
328 let (remote_id_tx, remote_id_rx) = watch::channel();
329 let _maintain_remote_id_task = cx.spawn_weak({
330 let status_rx = client.clone().status();
331 let public_rx = public_rx.clone();
332 move |this, mut cx| async move {
333 let mut stream = Stream::map(status_rx.clone(), drop)
334 .merge(Stream::map(public_rx.clone(), drop));
335 while stream.recv().await.is_some() {
336 let this = this.upgrade(&cx)?;
337 if status_rx.borrow().is_connected() && *public_rx.borrow() {
338 this.update(&mut cx, |this, cx| this.register(cx))
339 .await
340 .log_err()?;
341 } else {
342 this.update(&mut cx, |this, cx| this.unregister(cx))
343 .await
344 .log_err();
345 }
346 }
347 None
348 }
349 });
350
351 let handle = cx.weak_handle();
352 project_store.update(cx, |store, cx| store.add_project(handle, cx));
353
354 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
355 Self {
356 worktrees: Default::default(),
357 collaborators: Default::default(),
358 opened_buffers: Default::default(),
359 shared_buffers: Default::default(),
360 loading_buffers: Default::default(),
361 loading_local_worktrees: Default::default(),
362 buffer_snapshots: Default::default(),
363 client_state: ProjectClientState::Local {
364 is_shared: false,
365 remote_id_tx,
366 remote_id_rx,
367 public_tx,
368 public_rx,
369 _maintain_remote_id_task,
370 },
371 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
372 subscriptions: Vec::new(),
373 active_entry: None,
374 languages,
375 client,
376 user_store,
377 project_store,
378 fs,
379 next_entry_id: Default::default(),
380 next_diagnostic_group_id: Default::default(),
381 language_servers: Default::default(),
382 started_language_servers: Default::default(),
383 language_server_statuses: Default::default(),
384 last_workspace_edits_by_language_server: Default::default(),
385 language_server_settings: Default::default(),
386 next_language_server_id: 0,
387 nonce: StdRng::from_entropy().gen(),
388 }
389 })
390 }
391
392 pub async fn remote(
393 remote_id: u64,
394 client: Arc<Client>,
395 user_store: ModelHandle<UserStore>,
396 project_store: ModelHandle<ProjectStore>,
397 languages: Arc<LanguageRegistry>,
398 fs: Arc<dyn Fs>,
399 mut cx: AsyncAppContext,
400 ) -> Result<ModelHandle<Self>, JoinProjectError> {
401 client.authenticate_and_connect(true, &cx).await?;
402
403 let response = client
404 .request(proto::JoinProject {
405 project_id: remote_id,
406 })
407 .await?;
408
409 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
410 proto::join_project_response::Variant::Accept(response) => response,
411 proto::join_project_response::Variant::Decline(decline) => {
412 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
413 Some(proto::join_project_response::decline::Reason::Declined) => {
414 Err(JoinProjectError::HostDeclined)?
415 }
416 Some(proto::join_project_response::decline::Reason::Closed) => {
417 Err(JoinProjectError::HostClosedProject)?
418 }
419 Some(proto::join_project_response::decline::Reason::WentOffline) => {
420 Err(JoinProjectError::HostWentOffline)?
421 }
422 None => Err(anyhow!("missing decline reason"))?,
423 }
424 }
425 };
426
427 let replica_id = response.replica_id as ReplicaId;
428
429 let mut worktrees = Vec::new();
430 for worktree in response.worktrees {
431 let (worktree, load_task) = cx
432 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
433 worktrees.push(worktree);
434 load_task.detach();
435 }
436
437 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
438 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
439 let handle = cx.weak_handle();
440 project_store.update(cx, |store, cx| store.add_project(handle, cx));
441
442 let mut this = Self {
443 worktrees: Vec::new(),
444 loading_buffers: Default::default(),
445 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
446 shared_buffers: Default::default(),
447 loading_local_worktrees: Default::default(),
448 active_entry: None,
449 collaborators: Default::default(),
450 languages,
451 user_store: user_store.clone(),
452 project_store,
453 fs,
454 next_entry_id: Default::default(),
455 next_diagnostic_group_id: Default::default(),
456 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
457 client: client.clone(),
458 client_state: ProjectClientState::Remote {
459 sharing_has_stopped: false,
460 remote_id,
461 replica_id,
462 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
463 async move {
464 let mut status = client.status();
465 let is_connected =
466 status.next().await.map_or(false, |s| s.is_connected());
467 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
468 if !is_connected || status.next().await.is_some() {
469 if let Some(this) = this.upgrade(&cx) {
470 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
471 }
472 }
473 Ok(())
474 }
475 .log_err()
476 }),
477 },
478 language_servers: Default::default(),
479 started_language_servers: Default::default(),
480 language_server_settings: Default::default(),
481 language_server_statuses: response
482 .language_servers
483 .into_iter()
484 .map(|server| {
485 (
486 server.id as usize,
487 LanguageServerStatus {
488 name: server.name,
489 pending_work: Default::default(),
490 pending_diagnostic_updates: 0,
491 },
492 )
493 })
494 .collect(),
495 last_workspace_edits_by_language_server: Default::default(),
496 next_language_server_id: 0,
497 opened_buffers: Default::default(),
498 buffer_snapshots: Default::default(),
499 nonce: StdRng::from_entropy().gen(),
500 };
501 for worktree in worktrees {
502 this.add_worktree(&worktree, cx);
503 }
504 this
505 });
506
507 let user_ids = response
508 .collaborators
509 .iter()
510 .map(|peer| peer.user_id)
511 .collect();
512 user_store
513 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
514 .await?;
515 let mut collaborators = HashMap::default();
516 for message in response.collaborators {
517 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
518 collaborators.insert(collaborator.peer_id, collaborator);
519 }
520
521 this.update(&mut cx, |this, _| {
522 this.collaborators = collaborators;
523 });
524
525 Ok(this)
526 }
527
528 #[cfg(any(test, feature = "test-support"))]
529 pub async fn test(
530 fs: Arc<dyn Fs>,
531 root_paths: impl IntoIterator<Item = &Path>,
532 cx: &mut gpui::TestAppContext,
533 ) -> ModelHandle<Project> {
534 let languages = Arc::new(LanguageRegistry::test());
535 let http_client = client::test::FakeHttpClient::with_404_response();
536 let client = client::Client::new(http_client.clone());
537 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
538 let project_store = cx.add_model(|_| ProjectStore::new(Db::open_fake()));
539 let project = cx.update(|cx| {
540 Project::local(true, client, user_store, project_store, languages, fs, cx)
541 });
542 for path in root_paths {
543 let (tree, _) = project
544 .update(cx, |project, cx| {
545 project.find_or_create_local_worktree(path, true, cx)
546 })
547 .await
548 .unwrap();
549 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
550 .await;
551 }
552 project
553 }
554
555 pub fn restore_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
556 if self.is_remote() {
557 return Task::ready(Ok(()));
558 }
559
560 let db = self.project_store.read(cx).db.clone();
561 let project_path_keys = self.project_path_keys(cx);
562 let should_be_public = cx.background().spawn(async move {
563 let values = db.read(project_path_keys)?;
564 anyhow::Ok(values.into_iter().all(|e| e.is_some()))
565 });
566 cx.spawn(|this, mut cx| async move {
567 let public = should_be_public.await.log_err().unwrap_or(false);
568 this.update(&mut cx, |this, cx| {
569 if let ProjectClientState::Local { public_tx, .. } = &mut this.client_state {
570 let mut public_tx = public_tx.borrow_mut();
571 if *public_tx != public {
572 *public_tx = public;
573 drop(public_tx);
574 this.metadata_changed(false, cx);
575 }
576 }
577 });
578 Ok(())
579 })
580 }
581
582 fn persist_state(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
583 if self.is_remote() {
584 return Task::ready(Ok(()));
585 }
586
587 let db = self.project_store.read(cx).db.clone();
588 let project_path_keys = self.project_path_keys(cx);
589 let public = self.is_public();
590 cx.background().spawn(async move {
591 if public {
592 db.write(project_path_keys.into_iter().map(|key| (key, &[])))
593 } else {
594 db.delete(project_path_keys)
595 }
596 })
597 }
598
599 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
600 self.opened_buffers
601 .get(&remote_id)
602 .and_then(|buffer| buffer.upgrade(cx))
603 }
604
605 pub fn languages(&self) -> &Arc<LanguageRegistry> {
606 &self.languages
607 }
608
609 pub fn client(&self) -> Arc<Client> {
610 self.client.clone()
611 }
612
613 pub fn user_store(&self) -> ModelHandle<UserStore> {
614 self.user_store.clone()
615 }
616
617 pub fn project_store(&self) -> ModelHandle<ProjectStore> {
618 self.project_store.clone()
619 }
620
621 #[cfg(any(test, feature = "test-support"))]
622 pub fn check_invariants(&self, cx: &AppContext) {
623 if self.is_local() {
624 let mut worktree_root_paths = HashMap::default();
625 for worktree in self.worktrees(cx) {
626 let worktree = worktree.read(cx);
627 let abs_path = worktree.as_local().unwrap().abs_path().clone();
628 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
629 assert_eq!(
630 prev_worktree_id,
631 None,
632 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
633 abs_path,
634 worktree.id(),
635 prev_worktree_id
636 )
637 }
638 } else {
639 let replica_id = self.replica_id();
640 for buffer in self.opened_buffers.values() {
641 if let Some(buffer) = buffer.upgrade(cx) {
642 let buffer = buffer.read(cx);
643 assert_eq!(
644 buffer.deferred_ops_len(),
645 0,
646 "replica {}, buffer {} has deferred operations",
647 replica_id,
648 buffer.remote_id()
649 );
650 }
651 }
652 }
653 }
654
655 #[cfg(any(test, feature = "test-support"))]
656 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
657 let path = path.into();
658 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
659 self.opened_buffers.iter().any(|(_, buffer)| {
660 if let Some(buffer) = buffer.upgrade(cx) {
661 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
662 if file.worktree == worktree && file.path() == &path.path {
663 return true;
664 }
665 }
666 }
667 false
668 })
669 } else {
670 false
671 }
672 }
673
674 pub fn fs(&self) -> &Arc<dyn Fs> {
675 &self.fs
676 }
677
678 pub fn set_public(&mut self, is_public: bool, cx: &mut ModelContext<Self>) {
679 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
680 let mut public_tx = public_tx.borrow_mut();
681 if *public_tx != is_public {
682 *public_tx = is_public;
683 drop(public_tx);
684 self.metadata_changed(true, cx);
685 }
686 }
687 }
688
689 pub fn is_public(&self) -> bool {
690 match &self.client_state {
691 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
692 ProjectClientState::Remote { .. } => true,
693 }
694 }
695
696 fn unregister(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
697 self.unshared(cx);
698 if let ProjectClientState::Local { remote_id_rx, .. } = &mut self.client_state {
699 if let Some(remote_id) = *remote_id_rx.borrow() {
700 let request = self.client.request(proto::UnregisterProject {
701 project_id: remote_id,
702 });
703 return cx.spawn(|this, mut cx| async move {
704 let response = request.await;
705
706 // Unregistering the project causes the server to send out a
707 // contact update removing this project from the host's list
708 // of public projects. Wait until this contact update has been
709 // processed before clearing out this project's remote id, so
710 // that there is no moment where this project appears in the
711 // contact metadata and *also* has no remote id.
712 this.update(&mut cx, |this, cx| {
713 this.user_store()
714 .update(cx, |store, _| store.contact_updates_done())
715 })
716 .await;
717
718 this.update(&mut cx, |this, cx| {
719 if let ProjectClientState::Local { remote_id_tx, .. } =
720 &mut this.client_state
721 {
722 *remote_id_tx.borrow_mut() = None;
723 }
724 this.subscriptions.clear();
725 this.metadata_changed(false, cx);
726 });
727 response.map(drop)
728 });
729 }
730 }
731 Task::ready(Ok(()))
732 }
733
734 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
735 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
736 if remote_id_rx.borrow().is_some() {
737 return Task::ready(Ok(()));
738 }
739 }
740
741 let response = self.client.request(proto::RegisterProject {});
742 cx.spawn(|this, mut cx| async move {
743 let remote_id = response.await?.project_id;
744 this.update(&mut cx, |this, cx| {
745 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
746 *remote_id_tx.borrow_mut() = Some(remote_id);
747 }
748
749 this.metadata_changed(false, cx);
750 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
751 this.subscriptions
752 .push(this.client.add_model_for_remote_entity(remote_id, cx));
753 Ok(())
754 })
755 })
756 }
757
758 pub fn remote_id(&self) -> Option<u64> {
759 match &self.client_state {
760 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
761 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
762 }
763 }
764
765 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
766 let mut id = None;
767 let mut watch = None;
768 match &self.client_state {
769 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
770 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
771 }
772
773 async move {
774 if let Some(id) = id {
775 return id;
776 }
777 let mut watch = watch.unwrap();
778 loop {
779 let id = *watch.borrow();
780 if let Some(id) = id {
781 return id;
782 }
783 watch.next().await;
784 }
785 }
786 }
787
788 pub fn shared_remote_id(&self) -> Option<u64> {
789 match &self.client_state {
790 ProjectClientState::Local {
791 remote_id_rx,
792 is_shared,
793 ..
794 } => {
795 if *is_shared {
796 *remote_id_rx.borrow()
797 } else {
798 None
799 }
800 }
801 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
802 }
803 }
804
805 pub fn replica_id(&self) -> ReplicaId {
806 match &self.client_state {
807 ProjectClientState::Local { .. } => 0,
808 ProjectClientState::Remote { replica_id, .. } => *replica_id,
809 }
810 }
811
812 fn metadata_changed(&mut self, persist: bool, cx: &mut ModelContext<Self>) {
813 if let ProjectClientState::Local {
814 remote_id_rx,
815 public_rx,
816 ..
817 } = &self.client_state
818 {
819 if let (Some(project_id), true) = (*remote_id_rx.borrow(), *public_rx.borrow()) {
820 self.client
821 .send(proto::UpdateProject {
822 project_id,
823 worktrees: self
824 .worktrees
825 .iter()
826 .filter_map(|worktree| {
827 worktree.upgrade(&cx).map(|worktree| {
828 worktree.read(cx).as_local().unwrap().metadata_proto()
829 })
830 })
831 .collect(),
832 })
833 .log_err();
834 }
835
836 self.project_store.update(cx, |_, cx| cx.notify());
837 if persist {
838 self.persist_state(cx).detach_and_log_err(cx);
839 }
840 cx.notify();
841 }
842 }
843
844 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
845 &self.collaborators
846 }
847
848 pub fn worktrees<'a>(
849 &'a self,
850 cx: &'a AppContext,
851 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
852 self.worktrees
853 .iter()
854 .filter_map(move |worktree| worktree.upgrade(cx))
855 }
856
857 pub fn visible_worktrees<'a>(
858 &'a self,
859 cx: &'a AppContext,
860 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
861 self.worktrees.iter().filter_map(|worktree| {
862 worktree.upgrade(cx).and_then(|worktree| {
863 if worktree.read(cx).is_visible() {
864 Some(worktree)
865 } else {
866 None
867 }
868 })
869 })
870 }
871
872 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
873 self.visible_worktrees(cx)
874 .map(|tree| tree.read(cx).root_name())
875 }
876
877 fn project_path_keys(&self, cx: &AppContext) -> Vec<String> {
878 self.worktrees
879 .iter()
880 .filter_map(|worktree| {
881 worktree.upgrade(&cx).map(|worktree| {
882 format!(
883 "public-project-path:{}",
884 worktree
885 .read(cx)
886 .as_local()
887 .unwrap()
888 .abs_path()
889 .to_string_lossy()
890 )
891 })
892 })
893 .collect::<Vec<_>>()
894 }
895
896 pub fn worktree_for_id(
897 &self,
898 id: WorktreeId,
899 cx: &AppContext,
900 ) -> Option<ModelHandle<Worktree>> {
901 self.worktrees(cx)
902 .find(|worktree| worktree.read(cx).id() == id)
903 }
904
905 pub fn worktree_for_entry(
906 &self,
907 entry_id: ProjectEntryId,
908 cx: &AppContext,
909 ) -> Option<ModelHandle<Worktree>> {
910 self.worktrees(cx)
911 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
912 }
913
914 pub fn worktree_id_for_entry(
915 &self,
916 entry_id: ProjectEntryId,
917 cx: &AppContext,
918 ) -> Option<WorktreeId> {
919 self.worktree_for_entry(entry_id, cx)
920 .map(|worktree| worktree.read(cx).id())
921 }
922
923 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
924 paths.iter().all(|path| self.contains_path(&path, cx))
925 }
926
927 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
928 for worktree in self.worktrees(cx) {
929 let worktree = worktree.read(cx).as_local();
930 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
931 return true;
932 }
933 }
934 false
935 }
936
937 pub fn create_entry(
938 &mut self,
939 project_path: impl Into<ProjectPath>,
940 is_directory: bool,
941 cx: &mut ModelContext<Self>,
942 ) -> Option<Task<Result<Entry>>> {
943 let project_path = project_path.into();
944 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
945 if self.is_local() {
946 Some(worktree.update(cx, |worktree, cx| {
947 worktree
948 .as_local_mut()
949 .unwrap()
950 .create_entry(project_path.path, is_directory, cx)
951 }))
952 } else {
953 let client = self.client.clone();
954 let project_id = self.remote_id().unwrap();
955 Some(cx.spawn_weak(|_, mut cx| async move {
956 let response = client
957 .request(proto::CreateProjectEntry {
958 worktree_id: project_path.worktree_id.to_proto(),
959 project_id,
960 path: project_path.path.as_os_str().as_bytes().to_vec(),
961 is_directory,
962 })
963 .await?;
964 let entry = response
965 .entry
966 .ok_or_else(|| anyhow!("missing entry in response"))?;
967 worktree
968 .update(&mut cx, |worktree, cx| {
969 worktree.as_remote().unwrap().insert_entry(
970 entry,
971 response.worktree_scan_id as usize,
972 cx,
973 )
974 })
975 .await
976 }))
977 }
978 }
979
980 pub fn copy_entry(
981 &mut self,
982 entry_id: ProjectEntryId,
983 new_path: impl Into<Arc<Path>>,
984 cx: &mut ModelContext<Self>,
985 ) -> Option<Task<Result<Entry>>> {
986 let worktree = self.worktree_for_entry(entry_id, cx)?;
987 let new_path = new_path.into();
988 if self.is_local() {
989 worktree.update(cx, |worktree, cx| {
990 worktree
991 .as_local_mut()
992 .unwrap()
993 .copy_entry(entry_id, new_path, cx)
994 })
995 } else {
996 let client = self.client.clone();
997 let project_id = self.remote_id().unwrap();
998
999 Some(cx.spawn_weak(|_, mut cx| async move {
1000 let response = client
1001 .request(proto::CopyProjectEntry {
1002 project_id,
1003 entry_id: entry_id.to_proto(),
1004 new_path: new_path.as_os_str().as_bytes().to_vec(),
1005 })
1006 .await?;
1007 let entry = response
1008 .entry
1009 .ok_or_else(|| anyhow!("missing entry in response"))?;
1010 worktree
1011 .update(&mut cx, |worktree, cx| {
1012 worktree.as_remote().unwrap().insert_entry(
1013 entry,
1014 response.worktree_scan_id as usize,
1015 cx,
1016 )
1017 })
1018 .await
1019 }))
1020 }
1021 }
1022
1023 pub fn rename_entry(
1024 &mut self,
1025 entry_id: ProjectEntryId,
1026 new_path: impl Into<Arc<Path>>,
1027 cx: &mut ModelContext<Self>,
1028 ) -> Option<Task<Result<Entry>>> {
1029 let worktree = self.worktree_for_entry(entry_id, cx)?;
1030 let new_path = new_path.into();
1031 if self.is_local() {
1032 worktree.update(cx, |worktree, cx| {
1033 worktree
1034 .as_local_mut()
1035 .unwrap()
1036 .rename_entry(entry_id, new_path, cx)
1037 })
1038 } else {
1039 let client = self.client.clone();
1040 let project_id = self.remote_id().unwrap();
1041
1042 Some(cx.spawn_weak(|_, mut cx| async move {
1043 let response = client
1044 .request(proto::RenameProjectEntry {
1045 project_id,
1046 entry_id: entry_id.to_proto(),
1047 new_path: new_path.as_os_str().as_bytes().to_vec(),
1048 })
1049 .await?;
1050 let entry = response
1051 .entry
1052 .ok_or_else(|| anyhow!("missing entry in response"))?;
1053 worktree
1054 .update(&mut cx, |worktree, cx| {
1055 worktree.as_remote().unwrap().insert_entry(
1056 entry,
1057 response.worktree_scan_id as usize,
1058 cx,
1059 )
1060 })
1061 .await
1062 }))
1063 }
1064 }
1065
1066 pub fn delete_entry(
1067 &mut self,
1068 entry_id: ProjectEntryId,
1069 cx: &mut ModelContext<Self>,
1070 ) -> Option<Task<Result<()>>> {
1071 let worktree = self.worktree_for_entry(entry_id, cx)?;
1072 if self.is_local() {
1073 worktree.update(cx, |worktree, cx| {
1074 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
1075 })
1076 } else {
1077 let client = self.client.clone();
1078 let project_id = self.remote_id().unwrap();
1079 Some(cx.spawn_weak(|_, mut cx| async move {
1080 let response = client
1081 .request(proto::DeleteProjectEntry {
1082 project_id,
1083 entry_id: entry_id.to_proto(),
1084 })
1085 .await?;
1086 worktree
1087 .update(&mut cx, move |worktree, cx| {
1088 worktree.as_remote().unwrap().delete_entry(
1089 entry_id,
1090 response.worktree_scan_id as usize,
1091 cx,
1092 )
1093 })
1094 .await
1095 }))
1096 }
1097 }
1098
1099 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1100 let project_id;
1101 if let ProjectClientState::Local {
1102 remote_id_rx,
1103 is_shared,
1104 ..
1105 } = &mut self.client_state
1106 {
1107 if *is_shared {
1108 return Task::ready(Ok(()));
1109 }
1110 *is_shared = true;
1111 if let Some(id) = *remote_id_rx.borrow() {
1112 project_id = id;
1113 } else {
1114 return Task::ready(Err(anyhow!("project hasn't been registered")));
1115 }
1116 } else {
1117 return Task::ready(Err(anyhow!("can't share a remote project")));
1118 };
1119
1120 for open_buffer in self.opened_buffers.values_mut() {
1121 match open_buffer {
1122 OpenBuffer::Strong(_) => {}
1123 OpenBuffer::Weak(buffer) => {
1124 if let Some(buffer) = buffer.upgrade(cx) {
1125 *open_buffer = OpenBuffer::Strong(buffer);
1126 }
1127 }
1128 OpenBuffer::Loading(_) => unreachable!(),
1129 }
1130 }
1131
1132 for worktree_handle in self.worktrees.iter_mut() {
1133 match worktree_handle {
1134 WorktreeHandle::Strong(_) => {}
1135 WorktreeHandle::Weak(worktree) => {
1136 if let Some(worktree) = worktree.upgrade(cx) {
1137 *worktree_handle = WorktreeHandle::Strong(worktree);
1138 }
1139 }
1140 }
1141 }
1142
1143 let mut tasks = Vec::new();
1144 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1145 worktree.update(cx, |worktree, cx| {
1146 let worktree = worktree.as_local_mut().unwrap();
1147 tasks.push(worktree.share(project_id, cx));
1148 });
1149 }
1150
1151 cx.spawn(|this, mut cx| async move {
1152 for task in tasks {
1153 task.await?;
1154 }
1155 this.update(&mut cx, |_, cx| cx.notify());
1156 Ok(())
1157 })
1158 }
1159
1160 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1161 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1162 if !*is_shared {
1163 return;
1164 }
1165
1166 *is_shared = false;
1167 self.collaborators.clear();
1168 self.shared_buffers.clear();
1169 for worktree_handle in self.worktrees.iter_mut() {
1170 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1171 let is_visible = worktree.update(cx, |worktree, _| {
1172 worktree.as_local_mut().unwrap().unshare();
1173 worktree.is_visible()
1174 });
1175 if !is_visible {
1176 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1177 }
1178 }
1179 }
1180
1181 for open_buffer in self.opened_buffers.values_mut() {
1182 match open_buffer {
1183 OpenBuffer::Strong(buffer) => {
1184 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1185 }
1186 _ => {}
1187 }
1188 }
1189
1190 cx.notify();
1191 } else {
1192 log::error!("attempted to unshare a remote project");
1193 }
1194 }
1195
1196 pub fn respond_to_join_request(
1197 &mut self,
1198 requester_id: u64,
1199 allow: bool,
1200 cx: &mut ModelContext<Self>,
1201 ) {
1202 if let Some(project_id) = self.remote_id() {
1203 let share = self.share(cx);
1204 let client = self.client.clone();
1205 cx.foreground()
1206 .spawn(async move {
1207 share.await?;
1208 client.send(proto::RespondToJoinProjectRequest {
1209 requester_id,
1210 project_id,
1211 allow,
1212 })
1213 })
1214 .detach_and_log_err(cx);
1215 }
1216 }
1217
1218 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1219 if let ProjectClientState::Remote {
1220 sharing_has_stopped,
1221 ..
1222 } = &mut self.client_state
1223 {
1224 *sharing_has_stopped = true;
1225 self.collaborators.clear();
1226 cx.notify();
1227 }
1228 }
1229
1230 pub fn is_read_only(&self) -> bool {
1231 match &self.client_state {
1232 ProjectClientState::Local { .. } => false,
1233 ProjectClientState::Remote {
1234 sharing_has_stopped,
1235 ..
1236 } => *sharing_has_stopped,
1237 }
1238 }
1239
1240 pub fn is_local(&self) -> bool {
1241 match &self.client_state {
1242 ProjectClientState::Local { .. } => true,
1243 ProjectClientState::Remote { .. } => false,
1244 }
1245 }
1246
1247 pub fn is_remote(&self) -> bool {
1248 !self.is_local()
1249 }
1250
1251 pub fn create_buffer(
1252 &mut self,
1253 text: &str,
1254 language: Option<Arc<Language>>,
1255 cx: &mut ModelContext<Self>,
1256 ) -> Result<ModelHandle<Buffer>> {
1257 if self.is_remote() {
1258 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1259 }
1260
1261 let buffer = cx.add_model(|cx| {
1262 Buffer::new(self.replica_id(), text, cx)
1263 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1264 });
1265 self.register_buffer(&buffer, cx)?;
1266 Ok(buffer)
1267 }
1268
1269 pub fn open_path(
1270 &mut self,
1271 path: impl Into<ProjectPath>,
1272 cx: &mut ModelContext<Self>,
1273 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1274 let task = self.open_buffer(path, cx);
1275 cx.spawn_weak(|_, cx| async move {
1276 let buffer = task.await?;
1277 let project_entry_id = buffer
1278 .read_with(&cx, |buffer, cx| {
1279 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1280 })
1281 .ok_or_else(|| anyhow!("no project entry"))?;
1282 Ok((project_entry_id, buffer.into()))
1283 })
1284 }
1285
1286 pub fn open_local_buffer(
1287 &mut self,
1288 abs_path: impl AsRef<Path>,
1289 cx: &mut ModelContext<Self>,
1290 ) -> Task<Result<ModelHandle<Buffer>>> {
1291 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1292 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1293 } else {
1294 Task::ready(Err(anyhow!("no such path")))
1295 }
1296 }
1297
1298 pub fn open_buffer(
1299 &mut self,
1300 path: impl Into<ProjectPath>,
1301 cx: &mut ModelContext<Self>,
1302 ) -> Task<Result<ModelHandle<Buffer>>> {
1303 let project_path = path.into();
1304 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1305 worktree
1306 } else {
1307 return Task::ready(Err(anyhow!("no such worktree")));
1308 };
1309
1310 // If there is already a buffer for the given path, then return it.
1311 let existing_buffer = self.get_open_buffer(&project_path, cx);
1312 if let Some(existing_buffer) = existing_buffer {
1313 return Task::ready(Ok(existing_buffer));
1314 }
1315
1316 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1317 // If the given path is already being loaded, then wait for that existing
1318 // task to complete and return the same buffer.
1319 hash_map::Entry::Occupied(e) => e.get().clone(),
1320
1321 // Otherwise, record the fact that this path is now being loaded.
1322 hash_map::Entry::Vacant(entry) => {
1323 let (mut tx, rx) = postage::watch::channel();
1324 entry.insert(rx.clone());
1325
1326 let load_buffer = if worktree.read(cx).is_local() {
1327 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1328 } else {
1329 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1330 };
1331
1332 cx.spawn(move |this, mut cx| async move {
1333 let load_result = load_buffer.await;
1334 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1335 // Record the fact that the buffer is no longer loading.
1336 this.loading_buffers.remove(&project_path);
1337 let buffer = load_result.map_err(Arc::new)?;
1338 Ok(buffer)
1339 }));
1340 })
1341 .detach();
1342 rx
1343 }
1344 };
1345
1346 cx.foreground().spawn(async move {
1347 loop {
1348 if let Some(result) = loading_watch.borrow().as_ref() {
1349 match result {
1350 Ok(buffer) => return Ok(buffer.clone()),
1351 Err(error) => return Err(anyhow!("{}", error)),
1352 }
1353 }
1354 loading_watch.next().await;
1355 }
1356 })
1357 }
1358
1359 fn open_local_buffer_internal(
1360 &mut self,
1361 path: &Arc<Path>,
1362 worktree: &ModelHandle<Worktree>,
1363 cx: &mut ModelContext<Self>,
1364 ) -> Task<Result<ModelHandle<Buffer>>> {
1365 let load_buffer = worktree.update(cx, |worktree, cx| {
1366 let worktree = worktree.as_local_mut().unwrap();
1367 worktree.load_buffer(path, cx)
1368 });
1369 cx.spawn(|this, mut cx| async move {
1370 let buffer = load_buffer.await?;
1371 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1372 Ok(buffer)
1373 })
1374 }
1375
1376 fn open_remote_buffer_internal(
1377 &mut self,
1378 path: &Arc<Path>,
1379 worktree: &ModelHandle<Worktree>,
1380 cx: &mut ModelContext<Self>,
1381 ) -> Task<Result<ModelHandle<Buffer>>> {
1382 let rpc = self.client.clone();
1383 let project_id = self.remote_id().unwrap();
1384 let remote_worktree_id = worktree.read(cx).id();
1385 let path = path.clone();
1386 let path_string = path.to_string_lossy().to_string();
1387 cx.spawn(|this, mut cx| async move {
1388 let response = rpc
1389 .request(proto::OpenBufferByPath {
1390 project_id,
1391 worktree_id: remote_worktree_id.to_proto(),
1392 path: path_string,
1393 })
1394 .await?;
1395 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1396 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1397 .await
1398 })
1399 }
1400
1401 fn open_local_buffer_via_lsp(
1402 &mut self,
1403 abs_path: lsp::Url,
1404 lsp_adapter: Arc<dyn LspAdapter>,
1405 lsp_server: Arc<LanguageServer>,
1406 cx: &mut ModelContext<Self>,
1407 ) -> Task<Result<ModelHandle<Buffer>>> {
1408 cx.spawn(|this, mut cx| async move {
1409 let abs_path = abs_path
1410 .to_file_path()
1411 .map_err(|_| anyhow!("can't convert URI to path"))?;
1412 let (worktree, relative_path) = if let Some(result) =
1413 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1414 {
1415 result
1416 } else {
1417 let worktree = this
1418 .update(&mut cx, |this, cx| {
1419 this.create_local_worktree(&abs_path, false, cx)
1420 })
1421 .await?;
1422 this.update(&mut cx, |this, cx| {
1423 this.language_servers.insert(
1424 (worktree.read(cx).id(), lsp_adapter.name()),
1425 (lsp_adapter, lsp_server),
1426 );
1427 });
1428 (worktree, PathBuf::new())
1429 };
1430
1431 let project_path = ProjectPath {
1432 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1433 path: relative_path.into(),
1434 };
1435 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1436 .await
1437 })
1438 }
1439
1440 pub fn open_buffer_by_id(
1441 &mut self,
1442 id: u64,
1443 cx: &mut ModelContext<Self>,
1444 ) -> Task<Result<ModelHandle<Buffer>>> {
1445 if let Some(buffer) = self.buffer_for_id(id, cx) {
1446 Task::ready(Ok(buffer))
1447 } else if self.is_local() {
1448 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1449 } else if let Some(project_id) = self.remote_id() {
1450 let request = self
1451 .client
1452 .request(proto::OpenBufferById { project_id, id });
1453 cx.spawn(|this, mut cx| async move {
1454 let buffer = request
1455 .await?
1456 .buffer
1457 .ok_or_else(|| anyhow!("invalid buffer"))?;
1458 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1459 .await
1460 })
1461 } else {
1462 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1463 }
1464 }
1465
1466 pub fn save_buffer_as(
1467 &mut self,
1468 buffer: ModelHandle<Buffer>,
1469 abs_path: PathBuf,
1470 cx: &mut ModelContext<Project>,
1471 ) -> Task<Result<()>> {
1472 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1473 let old_path =
1474 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1475 cx.spawn(|this, mut cx| async move {
1476 if let Some(old_path) = old_path {
1477 this.update(&mut cx, |this, cx| {
1478 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1479 });
1480 }
1481 let (worktree, path) = worktree_task.await?;
1482 worktree
1483 .update(&mut cx, |worktree, cx| {
1484 worktree
1485 .as_local_mut()
1486 .unwrap()
1487 .save_buffer_as(buffer.clone(), path, cx)
1488 })
1489 .await?;
1490 this.update(&mut cx, |this, cx| {
1491 this.assign_language_to_buffer(&buffer, cx);
1492 this.register_buffer_with_language_server(&buffer, cx);
1493 });
1494 Ok(())
1495 })
1496 }
1497
1498 pub fn get_open_buffer(
1499 &mut self,
1500 path: &ProjectPath,
1501 cx: &mut ModelContext<Self>,
1502 ) -> Option<ModelHandle<Buffer>> {
1503 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1504 self.opened_buffers.values().find_map(|buffer| {
1505 let buffer = buffer.upgrade(cx)?;
1506 let file = File::from_dyn(buffer.read(cx).file())?;
1507 if file.worktree == worktree && file.path() == &path.path {
1508 Some(buffer)
1509 } else {
1510 None
1511 }
1512 })
1513 }
1514
1515 fn register_buffer(
1516 &mut self,
1517 buffer: &ModelHandle<Buffer>,
1518 cx: &mut ModelContext<Self>,
1519 ) -> Result<()> {
1520 let remote_id = buffer.read(cx).remote_id();
1521 let open_buffer = if self.is_remote() || self.is_shared() {
1522 OpenBuffer::Strong(buffer.clone())
1523 } else {
1524 OpenBuffer::Weak(buffer.downgrade())
1525 };
1526
1527 match self.opened_buffers.insert(remote_id, open_buffer) {
1528 None => {}
1529 Some(OpenBuffer::Loading(operations)) => {
1530 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1531 }
1532 Some(OpenBuffer::Weak(existing_handle)) => {
1533 if existing_handle.upgrade(cx).is_some() {
1534 Err(anyhow!(
1535 "already registered buffer with remote id {}",
1536 remote_id
1537 ))?
1538 }
1539 }
1540 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1541 "already registered buffer with remote id {}",
1542 remote_id
1543 ))?,
1544 }
1545 cx.subscribe(buffer, |this, buffer, event, cx| {
1546 this.on_buffer_event(buffer, event, cx);
1547 })
1548 .detach();
1549
1550 self.assign_language_to_buffer(buffer, cx);
1551 self.register_buffer_with_language_server(buffer, cx);
1552 cx.observe_release(buffer, |this, buffer, cx| {
1553 if let Some(file) = File::from_dyn(buffer.file()) {
1554 if file.is_local() {
1555 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1556 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1557 server
1558 .notify::<lsp::notification::DidCloseTextDocument>(
1559 lsp::DidCloseTextDocumentParams {
1560 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1561 },
1562 )
1563 .log_err();
1564 }
1565 }
1566 }
1567 })
1568 .detach();
1569
1570 Ok(())
1571 }
1572
1573 fn register_buffer_with_language_server(
1574 &mut self,
1575 buffer_handle: &ModelHandle<Buffer>,
1576 cx: &mut ModelContext<Self>,
1577 ) {
1578 let buffer = buffer_handle.read(cx);
1579 let buffer_id = buffer.remote_id();
1580 if let Some(file) = File::from_dyn(buffer.file()) {
1581 if file.is_local() {
1582 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1583 let initial_snapshot = buffer.text_snapshot();
1584
1585 let mut language_server = None;
1586 let mut language_id = None;
1587 if let Some(language) = buffer.language() {
1588 let worktree_id = file.worktree_id(cx);
1589 if let Some(adapter) = language.lsp_adapter() {
1590 language_id = adapter.id_for_language(language.name().as_ref());
1591 language_server = self
1592 .language_servers
1593 .get(&(worktree_id, adapter.name()))
1594 .cloned();
1595 }
1596 }
1597
1598 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1599 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1600 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1601 .log_err();
1602 }
1603 }
1604
1605 if let Some((_, server)) = language_server {
1606 server
1607 .notify::<lsp::notification::DidOpenTextDocument>(
1608 lsp::DidOpenTextDocumentParams {
1609 text_document: lsp::TextDocumentItem::new(
1610 uri,
1611 language_id.unwrap_or_default(),
1612 0,
1613 initial_snapshot.text(),
1614 ),
1615 }
1616 .clone(),
1617 )
1618 .log_err();
1619 buffer_handle.update(cx, |buffer, cx| {
1620 buffer.set_completion_triggers(
1621 server
1622 .capabilities()
1623 .completion_provider
1624 .as_ref()
1625 .and_then(|provider| provider.trigger_characters.clone())
1626 .unwrap_or(Vec::new()),
1627 cx,
1628 )
1629 });
1630 self.buffer_snapshots
1631 .insert(buffer_id, vec![(0, initial_snapshot)]);
1632 }
1633 }
1634 }
1635 }
1636
1637 fn unregister_buffer_from_language_server(
1638 &mut self,
1639 buffer: &ModelHandle<Buffer>,
1640 old_path: PathBuf,
1641 cx: &mut ModelContext<Self>,
1642 ) {
1643 buffer.update(cx, |buffer, cx| {
1644 buffer.update_diagnostics(Default::default(), cx);
1645 self.buffer_snapshots.remove(&buffer.remote_id());
1646 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1647 language_server
1648 .notify::<lsp::notification::DidCloseTextDocument>(
1649 lsp::DidCloseTextDocumentParams {
1650 text_document: lsp::TextDocumentIdentifier::new(
1651 lsp::Url::from_file_path(old_path).unwrap(),
1652 ),
1653 },
1654 )
1655 .log_err();
1656 }
1657 });
1658 }
1659
1660 fn on_buffer_event(
1661 &mut self,
1662 buffer: ModelHandle<Buffer>,
1663 event: &BufferEvent,
1664 cx: &mut ModelContext<Self>,
1665 ) -> Option<()> {
1666 match event {
1667 BufferEvent::Operation(operation) => {
1668 if let Some(project_id) = self.shared_remote_id() {
1669 let request = self.client.request(proto::UpdateBuffer {
1670 project_id,
1671 buffer_id: buffer.read(cx).remote_id(),
1672 operations: vec![language::proto::serialize_operation(&operation)],
1673 });
1674 cx.background().spawn(request).detach_and_log_err(cx);
1675 }
1676 }
1677 BufferEvent::Edited { .. } => {
1678 let (_, language_server) = self
1679 .language_server_for_buffer(buffer.read(cx), cx)?
1680 .clone();
1681 let buffer = buffer.read(cx);
1682 let file = File::from_dyn(buffer.file())?;
1683 let abs_path = file.as_local()?.abs_path(cx);
1684 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1685 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1686 let (version, prev_snapshot) = buffer_snapshots.last()?;
1687 let next_snapshot = buffer.text_snapshot();
1688 let next_version = version + 1;
1689
1690 let content_changes = buffer
1691 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1692 .map(|edit| {
1693 let edit_start = edit.new.start.0;
1694 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1695 let new_text = next_snapshot
1696 .text_for_range(edit.new.start.1..edit.new.end.1)
1697 .collect();
1698 lsp::TextDocumentContentChangeEvent {
1699 range: Some(lsp::Range::new(
1700 point_to_lsp(edit_start),
1701 point_to_lsp(edit_end),
1702 )),
1703 range_length: None,
1704 text: new_text,
1705 }
1706 })
1707 .collect();
1708
1709 buffer_snapshots.push((next_version, next_snapshot));
1710
1711 language_server
1712 .notify::<lsp::notification::DidChangeTextDocument>(
1713 lsp::DidChangeTextDocumentParams {
1714 text_document: lsp::VersionedTextDocumentIdentifier::new(
1715 uri,
1716 next_version,
1717 ),
1718 content_changes,
1719 },
1720 )
1721 .log_err();
1722 }
1723 BufferEvent::Saved => {
1724 let file = File::from_dyn(buffer.read(cx).file())?;
1725 let worktree_id = file.worktree_id(cx);
1726 let abs_path = file.as_local()?.abs_path(cx);
1727 let text_document = lsp::TextDocumentIdentifier {
1728 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1729 };
1730
1731 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1732 server
1733 .notify::<lsp::notification::DidSaveTextDocument>(
1734 lsp::DidSaveTextDocumentParams {
1735 text_document: text_document.clone(),
1736 text: None,
1737 },
1738 )
1739 .log_err();
1740 }
1741 }
1742 _ => {}
1743 }
1744
1745 None
1746 }
1747
1748 fn language_servers_for_worktree(
1749 &self,
1750 worktree_id: WorktreeId,
1751 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1752 self.language_servers.iter().filter_map(
1753 move |((language_server_worktree_id, _), server)| {
1754 if *language_server_worktree_id == worktree_id {
1755 Some(server)
1756 } else {
1757 None
1758 }
1759 },
1760 )
1761 }
1762
1763 fn assign_language_to_buffer(
1764 &mut self,
1765 buffer: &ModelHandle<Buffer>,
1766 cx: &mut ModelContext<Self>,
1767 ) -> Option<()> {
1768 // If the buffer has a language, set it and start the language server if we haven't already.
1769 let full_path = buffer.read(cx).file()?.full_path(cx);
1770 let language = self.languages.select_language(&full_path)?;
1771 buffer.update(cx, |buffer, cx| {
1772 buffer.set_language(Some(language.clone()), cx);
1773 });
1774
1775 let file = File::from_dyn(buffer.read(cx).file())?;
1776 let worktree = file.worktree.read(cx).as_local()?;
1777 let worktree_id = worktree.id();
1778 let worktree_abs_path = worktree.abs_path().clone();
1779 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1780
1781 None
1782 }
1783
1784 fn start_language_server(
1785 &mut self,
1786 worktree_id: WorktreeId,
1787 worktree_path: Arc<Path>,
1788 language: Arc<Language>,
1789 cx: &mut ModelContext<Self>,
1790 ) {
1791 let adapter = if let Some(adapter) = language.lsp_adapter() {
1792 adapter
1793 } else {
1794 return;
1795 };
1796 let key = (worktree_id, adapter.name());
1797 self.started_language_servers
1798 .entry(key.clone())
1799 .or_insert_with(|| {
1800 let server_id = post_inc(&mut self.next_language_server_id);
1801 let language_server = self.languages.start_language_server(
1802 server_id,
1803 language.clone(),
1804 worktree_path,
1805 self.client.http_client(),
1806 cx,
1807 );
1808 cx.spawn_weak(|this, mut cx| async move {
1809 let language_server = language_server?.await.log_err()?;
1810 let language_server = language_server
1811 .initialize(adapter.initialization_options())
1812 .await
1813 .log_err()?;
1814 let this = this.upgrade(&cx)?;
1815 let disk_based_diagnostics_progress_token =
1816 adapter.disk_based_diagnostics_progress_token();
1817
1818 language_server
1819 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1820 let this = this.downgrade();
1821 let adapter = adapter.clone();
1822 move |params, mut cx| {
1823 if let Some(this) = this.upgrade(&cx) {
1824 this.update(&mut cx, |this, cx| {
1825 this.on_lsp_diagnostics_published(
1826 server_id,
1827 params,
1828 &adapter,
1829 disk_based_diagnostics_progress_token,
1830 cx,
1831 );
1832 });
1833 }
1834 }
1835 })
1836 .detach();
1837
1838 language_server
1839 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1840 let settings = this
1841 .read_with(&cx, |this, _| this.language_server_settings.clone());
1842 move |params, _| {
1843 let settings = settings.lock().clone();
1844 async move {
1845 Ok(params
1846 .items
1847 .into_iter()
1848 .map(|item| {
1849 if let Some(section) = &item.section {
1850 settings
1851 .get(section)
1852 .cloned()
1853 .unwrap_or(serde_json::Value::Null)
1854 } else {
1855 settings.clone()
1856 }
1857 })
1858 .collect())
1859 }
1860 }
1861 })
1862 .detach();
1863
1864 language_server
1865 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1866 let this = this.downgrade();
1867 let adapter = adapter.clone();
1868 let language_server = language_server.clone();
1869 move |params, cx| {
1870 Self::on_lsp_workspace_edit(
1871 this,
1872 params,
1873 server_id,
1874 adapter.clone(),
1875 language_server.clone(),
1876 cx,
1877 )
1878 }
1879 })
1880 .detach();
1881
1882 language_server
1883 .on_notification::<lsp::notification::Progress, _>({
1884 let this = this.downgrade();
1885 move |params, mut cx| {
1886 if let Some(this) = this.upgrade(&cx) {
1887 this.update(&mut cx, |this, cx| {
1888 this.on_lsp_progress(
1889 params,
1890 server_id,
1891 disk_based_diagnostics_progress_token,
1892 cx,
1893 );
1894 });
1895 }
1896 }
1897 })
1898 .detach();
1899
1900 this.update(&mut cx, |this, cx| {
1901 this.language_servers
1902 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1903 this.language_server_statuses.insert(
1904 server_id,
1905 LanguageServerStatus {
1906 name: language_server.name().to_string(),
1907 pending_work: Default::default(),
1908 pending_diagnostic_updates: 0,
1909 },
1910 );
1911 language_server
1912 .notify::<lsp::notification::DidChangeConfiguration>(
1913 lsp::DidChangeConfigurationParams {
1914 settings: this.language_server_settings.lock().clone(),
1915 },
1916 )
1917 .ok();
1918
1919 if let Some(project_id) = this.shared_remote_id() {
1920 this.client
1921 .send(proto::StartLanguageServer {
1922 project_id,
1923 server: Some(proto::LanguageServer {
1924 id: server_id as u64,
1925 name: language_server.name().to_string(),
1926 }),
1927 })
1928 .log_err();
1929 }
1930
1931 // Tell the language server about every open buffer in the worktree that matches the language.
1932 for buffer in this.opened_buffers.values() {
1933 if let Some(buffer_handle) = buffer.upgrade(cx) {
1934 let buffer = buffer_handle.read(cx);
1935 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1936 file
1937 } else {
1938 continue;
1939 };
1940 let language = if let Some(language) = buffer.language() {
1941 language
1942 } else {
1943 continue;
1944 };
1945 if file.worktree.read(cx).id() != key.0
1946 || language.lsp_adapter().map(|a| a.name())
1947 != Some(key.1.clone())
1948 {
1949 continue;
1950 }
1951
1952 let file = file.as_local()?;
1953 let versions = this
1954 .buffer_snapshots
1955 .entry(buffer.remote_id())
1956 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1957 let (version, initial_snapshot) = versions.last().unwrap();
1958 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1959 let language_id = adapter.id_for_language(language.name().as_ref());
1960 language_server
1961 .notify::<lsp::notification::DidOpenTextDocument>(
1962 lsp::DidOpenTextDocumentParams {
1963 text_document: lsp::TextDocumentItem::new(
1964 uri,
1965 language_id.unwrap_or_default(),
1966 *version,
1967 initial_snapshot.text(),
1968 ),
1969 },
1970 )
1971 .log_err()?;
1972 buffer_handle.update(cx, |buffer, cx| {
1973 buffer.set_completion_triggers(
1974 language_server
1975 .capabilities()
1976 .completion_provider
1977 .as_ref()
1978 .and_then(|provider| {
1979 provider.trigger_characters.clone()
1980 })
1981 .unwrap_or(Vec::new()),
1982 cx,
1983 )
1984 });
1985 }
1986 }
1987
1988 cx.notify();
1989 Some(())
1990 });
1991
1992 Some(language_server)
1993 })
1994 });
1995 }
1996
1997 pub fn restart_language_servers_for_buffers(
1998 &mut self,
1999 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
2000 cx: &mut ModelContext<Self>,
2001 ) -> Option<()> {
2002 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
2003 .into_iter()
2004 .filter_map(|buffer| {
2005 let file = File::from_dyn(buffer.read(cx).file())?;
2006 let worktree = file.worktree.read(cx).as_local()?;
2007 let worktree_id = worktree.id();
2008 let worktree_abs_path = worktree.abs_path().clone();
2009 let full_path = file.full_path(cx);
2010 Some((worktree_id, worktree_abs_path, full_path))
2011 })
2012 .collect();
2013 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
2014 let language = self.languages.select_language(&full_path)?;
2015 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
2016 }
2017
2018 None
2019 }
2020
2021 fn restart_language_server(
2022 &mut self,
2023 worktree_id: WorktreeId,
2024 worktree_path: Arc<Path>,
2025 language: Arc<Language>,
2026 cx: &mut ModelContext<Self>,
2027 ) {
2028 let adapter = if let Some(adapter) = language.lsp_adapter() {
2029 adapter
2030 } else {
2031 return;
2032 };
2033 let key = (worktree_id, adapter.name());
2034 let server_to_shutdown = self.language_servers.remove(&key);
2035 self.started_language_servers.remove(&key);
2036 server_to_shutdown
2037 .as_ref()
2038 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
2039 cx.spawn_weak(|this, mut cx| async move {
2040 if let Some(this) = this.upgrade(&cx) {
2041 if let Some((_, server_to_shutdown)) = server_to_shutdown {
2042 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
2043 shutdown_task.await;
2044 }
2045 }
2046
2047 this.update(&mut cx, |this, cx| {
2048 this.start_language_server(worktree_id, worktree_path, language, cx);
2049 });
2050 }
2051 })
2052 .detach();
2053 }
2054
2055 fn on_lsp_diagnostics_published(
2056 &mut self,
2057 server_id: usize,
2058 mut params: lsp::PublishDiagnosticsParams,
2059 adapter: &Arc<dyn LspAdapter>,
2060 disk_based_diagnostics_progress_token: Option<&str>,
2061 cx: &mut ModelContext<Self>,
2062 ) {
2063 adapter.process_diagnostics(&mut params);
2064 if disk_based_diagnostics_progress_token.is_none() {
2065 self.disk_based_diagnostics_started(cx);
2066 self.broadcast_language_server_update(
2067 server_id,
2068 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2069 proto::LspDiskBasedDiagnosticsUpdating {},
2070 ),
2071 );
2072 }
2073 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
2074 .log_err();
2075 if disk_based_diagnostics_progress_token.is_none() {
2076 self.disk_based_diagnostics_finished(cx);
2077 self.broadcast_language_server_update(
2078 server_id,
2079 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2080 proto::LspDiskBasedDiagnosticsUpdated {},
2081 ),
2082 );
2083 }
2084 }
2085
2086 fn on_lsp_progress(
2087 &mut self,
2088 progress: lsp::ProgressParams,
2089 server_id: usize,
2090 disk_based_diagnostics_progress_token: Option<&str>,
2091 cx: &mut ModelContext<Self>,
2092 ) {
2093 let token = match progress.token {
2094 lsp::NumberOrString::String(token) => token,
2095 lsp::NumberOrString::Number(token) => {
2096 log::info!("skipping numeric progress token {}", token);
2097 return;
2098 }
2099 };
2100 let progress = match progress.value {
2101 lsp::ProgressParamsValue::WorkDone(value) => value,
2102 };
2103 let language_server_status =
2104 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2105 status
2106 } else {
2107 return;
2108 };
2109 match progress {
2110 lsp::WorkDoneProgress::Begin(_) => {
2111 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2112 language_server_status.pending_diagnostic_updates += 1;
2113 if language_server_status.pending_diagnostic_updates == 1 {
2114 self.disk_based_diagnostics_started(cx);
2115 self.broadcast_language_server_update(
2116 server_id,
2117 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2118 proto::LspDiskBasedDiagnosticsUpdating {},
2119 ),
2120 );
2121 }
2122 } else {
2123 self.on_lsp_work_start(server_id, token.clone(), cx);
2124 self.broadcast_language_server_update(
2125 server_id,
2126 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2127 token,
2128 }),
2129 );
2130 }
2131 }
2132 lsp::WorkDoneProgress::Report(report) => {
2133 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2134 self.on_lsp_work_progress(
2135 server_id,
2136 token.clone(),
2137 LanguageServerProgress {
2138 message: report.message.clone(),
2139 percentage: report.percentage.map(|p| p as usize),
2140 last_update_at: Instant::now(),
2141 },
2142 cx,
2143 );
2144 self.broadcast_language_server_update(
2145 server_id,
2146 proto::update_language_server::Variant::WorkProgress(
2147 proto::LspWorkProgress {
2148 token,
2149 message: report.message,
2150 percentage: report.percentage.map(|p| p as u32),
2151 },
2152 ),
2153 );
2154 }
2155 }
2156 lsp::WorkDoneProgress::End(_) => {
2157 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2158 language_server_status.pending_diagnostic_updates -= 1;
2159 if language_server_status.pending_diagnostic_updates == 0 {
2160 self.disk_based_diagnostics_finished(cx);
2161 self.broadcast_language_server_update(
2162 server_id,
2163 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2164 proto::LspDiskBasedDiagnosticsUpdated {},
2165 ),
2166 );
2167 }
2168 } else {
2169 self.on_lsp_work_end(server_id, token.clone(), cx);
2170 self.broadcast_language_server_update(
2171 server_id,
2172 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2173 token,
2174 }),
2175 );
2176 }
2177 }
2178 }
2179 }
2180
2181 fn on_lsp_work_start(
2182 &mut self,
2183 language_server_id: usize,
2184 token: String,
2185 cx: &mut ModelContext<Self>,
2186 ) {
2187 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2188 status.pending_work.insert(
2189 token,
2190 LanguageServerProgress {
2191 message: None,
2192 percentage: None,
2193 last_update_at: Instant::now(),
2194 },
2195 );
2196 cx.notify();
2197 }
2198 }
2199
2200 fn on_lsp_work_progress(
2201 &mut self,
2202 language_server_id: usize,
2203 token: String,
2204 progress: LanguageServerProgress,
2205 cx: &mut ModelContext<Self>,
2206 ) {
2207 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2208 status.pending_work.insert(token, progress);
2209 cx.notify();
2210 }
2211 }
2212
2213 fn on_lsp_work_end(
2214 &mut self,
2215 language_server_id: usize,
2216 token: String,
2217 cx: &mut ModelContext<Self>,
2218 ) {
2219 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2220 status.pending_work.remove(&token);
2221 cx.notify();
2222 }
2223 }
2224
2225 async fn on_lsp_workspace_edit(
2226 this: WeakModelHandle<Self>,
2227 params: lsp::ApplyWorkspaceEditParams,
2228 server_id: usize,
2229 adapter: Arc<dyn LspAdapter>,
2230 language_server: Arc<LanguageServer>,
2231 mut cx: AsyncAppContext,
2232 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2233 let this = this
2234 .upgrade(&cx)
2235 .ok_or_else(|| anyhow!("project project closed"))?;
2236 let transaction = Self::deserialize_workspace_edit(
2237 this.clone(),
2238 params.edit,
2239 true,
2240 adapter.clone(),
2241 language_server.clone(),
2242 &mut cx,
2243 )
2244 .await
2245 .log_err();
2246 this.update(&mut cx, |this, _| {
2247 if let Some(transaction) = transaction {
2248 this.last_workspace_edits_by_language_server
2249 .insert(server_id, transaction);
2250 }
2251 });
2252 Ok(lsp::ApplyWorkspaceEditResponse {
2253 applied: true,
2254 failed_change: None,
2255 failure_reason: None,
2256 })
2257 }
2258
2259 fn broadcast_language_server_update(
2260 &self,
2261 language_server_id: usize,
2262 event: proto::update_language_server::Variant,
2263 ) {
2264 if let Some(project_id) = self.shared_remote_id() {
2265 self.client
2266 .send(proto::UpdateLanguageServer {
2267 project_id,
2268 language_server_id: language_server_id as u64,
2269 variant: Some(event),
2270 })
2271 .log_err();
2272 }
2273 }
2274
2275 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2276 for (_, server) in self.language_servers.values() {
2277 server
2278 .notify::<lsp::notification::DidChangeConfiguration>(
2279 lsp::DidChangeConfigurationParams {
2280 settings: settings.clone(),
2281 },
2282 )
2283 .ok();
2284 }
2285 *self.language_server_settings.lock() = settings;
2286 }
2287
2288 pub fn language_server_statuses(
2289 &self,
2290 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2291 self.language_server_statuses.values()
2292 }
2293
2294 pub fn update_diagnostics(
2295 &mut self,
2296 params: lsp::PublishDiagnosticsParams,
2297 disk_based_sources: &[&str],
2298 cx: &mut ModelContext<Self>,
2299 ) -> Result<()> {
2300 let abs_path = params
2301 .uri
2302 .to_file_path()
2303 .map_err(|_| anyhow!("URI is not a file"))?;
2304 let mut diagnostics = Vec::default();
2305 let mut primary_diagnostic_group_ids = HashMap::default();
2306 let mut sources_by_group_id = HashMap::default();
2307 let mut supporting_diagnostics = HashMap::default();
2308 for diagnostic in ¶ms.diagnostics {
2309 let source = diagnostic.source.as_ref();
2310 let code = diagnostic.code.as_ref().map(|code| match code {
2311 lsp::NumberOrString::Number(code) => code.to_string(),
2312 lsp::NumberOrString::String(code) => code.clone(),
2313 });
2314 let range = range_from_lsp(diagnostic.range);
2315 let is_supporting = diagnostic
2316 .related_information
2317 .as_ref()
2318 .map_or(false, |infos| {
2319 infos.iter().any(|info| {
2320 primary_diagnostic_group_ids.contains_key(&(
2321 source,
2322 code.clone(),
2323 range_from_lsp(info.location.range),
2324 ))
2325 })
2326 });
2327
2328 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2329 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2330 });
2331
2332 if is_supporting {
2333 supporting_diagnostics.insert(
2334 (source, code.clone(), range),
2335 (diagnostic.severity, is_unnecessary),
2336 );
2337 } else {
2338 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2339 let is_disk_based = source.map_or(false, |source| {
2340 disk_based_sources.contains(&source.as_str())
2341 });
2342
2343 sources_by_group_id.insert(group_id, source);
2344 primary_diagnostic_group_ids
2345 .insert((source, code.clone(), range.clone()), group_id);
2346
2347 diagnostics.push(DiagnosticEntry {
2348 range,
2349 diagnostic: Diagnostic {
2350 code: code.clone(),
2351 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2352 message: diagnostic.message.clone(),
2353 group_id,
2354 is_primary: true,
2355 is_valid: true,
2356 is_disk_based,
2357 is_unnecessary,
2358 },
2359 });
2360 if let Some(infos) = &diagnostic.related_information {
2361 for info in infos {
2362 if info.location.uri == params.uri && !info.message.is_empty() {
2363 let range = range_from_lsp(info.location.range);
2364 diagnostics.push(DiagnosticEntry {
2365 range,
2366 diagnostic: Diagnostic {
2367 code: code.clone(),
2368 severity: DiagnosticSeverity::INFORMATION,
2369 message: info.message.clone(),
2370 group_id,
2371 is_primary: false,
2372 is_valid: true,
2373 is_disk_based,
2374 is_unnecessary: false,
2375 },
2376 });
2377 }
2378 }
2379 }
2380 }
2381 }
2382
2383 for entry in &mut diagnostics {
2384 let diagnostic = &mut entry.diagnostic;
2385 if !diagnostic.is_primary {
2386 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2387 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2388 source,
2389 diagnostic.code.clone(),
2390 entry.range.clone(),
2391 )) {
2392 if let Some(severity) = severity {
2393 diagnostic.severity = severity;
2394 }
2395 diagnostic.is_unnecessary = is_unnecessary;
2396 }
2397 }
2398 }
2399
2400 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2401 Ok(())
2402 }
2403
2404 pub fn update_diagnostic_entries(
2405 &mut self,
2406 abs_path: PathBuf,
2407 version: Option<i32>,
2408 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2409 cx: &mut ModelContext<Project>,
2410 ) -> Result<(), anyhow::Error> {
2411 let (worktree, relative_path) = self
2412 .find_local_worktree(&abs_path, cx)
2413 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2414 if !worktree.read(cx).is_visible() {
2415 return Ok(());
2416 }
2417
2418 let project_path = ProjectPath {
2419 worktree_id: worktree.read(cx).id(),
2420 path: relative_path.into(),
2421 };
2422 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2423 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2424 }
2425
2426 let updated = worktree.update(cx, |worktree, cx| {
2427 worktree
2428 .as_local_mut()
2429 .ok_or_else(|| anyhow!("not a local worktree"))?
2430 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2431 })?;
2432 if updated {
2433 cx.emit(Event::DiagnosticsUpdated(project_path));
2434 }
2435 Ok(())
2436 }
2437
2438 fn update_buffer_diagnostics(
2439 &mut self,
2440 buffer: &ModelHandle<Buffer>,
2441 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2442 version: Option<i32>,
2443 cx: &mut ModelContext<Self>,
2444 ) -> Result<()> {
2445 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2446 Ordering::Equal
2447 .then_with(|| b.is_primary.cmp(&a.is_primary))
2448 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2449 .then_with(|| a.severity.cmp(&b.severity))
2450 .then_with(|| a.message.cmp(&b.message))
2451 }
2452
2453 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2454
2455 diagnostics.sort_unstable_by(|a, b| {
2456 Ordering::Equal
2457 .then_with(|| a.range.start.cmp(&b.range.start))
2458 .then_with(|| b.range.end.cmp(&a.range.end))
2459 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2460 });
2461
2462 let mut sanitized_diagnostics = Vec::new();
2463 let edits_since_save = Patch::new(
2464 snapshot
2465 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2466 .collect(),
2467 );
2468 for entry in diagnostics {
2469 let start;
2470 let end;
2471 if entry.diagnostic.is_disk_based {
2472 // Some diagnostics are based on files on disk instead of buffers'
2473 // current contents. Adjust these diagnostics' ranges to reflect
2474 // any unsaved edits.
2475 start = edits_since_save.old_to_new(entry.range.start);
2476 end = edits_since_save.old_to_new(entry.range.end);
2477 } else {
2478 start = entry.range.start;
2479 end = entry.range.end;
2480 }
2481
2482 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2483 ..snapshot.clip_point_utf16(end, Bias::Right);
2484
2485 // Expand empty ranges by one character
2486 if range.start == range.end {
2487 range.end.column += 1;
2488 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2489 if range.start == range.end && range.end.column > 0 {
2490 range.start.column -= 1;
2491 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2492 }
2493 }
2494
2495 sanitized_diagnostics.push(DiagnosticEntry {
2496 range,
2497 diagnostic: entry.diagnostic,
2498 });
2499 }
2500 drop(edits_since_save);
2501
2502 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2503 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2504 Ok(())
2505 }
2506
2507 pub fn reload_buffers(
2508 &self,
2509 buffers: HashSet<ModelHandle<Buffer>>,
2510 push_to_history: bool,
2511 cx: &mut ModelContext<Self>,
2512 ) -> Task<Result<ProjectTransaction>> {
2513 let mut local_buffers = Vec::new();
2514 let mut remote_buffers = None;
2515 for buffer_handle in buffers {
2516 let buffer = buffer_handle.read(cx);
2517 if buffer.is_dirty() {
2518 if let Some(file) = File::from_dyn(buffer.file()) {
2519 if file.is_local() {
2520 local_buffers.push(buffer_handle);
2521 } else {
2522 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2523 }
2524 }
2525 }
2526 }
2527
2528 let remote_buffers = self.remote_id().zip(remote_buffers);
2529 let client = self.client.clone();
2530
2531 cx.spawn(|this, mut cx| async move {
2532 let mut project_transaction = ProjectTransaction::default();
2533
2534 if let Some((project_id, remote_buffers)) = remote_buffers {
2535 let response = client
2536 .request(proto::ReloadBuffers {
2537 project_id,
2538 buffer_ids: remote_buffers
2539 .iter()
2540 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2541 .collect(),
2542 })
2543 .await?
2544 .transaction
2545 .ok_or_else(|| anyhow!("missing transaction"))?;
2546 project_transaction = this
2547 .update(&mut cx, |this, cx| {
2548 this.deserialize_project_transaction(response, push_to_history, cx)
2549 })
2550 .await?;
2551 }
2552
2553 for buffer in local_buffers {
2554 let transaction = buffer
2555 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2556 .await?;
2557 buffer.update(&mut cx, |buffer, cx| {
2558 if let Some(transaction) = transaction {
2559 if !push_to_history {
2560 buffer.forget_transaction(transaction.id);
2561 }
2562 project_transaction.0.insert(cx.handle(), transaction);
2563 }
2564 });
2565 }
2566
2567 Ok(project_transaction)
2568 })
2569 }
2570
2571 pub fn format(
2572 &self,
2573 buffers: HashSet<ModelHandle<Buffer>>,
2574 push_to_history: bool,
2575 cx: &mut ModelContext<Project>,
2576 ) -> Task<Result<ProjectTransaction>> {
2577 let mut local_buffers = Vec::new();
2578 let mut remote_buffers = None;
2579 for buffer_handle in buffers {
2580 let buffer = buffer_handle.read(cx);
2581 if let Some(file) = File::from_dyn(buffer.file()) {
2582 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2583 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2584 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2585 }
2586 } else {
2587 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2588 }
2589 } else {
2590 return Task::ready(Ok(Default::default()));
2591 }
2592 }
2593
2594 let remote_buffers = self.remote_id().zip(remote_buffers);
2595 let client = self.client.clone();
2596
2597 cx.spawn(|this, mut cx| async move {
2598 let mut project_transaction = ProjectTransaction::default();
2599
2600 if let Some((project_id, remote_buffers)) = remote_buffers {
2601 let response = client
2602 .request(proto::FormatBuffers {
2603 project_id,
2604 buffer_ids: remote_buffers
2605 .iter()
2606 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2607 .collect(),
2608 })
2609 .await?
2610 .transaction
2611 .ok_or_else(|| anyhow!("missing transaction"))?;
2612 project_transaction = this
2613 .update(&mut cx, |this, cx| {
2614 this.deserialize_project_transaction(response, push_to_history, cx)
2615 })
2616 .await?;
2617 }
2618
2619 for (buffer, buffer_abs_path, language_server) in local_buffers {
2620 let text_document = lsp::TextDocumentIdentifier::new(
2621 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2622 );
2623 let capabilities = &language_server.capabilities();
2624 let tab_size = cx.update(|cx| {
2625 let language_name = buffer.read(cx).language().map(|language| language.name());
2626 cx.global::<Settings>().tab_size(language_name.as_deref())
2627 });
2628 let lsp_edits = if capabilities
2629 .document_formatting_provider
2630 .as_ref()
2631 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2632 {
2633 language_server
2634 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2635 text_document,
2636 options: lsp::FormattingOptions {
2637 tab_size,
2638 insert_spaces: true,
2639 insert_final_newline: Some(true),
2640 ..Default::default()
2641 },
2642 work_done_progress_params: Default::default(),
2643 })
2644 .await?
2645 } else if capabilities
2646 .document_range_formatting_provider
2647 .as_ref()
2648 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2649 {
2650 let buffer_start = lsp::Position::new(0, 0);
2651 let buffer_end =
2652 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2653 language_server
2654 .request::<lsp::request::RangeFormatting>(
2655 lsp::DocumentRangeFormattingParams {
2656 text_document,
2657 range: lsp::Range::new(buffer_start, buffer_end),
2658 options: lsp::FormattingOptions {
2659 tab_size: 4,
2660 insert_spaces: true,
2661 insert_final_newline: Some(true),
2662 ..Default::default()
2663 },
2664 work_done_progress_params: Default::default(),
2665 },
2666 )
2667 .await?
2668 } else {
2669 continue;
2670 };
2671
2672 if let Some(lsp_edits) = lsp_edits {
2673 let edits = this
2674 .update(&mut cx, |this, cx| {
2675 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2676 })
2677 .await?;
2678 buffer.update(&mut cx, |buffer, cx| {
2679 buffer.finalize_last_transaction();
2680 buffer.start_transaction();
2681 for (range, text) in edits {
2682 buffer.edit([(range, text)], cx);
2683 }
2684 if buffer.end_transaction(cx).is_some() {
2685 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2686 if !push_to_history {
2687 buffer.forget_transaction(transaction.id);
2688 }
2689 project_transaction.0.insert(cx.handle(), transaction);
2690 }
2691 });
2692 }
2693 }
2694
2695 Ok(project_transaction)
2696 })
2697 }
2698
2699 pub fn definition<T: ToPointUtf16>(
2700 &self,
2701 buffer: &ModelHandle<Buffer>,
2702 position: T,
2703 cx: &mut ModelContext<Self>,
2704 ) -> Task<Result<Vec<Location>>> {
2705 let position = position.to_point_utf16(buffer.read(cx));
2706 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2707 }
2708
2709 pub fn references<T: ToPointUtf16>(
2710 &self,
2711 buffer: &ModelHandle<Buffer>,
2712 position: T,
2713 cx: &mut ModelContext<Self>,
2714 ) -> Task<Result<Vec<Location>>> {
2715 let position = position.to_point_utf16(buffer.read(cx));
2716 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2717 }
2718
2719 pub fn document_highlights<T: ToPointUtf16>(
2720 &self,
2721 buffer: &ModelHandle<Buffer>,
2722 position: T,
2723 cx: &mut ModelContext<Self>,
2724 ) -> Task<Result<Vec<DocumentHighlight>>> {
2725 let position = position.to_point_utf16(buffer.read(cx));
2726
2727 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2728 }
2729
2730 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2731 if self.is_local() {
2732 let mut requests = Vec::new();
2733 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2734 let worktree_id = *worktree_id;
2735 if let Some(worktree) = self
2736 .worktree_for_id(worktree_id, cx)
2737 .and_then(|worktree| worktree.read(cx).as_local())
2738 {
2739 let lsp_adapter = lsp_adapter.clone();
2740 let worktree_abs_path = worktree.abs_path().clone();
2741 requests.push(
2742 language_server
2743 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2744 query: query.to_string(),
2745 ..Default::default()
2746 })
2747 .log_err()
2748 .map(move |response| {
2749 (
2750 lsp_adapter,
2751 worktree_id,
2752 worktree_abs_path,
2753 response.unwrap_or_default(),
2754 )
2755 }),
2756 );
2757 }
2758 }
2759
2760 cx.spawn_weak(|this, cx| async move {
2761 let responses = futures::future::join_all(requests).await;
2762 let this = if let Some(this) = this.upgrade(&cx) {
2763 this
2764 } else {
2765 return Ok(Default::default());
2766 };
2767 this.read_with(&cx, |this, cx| {
2768 let mut symbols = Vec::new();
2769 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2770 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2771 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2772 let mut worktree_id = source_worktree_id;
2773 let path;
2774 if let Some((worktree, rel_path)) =
2775 this.find_local_worktree(&abs_path, cx)
2776 {
2777 worktree_id = worktree.read(cx).id();
2778 path = rel_path;
2779 } else {
2780 path = relativize_path(&worktree_abs_path, &abs_path);
2781 }
2782
2783 let label = this
2784 .languages
2785 .select_language(&path)
2786 .and_then(|language| {
2787 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2788 })
2789 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2790 let signature = this.symbol_signature(worktree_id, &path);
2791
2792 Some(Symbol {
2793 source_worktree_id,
2794 worktree_id,
2795 language_server_name: adapter.name(),
2796 name: lsp_symbol.name,
2797 kind: lsp_symbol.kind,
2798 label,
2799 path,
2800 range: range_from_lsp(lsp_symbol.location.range),
2801 signature,
2802 })
2803 }));
2804 }
2805 Ok(symbols)
2806 })
2807 })
2808 } else if let Some(project_id) = self.remote_id() {
2809 let request = self.client.request(proto::GetProjectSymbols {
2810 project_id,
2811 query: query.to_string(),
2812 });
2813 cx.spawn_weak(|this, cx| async move {
2814 let response = request.await?;
2815 let mut symbols = Vec::new();
2816 if let Some(this) = this.upgrade(&cx) {
2817 this.read_with(&cx, |this, _| {
2818 symbols.extend(
2819 response
2820 .symbols
2821 .into_iter()
2822 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2823 );
2824 })
2825 }
2826 Ok(symbols)
2827 })
2828 } else {
2829 Task::ready(Ok(Default::default()))
2830 }
2831 }
2832
2833 pub fn open_buffer_for_symbol(
2834 &mut self,
2835 symbol: &Symbol,
2836 cx: &mut ModelContext<Self>,
2837 ) -> Task<Result<ModelHandle<Buffer>>> {
2838 if self.is_local() {
2839 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2840 symbol.source_worktree_id,
2841 symbol.language_server_name.clone(),
2842 )) {
2843 server.clone()
2844 } else {
2845 return Task::ready(Err(anyhow!(
2846 "language server for worktree and language not found"
2847 )));
2848 };
2849
2850 let worktree_abs_path = if let Some(worktree_abs_path) = self
2851 .worktree_for_id(symbol.worktree_id, cx)
2852 .and_then(|worktree| worktree.read(cx).as_local())
2853 .map(|local_worktree| local_worktree.abs_path())
2854 {
2855 worktree_abs_path
2856 } else {
2857 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2858 };
2859 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2860 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2861 uri
2862 } else {
2863 return Task::ready(Err(anyhow!("invalid symbol path")));
2864 };
2865
2866 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2867 } else if let Some(project_id) = self.remote_id() {
2868 let request = self.client.request(proto::OpenBufferForSymbol {
2869 project_id,
2870 symbol: Some(serialize_symbol(symbol)),
2871 });
2872 cx.spawn(|this, mut cx| async move {
2873 let response = request.await?;
2874 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2875 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2876 .await
2877 })
2878 } else {
2879 Task::ready(Err(anyhow!("project does not have a remote id")))
2880 }
2881 }
2882
2883 pub fn completions<T: ToPointUtf16>(
2884 &self,
2885 source_buffer_handle: &ModelHandle<Buffer>,
2886 position: T,
2887 cx: &mut ModelContext<Self>,
2888 ) -> Task<Result<Vec<Completion>>> {
2889 let source_buffer_handle = source_buffer_handle.clone();
2890 let source_buffer = source_buffer_handle.read(cx);
2891 let buffer_id = source_buffer.remote_id();
2892 let language = source_buffer.language().cloned();
2893 let worktree;
2894 let buffer_abs_path;
2895 if let Some(file) = File::from_dyn(source_buffer.file()) {
2896 worktree = file.worktree.clone();
2897 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2898 } else {
2899 return Task::ready(Ok(Default::default()));
2900 };
2901
2902 let position = position.to_point_utf16(source_buffer);
2903 let anchor = source_buffer.anchor_after(position);
2904
2905 if worktree.read(cx).as_local().is_some() {
2906 let buffer_abs_path = buffer_abs_path.unwrap();
2907 let (_, lang_server) =
2908 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2909 server.clone()
2910 } else {
2911 return Task::ready(Ok(Default::default()));
2912 };
2913
2914 cx.spawn(|_, cx| async move {
2915 let completions = lang_server
2916 .request::<lsp::request::Completion>(lsp::CompletionParams {
2917 text_document_position: lsp::TextDocumentPositionParams::new(
2918 lsp::TextDocumentIdentifier::new(
2919 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2920 ),
2921 point_to_lsp(position),
2922 ),
2923 context: Default::default(),
2924 work_done_progress_params: Default::default(),
2925 partial_result_params: Default::default(),
2926 })
2927 .await
2928 .context("lsp completion request failed")?;
2929
2930 let completions = if let Some(completions) = completions {
2931 match completions {
2932 lsp::CompletionResponse::Array(completions) => completions,
2933 lsp::CompletionResponse::List(list) => list.items,
2934 }
2935 } else {
2936 Default::default()
2937 };
2938
2939 source_buffer_handle.read_with(&cx, |this, _| {
2940 let snapshot = this.snapshot();
2941 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2942 let mut range_for_token = None;
2943 Ok(completions
2944 .into_iter()
2945 .filter_map(|lsp_completion| {
2946 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2947 // If the language server provides a range to overwrite, then
2948 // check that the range is valid.
2949 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2950 let range = range_from_lsp(edit.range);
2951 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2952 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2953 if start != range.start || end != range.end {
2954 log::info!("completion out of expected range");
2955 return None;
2956 }
2957 (
2958 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2959 edit.new_text.clone(),
2960 )
2961 }
2962 // If the language server does not provide a range, then infer
2963 // the range based on the syntax tree.
2964 None => {
2965 if position != clipped_position {
2966 log::info!("completion out of expected range");
2967 return None;
2968 }
2969 let Range { start, end } = range_for_token
2970 .get_or_insert_with(|| {
2971 let offset = position.to_offset(&snapshot);
2972 snapshot
2973 .range_for_word_token_at(offset)
2974 .unwrap_or_else(|| offset..offset)
2975 })
2976 .clone();
2977 let text = lsp_completion
2978 .insert_text
2979 .as_ref()
2980 .unwrap_or(&lsp_completion.label)
2981 .clone();
2982 (
2983 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2984 text.clone(),
2985 )
2986 }
2987 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2988 log::info!("unsupported insert/replace completion");
2989 return None;
2990 }
2991 };
2992
2993 Some(Completion {
2994 old_range,
2995 new_text,
2996 label: language
2997 .as_ref()
2998 .and_then(|l| l.label_for_completion(&lsp_completion))
2999 .unwrap_or_else(|| {
3000 CodeLabel::plain(
3001 lsp_completion.label.clone(),
3002 lsp_completion.filter_text.as_deref(),
3003 )
3004 }),
3005 lsp_completion,
3006 })
3007 })
3008 .collect())
3009 })
3010 })
3011 } else if let Some(project_id) = self.remote_id() {
3012 let rpc = self.client.clone();
3013 let message = proto::GetCompletions {
3014 project_id,
3015 buffer_id,
3016 position: Some(language::proto::serialize_anchor(&anchor)),
3017 version: serialize_version(&source_buffer.version()),
3018 };
3019 cx.spawn_weak(|_, mut cx| async move {
3020 let response = rpc.request(message).await?;
3021
3022 source_buffer_handle
3023 .update(&mut cx, |buffer, _| {
3024 buffer.wait_for_version(deserialize_version(response.version))
3025 })
3026 .await;
3027
3028 response
3029 .completions
3030 .into_iter()
3031 .map(|completion| {
3032 language::proto::deserialize_completion(completion, language.as_ref())
3033 })
3034 .collect()
3035 })
3036 } else {
3037 Task::ready(Ok(Default::default()))
3038 }
3039 }
3040
3041 pub fn apply_additional_edits_for_completion(
3042 &self,
3043 buffer_handle: ModelHandle<Buffer>,
3044 completion: Completion,
3045 push_to_history: bool,
3046 cx: &mut ModelContext<Self>,
3047 ) -> Task<Result<Option<Transaction>>> {
3048 let buffer = buffer_handle.read(cx);
3049 let buffer_id = buffer.remote_id();
3050
3051 if self.is_local() {
3052 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3053 {
3054 server.clone()
3055 } else {
3056 return Task::ready(Ok(Default::default()));
3057 };
3058
3059 cx.spawn(|this, mut cx| async move {
3060 let resolved_completion = lang_server
3061 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
3062 .await?;
3063 if let Some(edits) = resolved_completion.additional_text_edits {
3064 let edits = this
3065 .update(&mut cx, |this, cx| {
3066 this.edits_from_lsp(&buffer_handle, edits, None, cx)
3067 })
3068 .await?;
3069 buffer_handle.update(&mut cx, |buffer, cx| {
3070 buffer.finalize_last_transaction();
3071 buffer.start_transaction();
3072 for (range, text) in edits {
3073 buffer.edit([(range, text)], cx);
3074 }
3075 let transaction = if buffer.end_transaction(cx).is_some() {
3076 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3077 if !push_to_history {
3078 buffer.forget_transaction(transaction.id);
3079 }
3080 Some(transaction)
3081 } else {
3082 None
3083 };
3084 Ok(transaction)
3085 })
3086 } else {
3087 Ok(None)
3088 }
3089 })
3090 } else if let Some(project_id) = self.remote_id() {
3091 let client = self.client.clone();
3092 cx.spawn(|_, mut cx| async move {
3093 let response = client
3094 .request(proto::ApplyCompletionAdditionalEdits {
3095 project_id,
3096 buffer_id,
3097 completion: Some(language::proto::serialize_completion(&completion)),
3098 })
3099 .await?;
3100
3101 if let Some(transaction) = response.transaction {
3102 let transaction = language::proto::deserialize_transaction(transaction)?;
3103 buffer_handle
3104 .update(&mut cx, |buffer, _| {
3105 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3106 })
3107 .await;
3108 if push_to_history {
3109 buffer_handle.update(&mut cx, |buffer, _| {
3110 buffer.push_transaction(transaction.clone(), Instant::now());
3111 });
3112 }
3113 Ok(Some(transaction))
3114 } else {
3115 Ok(None)
3116 }
3117 })
3118 } else {
3119 Task::ready(Err(anyhow!("project does not have a remote id")))
3120 }
3121 }
3122
3123 pub fn code_actions<T: Clone + ToOffset>(
3124 &self,
3125 buffer_handle: &ModelHandle<Buffer>,
3126 range: Range<T>,
3127 cx: &mut ModelContext<Self>,
3128 ) -> Task<Result<Vec<CodeAction>>> {
3129 let buffer_handle = buffer_handle.clone();
3130 let buffer = buffer_handle.read(cx);
3131 let snapshot = buffer.snapshot();
3132 let relevant_diagnostics = snapshot
3133 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3134 .map(|entry| entry.to_lsp_diagnostic_stub())
3135 .collect();
3136 let buffer_id = buffer.remote_id();
3137 let worktree;
3138 let buffer_abs_path;
3139 if let Some(file) = File::from_dyn(buffer.file()) {
3140 worktree = file.worktree.clone();
3141 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3142 } else {
3143 return Task::ready(Ok(Default::default()));
3144 };
3145 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3146
3147 if worktree.read(cx).as_local().is_some() {
3148 let buffer_abs_path = buffer_abs_path.unwrap();
3149 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3150 {
3151 server.clone()
3152 } else {
3153 return Task::ready(Ok(Default::default()));
3154 };
3155
3156 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3157 cx.foreground().spawn(async move {
3158 if !lang_server.capabilities().code_action_provider.is_some() {
3159 return Ok(Default::default());
3160 }
3161
3162 Ok(lang_server
3163 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3164 text_document: lsp::TextDocumentIdentifier::new(
3165 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3166 ),
3167 range: lsp_range,
3168 work_done_progress_params: Default::default(),
3169 partial_result_params: Default::default(),
3170 context: lsp::CodeActionContext {
3171 diagnostics: relevant_diagnostics,
3172 only: Some(vec![
3173 lsp::CodeActionKind::QUICKFIX,
3174 lsp::CodeActionKind::REFACTOR,
3175 lsp::CodeActionKind::REFACTOR_EXTRACT,
3176 lsp::CodeActionKind::SOURCE,
3177 ]),
3178 },
3179 })
3180 .await?
3181 .unwrap_or_default()
3182 .into_iter()
3183 .filter_map(|entry| {
3184 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3185 Some(CodeAction {
3186 range: range.clone(),
3187 lsp_action,
3188 })
3189 } else {
3190 None
3191 }
3192 })
3193 .collect())
3194 })
3195 } else if let Some(project_id) = self.remote_id() {
3196 let rpc = self.client.clone();
3197 let version = buffer.version();
3198 cx.spawn_weak(|_, mut cx| async move {
3199 let response = rpc
3200 .request(proto::GetCodeActions {
3201 project_id,
3202 buffer_id,
3203 start: Some(language::proto::serialize_anchor(&range.start)),
3204 end: Some(language::proto::serialize_anchor(&range.end)),
3205 version: serialize_version(&version),
3206 })
3207 .await?;
3208
3209 buffer_handle
3210 .update(&mut cx, |buffer, _| {
3211 buffer.wait_for_version(deserialize_version(response.version))
3212 })
3213 .await;
3214
3215 response
3216 .actions
3217 .into_iter()
3218 .map(language::proto::deserialize_code_action)
3219 .collect()
3220 })
3221 } else {
3222 Task::ready(Ok(Default::default()))
3223 }
3224 }
3225
3226 pub fn apply_code_action(
3227 &self,
3228 buffer_handle: ModelHandle<Buffer>,
3229 mut action: CodeAction,
3230 push_to_history: bool,
3231 cx: &mut ModelContext<Self>,
3232 ) -> Task<Result<ProjectTransaction>> {
3233 if self.is_local() {
3234 let buffer = buffer_handle.read(cx);
3235 let (lsp_adapter, lang_server) =
3236 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3237 server.clone()
3238 } else {
3239 return Task::ready(Ok(Default::default()));
3240 };
3241 let range = action.range.to_point_utf16(buffer);
3242
3243 cx.spawn(|this, mut cx| async move {
3244 if let Some(lsp_range) = action
3245 .lsp_action
3246 .data
3247 .as_mut()
3248 .and_then(|d| d.get_mut("codeActionParams"))
3249 .and_then(|d| d.get_mut("range"))
3250 {
3251 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3252 action.lsp_action = lang_server
3253 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3254 .await?;
3255 } else {
3256 let actions = this
3257 .update(&mut cx, |this, cx| {
3258 this.code_actions(&buffer_handle, action.range, cx)
3259 })
3260 .await?;
3261 action.lsp_action = actions
3262 .into_iter()
3263 .find(|a| a.lsp_action.title == action.lsp_action.title)
3264 .ok_or_else(|| anyhow!("code action is outdated"))?
3265 .lsp_action;
3266 }
3267
3268 if let Some(edit) = action.lsp_action.edit {
3269 Self::deserialize_workspace_edit(
3270 this,
3271 edit,
3272 push_to_history,
3273 lsp_adapter,
3274 lang_server,
3275 &mut cx,
3276 )
3277 .await
3278 } else if let Some(command) = action.lsp_action.command {
3279 this.update(&mut cx, |this, _| {
3280 this.last_workspace_edits_by_language_server
3281 .remove(&lang_server.server_id());
3282 });
3283 lang_server
3284 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3285 command: command.command,
3286 arguments: command.arguments.unwrap_or_default(),
3287 ..Default::default()
3288 })
3289 .await?;
3290 Ok(this.update(&mut cx, |this, _| {
3291 this.last_workspace_edits_by_language_server
3292 .remove(&lang_server.server_id())
3293 .unwrap_or_default()
3294 }))
3295 } else {
3296 Ok(ProjectTransaction::default())
3297 }
3298 })
3299 } else if let Some(project_id) = self.remote_id() {
3300 let client = self.client.clone();
3301 let request = proto::ApplyCodeAction {
3302 project_id,
3303 buffer_id: buffer_handle.read(cx).remote_id(),
3304 action: Some(language::proto::serialize_code_action(&action)),
3305 };
3306 cx.spawn(|this, mut cx| async move {
3307 let response = client
3308 .request(request)
3309 .await?
3310 .transaction
3311 .ok_or_else(|| anyhow!("missing transaction"))?;
3312 this.update(&mut cx, |this, cx| {
3313 this.deserialize_project_transaction(response, push_to_history, cx)
3314 })
3315 .await
3316 })
3317 } else {
3318 Task::ready(Err(anyhow!("project does not have a remote id")))
3319 }
3320 }
3321
3322 async fn deserialize_workspace_edit(
3323 this: ModelHandle<Self>,
3324 edit: lsp::WorkspaceEdit,
3325 push_to_history: bool,
3326 lsp_adapter: Arc<dyn LspAdapter>,
3327 language_server: Arc<LanguageServer>,
3328 cx: &mut AsyncAppContext,
3329 ) -> Result<ProjectTransaction> {
3330 let fs = this.read_with(cx, |this, _| this.fs.clone());
3331 let mut operations = Vec::new();
3332 if let Some(document_changes) = edit.document_changes {
3333 match document_changes {
3334 lsp::DocumentChanges::Edits(edits) => {
3335 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3336 }
3337 lsp::DocumentChanges::Operations(ops) => operations = ops,
3338 }
3339 } else if let Some(changes) = edit.changes {
3340 operations.extend(changes.into_iter().map(|(uri, edits)| {
3341 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3342 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3343 uri,
3344 version: None,
3345 },
3346 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3347 })
3348 }));
3349 }
3350
3351 let mut project_transaction = ProjectTransaction::default();
3352 for operation in operations {
3353 match operation {
3354 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3355 let abs_path = op
3356 .uri
3357 .to_file_path()
3358 .map_err(|_| anyhow!("can't convert URI to path"))?;
3359
3360 if let Some(parent_path) = abs_path.parent() {
3361 fs.create_dir(parent_path).await?;
3362 }
3363 if abs_path.ends_with("/") {
3364 fs.create_dir(&abs_path).await?;
3365 } else {
3366 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3367 .await?;
3368 }
3369 }
3370 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3371 let source_abs_path = op
3372 .old_uri
3373 .to_file_path()
3374 .map_err(|_| anyhow!("can't convert URI to path"))?;
3375 let target_abs_path = op
3376 .new_uri
3377 .to_file_path()
3378 .map_err(|_| anyhow!("can't convert URI to path"))?;
3379 fs.rename(
3380 &source_abs_path,
3381 &target_abs_path,
3382 op.options.map(Into::into).unwrap_or_default(),
3383 )
3384 .await?;
3385 }
3386 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3387 let abs_path = op
3388 .uri
3389 .to_file_path()
3390 .map_err(|_| anyhow!("can't convert URI to path"))?;
3391 let options = op.options.map(Into::into).unwrap_or_default();
3392 if abs_path.ends_with("/") {
3393 fs.remove_dir(&abs_path, options).await?;
3394 } else {
3395 fs.remove_file(&abs_path, options).await?;
3396 }
3397 }
3398 lsp::DocumentChangeOperation::Edit(op) => {
3399 let buffer_to_edit = this
3400 .update(cx, |this, cx| {
3401 this.open_local_buffer_via_lsp(
3402 op.text_document.uri,
3403 lsp_adapter.clone(),
3404 language_server.clone(),
3405 cx,
3406 )
3407 })
3408 .await?;
3409
3410 let edits = this
3411 .update(cx, |this, cx| {
3412 let edits = op.edits.into_iter().map(|edit| match edit {
3413 lsp::OneOf::Left(edit) => edit,
3414 lsp::OneOf::Right(edit) => edit.text_edit,
3415 });
3416 this.edits_from_lsp(
3417 &buffer_to_edit,
3418 edits,
3419 op.text_document.version,
3420 cx,
3421 )
3422 })
3423 .await?;
3424
3425 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3426 buffer.finalize_last_transaction();
3427 buffer.start_transaction();
3428 for (range, text) in edits {
3429 buffer.edit([(range, text)], cx);
3430 }
3431 let transaction = if buffer.end_transaction(cx).is_some() {
3432 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3433 if !push_to_history {
3434 buffer.forget_transaction(transaction.id);
3435 }
3436 Some(transaction)
3437 } else {
3438 None
3439 };
3440
3441 transaction
3442 });
3443 if let Some(transaction) = transaction {
3444 project_transaction.0.insert(buffer_to_edit, transaction);
3445 }
3446 }
3447 }
3448 }
3449
3450 Ok(project_transaction)
3451 }
3452
3453 pub fn prepare_rename<T: ToPointUtf16>(
3454 &self,
3455 buffer: ModelHandle<Buffer>,
3456 position: T,
3457 cx: &mut ModelContext<Self>,
3458 ) -> Task<Result<Option<Range<Anchor>>>> {
3459 let position = position.to_point_utf16(buffer.read(cx));
3460 self.request_lsp(buffer, PrepareRename { position }, cx)
3461 }
3462
3463 pub fn perform_rename<T: ToPointUtf16>(
3464 &self,
3465 buffer: ModelHandle<Buffer>,
3466 position: T,
3467 new_name: String,
3468 push_to_history: bool,
3469 cx: &mut ModelContext<Self>,
3470 ) -> Task<Result<ProjectTransaction>> {
3471 let position = position.to_point_utf16(buffer.read(cx));
3472 self.request_lsp(
3473 buffer,
3474 PerformRename {
3475 position,
3476 new_name,
3477 push_to_history,
3478 },
3479 cx,
3480 )
3481 }
3482
3483 pub fn search(
3484 &self,
3485 query: SearchQuery,
3486 cx: &mut ModelContext<Self>,
3487 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3488 if self.is_local() {
3489 let snapshots = self
3490 .visible_worktrees(cx)
3491 .filter_map(|tree| {
3492 let tree = tree.read(cx).as_local()?;
3493 Some(tree.snapshot())
3494 })
3495 .collect::<Vec<_>>();
3496
3497 let background = cx.background().clone();
3498 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3499 if path_count == 0 {
3500 return Task::ready(Ok(Default::default()));
3501 }
3502 let workers = background.num_cpus().min(path_count);
3503 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3504 cx.background()
3505 .spawn({
3506 let fs = self.fs.clone();
3507 let background = cx.background().clone();
3508 let query = query.clone();
3509 async move {
3510 let fs = &fs;
3511 let query = &query;
3512 let matching_paths_tx = &matching_paths_tx;
3513 let paths_per_worker = (path_count + workers - 1) / workers;
3514 let snapshots = &snapshots;
3515 background
3516 .scoped(|scope| {
3517 for worker_ix in 0..workers {
3518 let worker_start_ix = worker_ix * paths_per_worker;
3519 let worker_end_ix = worker_start_ix + paths_per_worker;
3520 scope.spawn(async move {
3521 let mut snapshot_start_ix = 0;
3522 let mut abs_path = PathBuf::new();
3523 for snapshot in snapshots {
3524 let snapshot_end_ix =
3525 snapshot_start_ix + snapshot.visible_file_count();
3526 if worker_end_ix <= snapshot_start_ix {
3527 break;
3528 } else if worker_start_ix > snapshot_end_ix {
3529 snapshot_start_ix = snapshot_end_ix;
3530 continue;
3531 } else {
3532 let start_in_snapshot = worker_start_ix
3533 .saturating_sub(snapshot_start_ix);
3534 let end_in_snapshot =
3535 cmp::min(worker_end_ix, snapshot_end_ix)
3536 - snapshot_start_ix;
3537
3538 for entry in snapshot
3539 .files(false, start_in_snapshot)
3540 .take(end_in_snapshot - start_in_snapshot)
3541 {
3542 if matching_paths_tx.is_closed() {
3543 break;
3544 }
3545
3546 abs_path.clear();
3547 abs_path.push(&snapshot.abs_path());
3548 abs_path.push(&entry.path);
3549 let matches = if let Some(file) =
3550 fs.open_sync(&abs_path).await.log_err()
3551 {
3552 query.detect(file).unwrap_or(false)
3553 } else {
3554 false
3555 };
3556
3557 if matches {
3558 let project_path =
3559 (snapshot.id(), entry.path.clone());
3560 if matching_paths_tx
3561 .send(project_path)
3562 .await
3563 .is_err()
3564 {
3565 break;
3566 }
3567 }
3568 }
3569
3570 snapshot_start_ix = snapshot_end_ix;
3571 }
3572 }
3573 });
3574 }
3575 })
3576 .await;
3577 }
3578 })
3579 .detach();
3580
3581 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3582 let open_buffers = self
3583 .opened_buffers
3584 .values()
3585 .filter_map(|b| b.upgrade(cx))
3586 .collect::<HashSet<_>>();
3587 cx.spawn(|this, cx| async move {
3588 for buffer in &open_buffers {
3589 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3590 buffers_tx.send((buffer.clone(), snapshot)).await?;
3591 }
3592
3593 let open_buffers = Rc::new(RefCell::new(open_buffers));
3594 while let Some(project_path) = matching_paths_rx.next().await {
3595 if buffers_tx.is_closed() {
3596 break;
3597 }
3598
3599 let this = this.clone();
3600 let open_buffers = open_buffers.clone();
3601 let buffers_tx = buffers_tx.clone();
3602 cx.spawn(|mut cx| async move {
3603 if let Some(buffer) = this
3604 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3605 .await
3606 .log_err()
3607 {
3608 if open_buffers.borrow_mut().insert(buffer.clone()) {
3609 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3610 buffers_tx.send((buffer, snapshot)).await?;
3611 }
3612 }
3613
3614 Ok::<_, anyhow::Error>(())
3615 })
3616 .detach();
3617 }
3618
3619 Ok::<_, anyhow::Error>(())
3620 })
3621 .detach_and_log_err(cx);
3622
3623 let background = cx.background().clone();
3624 cx.background().spawn(async move {
3625 let query = &query;
3626 let mut matched_buffers = Vec::new();
3627 for _ in 0..workers {
3628 matched_buffers.push(HashMap::default());
3629 }
3630 background
3631 .scoped(|scope| {
3632 for worker_matched_buffers in matched_buffers.iter_mut() {
3633 let mut buffers_rx = buffers_rx.clone();
3634 scope.spawn(async move {
3635 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3636 let buffer_matches = query
3637 .search(snapshot.as_rope())
3638 .await
3639 .iter()
3640 .map(|range| {
3641 snapshot.anchor_before(range.start)
3642 ..snapshot.anchor_after(range.end)
3643 })
3644 .collect::<Vec<_>>();
3645 if !buffer_matches.is_empty() {
3646 worker_matched_buffers
3647 .insert(buffer.clone(), buffer_matches);
3648 }
3649 }
3650 });
3651 }
3652 })
3653 .await;
3654 Ok(matched_buffers.into_iter().flatten().collect())
3655 })
3656 } else if let Some(project_id) = self.remote_id() {
3657 let request = self.client.request(query.to_proto(project_id));
3658 cx.spawn(|this, mut cx| async move {
3659 let response = request.await?;
3660 let mut result = HashMap::default();
3661 for location in response.locations {
3662 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3663 let target_buffer = this
3664 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3665 .await?;
3666 let start = location
3667 .start
3668 .and_then(deserialize_anchor)
3669 .ok_or_else(|| anyhow!("missing target start"))?;
3670 let end = location
3671 .end
3672 .and_then(deserialize_anchor)
3673 .ok_or_else(|| anyhow!("missing target end"))?;
3674 result
3675 .entry(target_buffer)
3676 .or_insert(Vec::new())
3677 .push(start..end)
3678 }
3679 Ok(result)
3680 })
3681 } else {
3682 Task::ready(Ok(Default::default()))
3683 }
3684 }
3685
3686 fn request_lsp<R: LspCommand>(
3687 &self,
3688 buffer_handle: ModelHandle<Buffer>,
3689 request: R,
3690 cx: &mut ModelContext<Self>,
3691 ) -> Task<Result<R::Response>>
3692 where
3693 <R::LspRequest as lsp::request::Request>::Result: Send,
3694 {
3695 let buffer = buffer_handle.read(cx);
3696 if self.is_local() {
3697 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3698 if let Some((file, (_, language_server))) =
3699 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3700 {
3701 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3702 return cx.spawn(|this, cx| async move {
3703 if !request.check_capabilities(&language_server.capabilities()) {
3704 return Ok(Default::default());
3705 }
3706
3707 let response = language_server
3708 .request::<R::LspRequest>(lsp_params)
3709 .await
3710 .context("lsp request failed")?;
3711 request
3712 .response_from_lsp(response, this, buffer_handle, cx)
3713 .await
3714 });
3715 }
3716 } else if let Some(project_id) = self.remote_id() {
3717 let rpc = self.client.clone();
3718 let message = request.to_proto(project_id, buffer);
3719 return cx.spawn(|this, cx| async move {
3720 let response = rpc.request(message).await?;
3721 request
3722 .response_from_proto(response, this, buffer_handle, cx)
3723 .await
3724 });
3725 }
3726 Task::ready(Ok(Default::default()))
3727 }
3728
3729 pub fn find_or_create_local_worktree(
3730 &mut self,
3731 abs_path: impl AsRef<Path>,
3732 visible: bool,
3733 cx: &mut ModelContext<Self>,
3734 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3735 let abs_path = abs_path.as_ref();
3736 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3737 Task::ready(Ok((tree.clone(), relative_path.into())))
3738 } else {
3739 let worktree = self.create_local_worktree(abs_path, visible, cx);
3740 cx.foreground()
3741 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3742 }
3743 }
3744
3745 pub fn find_local_worktree(
3746 &self,
3747 abs_path: &Path,
3748 cx: &AppContext,
3749 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3750 for tree in self.worktrees(cx) {
3751 if let Some(relative_path) = tree
3752 .read(cx)
3753 .as_local()
3754 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3755 {
3756 return Some((tree.clone(), relative_path.into()));
3757 }
3758 }
3759 None
3760 }
3761
3762 pub fn is_shared(&self) -> bool {
3763 match &self.client_state {
3764 ProjectClientState::Local { is_shared, .. } => *is_shared,
3765 ProjectClientState::Remote { .. } => false,
3766 }
3767 }
3768
3769 fn create_local_worktree(
3770 &mut self,
3771 abs_path: impl AsRef<Path>,
3772 visible: bool,
3773 cx: &mut ModelContext<Self>,
3774 ) -> Task<Result<ModelHandle<Worktree>>> {
3775 let fs = self.fs.clone();
3776 let client = self.client.clone();
3777 let next_entry_id = self.next_entry_id.clone();
3778 let path: Arc<Path> = abs_path.as_ref().into();
3779 let task = self
3780 .loading_local_worktrees
3781 .entry(path.clone())
3782 .or_insert_with(|| {
3783 cx.spawn(|project, mut cx| {
3784 async move {
3785 let worktree = Worktree::local(
3786 client.clone(),
3787 path.clone(),
3788 visible,
3789 fs,
3790 next_entry_id,
3791 &mut cx,
3792 )
3793 .await;
3794 project.update(&mut cx, |project, _| {
3795 project.loading_local_worktrees.remove(&path);
3796 });
3797 let worktree = worktree?;
3798
3799 let project_id = project.update(&mut cx, |project, cx| {
3800 project.add_worktree(&worktree, cx);
3801 project.shared_remote_id()
3802 });
3803
3804 if let Some(project_id) = project_id {
3805 worktree
3806 .update(&mut cx, |worktree, cx| {
3807 worktree.as_local_mut().unwrap().share(project_id, cx)
3808 })
3809 .await
3810 .log_err();
3811 }
3812
3813 Ok(worktree)
3814 }
3815 .map_err(|err| Arc::new(err))
3816 })
3817 .shared()
3818 })
3819 .clone();
3820 cx.foreground().spawn(async move {
3821 match task.await {
3822 Ok(worktree) => Ok(worktree),
3823 Err(err) => Err(anyhow!("{}", err)),
3824 }
3825 })
3826 }
3827
3828 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3829 self.worktrees.retain(|worktree| {
3830 if let Some(worktree) = worktree.upgrade(cx) {
3831 let id = worktree.read(cx).id();
3832 if id == id_to_remove {
3833 cx.emit(Event::WorktreeRemoved(id));
3834 false
3835 } else {
3836 true
3837 }
3838 } else {
3839 false
3840 }
3841 });
3842 self.metadata_changed(true, cx);
3843 cx.notify();
3844 }
3845
3846 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3847 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3848 if worktree.read(cx).is_local() {
3849 cx.subscribe(&worktree, |this, worktree, _, cx| {
3850 this.update_local_worktree_buffers(worktree, cx);
3851 })
3852 .detach();
3853 }
3854
3855 let push_strong_handle = {
3856 let worktree = worktree.read(cx);
3857 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3858 };
3859 if push_strong_handle {
3860 self.worktrees
3861 .push(WorktreeHandle::Strong(worktree.clone()));
3862 } else {
3863 cx.observe_release(&worktree, |this, _, cx| {
3864 this.worktrees
3865 .retain(|worktree| worktree.upgrade(cx).is_some());
3866 cx.notify();
3867 })
3868 .detach();
3869 self.worktrees
3870 .push(WorktreeHandle::Weak(worktree.downgrade()));
3871 }
3872 self.metadata_changed(true, cx);
3873 cx.emit(Event::WorktreeAdded);
3874 cx.notify();
3875 }
3876
3877 fn update_local_worktree_buffers(
3878 &mut self,
3879 worktree_handle: ModelHandle<Worktree>,
3880 cx: &mut ModelContext<Self>,
3881 ) {
3882 let snapshot = worktree_handle.read(cx).snapshot();
3883 let mut buffers_to_delete = Vec::new();
3884 let mut renamed_buffers = Vec::new();
3885 for (buffer_id, buffer) in &self.opened_buffers {
3886 if let Some(buffer) = buffer.upgrade(cx) {
3887 buffer.update(cx, |buffer, cx| {
3888 if let Some(old_file) = File::from_dyn(buffer.file()) {
3889 if old_file.worktree != worktree_handle {
3890 return;
3891 }
3892
3893 let new_file = if let Some(entry) = old_file
3894 .entry_id
3895 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3896 {
3897 File {
3898 is_local: true,
3899 entry_id: Some(entry.id),
3900 mtime: entry.mtime,
3901 path: entry.path.clone(),
3902 worktree: worktree_handle.clone(),
3903 }
3904 } else if let Some(entry) =
3905 snapshot.entry_for_path(old_file.path().as_ref())
3906 {
3907 File {
3908 is_local: true,
3909 entry_id: Some(entry.id),
3910 mtime: entry.mtime,
3911 path: entry.path.clone(),
3912 worktree: worktree_handle.clone(),
3913 }
3914 } else {
3915 File {
3916 is_local: true,
3917 entry_id: None,
3918 path: old_file.path().clone(),
3919 mtime: old_file.mtime(),
3920 worktree: worktree_handle.clone(),
3921 }
3922 };
3923
3924 let old_path = old_file.abs_path(cx);
3925 if new_file.abs_path(cx) != old_path {
3926 renamed_buffers.push((cx.handle(), old_path));
3927 }
3928
3929 if let Some(project_id) = self.shared_remote_id() {
3930 self.client
3931 .send(proto::UpdateBufferFile {
3932 project_id,
3933 buffer_id: *buffer_id as u64,
3934 file: Some(new_file.to_proto()),
3935 })
3936 .log_err();
3937 }
3938 buffer.file_updated(Box::new(new_file), cx).detach();
3939 }
3940 });
3941 } else {
3942 buffers_to_delete.push(*buffer_id);
3943 }
3944 }
3945
3946 for buffer_id in buffers_to_delete {
3947 self.opened_buffers.remove(&buffer_id);
3948 }
3949
3950 for (buffer, old_path) in renamed_buffers {
3951 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3952 self.assign_language_to_buffer(&buffer, cx);
3953 self.register_buffer_with_language_server(&buffer, cx);
3954 }
3955 }
3956
3957 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3958 let new_active_entry = entry.and_then(|project_path| {
3959 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3960 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3961 Some(entry.id)
3962 });
3963 if new_active_entry != self.active_entry {
3964 self.active_entry = new_active_entry;
3965 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3966 }
3967 }
3968
3969 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3970 self.language_server_statuses
3971 .values()
3972 .any(|status| status.pending_diagnostic_updates > 0)
3973 }
3974
3975 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3976 let mut summary = DiagnosticSummary::default();
3977 for (_, path_summary) in self.diagnostic_summaries(cx) {
3978 summary.error_count += path_summary.error_count;
3979 summary.warning_count += path_summary.warning_count;
3980 }
3981 summary
3982 }
3983
3984 pub fn diagnostic_summaries<'a>(
3985 &'a self,
3986 cx: &'a AppContext,
3987 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3988 self.worktrees(cx).flat_map(move |worktree| {
3989 let worktree = worktree.read(cx);
3990 let worktree_id = worktree.id();
3991 worktree
3992 .diagnostic_summaries()
3993 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3994 })
3995 }
3996
3997 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3998 if self
3999 .language_server_statuses
4000 .values()
4001 .map(|status| status.pending_diagnostic_updates)
4002 .sum::<isize>()
4003 == 1
4004 {
4005 cx.emit(Event::DiskBasedDiagnosticsStarted);
4006 }
4007 }
4008
4009 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
4010 cx.emit(Event::DiskBasedDiagnosticsUpdated);
4011 if self
4012 .language_server_statuses
4013 .values()
4014 .map(|status| status.pending_diagnostic_updates)
4015 .sum::<isize>()
4016 == 0
4017 {
4018 cx.emit(Event::DiskBasedDiagnosticsFinished);
4019 }
4020 }
4021
4022 pub fn active_entry(&self) -> Option<ProjectEntryId> {
4023 self.active_entry
4024 }
4025
4026 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
4027 self.worktree_for_id(path.worktree_id, cx)?
4028 .read(cx)
4029 .entry_for_path(&path.path)
4030 .map(|entry| entry.id)
4031 }
4032
4033 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
4034 let worktree = self.worktree_for_entry(entry_id, cx)?;
4035 let worktree = worktree.read(cx);
4036 let worktree_id = worktree.id();
4037 let path = worktree.entry_for_id(entry_id)?.path.clone();
4038 Some(ProjectPath { worktree_id, path })
4039 }
4040
4041 // RPC message handlers
4042
4043 async fn handle_request_join_project(
4044 this: ModelHandle<Self>,
4045 message: TypedEnvelope<proto::RequestJoinProject>,
4046 _: Arc<Client>,
4047 mut cx: AsyncAppContext,
4048 ) -> Result<()> {
4049 let user_id = message.payload.requester_id;
4050 if this.read_with(&cx, |project, _| {
4051 project.collaborators.values().any(|c| c.user.id == user_id)
4052 }) {
4053 this.update(&mut cx, |this, cx| {
4054 this.respond_to_join_request(user_id, true, cx)
4055 });
4056 } else {
4057 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4058 let user = user_store
4059 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
4060 .await?;
4061 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
4062 }
4063 Ok(())
4064 }
4065
4066 async fn handle_unregister_project(
4067 this: ModelHandle<Self>,
4068 _: TypedEnvelope<proto::UnregisterProject>,
4069 _: Arc<Client>,
4070 mut cx: AsyncAppContext,
4071 ) -> Result<()> {
4072 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
4073 Ok(())
4074 }
4075
4076 async fn handle_project_unshared(
4077 this: ModelHandle<Self>,
4078 _: TypedEnvelope<proto::ProjectUnshared>,
4079 _: Arc<Client>,
4080 mut cx: AsyncAppContext,
4081 ) -> Result<()> {
4082 this.update(&mut cx, |this, cx| this.unshared(cx));
4083 Ok(())
4084 }
4085
4086 async fn handle_add_collaborator(
4087 this: ModelHandle<Self>,
4088 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4089 _: Arc<Client>,
4090 mut cx: AsyncAppContext,
4091 ) -> Result<()> {
4092 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4093 let collaborator = envelope
4094 .payload
4095 .collaborator
4096 .take()
4097 .ok_or_else(|| anyhow!("empty collaborator"))?;
4098
4099 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4100 this.update(&mut cx, |this, cx| {
4101 this.collaborators
4102 .insert(collaborator.peer_id, collaborator);
4103 cx.notify();
4104 });
4105
4106 Ok(())
4107 }
4108
4109 async fn handle_remove_collaborator(
4110 this: ModelHandle<Self>,
4111 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4112 _: Arc<Client>,
4113 mut cx: AsyncAppContext,
4114 ) -> Result<()> {
4115 this.update(&mut cx, |this, cx| {
4116 let peer_id = PeerId(envelope.payload.peer_id);
4117 let replica_id = this
4118 .collaborators
4119 .remove(&peer_id)
4120 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4121 .replica_id;
4122 for (_, buffer) in &this.opened_buffers {
4123 if let Some(buffer) = buffer.upgrade(cx) {
4124 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4125 }
4126 }
4127
4128 cx.emit(Event::CollaboratorLeft(peer_id));
4129 cx.notify();
4130 Ok(())
4131 })
4132 }
4133
4134 async fn handle_join_project_request_cancelled(
4135 this: ModelHandle<Self>,
4136 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4137 _: Arc<Client>,
4138 mut cx: AsyncAppContext,
4139 ) -> Result<()> {
4140 let user = this
4141 .update(&mut cx, |this, cx| {
4142 this.user_store.update(cx, |user_store, cx| {
4143 user_store.fetch_user(envelope.payload.requester_id, cx)
4144 })
4145 })
4146 .await?;
4147
4148 this.update(&mut cx, |_, cx| {
4149 cx.emit(Event::ContactCancelledJoinRequest(user));
4150 });
4151
4152 Ok(())
4153 }
4154
4155 async fn handle_update_project(
4156 this: ModelHandle<Self>,
4157 envelope: TypedEnvelope<proto::UpdateProject>,
4158 client: Arc<Client>,
4159 mut cx: AsyncAppContext,
4160 ) -> Result<()> {
4161 this.update(&mut cx, |this, cx| {
4162 let replica_id = this.replica_id();
4163 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4164
4165 let mut old_worktrees_by_id = this
4166 .worktrees
4167 .drain(..)
4168 .filter_map(|worktree| {
4169 let worktree = worktree.upgrade(cx)?;
4170 Some((worktree.read(cx).id(), worktree))
4171 })
4172 .collect::<HashMap<_, _>>();
4173
4174 for worktree in envelope.payload.worktrees {
4175 if let Some(old_worktree) =
4176 old_worktrees_by_id.remove(&WorktreeId::from_proto(worktree.id))
4177 {
4178 this.worktrees.push(WorktreeHandle::Strong(old_worktree));
4179 } else {
4180 let worktree = proto::Worktree {
4181 id: worktree.id,
4182 root_name: worktree.root_name,
4183 entries: Default::default(),
4184 diagnostic_summaries: Default::default(),
4185 visible: worktree.visible,
4186 scan_id: 0,
4187 };
4188 let (worktree, load_task) =
4189 Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx);
4190 this.add_worktree(&worktree, cx);
4191 load_task.detach();
4192 }
4193 }
4194
4195 this.metadata_changed(true, cx);
4196 for (id, _) in old_worktrees_by_id {
4197 cx.emit(Event::WorktreeRemoved(id));
4198 }
4199
4200 Ok(())
4201 })
4202 }
4203
4204 async fn handle_update_worktree(
4205 this: ModelHandle<Self>,
4206 envelope: TypedEnvelope<proto::UpdateWorktree>,
4207 _: Arc<Client>,
4208 mut cx: AsyncAppContext,
4209 ) -> Result<()> {
4210 this.update(&mut cx, |this, cx| {
4211 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4212 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4213 worktree.update(cx, |worktree, _| {
4214 let worktree = worktree.as_remote_mut().unwrap();
4215 worktree.update_from_remote(envelope)
4216 })?;
4217 }
4218 Ok(())
4219 })
4220 }
4221
4222 async fn handle_create_project_entry(
4223 this: ModelHandle<Self>,
4224 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4225 _: Arc<Client>,
4226 mut cx: AsyncAppContext,
4227 ) -> Result<proto::ProjectEntryResponse> {
4228 let worktree = this.update(&mut cx, |this, cx| {
4229 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4230 this.worktree_for_id(worktree_id, cx)
4231 .ok_or_else(|| anyhow!("worktree not found"))
4232 })?;
4233 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4234 let entry = worktree
4235 .update(&mut cx, |worktree, cx| {
4236 let worktree = worktree.as_local_mut().unwrap();
4237 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4238 worktree.create_entry(path, envelope.payload.is_directory, cx)
4239 })
4240 .await?;
4241 Ok(proto::ProjectEntryResponse {
4242 entry: Some((&entry).into()),
4243 worktree_scan_id: worktree_scan_id as u64,
4244 })
4245 }
4246
4247 async fn handle_rename_project_entry(
4248 this: ModelHandle<Self>,
4249 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4250 _: Arc<Client>,
4251 mut cx: AsyncAppContext,
4252 ) -> Result<proto::ProjectEntryResponse> {
4253 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4254 let worktree = this.read_with(&cx, |this, cx| {
4255 this.worktree_for_entry(entry_id, cx)
4256 .ok_or_else(|| anyhow!("worktree not found"))
4257 })?;
4258 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4259 let entry = worktree
4260 .update(&mut cx, |worktree, cx| {
4261 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4262 worktree
4263 .as_local_mut()
4264 .unwrap()
4265 .rename_entry(entry_id, new_path, cx)
4266 .ok_or_else(|| anyhow!("invalid entry"))
4267 })?
4268 .await?;
4269 Ok(proto::ProjectEntryResponse {
4270 entry: Some((&entry).into()),
4271 worktree_scan_id: worktree_scan_id as u64,
4272 })
4273 }
4274
4275 async fn handle_copy_project_entry(
4276 this: ModelHandle<Self>,
4277 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4278 _: Arc<Client>,
4279 mut cx: AsyncAppContext,
4280 ) -> Result<proto::ProjectEntryResponse> {
4281 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4282 let worktree = this.read_with(&cx, |this, cx| {
4283 this.worktree_for_entry(entry_id, cx)
4284 .ok_or_else(|| anyhow!("worktree not found"))
4285 })?;
4286 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4287 let entry = worktree
4288 .update(&mut cx, |worktree, cx| {
4289 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4290 worktree
4291 .as_local_mut()
4292 .unwrap()
4293 .copy_entry(entry_id, new_path, cx)
4294 .ok_or_else(|| anyhow!("invalid entry"))
4295 })?
4296 .await?;
4297 Ok(proto::ProjectEntryResponse {
4298 entry: Some((&entry).into()),
4299 worktree_scan_id: worktree_scan_id as u64,
4300 })
4301 }
4302
4303 async fn handle_delete_project_entry(
4304 this: ModelHandle<Self>,
4305 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4306 _: Arc<Client>,
4307 mut cx: AsyncAppContext,
4308 ) -> Result<proto::ProjectEntryResponse> {
4309 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4310 let worktree = this.read_with(&cx, |this, cx| {
4311 this.worktree_for_entry(entry_id, cx)
4312 .ok_or_else(|| anyhow!("worktree not found"))
4313 })?;
4314 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4315 worktree
4316 .update(&mut cx, |worktree, cx| {
4317 worktree
4318 .as_local_mut()
4319 .unwrap()
4320 .delete_entry(entry_id, cx)
4321 .ok_or_else(|| anyhow!("invalid entry"))
4322 })?
4323 .await?;
4324 Ok(proto::ProjectEntryResponse {
4325 entry: None,
4326 worktree_scan_id: worktree_scan_id as u64,
4327 })
4328 }
4329
4330 async fn handle_update_diagnostic_summary(
4331 this: ModelHandle<Self>,
4332 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4333 _: Arc<Client>,
4334 mut cx: AsyncAppContext,
4335 ) -> Result<()> {
4336 this.update(&mut cx, |this, cx| {
4337 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4338 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4339 if let Some(summary) = envelope.payload.summary {
4340 let project_path = ProjectPath {
4341 worktree_id,
4342 path: Path::new(&summary.path).into(),
4343 };
4344 worktree.update(cx, |worktree, _| {
4345 worktree
4346 .as_remote_mut()
4347 .unwrap()
4348 .update_diagnostic_summary(project_path.path.clone(), &summary);
4349 });
4350 cx.emit(Event::DiagnosticsUpdated(project_path));
4351 }
4352 }
4353 Ok(())
4354 })
4355 }
4356
4357 async fn handle_start_language_server(
4358 this: ModelHandle<Self>,
4359 envelope: TypedEnvelope<proto::StartLanguageServer>,
4360 _: Arc<Client>,
4361 mut cx: AsyncAppContext,
4362 ) -> Result<()> {
4363 let server = envelope
4364 .payload
4365 .server
4366 .ok_or_else(|| anyhow!("invalid server"))?;
4367 this.update(&mut cx, |this, cx| {
4368 this.language_server_statuses.insert(
4369 server.id as usize,
4370 LanguageServerStatus {
4371 name: server.name,
4372 pending_work: Default::default(),
4373 pending_diagnostic_updates: 0,
4374 },
4375 );
4376 cx.notify();
4377 });
4378 Ok(())
4379 }
4380
4381 async fn handle_update_language_server(
4382 this: ModelHandle<Self>,
4383 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4384 _: Arc<Client>,
4385 mut cx: AsyncAppContext,
4386 ) -> Result<()> {
4387 let language_server_id = envelope.payload.language_server_id as usize;
4388 match envelope
4389 .payload
4390 .variant
4391 .ok_or_else(|| anyhow!("invalid variant"))?
4392 {
4393 proto::update_language_server::Variant::WorkStart(payload) => {
4394 this.update(&mut cx, |this, cx| {
4395 this.on_lsp_work_start(language_server_id, payload.token, cx);
4396 })
4397 }
4398 proto::update_language_server::Variant::WorkProgress(payload) => {
4399 this.update(&mut cx, |this, cx| {
4400 this.on_lsp_work_progress(
4401 language_server_id,
4402 payload.token,
4403 LanguageServerProgress {
4404 message: payload.message,
4405 percentage: payload.percentage.map(|p| p as usize),
4406 last_update_at: Instant::now(),
4407 },
4408 cx,
4409 );
4410 })
4411 }
4412 proto::update_language_server::Variant::WorkEnd(payload) => {
4413 this.update(&mut cx, |this, cx| {
4414 this.on_lsp_work_end(language_server_id, payload.token, cx);
4415 })
4416 }
4417 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4418 this.update(&mut cx, |this, cx| {
4419 this.disk_based_diagnostics_started(cx);
4420 })
4421 }
4422 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4423 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4424 }
4425 }
4426
4427 Ok(())
4428 }
4429
4430 async fn handle_update_buffer(
4431 this: ModelHandle<Self>,
4432 envelope: TypedEnvelope<proto::UpdateBuffer>,
4433 _: Arc<Client>,
4434 mut cx: AsyncAppContext,
4435 ) -> Result<()> {
4436 this.update(&mut cx, |this, cx| {
4437 let payload = envelope.payload.clone();
4438 let buffer_id = payload.buffer_id;
4439 let ops = payload
4440 .operations
4441 .into_iter()
4442 .map(|op| language::proto::deserialize_operation(op))
4443 .collect::<Result<Vec<_>, _>>()?;
4444 let is_remote = this.is_remote();
4445 match this.opened_buffers.entry(buffer_id) {
4446 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4447 OpenBuffer::Strong(buffer) => {
4448 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4449 }
4450 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4451 OpenBuffer::Weak(_) => {}
4452 },
4453 hash_map::Entry::Vacant(e) => {
4454 assert!(
4455 is_remote,
4456 "received buffer update from {:?}",
4457 envelope.original_sender_id
4458 );
4459 e.insert(OpenBuffer::Loading(ops));
4460 }
4461 }
4462 Ok(())
4463 })
4464 }
4465
4466 async fn handle_update_buffer_file(
4467 this: ModelHandle<Self>,
4468 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4469 _: Arc<Client>,
4470 mut cx: AsyncAppContext,
4471 ) -> Result<()> {
4472 this.update(&mut cx, |this, cx| {
4473 let payload = envelope.payload.clone();
4474 let buffer_id = payload.buffer_id;
4475 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4476 let worktree = this
4477 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4478 .ok_or_else(|| anyhow!("no such worktree"))?;
4479 let file = File::from_proto(file, worktree.clone(), cx)?;
4480 let buffer = this
4481 .opened_buffers
4482 .get_mut(&buffer_id)
4483 .and_then(|b| b.upgrade(cx))
4484 .ok_or_else(|| anyhow!("no such buffer"))?;
4485 buffer.update(cx, |buffer, cx| {
4486 buffer.file_updated(Box::new(file), cx).detach();
4487 });
4488 Ok(())
4489 })
4490 }
4491
4492 async fn handle_save_buffer(
4493 this: ModelHandle<Self>,
4494 envelope: TypedEnvelope<proto::SaveBuffer>,
4495 _: Arc<Client>,
4496 mut cx: AsyncAppContext,
4497 ) -> Result<proto::BufferSaved> {
4498 let buffer_id = envelope.payload.buffer_id;
4499 let requested_version = deserialize_version(envelope.payload.version);
4500
4501 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4502 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4503 let buffer = this
4504 .opened_buffers
4505 .get(&buffer_id)
4506 .and_then(|buffer| buffer.upgrade(cx))
4507 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4508 Ok::<_, anyhow::Error>((project_id, buffer))
4509 })?;
4510 buffer
4511 .update(&mut cx, |buffer, _| {
4512 buffer.wait_for_version(requested_version)
4513 })
4514 .await;
4515
4516 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4517 Ok(proto::BufferSaved {
4518 project_id,
4519 buffer_id,
4520 version: serialize_version(&saved_version),
4521 mtime: Some(mtime.into()),
4522 })
4523 }
4524
4525 async fn handle_reload_buffers(
4526 this: ModelHandle<Self>,
4527 envelope: TypedEnvelope<proto::ReloadBuffers>,
4528 _: Arc<Client>,
4529 mut cx: AsyncAppContext,
4530 ) -> Result<proto::ReloadBuffersResponse> {
4531 let sender_id = envelope.original_sender_id()?;
4532 let reload = this.update(&mut cx, |this, cx| {
4533 let mut buffers = HashSet::default();
4534 for buffer_id in &envelope.payload.buffer_ids {
4535 buffers.insert(
4536 this.opened_buffers
4537 .get(buffer_id)
4538 .and_then(|buffer| buffer.upgrade(cx))
4539 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4540 );
4541 }
4542 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4543 })?;
4544
4545 let project_transaction = reload.await?;
4546 let project_transaction = this.update(&mut cx, |this, cx| {
4547 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4548 });
4549 Ok(proto::ReloadBuffersResponse {
4550 transaction: Some(project_transaction),
4551 })
4552 }
4553
4554 async fn handle_format_buffers(
4555 this: ModelHandle<Self>,
4556 envelope: TypedEnvelope<proto::FormatBuffers>,
4557 _: Arc<Client>,
4558 mut cx: AsyncAppContext,
4559 ) -> Result<proto::FormatBuffersResponse> {
4560 let sender_id = envelope.original_sender_id()?;
4561 let format = this.update(&mut cx, |this, cx| {
4562 let mut buffers = HashSet::default();
4563 for buffer_id in &envelope.payload.buffer_ids {
4564 buffers.insert(
4565 this.opened_buffers
4566 .get(buffer_id)
4567 .and_then(|buffer| buffer.upgrade(cx))
4568 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4569 );
4570 }
4571 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4572 })?;
4573
4574 let project_transaction = format.await?;
4575 let project_transaction = this.update(&mut cx, |this, cx| {
4576 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4577 });
4578 Ok(proto::FormatBuffersResponse {
4579 transaction: Some(project_transaction),
4580 })
4581 }
4582
4583 async fn handle_get_completions(
4584 this: ModelHandle<Self>,
4585 envelope: TypedEnvelope<proto::GetCompletions>,
4586 _: Arc<Client>,
4587 mut cx: AsyncAppContext,
4588 ) -> Result<proto::GetCompletionsResponse> {
4589 let position = envelope
4590 .payload
4591 .position
4592 .and_then(language::proto::deserialize_anchor)
4593 .ok_or_else(|| anyhow!("invalid position"))?;
4594 let version = deserialize_version(envelope.payload.version);
4595 let buffer = this.read_with(&cx, |this, cx| {
4596 this.opened_buffers
4597 .get(&envelope.payload.buffer_id)
4598 .and_then(|buffer| buffer.upgrade(cx))
4599 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4600 })?;
4601 buffer
4602 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4603 .await;
4604 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4605 let completions = this
4606 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4607 .await?;
4608
4609 Ok(proto::GetCompletionsResponse {
4610 completions: completions
4611 .iter()
4612 .map(language::proto::serialize_completion)
4613 .collect(),
4614 version: serialize_version(&version),
4615 })
4616 }
4617
4618 async fn handle_apply_additional_edits_for_completion(
4619 this: ModelHandle<Self>,
4620 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4621 _: Arc<Client>,
4622 mut cx: AsyncAppContext,
4623 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4624 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4625 let buffer = this
4626 .opened_buffers
4627 .get(&envelope.payload.buffer_id)
4628 .and_then(|buffer| buffer.upgrade(cx))
4629 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4630 let language = buffer.read(cx).language();
4631 let completion = language::proto::deserialize_completion(
4632 envelope
4633 .payload
4634 .completion
4635 .ok_or_else(|| anyhow!("invalid completion"))?,
4636 language,
4637 )?;
4638 Ok::<_, anyhow::Error>(
4639 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4640 )
4641 })?;
4642
4643 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4644 transaction: apply_additional_edits
4645 .await?
4646 .as_ref()
4647 .map(language::proto::serialize_transaction),
4648 })
4649 }
4650
4651 async fn handle_get_code_actions(
4652 this: ModelHandle<Self>,
4653 envelope: TypedEnvelope<proto::GetCodeActions>,
4654 _: Arc<Client>,
4655 mut cx: AsyncAppContext,
4656 ) -> Result<proto::GetCodeActionsResponse> {
4657 let start = envelope
4658 .payload
4659 .start
4660 .and_then(language::proto::deserialize_anchor)
4661 .ok_or_else(|| anyhow!("invalid start"))?;
4662 let end = envelope
4663 .payload
4664 .end
4665 .and_then(language::proto::deserialize_anchor)
4666 .ok_or_else(|| anyhow!("invalid end"))?;
4667 let buffer = this.update(&mut cx, |this, cx| {
4668 this.opened_buffers
4669 .get(&envelope.payload.buffer_id)
4670 .and_then(|buffer| buffer.upgrade(cx))
4671 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4672 })?;
4673 buffer
4674 .update(&mut cx, |buffer, _| {
4675 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4676 })
4677 .await;
4678
4679 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4680 let code_actions = this.update(&mut cx, |this, cx| {
4681 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4682 })?;
4683
4684 Ok(proto::GetCodeActionsResponse {
4685 actions: code_actions
4686 .await?
4687 .iter()
4688 .map(language::proto::serialize_code_action)
4689 .collect(),
4690 version: serialize_version(&version),
4691 })
4692 }
4693
4694 async fn handle_apply_code_action(
4695 this: ModelHandle<Self>,
4696 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4697 _: Arc<Client>,
4698 mut cx: AsyncAppContext,
4699 ) -> Result<proto::ApplyCodeActionResponse> {
4700 let sender_id = envelope.original_sender_id()?;
4701 let action = language::proto::deserialize_code_action(
4702 envelope
4703 .payload
4704 .action
4705 .ok_or_else(|| anyhow!("invalid action"))?,
4706 )?;
4707 let apply_code_action = this.update(&mut cx, |this, cx| {
4708 let buffer = this
4709 .opened_buffers
4710 .get(&envelope.payload.buffer_id)
4711 .and_then(|buffer| buffer.upgrade(cx))
4712 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4713 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4714 })?;
4715
4716 let project_transaction = apply_code_action.await?;
4717 let project_transaction = this.update(&mut cx, |this, cx| {
4718 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4719 });
4720 Ok(proto::ApplyCodeActionResponse {
4721 transaction: Some(project_transaction),
4722 })
4723 }
4724
4725 async fn handle_lsp_command<T: LspCommand>(
4726 this: ModelHandle<Self>,
4727 envelope: TypedEnvelope<T::ProtoRequest>,
4728 _: Arc<Client>,
4729 mut cx: AsyncAppContext,
4730 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4731 where
4732 <T::LspRequest as lsp::request::Request>::Result: Send,
4733 {
4734 let sender_id = envelope.original_sender_id()?;
4735 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4736 let buffer_handle = this.read_with(&cx, |this, _| {
4737 this.opened_buffers
4738 .get(&buffer_id)
4739 .and_then(|buffer| buffer.upgrade(&cx))
4740 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4741 })?;
4742 let request = T::from_proto(
4743 envelope.payload,
4744 this.clone(),
4745 buffer_handle.clone(),
4746 cx.clone(),
4747 )
4748 .await?;
4749 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4750 let response = this
4751 .update(&mut cx, |this, cx| {
4752 this.request_lsp(buffer_handle, request, cx)
4753 })
4754 .await?;
4755 this.update(&mut cx, |this, cx| {
4756 Ok(T::response_to_proto(
4757 response,
4758 this,
4759 sender_id,
4760 &buffer_version,
4761 cx,
4762 ))
4763 })
4764 }
4765
4766 async fn handle_get_project_symbols(
4767 this: ModelHandle<Self>,
4768 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4769 _: Arc<Client>,
4770 mut cx: AsyncAppContext,
4771 ) -> Result<proto::GetProjectSymbolsResponse> {
4772 let symbols = this
4773 .update(&mut cx, |this, cx| {
4774 this.symbols(&envelope.payload.query, cx)
4775 })
4776 .await?;
4777
4778 Ok(proto::GetProjectSymbolsResponse {
4779 symbols: symbols.iter().map(serialize_symbol).collect(),
4780 })
4781 }
4782
4783 async fn handle_search_project(
4784 this: ModelHandle<Self>,
4785 envelope: TypedEnvelope<proto::SearchProject>,
4786 _: Arc<Client>,
4787 mut cx: AsyncAppContext,
4788 ) -> Result<proto::SearchProjectResponse> {
4789 let peer_id = envelope.original_sender_id()?;
4790 let query = SearchQuery::from_proto(envelope.payload)?;
4791 let result = this
4792 .update(&mut cx, |this, cx| this.search(query, cx))
4793 .await?;
4794
4795 this.update(&mut cx, |this, cx| {
4796 let mut locations = Vec::new();
4797 for (buffer, ranges) in result {
4798 for range in ranges {
4799 let start = serialize_anchor(&range.start);
4800 let end = serialize_anchor(&range.end);
4801 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4802 locations.push(proto::Location {
4803 buffer: Some(buffer),
4804 start: Some(start),
4805 end: Some(end),
4806 });
4807 }
4808 }
4809 Ok(proto::SearchProjectResponse { locations })
4810 })
4811 }
4812
4813 async fn handle_open_buffer_for_symbol(
4814 this: ModelHandle<Self>,
4815 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4816 _: Arc<Client>,
4817 mut cx: AsyncAppContext,
4818 ) -> Result<proto::OpenBufferForSymbolResponse> {
4819 let peer_id = envelope.original_sender_id()?;
4820 let symbol = envelope
4821 .payload
4822 .symbol
4823 .ok_or_else(|| anyhow!("invalid symbol"))?;
4824 let symbol = this.read_with(&cx, |this, _| {
4825 let symbol = this.deserialize_symbol(symbol)?;
4826 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4827 if signature == symbol.signature {
4828 Ok(symbol)
4829 } else {
4830 Err(anyhow!("invalid symbol signature"))
4831 }
4832 })?;
4833 let buffer = this
4834 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4835 .await?;
4836
4837 Ok(proto::OpenBufferForSymbolResponse {
4838 buffer: Some(this.update(&mut cx, |this, cx| {
4839 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4840 })),
4841 })
4842 }
4843
4844 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4845 let mut hasher = Sha256::new();
4846 hasher.update(worktree_id.to_proto().to_be_bytes());
4847 hasher.update(path.to_string_lossy().as_bytes());
4848 hasher.update(self.nonce.to_be_bytes());
4849 hasher.finalize().as_slice().try_into().unwrap()
4850 }
4851
4852 async fn handle_open_buffer_by_id(
4853 this: ModelHandle<Self>,
4854 envelope: TypedEnvelope<proto::OpenBufferById>,
4855 _: Arc<Client>,
4856 mut cx: AsyncAppContext,
4857 ) -> Result<proto::OpenBufferResponse> {
4858 let peer_id = envelope.original_sender_id()?;
4859 let buffer = this
4860 .update(&mut cx, |this, cx| {
4861 this.open_buffer_by_id(envelope.payload.id, cx)
4862 })
4863 .await?;
4864 this.update(&mut cx, |this, cx| {
4865 Ok(proto::OpenBufferResponse {
4866 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4867 })
4868 })
4869 }
4870
4871 async fn handle_open_buffer_by_path(
4872 this: ModelHandle<Self>,
4873 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4874 _: Arc<Client>,
4875 mut cx: AsyncAppContext,
4876 ) -> Result<proto::OpenBufferResponse> {
4877 let peer_id = envelope.original_sender_id()?;
4878 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4879 let open_buffer = this.update(&mut cx, |this, cx| {
4880 this.open_buffer(
4881 ProjectPath {
4882 worktree_id,
4883 path: PathBuf::from(envelope.payload.path).into(),
4884 },
4885 cx,
4886 )
4887 });
4888
4889 let buffer = open_buffer.await?;
4890 this.update(&mut cx, |this, cx| {
4891 Ok(proto::OpenBufferResponse {
4892 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4893 })
4894 })
4895 }
4896
4897 fn serialize_project_transaction_for_peer(
4898 &mut self,
4899 project_transaction: ProjectTransaction,
4900 peer_id: PeerId,
4901 cx: &AppContext,
4902 ) -> proto::ProjectTransaction {
4903 let mut serialized_transaction = proto::ProjectTransaction {
4904 buffers: Default::default(),
4905 transactions: Default::default(),
4906 };
4907 for (buffer, transaction) in project_transaction.0 {
4908 serialized_transaction
4909 .buffers
4910 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4911 serialized_transaction
4912 .transactions
4913 .push(language::proto::serialize_transaction(&transaction));
4914 }
4915 serialized_transaction
4916 }
4917
4918 fn deserialize_project_transaction(
4919 &mut self,
4920 message: proto::ProjectTransaction,
4921 push_to_history: bool,
4922 cx: &mut ModelContext<Self>,
4923 ) -> Task<Result<ProjectTransaction>> {
4924 cx.spawn(|this, mut cx| async move {
4925 let mut project_transaction = ProjectTransaction::default();
4926 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4927 let buffer = this
4928 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4929 .await?;
4930 let transaction = language::proto::deserialize_transaction(transaction)?;
4931 project_transaction.0.insert(buffer, transaction);
4932 }
4933
4934 for (buffer, transaction) in &project_transaction.0 {
4935 buffer
4936 .update(&mut cx, |buffer, _| {
4937 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4938 })
4939 .await;
4940
4941 if push_to_history {
4942 buffer.update(&mut cx, |buffer, _| {
4943 buffer.push_transaction(transaction.clone(), Instant::now());
4944 });
4945 }
4946 }
4947
4948 Ok(project_transaction)
4949 })
4950 }
4951
4952 fn serialize_buffer_for_peer(
4953 &mut self,
4954 buffer: &ModelHandle<Buffer>,
4955 peer_id: PeerId,
4956 cx: &AppContext,
4957 ) -> proto::Buffer {
4958 let buffer_id = buffer.read(cx).remote_id();
4959 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4960 if shared_buffers.insert(buffer_id) {
4961 proto::Buffer {
4962 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4963 }
4964 } else {
4965 proto::Buffer {
4966 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4967 }
4968 }
4969 }
4970
4971 fn deserialize_buffer(
4972 &mut self,
4973 buffer: proto::Buffer,
4974 cx: &mut ModelContext<Self>,
4975 ) -> Task<Result<ModelHandle<Buffer>>> {
4976 let replica_id = self.replica_id();
4977
4978 let opened_buffer_tx = self.opened_buffer.0.clone();
4979 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4980 cx.spawn(|this, mut cx| async move {
4981 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4982 proto::buffer::Variant::Id(id) => {
4983 let buffer = loop {
4984 let buffer = this.read_with(&cx, |this, cx| {
4985 this.opened_buffers
4986 .get(&id)
4987 .and_then(|buffer| buffer.upgrade(cx))
4988 });
4989 if let Some(buffer) = buffer {
4990 break buffer;
4991 }
4992 opened_buffer_rx
4993 .next()
4994 .await
4995 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4996 };
4997 Ok(buffer)
4998 }
4999 proto::buffer::Variant::State(mut buffer) => {
5000 let mut buffer_worktree = None;
5001 let mut buffer_file = None;
5002 if let Some(file) = buffer.file.take() {
5003 this.read_with(&cx, |this, cx| {
5004 let worktree_id = WorktreeId::from_proto(file.worktree_id);
5005 let worktree =
5006 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
5007 anyhow!("no worktree found for id {}", file.worktree_id)
5008 })?;
5009 buffer_file =
5010 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
5011 as Box<dyn language::File>);
5012 buffer_worktree = Some(worktree);
5013 Ok::<_, anyhow::Error>(())
5014 })?;
5015 }
5016
5017 let buffer = cx.add_model(|cx| {
5018 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
5019 });
5020
5021 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
5022
5023 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
5024 Ok(buffer)
5025 }
5026 }
5027 })
5028 }
5029
5030 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
5031 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
5032 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
5033 let start = serialized_symbol
5034 .start
5035 .ok_or_else(|| anyhow!("invalid start"))?;
5036 let end = serialized_symbol
5037 .end
5038 .ok_or_else(|| anyhow!("invalid end"))?;
5039 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
5040 let path = PathBuf::from(serialized_symbol.path);
5041 let language = self.languages.select_language(&path);
5042 Ok(Symbol {
5043 source_worktree_id,
5044 worktree_id,
5045 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
5046 label: language
5047 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
5048 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
5049 name: serialized_symbol.name,
5050 path,
5051 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
5052 kind,
5053 signature: serialized_symbol
5054 .signature
5055 .try_into()
5056 .map_err(|_| anyhow!("invalid signature"))?,
5057 })
5058 }
5059
5060 async fn handle_buffer_saved(
5061 this: ModelHandle<Self>,
5062 envelope: TypedEnvelope<proto::BufferSaved>,
5063 _: Arc<Client>,
5064 mut cx: AsyncAppContext,
5065 ) -> Result<()> {
5066 let version = deserialize_version(envelope.payload.version);
5067 let mtime = envelope
5068 .payload
5069 .mtime
5070 .ok_or_else(|| anyhow!("missing mtime"))?
5071 .into();
5072
5073 this.update(&mut cx, |this, cx| {
5074 let buffer = this
5075 .opened_buffers
5076 .get(&envelope.payload.buffer_id)
5077 .and_then(|buffer| buffer.upgrade(cx));
5078 if let Some(buffer) = buffer {
5079 buffer.update(cx, |buffer, cx| {
5080 buffer.did_save(version, mtime, None, cx);
5081 });
5082 }
5083 Ok(())
5084 })
5085 }
5086
5087 async fn handle_buffer_reloaded(
5088 this: ModelHandle<Self>,
5089 envelope: TypedEnvelope<proto::BufferReloaded>,
5090 _: Arc<Client>,
5091 mut cx: AsyncAppContext,
5092 ) -> Result<()> {
5093 let payload = envelope.payload.clone();
5094 let version = deserialize_version(payload.version);
5095 let mtime = payload
5096 .mtime
5097 .ok_or_else(|| anyhow!("missing mtime"))?
5098 .into();
5099 this.update(&mut cx, |this, cx| {
5100 let buffer = this
5101 .opened_buffers
5102 .get(&payload.buffer_id)
5103 .and_then(|buffer| buffer.upgrade(cx));
5104 if let Some(buffer) = buffer {
5105 buffer.update(cx, |buffer, cx| {
5106 buffer.did_reload(version, mtime, cx);
5107 });
5108 }
5109 Ok(())
5110 })
5111 }
5112
5113 pub fn match_paths<'a>(
5114 &self,
5115 query: &'a str,
5116 include_ignored: bool,
5117 smart_case: bool,
5118 max_results: usize,
5119 cancel_flag: &'a AtomicBool,
5120 cx: &AppContext,
5121 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5122 let worktrees = self
5123 .worktrees(cx)
5124 .filter(|worktree| worktree.read(cx).is_visible())
5125 .collect::<Vec<_>>();
5126 let include_root_name = worktrees.len() > 1;
5127 let candidate_sets = worktrees
5128 .into_iter()
5129 .map(|worktree| CandidateSet {
5130 snapshot: worktree.read(cx).snapshot(),
5131 include_ignored,
5132 include_root_name,
5133 })
5134 .collect::<Vec<_>>();
5135
5136 let background = cx.background().clone();
5137 async move {
5138 fuzzy::match_paths(
5139 candidate_sets.as_slice(),
5140 query,
5141 smart_case,
5142 max_results,
5143 cancel_flag,
5144 background,
5145 )
5146 .await
5147 }
5148 }
5149
5150 fn edits_from_lsp(
5151 &mut self,
5152 buffer: &ModelHandle<Buffer>,
5153 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5154 version: Option<i32>,
5155 cx: &mut ModelContext<Self>,
5156 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5157 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5158 cx.background().spawn(async move {
5159 let snapshot = snapshot?;
5160 let mut lsp_edits = lsp_edits
5161 .into_iter()
5162 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5163 .peekable();
5164
5165 let mut edits = Vec::new();
5166 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5167 // Combine any LSP edits that are adjacent.
5168 //
5169 // Also, combine LSP edits that are separated from each other by only
5170 // a newline. This is important because for some code actions,
5171 // Rust-analyzer rewrites the entire buffer via a series of edits that
5172 // are separated by unchanged newline characters.
5173 //
5174 // In order for the diffing logic below to work properly, any edits that
5175 // cancel each other out must be combined into one.
5176 while let Some((next_range, next_text)) = lsp_edits.peek() {
5177 if next_range.start > range.end {
5178 if next_range.start.row > range.end.row + 1
5179 || next_range.start.column > 0
5180 || snapshot.clip_point_utf16(
5181 PointUtf16::new(range.end.row, u32::MAX),
5182 Bias::Left,
5183 ) > range.end
5184 {
5185 break;
5186 }
5187 new_text.push('\n');
5188 }
5189 range.end = next_range.end;
5190 new_text.push_str(&next_text);
5191 lsp_edits.next();
5192 }
5193
5194 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5195 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5196 {
5197 return Err(anyhow!("invalid edits received from language server"));
5198 }
5199
5200 // For multiline edits, perform a diff of the old and new text so that
5201 // we can identify the changes more precisely, preserving the locations
5202 // of any anchors positioned in the unchanged regions.
5203 if range.end.row > range.start.row {
5204 let mut offset = range.start.to_offset(&snapshot);
5205 let old_text = snapshot.text_for_range(range).collect::<String>();
5206
5207 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5208 let mut moved_since_edit = true;
5209 for change in diff.iter_all_changes() {
5210 let tag = change.tag();
5211 let value = change.value();
5212 match tag {
5213 ChangeTag::Equal => {
5214 offset += value.len();
5215 moved_since_edit = true;
5216 }
5217 ChangeTag::Delete => {
5218 let start = snapshot.anchor_after(offset);
5219 let end = snapshot.anchor_before(offset + value.len());
5220 if moved_since_edit {
5221 edits.push((start..end, String::new()));
5222 } else {
5223 edits.last_mut().unwrap().0.end = end;
5224 }
5225 offset += value.len();
5226 moved_since_edit = false;
5227 }
5228 ChangeTag::Insert => {
5229 if moved_since_edit {
5230 let anchor = snapshot.anchor_after(offset);
5231 edits.push((anchor.clone()..anchor, value.to_string()));
5232 } else {
5233 edits.last_mut().unwrap().1.push_str(value);
5234 }
5235 moved_since_edit = false;
5236 }
5237 }
5238 }
5239 } else if range.end == range.start {
5240 let anchor = snapshot.anchor_after(range.start);
5241 edits.push((anchor.clone()..anchor, new_text));
5242 } else {
5243 let edit_start = snapshot.anchor_after(range.start);
5244 let edit_end = snapshot.anchor_before(range.end);
5245 edits.push((edit_start..edit_end, new_text));
5246 }
5247 }
5248
5249 Ok(edits)
5250 })
5251 }
5252
5253 fn buffer_snapshot_for_lsp_version(
5254 &mut self,
5255 buffer: &ModelHandle<Buffer>,
5256 version: Option<i32>,
5257 cx: &AppContext,
5258 ) -> Result<TextBufferSnapshot> {
5259 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5260
5261 if let Some(version) = version {
5262 let buffer_id = buffer.read(cx).remote_id();
5263 let snapshots = self
5264 .buffer_snapshots
5265 .get_mut(&buffer_id)
5266 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5267 let mut found_snapshot = None;
5268 snapshots.retain(|(snapshot_version, snapshot)| {
5269 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5270 false
5271 } else {
5272 if *snapshot_version == version {
5273 found_snapshot = Some(snapshot.clone());
5274 }
5275 true
5276 }
5277 });
5278
5279 found_snapshot.ok_or_else(|| {
5280 anyhow!(
5281 "snapshot not found for buffer {} at version {}",
5282 buffer_id,
5283 version
5284 )
5285 })
5286 } else {
5287 Ok((buffer.read(cx)).text_snapshot())
5288 }
5289 }
5290
5291 fn language_server_for_buffer(
5292 &self,
5293 buffer: &Buffer,
5294 cx: &AppContext,
5295 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5296 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5297 let worktree_id = file.worktree_id(cx);
5298 self.language_servers
5299 .get(&(worktree_id, language.lsp_adapter()?.name()))
5300 } else {
5301 None
5302 }
5303 }
5304}
5305
5306impl ProjectStore {
5307 pub fn new(db: Arc<Db>) -> Self {
5308 Self {
5309 db,
5310 projects: Default::default(),
5311 }
5312 }
5313
5314 pub fn projects<'a>(
5315 &'a self,
5316 cx: &'a AppContext,
5317 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5318 self.projects
5319 .iter()
5320 .filter_map(|project| project.upgrade(cx))
5321 }
5322
5323 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5324 if let Err(ix) = self
5325 .projects
5326 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5327 {
5328 self.projects.insert(ix, project);
5329 }
5330 cx.notify();
5331 }
5332
5333 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5334 let mut did_change = false;
5335 self.projects.retain(|project| {
5336 if project.is_upgradable(cx) {
5337 true
5338 } else {
5339 did_change = true;
5340 false
5341 }
5342 });
5343 if did_change {
5344 cx.notify();
5345 }
5346 }
5347}
5348
5349impl WorktreeHandle {
5350 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5351 match self {
5352 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5353 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5354 }
5355 }
5356}
5357
5358impl OpenBuffer {
5359 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5360 match self {
5361 OpenBuffer::Strong(handle) => Some(handle.clone()),
5362 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5363 OpenBuffer::Loading(_) => None,
5364 }
5365 }
5366}
5367
5368struct CandidateSet {
5369 snapshot: Snapshot,
5370 include_ignored: bool,
5371 include_root_name: bool,
5372}
5373
5374impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5375 type Candidates = CandidateSetIter<'a>;
5376
5377 fn id(&self) -> usize {
5378 self.snapshot.id().to_usize()
5379 }
5380
5381 fn len(&self) -> usize {
5382 if self.include_ignored {
5383 self.snapshot.file_count()
5384 } else {
5385 self.snapshot.visible_file_count()
5386 }
5387 }
5388
5389 fn prefix(&self) -> Arc<str> {
5390 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5391 self.snapshot.root_name().into()
5392 } else if self.include_root_name {
5393 format!("{}/", self.snapshot.root_name()).into()
5394 } else {
5395 "".into()
5396 }
5397 }
5398
5399 fn candidates(&'a self, start: usize) -> Self::Candidates {
5400 CandidateSetIter {
5401 traversal: self.snapshot.files(self.include_ignored, start),
5402 }
5403 }
5404}
5405
5406struct CandidateSetIter<'a> {
5407 traversal: Traversal<'a>,
5408}
5409
5410impl<'a> Iterator for CandidateSetIter<'a> {
5411 type Item = PathMatchCandidate<'a>;
5412
5413 fn next(&mut self) -> Option<Self::Item> {
5414 self.traversal.next().map(|entry| {
5415 if let EntryKind::File(char_bag) = entry.kind {
5416 PathMatchCandidate {
5417 path: &entry.path,
5418 char_bag,
5419 }
5420 } else {
5421 unreachable!()
5422 }
5423 })
5424 }
5425}
5426
5427impl Entity for ProjectStore {
5428 type Event = ();
5429}
5430
5431impl Entity for Project {
5432 type Event = Event;
5433
5434 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5435 self.project_store.update(cx, ProjectStore::prune_projects);
5436
5437 match &self.client_state {
5438 ProjectClientState::Local { remote_id_rx, .. } => {
5439 if let Some(project_id) = *remote_id_rx.borrow() {
5440 self.client
5441 .send(proto::UnregisterProject { project_id })
5442 .log_err();
5443 }
5444 }
5445 ProjectClientState::Remote { remote_id, .. } => {
5446 self.client
5447 .send(proto::LeaveProject {
5448 project_id: *remote_id,
5449 })
5450 .log_err();
5451 }
5452 }
5453 }
5454
5455 fn app_will_quit(
5456 &mut self,
5457 _: &mut MutableAppContext,
5458 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5459 let shutdown_futures = self
5460 .language_servers
5461 .drain()
5462 .filter_map(|(_, (_, server))| server.shutdown())
5463 .collect::<Vec<_>>();
5464 Some(
5465 async move {
5466 futures::future::join_all(shutdown_futures).await;
5467 }
5468 .boxed(),
5469 )
5470 }
5471}
5472
5473impl Collaborator {
5474 fn from_proto(
5475 message: proto::Collaborator,
5476 user_store: &ModelHandle<UserStore>,
5477 cx: &mut AsyncAppContext,
5478 ) -> impl Future<Output = Result<Self>> {
5479 let user = user_store.update(cx, |user_store, cx| {
5480 user_store.fetch_user(message.user_id, cx)
5481 });
5482
5483 async move {
5484 Ok(Self {
5485 peer_id: PeerId(message.peer_id),
5486 user: user.await?,
5487 replica_id: message.replica_id as ReplicaId,
5488 })
5489 }
5490 }
5491}
5492
5493impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5494 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5495 Self {
5496 worktree_id,
5497 path: path.as_ref().into(),
5498 }
5499 }
5500}
5501
5502impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5503 fn from(options: lsp::CreateFileOptions) -> Self {
5504 Self {
5505 overwrite: options.overwrite.unwrap_or(false),
5506 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5507 }
5508 }
5509}
5510
5511impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5512 fn from(options: lsp::RenameFileOptions) -> Self {
5513 Self {
5514 overwrite: options.overwrite.unwrap_or(false),
5515 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5516 }
5517 }
5518}
5519
5520impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5521 fn from(options: lsp::DeleteFileOptions) -> Self {
5522 Self {
5523 recursive: options.recursive.unwrap_or(false),
5524 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5525 }
5526 }
5527}
5528
5529fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5530 proto::Symbol {
5531 source_worktree_id: symbol.source_worktree_id.to_proto(),
5532 worktree_id: symbol.worktree_id.to_proto(),
5533 language_server_name: symbol.language_server_name.0.to_string(),
5534 name: symbol.name.clone(),
5535 kind: unsafe { mem::transmute(symbol.kind) },
5536 path: symbol.path.to_string_lossy().to_string(),
5537 start: Some(proto::Point {
5538 row: symbol.range.start.row,
5539 column: symbol.range.start.column,
5540 }),
5541 end: Some(proto::Point {
5542 row: symbol.range.end.row,
5543 column: symbol.range.end.column,
5544 }),
5545 signature: symbol.signature.to_vec(),
5546 }
5547}
5548
5549fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5550 let mut path_components = path.components();
5551 let mut base_components = base.components();
5552 let mut components: Vec<Component> = Vec::new();
5553 loop {
5554 match (path_components.next(), base_components.next()) {
5555 (None, None) => break,
5556 (Some(a), None) => {
5557 components.push(a);
5558 components.extend(path_components.by_ref());
5559 break;
5560 }
5561 (None, _) => components.push(Component::ParentDir),
5562 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5563 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5564 (Some(a), Some(_)) => {
5565 components.push(Component::ParentDir);
5566 for _ in base_components {
5567 components.push(Component::ParentDir);
5568 }
5569 components.push(a);
5570 components.extend(path_components.by_ref());
5571 break;
5572 }
5573 }
5574 }
5575 components.iter().map(|c| c.as_os_str()).collect()
5576}
5577
5578impl Item for Buffer {
5579 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5580 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5581 }
5582}
5583
5584#[cfg(test)]
5585mod tests {
5586 use crate::worktree::WorktreeHandle;
5587
5588 use super::{Event, *};
5589 use fs::RealFs;
5590 use futures::{future, StreamExt};
5591 use gpui::test::subscribe;
5592 use language::{
5593 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5594 OffsetRangeExt, Point, ToPoint,
5595 };
5596 use lsp::Url;
5597 use serde_json::json;
5598 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5599 use unindent::Unindent as _;
5600 use util::{assert_set_eq, test::temp_tree};
5601
5602 #[gpui::test]
5603 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5604 let dir = temp_tree(json!({
5605 "root": {
5606 "apple": "",
5607 "banana": {
5608 "carrot": {
5609 "date": "",
5610 "endive": "",
5611 }
5612 },
5613 "fennel": {
5614 "grape": "",
5615 }
5616 }
5617 }));
5618
5619 let root_link_path = dir.path().join("root_link");
5620 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5621 unix::fs::symlink(
5622 &dir.path().join("root/fennel"),
5623 &dir.path().join("root/finnochio"),
5624 )
5625 .unwrap();
5626
5627 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5628
5629 project.read_with(cx, |project, cx| {
5630 let tree = project.worktrees(cx).next().unwrap().read(cx);
5631 assert_eq!(tree.file_count(), 5);
5632 assert_eq!(
5633 tree.inode_for_path("fennel/grape"),
5634 tree.inode_for_path("finnochio/grape")
5635 );
5636 });
5637
5638 let cancel_flag = Default::default();
5639 let results = project
5640 .read_with(cx, |project, cx| {
5641 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5642 })
5643 .await;
5644 assert_eq!(
5645 results
5646 .into_iter()
5647 .map(|result| result.path)
5648 .collect::<Vec<Arc<Path>>>(),
5649 vec![
5650 PathBuf::from("banana/carrot/date").into(),
5651 PathBuf::from("banana/carrot/endive").into(),
5652 ]
5653 );
5654 }
5655
5656 #[gpui::test]
5657 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5658 cx.foreground().forbid_parking();
5659
5660 let mut rust_language = Language::new(
5661 LanguageConfig {
5662 name: "Rust".into(),
5663 path_suffixes: vec!["rs".to_string()],
5664 ..Default::default()
5665 },
5666 Some(tree_sitter_rust::language()),
5667 );
5668 let mut json_language = Language::new(
5669 LanguageConfig {
5670 name: "JSON".into(),
5671 path_suffixes: vec!["json".to_string()],
5672 ..Default::default()
5673 },
5674 None,
5675 );
5676 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5677 name: "the-rust-language-server",
5678 capabilities: lsp::ServerCapabilities {
5679 completion_provider: Some(lsp::CompletionOptions {
5680 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5681 ..Default::default()
5682 }),
5683 ..Default::default()
5684 },
5685 ..Default::default()
5686 });
5687 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5688 name: "the-json-language-server",
5689 capabilities: lsp::ServerCapabilities {
5690 completion_provider: Some(lsp::CompletionOptions {
5691 trigger_characters: Some(vec![":".to_string()]),
5692 ..Default::default()
5693 }),
5694 ..Default::default()
5695 },
5696 ..Default::default()
5697 });
5698
5699 let fs = FakeFs::new(cx.background());
5700 fs.insert_tree(
5701 "/the-root",
5702 json!({
5703 "test.rs": "const A: i32 = 1;",
5704 "test2.rs": "",
5705 "Cargo.toml": "a = 1",
5706 "package.json": "{\"a\": 1}",
5707 }),
5708 )
5709 .await;
5710
5711 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5712 project.update(cx, |project, _| {
5713 project.languages.add(Arc::new(rust_language));
5714 project.languages.add(Arc::new(json_language));
5715 });
5716
5717 // Open a buffer without an associated language server.
5718 let toml_buffer = project
5719 .update(cx, |project, cx| {
5720 project.open_local_buffer("/the-root/Cargo.toml", cx)
5721 })
5722 .await
5723 .unwrap();
5724
5725 // Open a buffer with an associated language server.
5726 let rust_buffer = project
5727 .update(cx, |project, cx| {
5728 project.open_local_buffer("/the-root/test.rs", cx)
5729 })
5730 .await
5731 .unwrap();
5732
5733 // A server is started up, and it is notified about Rust files.
5734 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5735 assert_eq!(
5736 fake_rust_server
5737 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5738 .await
5739 .text_document,
5740 lsp::TextDocumentItem {
5741 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5742 version: 0,
5743 text: "const A: i32 = 1;".to_string(),
5744 language_id: Default::default()
5745 }
5746 );
5747
5748 // The buffer is configured based on the language server's capabilities.
5749 rust_buffer.read_with(cx, |buffer, _| {
5750 assert_eq!(
5751 buffer.completion_triggers(),
5752 &[".".to_string(), "::".to_string()]
5753 );
5754 });
5755 toml_buffer.read_with(cx, |buffer, _| {
5756 assert!(buffer.completion_triggers().is_empty());
5757 });
5758
5759 // Edit a buffer. The changes are reported to the language server.
5760 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5761 assert_eq!(
5762 fake_rust_server
5763 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5764 .await
5765 .text_document,
5766 lsp::VersionedTextDocumentIdentifier::new(
5767 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5768 1
5769 )
5770 );
5771
5772 // Open a third buffer with a different associated language server.
5773 let json_buffer = project
5774 .update(cx, |project, cx| {
5775 project.open_local_buffer("/the-root/package.json", cx)
5776 })
5777 .await
5778 .unwrap();
5779
5780 // A json language server is started up and is only notified about the json buffer.
5781 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5782 assert_eq!(
5783 fake_json_server
5784 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5785 .await
5786 .text_document,
5787 lsp::TextDocumentItem {
5788 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5789 version: 0,
5790 text: "{\"a\": 1}".to_string(),
5791 language_id: Default::default()
5792 }
5793 );
5794
5795 // This buffer is configured based on the second language server's
5796 // capabilities.
5797 json_buffer.read_with(cx, |buffer, _| {
5798 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5799 });
5800
5801 // When opening another buffer whose language server is already running,
5802 // it is also configured based on the existing language server's capabilities.
5803 let rust_buffer2 = project
5804 .update(cx, |project, cx| {
5805 project.open_local_buffer("/the-root/test2.rs", cx)
5806 })
5807 .await
5808 .unwrap();
5809 rust_buffer2.read_with(cx, |buffer, _| {
5810 assert_eq!(
5811 buffer.completion_triggers(),
5812 &[".".to_string(), "::".to_string()]
5813 );
5814 });
5815
5816 // Changes are reported only to servers matching the buffer's language.
5817 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5818 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5819 assert_eq!(
5820 fake_rust_server
5821 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5822 .await
5823 .text_document,
5824 lsp::VersionedTextDocumentIdentifier::new(
5825 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5826 1
5827 )
5828 );
5829
5830 // Save notifications are reported to all servers.
5831 toml_buffer
5832 .update(cx, |buffer, cx| buffer.save(cx))
5833 .await
5834 .unwrap();
5835 assert_eq!(
5836 fake_rust_server
5837 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5838 .await
5839 .text_document,
5840 lsp::TextDocumentIdentifier::new(
5841 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5842 )
5843 );
5844 assert_eq!(
5845 fake_json_server
5846 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5847 .await
5848 .text_document,
5849 lsp::TextDocumentIdentifier::new(
5850 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5851 )
5852 );
5853
5854 // Renames are reported only to servers matching the buffer's language.
5855 fs.rename(
5856 Path::new("/the-root/test2.rs"),
5857 Path::new("/the-root/test3.rs"),
5858 Default::default(),
5859 )
5860 .await
5861 .unwrap();
5862 assert_eq!(
5863 fake_rust_server
5864 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5865 .await
5866 .text_document,
5867 lsp::TextDocumentIdentifier::new(
5868 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5869 ),
5870 );
5871 assert_eq!(
5872 fake_rust_server
5873 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5874 .await
5875 .text_document,
5876 lsp::TextDocumentItem {
5877 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5878 version: 0,
5879 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5880 language_id: Default::default()
5881 },
5882 );
5883
5884 rust_buffer2.update(cx, |buffer, cx| {
5885 buffer.update_diagnostics(
5886 DiagnosticSet::from_sorted_entries(
5887 vec![DiagnosticEntry {
5888 diagnostic: Default::default(),
5889 range: Anchor::MIN..Anchor::MAX,
5890 }],
5891 &buffer.snapshot(),
5892 ),
5893 cx,
5894 );
5895 assert_eq!(
5896 buffer
5897 .snapshot()
5898 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5899 .count(),
5900 1
5901 );
5902 });
5903
5904 // When the rename changes the extension of the file, the buffer gets closed on the old
5905 // language server and gets opened on the new one.
5906 fs.rename(
5907 Path::new("/the-root/test3.rs"),
5908 Path::new("/the-root/test3.json"),
5909 Default::default(),
5910 )
5911 .await
5912 .unwrap();
5913 assert_eq!(
5914 fake_rust_server
5915 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5916 .await
5917 .text_document,
5918 lsp::TextDocumentIdentifier::new(
5919 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5920 ),
5921 );
5922 assert_eq!(
5923 fake_json_server
5924 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5925 .await
5926 .text_document,
5927 lsp::TextDocumentItem {
5928 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5929 version: 0,
5930 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5931 language_id: Default::default()
5932 },
5933 );
5934
5935 // We clear the diagnostics, since the language has changed.
5936 rust_buffer2.read_with(cx, |buffer, _| {
5937 assert_eq!(
5938 buffer
5939 .snapshot()
5940 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5941 .count(),
5942 0
5943 );
5944 });
5945
5946 // The renamed file's version resets after changing language server.
5947 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5948 assert_eq!(
5949 fake_json_server
5950 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5951 .await
5952 .text_document,
5953 lsp::VersionedTextDocumentIdentifier::new(
5954 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5955 1
5956 )
5957 );
5958
5959 // Restart language servers
5960 project.update(cx, |project, cx| {
5961 project.restart_language_servers_for_buffers(
5962 vec![rust_buffer.clone(), json_buffer.clone()],
5963 cx,
5964 );
5965 });
5966
5967 let mut rust_shutdown_requests = fake_rust_server
5968 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5969 let mut json_shutdown_requests = fake_json_server
5970 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5971 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5972
5973 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5974 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5975
5976 // Ensure rust document is reopened in new rust language server
5977 assert_eq!(
5978 fake_rust_server
5979 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5980 .await
5981 .text_document,
5982 lsp::TextDocumentItem {
5983 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5984 version: 1,
5985 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5986 language_id: Default::default()
5987 }
5988 );
5989
5990 // Ensure json documents are reopened in new json language server
5991 assert_set_eq!(
5992 [
5993 fake_json_server
5994 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5995 .await
5996 .text_document,
5997 fake_json_server
5998 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5999 .await
6000 .text_document,
6001 ],
6002 [
6003 lsp::TextDocumentItem {
6004 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6005 version: 0,
6006 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
6007 language_id: Default::default()
6008 },
6009 lsp::TextDocumentItem {
6010 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
6011 version: 1,
6012 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
6013 language_id: Default::default()
6014 }
6015 ]
6016 );
6017
6018 // Close notifications are reported only to servers matching the buffer's language.
6019 cx.update(|_| drop(json_buffer));
6020 let close_message = lsp::DidCloseTextDocumentParams {
6021 text_document: lsp::TextDocumentIdentifier::new(
6022 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
6023 ),
6024 };
6025 assert_eq!(
6026 fake_json_server
6027 .receive_notification::<lsp::notification::DidCloseTextDocument>()
6028 .await,
6029 close_message,
6030 );
6031 }
6032
6033 #[gpui::test]
6034 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
6035 cx.foreground().forbid_parking();
6036
6037 let fs = FakeFs::new(cx.background());
6038 fs.insert_tree(
6039 "/dir",
6040 json!({
6041 "a.rs": "let a = 1;",
6042 "b.rs": "let b = 2;"
6043 }),
6044 )
6045 .await;
6046
6047 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
6048
6049 let buffer_a = project
6050 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6051 .await
6052 .unwrap();
6053 let buffer_b = project
6054 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6055 .await
6056 .unwrap();
6057
6058 project.update(cx, |project, cx| {
6059 project
6060 .update_diagnostics(
6061 lsp::PublishDiagnosticsParams {
6062 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6063 version: None,
6064 diagnostics: vec![lsp::Diagnostic {
6065 range: lsp::Range::new(
6066 lsp::Position::new(0, 4),
6067 lsp::Position::new(0, 5),
6068 ),
6069 severity: Some(lsp::DiagnosticSeverity::ERROR),
6070 message: "error 1".to_string(),
6071 ..Default::default()
6072 }],
6073 },
6074 &[],
6075 cx,
6076 )
6077 .unwrap();
6078 project
6079 .update_diagnostics(
6080 lsp::PublishDiagnosticsParams {
6081 uri: Url::from_file_path("/dir/b.rs").unwrap(),
6082 version: None,
6083 diagnostics: vec![lsp::Diagnostic {
6084 range: lsp::Range::new(
6085 lsp::Position::new(0, 4),
6086 lsp::Position::new(0, 5),
6087 ),
6088 severity: Some(lsp::DiagnosticSeverity::WARNING),
6089 message: "error 2".to_string(),
6090 ..Default::default()
6091 }],
6092 },
6093 &[],
6094 cx,
6095 )
6096 .unwrap();
6097 });
6098
6099 buffer_a.read_with(cx, |buffer, _| {
6100 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6101 assert_eq!(
6102 chunks
6103 .iter()
6104 .map(|(s, d)| (s.as_str(), *d))
6105 .collect::<Vec<_>>(),
6106 &[
6107 ("let ", None),
6108 ("a", Some(DiagnosticSeverity::ERROR)),
6109 (" = 1;", None),
6110 ]
6111 );
6112 });
6113 buffer_b.read_with(cx, |buffer, _| {
6114 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6115 assert_eq!(
6116 chunks
6117 .iter()
6118 .map(|(s, d)| (s.as_str(), *d))
6119 .collect::<Vec<_>>(),
6120 &[
6121 ("let ", None),
6122 ("b", Some(DiagnosticSeverity::WARNING)),
6123 (" = 2;", None),
6124 ]
6125 );
6126 });
6127 }
6128
6129 #[gpui::test]
6130 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6131 cx.foreground().forbid_parking();
6132
6133 let progress_token = "the-progress-token";
6134 let mut language = Language::new(
6135 LanguageConfig {
6136 name: "Rust".into(),
6137 path_suffixes: vec!["rs".to_string()],
6138 ..Default::default()
6139 },
6140 Some(tree_sitter_rust::language()),
6141 );
6142 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6143 disk_based_diagnostics_progress_token: Some(progress_token),
6144 disk_based_diagnostics_sources: &["disk"],
6145 ..Default::default()
6146 });
6147
6148 let fs = FakeFs::new(cx.background());
6149 fs.insert_tree(
6150 "/dir",
6151 json!({
6152 "a.rs": "fn a() { A }",
6153 "b.rs": "const y: i32 = 1",
6154 }),
6155 )
6156 .await;
6157
6158 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6159 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6160 let worktree_id =
6161 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6162
6163 // Cause worktree to start the fake language server
6164 let _buffer = project
6165 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6166 .await
6167 .unwrap();
6168
6169 let mut events = subscribe(&project, cx);
6170
6171 let mut fake_server = fake_servers.next().await.unwrap();
6172 fake_server.start_progress(progress_token).await;
6173 assert_eq!(
6174 events.next().await.unwrap(),
6175 Event::DiskBasedDiagnosticsStarted
6176 );
6177
6178 fake_server.start_progress(progress_token).await;
6179 fake_server.end_progress(progress_token).await;
6180 fake_server.start_progress(progress_token).await;
6181
6182 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6183 lsp::PublishDiagnosticsParams {
6184 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6185 version: None,
6186 diagnostics: vec![lsp::Diagnostic {
6187 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6188 severity: Some(lsp::DiagnosticSeverity::ERROR),
6189 message: "undefined variable 'A'".to_string(),
6190 ..Default::default()
6191 }],
6192 },
6193 );
6194 assert_eq!(
6195 events.next().await.unwrap(),
6196 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6197 );
6198
6199 fake_server.end_progress(progress_token).await;
6200 fake_server.end_progress(progress_token).await;
6201 assert_eq!(
6202 events.next().await.unwrap(),
6203 Event::DiskBasedDiagnosticsUpdated
6204 );
6205 assert_eq!(
6206 events.next().await.unwrap(),
6207 Event::DiskBasedDiagnosticsFinished
6208 );
6209
6210 let buffer = project
6211 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6212 .await
6213 .unwrap();
6214
6215 buffer.read_with(cx, |buffer, _| {
6216 let snapshot = buffer.snapshot();
6217 let diagnostics = snapshot
6218 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6219 .collect::<Vec<_>>();
6220 assert_eq!(
6221 diagnostics,
6222 &[DiagnosticEntry {
6223 range: Point::new(0, 9)..Point::new(0, 10),
6224 diagnostic: Diagnostic {
6225 severity: lsp::DiagnosticSeverity::ERROR,
6226 message: "undefined variable 'A'".to_string(),
6227 group_id: 0,
6228 is_primary: true,
6229 ..Default::default()
6230 }
6231 }]
6232 )
6233 });
6234
6235 // Ensure publishing empty diagnostics twice only results in one update event.
6236 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6237 lsp::PublishDiagnosticsParams {
6238 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6239 version: None,
6240 diagnostics: Default::default(),
6241 },
6242 );
6243 assert_eq!(
6244 events.next().await.unwrap(),
6245 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6246 );
6247
6248 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6249 lsp::PublishDiagnosticsParams {
6250 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6251 version: None,
6252 diagnostics: Default::default(),
6253 },
6254 );
6255 cx.foreground().run_until_parked();
6256 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6257 }
6258
6259 #[gpui::test]
6260 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6261 cx.foreground().forbid_parking();
6262
6263 let progress_token = "the-progress-token";
6264 let mut language = Language::new(
6265 LanguageConfig {
6266 path_suffixes: vec!["rs".to_string()],
6267 ..Default::default()
6268 },
6269 None,
6270 );
6271 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6272 disk_based_diagnostics_sources: &["disk"],
6273 disk_based_diagnostics_progress_token: Some(progress_token),
6274 ..Default::default()
6275 });
6276
6277 let fs = FakeFs::new(cx.background());
6278 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6279
6280 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6281 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6282
6283 let buffer = project
6284 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6285 .await
6286 .unwrap();
6287
6288 // Simulate diagnostics starting to update.
6289 let mut fake_server = fake_servers.next().await.unwrap();
6290 fake_server.start_progress(progress_token).await;
6291
6292 // Restart the server before the diagnostics finish updating.
6293 project.update(cx, |project, cx| {
6294 project.restart_language_servers_for_buffers([buffer], cx);
6295 });
6296 let mut events = subscribe(&project, cx);
6297
6298 // Simulate the newly started server sending more diagnostics.
6299 let mut fake_server = fake_servers.next().await.unwrap();
6300 fake_server.start_progress(progress_token).await;
6301 assert_eq!(
6302 events.next().await.unwrap(),
6303 Event::DiskBasedDiagnosticsStarted
6304 );
6305
6306 // All diagnostics are considered done, despite the old server's diagnostic
6307 // task never completing.
6308 fake_server.end_progress(progress_token).await;
6309 assert_eq!(
6310 events.next().await.unwrap(),
6311 Event::DiskBasedDiagnosticsUpdated
6312 );
6313 assert_eq!(
6314 events.next().await.unwrap(),
6315 Event::DiskBasedDiagnosticsFinished
6316 );
6317 project.read_with(cx, |project, _| {
6318 assert!(!project.is_running_disk_based_diagnostics());
6319 });
6320 }
6321
6322 #[gpui::test]
6323 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6324 cx.foreground().forbid_parking();
6325
6326 let mut language = Language::new(
6327 LanguageConfig {
6328 name: "Rust".into(),
6329 path_suffixes: vec!["rs".to_string()],
6330 ..Default::default()
6331 },
6332 Some(tree_sitter_rust::language()),
6333 );
6334 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6335 disk_based_diagnostics_sources: &["disk"],
6336 ..Default::default()
6337 });
6338
6339 let text = "
6340 fn a() { A }
6341 fn b() { BB }
6342 fn c() { CCC }
6343 "
6344 .unindent();
6345
6346 let fs = FakeFs::new(cx.background());
6347 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6348
6349 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6350 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6351
6352 let buffer = project
6353 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6354 .await
6355 .unwrap();
6356
6357 let mut fake_server = fake_servers.next().await.unwrap();
6358 let open_notification = fake_server
6359 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6360 .await;
6361
6362 // Edit the buffer, moving the content down
6363 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6364 let change_notification_1 = fake_server
6365 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6366 .await;
6367 assert!(
6368 change_notification_1.text_document.version > open_notification.text_document.version
6369 );
6370
6371 // Report some diagnostics for the initial version of the buffer
6372 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6373 lsp::PublishDiagnosticsParams {
6374 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6375 version: Some(open_notification.text_document.version),
6376 diagnostics: vec![
6377 lsp::Diagnostic {
6378 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6379 severity: Some(DiagnosticSeverity::ERROR),
6380 message: "undefined variable 'A'".to_string(),
6381 source: Some("disk".to_string()),
6382 ..Default::default()
6383 },
6384 lsp::Diagnostic {
6385 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6386 severity: Some(DiagnosticSeverity::ERROR),
6387 message: "undefined variable 'BB'".to_string(),
6388 source: Some("disk".to_string()),
6389 ..Default::default()
6390 },
6391 lsp::Diagnostic {
6392 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6393 severity: Some(DiagnosticSeverity::ERROR),
6394 source: Some("disk".to_string()),
6395 message: "undefined variable 'CCC'".to_string(),
6396 ..Default::default()
6397 },
6398 ],
6399 },
6400 );
6401
6402 // The diagnostics have moved down since they were created.
6403 buffer.next_notification(cx).await;
6404 buffer.read_with(cx, |buffer, _| {
6405 assert_eq!(
6406 buffer
6407 .snapshot()
6408 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6409 .collect::<Vec<_>>(),
6410 &[
6411 DiagnosticEntry {
6412 range: Point::new(3, 9)..Point::new(3, 11),
6413 diagnostic: Diagnostic {
6414 severity: DiagnosticSeverity::ERROR,
6415 message: "undefined variable 'BB'".to_string(),
6416 is_disk_based: true,
6417 group_id: 1,
6418 is_primary: true,
6419 ..Default::default()
6420 },
6421 },
6422 DiagnosticEntry {
6423 range: Point::new(4, 9)..Point::new(4, 12),
6424 diagnostic: Diagnostic {
6425 severity: DiagnosticSeverity::ERROR,
6426 message: "undefined variable 'CCC'".to_string(),
6427 is_disk_based: true,
6428 group_id: 2,
6429 is_primary: true,
6430 ..Default::default()
6431 }
6432 }
6433 ]
6434 );
6435 assert_eq!(
6436 chunks_with_diagnostics(buffer, 0..buffer.len()),
6437 [
6438 ("\n\nfn a() { ".to_string(), None),
6439 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6440 (" }\nfn b() { ".to_string(), None),
6441 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6442 (" }\nfn c() { ".to_string(), None),
6443 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6444 (" }\n".to_string(), None),
6445 ]
6446 );
6447 assert_eq!(
6448 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6449 [
6450 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6451 (" }\nfn c() { ".to_string(), None),
6452 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6453 ]
6454 );
6455 });
6456
6457 // Ensure overlapping diagnostics are highlighted correctly.
6458 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6459 lsp::PublishDiagnosticsParams {
6460 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6461 version: Some(open_notification.text_document.version),
6462 diagnostics: vec![
6463 lsp::Diagnostic {
6464 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6465 severity: Some(DiagnosticSeverity::ERROR),
6466 message: "undefined variable 'A'".to_string(),
6467 source: Some("disk".to_string()),
6468 ..Default::default()
6469 },
6470 lsp::Diagnostic {
6471 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6472 severity: Some(DiagnosticSeverity::WARNING),
6473 message: "unreachable statement".to_string(),
6474 source: Some("disk".to_string()),
6475 ..Default::default()
6476 },
6477 ],
6478 },
6479 );
6480
6481 buffer.next_notification(cx).await;
6482 buffer.read_with(cx, |buffer, _| {
6483 assert_eq!(
6484 buffer
6485 .snapshot()
6486 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6487 .collect::<Vec<_>>(),
6488 &[
6489 DiagnosticEntry {
6490 range: Point::new(2, 9)..Point::new(2, 12),
6491 diagnostic: Diagnostic {
6492 severity: DiagnosticSeverity::WARNING,
6493 message: "unreachable statement".to_string(),
6494 is_disk_based: true,
6495 group_id: 4,
6496 is_primary: true,
6497 ..Default::default()
6498 }
6499 },
6500 DiagnosticEntry {
6501 range: Point::new(2, 9)..Point::new(2, 10),
6502 diagnostic: Diagnostic {
6503 severity: DiagnosticSeverity::ERROR,
6504 message: "undefined variable 'A'".to_string(),
6505 is_disk_based: true,
6506 group_id: 3,
6507 is_primary: true,
6508 ..Default::default()
6509 },
6510 }
6511 ]
6512 );
6513 assert_eq!(
6514 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6515 [
6516 ("fn a() { ".to_string(), None),
6517 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6518 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6519 ("\n".to_string(), None),
6520 ]
6521 );
6522 assert_eq!(
6523 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6524 [
6525 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6526 ("\n".to_string(), None),
6527 ]
6528 );
6529 });
6530
6531 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6532 // changes since the last save.
6533 buffer.update(cx, |buffer, cx| {
6534 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6535 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6536 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6537 });
6538 let change_notification_2 = fake_server
6539 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6540 .await;
6541 assert!(
6542 change_notification_2.text_document.version
6543 > change_notification_1.text_document.version
6544 );
6545
6546 // Handle out-of-order diagnostics
6547 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6548 lsp::PublishDiagnosticsParams {
6549 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6550 version: Some(change_notification_2.text_document.version),
6551 diagnostics: vec![
6552 lsp::Diagnostic {
6553 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6554 severity: Some(DiagnosticSeverity::ERROR),
6555 message: "undefined variable 'BB'".to_string(),
6556 source: Some("disk".to_string()),
6557 ..Default::default()
6558 },
6559 lsp::Diagnostic {
6560 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6561 severity: Some(DiagnosticSeverity::WARNING),
6562 message: "undefined variable 'A'".to_string(),
6563 source: Some("disk".to_string()),
6564 ..Default::default()
6565 },
6566 ],
6567 },
6568 );
6569
6570 buffer.next_notification(cx).await;
6571 buffer.read_with(cx, |buffer, _| {
6572 assert_eq!(
6573 buffer
6574 .snapshot()
6575 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6576 .collect::<Vec<_>>(),
6577 &[
6578 DiagnosticEntry {
6579 range: Point::new(2, 21)..Point::new(2, 22),
6580 diagnostic: Diagnostic {
6581 severity: DiagnosticSeverity::WARNING,
6582 message: "undefined variable 'A'".to_string(),
6583 is_disk_based: true,
6584 group_id: 6,
6585 is_primary: true,
6586 ..Default::default()
6587 }
6588 },
6589 DiagnosticEntry {
6590 range: Point::new(3, 9)..Point::new(3, 14),
6591 diagnostic: Diagnostic {
6592 severity: DiagnosticSeverity::ERROR,
6593 message: "undefined variable 'BB'".to_string(),
6594 is_disk_based: true,
6595 group_id: 5,
6596 is_primary: true,
6597 ..Default::default()
6598 },
6599 }
6600 ]
6601 );
6602 });
6603 }
6604
6605 #[gpui::test]
6606 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6607 cx.foreground().forbid_parking();
6608
6609 let text = concat!(
6610 "let one = ;\n", //
6611 "let two = \n",
6612 "let three = 3;\n",
6613 );
6614
6615 let fs = FakeFs::new(cx.background());
6616 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6617
6618 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6619 let buffer = project
6620 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6621 .await
6622 .unwrap();
6623
6624 project.update(cx, |project, cx| {
6625 project
6626 .update_buffer_diagnostics(
6627 &buffer,
6628 vec![
6629 DiagnosticEntry {
6630 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6631 diagnostic: Diagnostic {
6632 severity: DiagnosticSeverity::ERROR,
6633 message: "syntax error 1".to_string(),
6634 ..Default::default()
6635 },
6636 },
6637 DiagnosticEntry {
6638 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6639 diagnostic: Diagnostic {
6640 severity: DiagnosticSeverity::ERROR,
6641 message: "syntax error 2".to_string(),
6642 ..Default::default()
6643 },
6644 },
6645 ],
6646 None,
6647 cx,
6648 )
6649 .unwrap();
6650 });
6651
6652 // An empty range is extended forward to include the following character.
6653 // At the end of a line, an empty range is extended backward to include
6654 // the preceding character.
6655 buffer.read_with(cx, |buffer, _| {
6656 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6657 assert_eq!(
6658 chunks
6659 .iter()
6660 .map(|(s, d)| (s.as_str(), *d))
6661 .collect::<Vec<_>>(),
6662 &[
6663 ("let one = ", None),
6664 (";", Some(DiagnosticSeverity::ERROR)),
6665 ("\nlet two =", None),
6666 (" ", Some(DiagnosticSeverity::ERROR)),
6667 ("\nlet three = 3;\n", None)
6668 ]
6669 );
6670 });
6671 }
6672
6673 #[gpui::test]
6674 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6675 cx.foreground().forbid_parking();
6676
6677 let mut language = Language::new(
6678 LanguageConfig {
6679 name: "Rust".into(),
6680 path_suffixes: vec!["rs".to_string()],
6681 ..Default::default()
6682 },
6683 Some(tree_sitter_rust::language()),
6684 );
6685 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6686
6687 let text = "
6688 fn a() {
6689 f1();
6690 }
6691 fn b() {
6692 f2();
6693 }
6694 fn c() {
6695 f3();
6696 }
6697 "
6698 .unindent();
6699
6700 let fs = FakeFs::new(cx.background());
6701 fs.insert_tree(
6702 "/dir",
6703 json!({
6704 "a.rs": text.clone(),
6705 }),
6706 )
6707 .await;
6708
6709 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6710 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6711 let buffer = project
6712 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6713 .await
6714 .unwrap();
6715
6716 let mut fake_server = fake_servers.next().await.unwrap();
6717 let lsp_document_version = fake_server
6718 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6719 .await
6720 .text_document
6721 .version;
6722
6723 // Simulate editing the buffer after the language server computes some edits.
6724 buffer.update(cx, |buffer, cx| {
6725 buffer.edit(
6726 [(
6727 Point::new(0, 0)..Point::new(0, 0),
6728 "// above first function\n",
6729 )],
6730 cx,
6731 );
6732 buffer.edit(
6733 [(
6734 Point::new(2, 0)..Point::new(2, 0),
6735 " // inside first function\n",
6736 )],
6737 cx,
6738 );
6739 buffer.edit(
6740 [(
6741 Point::new(6, 4)..Point::new(6, 4),
6742 "// inside second function ",
6743 )],
6744 cx,
6745 );
6746
6747 assert_eq!(
6748 buffer.text(),
6749 "
6750 // above first function
6751 fn a() {
6752 // inside first function
6753 f1();
6754 }
6755 fn b() {
6756 // inside second function f2();
6757 }
6758 fn c() {
6759 f3();
6760 }
6761 "
6762 .unindent()
6763 );
6764 });
6765
6766 let edits = project
6767 .update(cx, |project, cx| {
6768 project.edits_from_lsp(
6769 &buffer,
6770 vec![
6771 // replace body of first function
6772 lsp::TextEdit {
6773 range: lsp::Range::new(
6774 lsp::Position::new(0, 0),
6775 lsp::Position::new(3, 0),
6776 ),
6777 new_text: "
6778 fn a() {
6779 f10();
6780 }
6781 "
6782 .unindent(),
6783 },
6784 // edit inside second function
6785 lsp::TextEdit {
6786 range: lsp::Range::new(
6787 lsp::Position::new(4, 6),
6788 lsp::Position::new(4, 6),
6789 ),
6790 new_text: "00".into(),
6791 },
6792 // edit inside third function via two distinct edits
6793 lsp::TextEdit {
6794 range: lsp::Range::new(
6795 lsp::Position::new(7, 5),
6796 lsp::Position::new(7, 5),
6797 ),
6798 new_text: "4000".into(),
6799 },
6800 lsp::TextEdit {
6801 range: lsp::Range::new(
6802 lsp::Position::new(7, 5),
6803 lsp::Position::new(7, 6),
6804 ),
6805 new_text: "".into(),
6806 },
6807 ],
6808 Some(lsp_document_version),
6809 cx,
6810 )
6811 })
6812 .await
6813 .unwrap();
6814
6815 buffer.update(cx, |buffer, cx| {
6816 for (range, new_text) in edits {
6817 buffer.edit([(range, new_text)], cx);
6818 }
6819 assert_eq!(
6820 buffer.text(),
6821 "
6822 // above first function
6823 fn a() {
6824 // inside first function
6825 f10();
6826 }
6827 fn b() {
6828 // inside second function f200();
6829 }
6830 fn c() {
6831 f4000();
6832 }
6833 "
6834 .unindent()
6835 );
6836 });
6837 }
6838
6839 #[gpui::test]
6840 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6841 cx.foreground().forbid_parking();
6842
6843 let text = "
6844 use a::b;
6845 use a::c;
6846
6847 fn f() {
6848 b();
6849 c();
6850 }
6851 "
6852 .unindent();
6853
6854 let fs = FakeFs::new(cx.background());
6855 fs.insert_tree(
6856 "/dir",
6857 json!({
6858 "a.rs": text.clone(),
6859 }),
6860 )
6861 .await;
6862
6863 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6864 let buffer = project
6865 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6866 .await
6867 .unwrap();
6868
6869 // Simulate the language server sending us a small edit in the form of a very large diff.
6870 // Rust-analyzer does this when performing a merge-imports code action.
6871 let edits = project
6872 .update(cx, |project, cx| {
6873 project.edits_from_lsp(
6874 &buffer,
6875 [
6876 // Replace the first use statement without editing the semicolon.
6877 lsp::TextEdit {
6878 range: lsp::Range::new(
6879 lsp::Position::new(0, 4),
6880 lsp::Position::new(0, 8),
6881 ),
6882 new_text: "a::{b, c}".into(),
6883 },
6884 // Reinsert the remainder of the file between the semicolon and the final
6885 // newline of the file.
6886 lsp::TextEdit {
6887 range: lsp::Range::new(
6888 lsp::Position::new(0, 9),
6889 lsp::Position::new(0, 9),
6890 ),
6891 new_text: "\n\n".into(),
6892 },
6893 lsp::TextEdit {
6894 range: lsp::Range::new(
6895 lsp::Position::new(0, 9),
6896 lsp::Position::new(0, 9),
6897 ),
6898 new_text: "
6899 fn f() {
6900 b();
6901 c();
6902 }"
6903 .unindent(),
6904 },
6905 // Delete everything after the first newline of the file.
6906 lsp::TextEdit {
6907 range: lsp::Range::new(
6908 lsp::Position::new(1, 0),
6909 lsp::Position::new(7, 0),
6910 ),
6911 new_text: "".into(),
6912 },
6913 ],
6914 None,
6915 cx,
6916 )
6917 })
6918 .await
6919 .unwrap();
6920
6921 buffer.update(cx, |buffer, cx| {
6922 let edits = edits
6923 .into_iter()
6924 .map(|(range, text)| {
6925 (
6926 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6927 text,
6928 )
6929 })
6930 .collect::<Vec<_>>();
6931
6932 assert_eq!(
6933 edits,
6934 [
6935 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6936 (Point::new(1, 0)..Point::new(2, 0), "".into())
6937 ]
6938 );
6939
6940 for (range, new_text) in edits {
6941 buffer.edit([(range, new_text)], cx);
6942 }
6943 assert_eq!(
6944 buffer.text(),
6945 "
6946 use a::{b, c};
6947
6948 fn f() {
6949 b();
6950 c();
6951 }
6952 "
6953 .unindent()
6954 );
6955 });
6956 }
6957
6958 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6959 buffer: &Buffer,
6960 range: Range<T>,
6961 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6962 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6963 for chunk in buffer.snapshot().chunks(range, true) {
6964 if chunks.last().map_or(false, |prev_chunk| {
6965 prev_chunk.1 == chunk.diagnostic_severity
6966 }) {
6967 chunks.last_mut().unwrap().0.push_str(chunk.text);
6968 } else {
6969 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6970 }
6971 }
6972 chunks
6973 }
6974
6975 #[gpui::test]
6976 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6977 let dir = temp_tree(json!({
6978 "root": {
6979 "dir1": {},
6980 "dir2": {
6981 "dir3": {}
6982 }
6983 }
6984 }));
6985
6986 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6987 let cancel_flag = Default::default();
6988 let results = project
6989 .read_with(cx, |project, cx| {
6990 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6991 })
6992 .await;
6993
6994 assert!(results.is_empty());
6995 }
6996
6997 #[gpui::test(iterations = 10)]
6998 async fn test_definition(cx: &mut gpui::TestAppContext) {
6999 let mut language = Language::new(
7000 LanguageConfig {
7001 name: "Rust".into(),
7002 path_suffixes: vec!["rs".to_string()],
7003 ..Default::default()
7004 },
7005 Some(tree_sitter_rust::language()),
7006 );
7007 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
7008
7009 let fs = FakeFs::new(cx.background());
7010 fs.insert_tree(
7011 "/dir",
7012 json!({
7013 "a.rs": "const fn a() { A }",
7014 "b.rs": "const y: i32 = crate::a()",
7015 }),
7016 )
7017 .await;
7018
7019 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
7020 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7021
7022 let buffer = project
7023 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
7024 .await
7025 .unwrap();
7026
7027 let fake_server = fake_servers.next().await.unwrap();
7028 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
7029 let params = params.text_document_position_params;
7030 assert_eq!(
7031 params.text_document.uri.to_file_path().unwrap(),
7032 Path::new("/dir/b.rs"),
7033 );
7034 assert_eq!(params.position, lsp::Position::new(0, 22));
7035
7036 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
7037 lsp::Location::new(
7038 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
7039 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
7040 ),
7041 )))
7042 });
7043
7044 let mut definitions = project
7045 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
7046 .await
7047 .unwrap();
7048
7049 assert_eq!(definitions.len(), 1);
7050 let definition = definitions.pop().unwrap();
7051 cx.update(|cx| {
7052 let target_buffer = definition.buffer.read(cx);
7053 assert_eq!(
7054 target_buffer
7055 .file()
7056 .unwrap()
7057 .as_local()
7058 .unwrap()
7059 .abs_path(cx),
7060 Path::new("/dir/a.rs"),
7061 );
7062 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
7063 assert_eq!(
7064 list_worktrees(&project, cx),
7065 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
7066 );
7067
7068 drop(definition);
7069 });
7070 cx.read(|cx| {
7071 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
7072 });
7073
7074 fn list_worktrees<'a>(
7075 project: &'a ModelHandle<Project>,
7076 cx: &'a AppContext,
7077 ) -> Vec<(&'a Path, bool)> {
7078 project
7079 .read(cx)
7080 .worktrees(cx)
7081 .map(|worktree| {
7082 let worktree = worktree.read(cx);
7083 (
7084 worktree.as_local().unwrap().abs_path().as_ref(),
7085 worktree.is_visible(),
7086 )
7087 })
7088 .collect::<Vec<_>>()
7089 }
7090 }
7091
7092 #[gpui::test]
7093 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
7094 let mut language = Language::new(
7095 LanguageConfig {
7096 name: "TypeScript".into(),
7097 path_suffixes: vec!["ts".to_string()],
7098 ..Default::default()
7099 },
7100 Some(tree_sitter_typescript::language_typescript()),
7101 );
7102 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7103
7104 let fs = FakeFs::new(cx.background());
7105 fs.insert_tree(
7106 "/dir",
7107 json!({
7108 "a.ts": "",
7109 }),
7110 )
7111 .await;
7112
7113 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7114 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7115 let buffer = project
7116 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7117 .await
7118 .unwrap();
7119
7120 let fake_server = fake_language_servers.next().await.unwrap();
7121
7122 let text = "let a = b.fqn";
7123 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7124 let completions = project.update(cx, |project, cx| {
7125 project.completions(&buffer, text.len(), cx)
7126 });
7127
7128 fake_server
7129 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7130 Ok(Some(lsp::CompletionResponse::Array(vec![
7131 lsp::CompletionItem {
7132 label: "fullyQualifiedName?".into(),
7133 insert_text: Some("fullyQualifiedName".into()),
7134 ..Default::default()
7135 },
7136 ])))
7137 })
7138 .next()
7139 .await;
7140 let completions = completions.await.unwrap();
7141 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7142 assert_eq!(completions.len(), 1);
7143 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7144 assert_eq!(
7145 completions[0].old_range.to_offset(&snapshot),
7146 text.len() - 3..text.len()
7147 );
7148 }
7149
7150 #[gpui::test(iterations = 10)]
7151 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7152 let mut language = Language::new(
7153 LanguageConfig {
7154 name: "TypeScript".into(),
7155 path_suffixes: vec!["ts".to_string()],
7156 ..Default::default()
7157 },
7158 None,
7159 );
7160 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7161
7162 let fs = FakeFs::new(cx.background());
7163 fs.insert_tree(
7164 "/dir",
7165 json!({
7166 "a.ts": "a",
7167 }),
7168 )
7169 .await;
7170
7171 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7172 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7173 let buffer = project
7174 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7175 .await
7176 .unwrap();
7177
7178 let fake_server = fake_language_servers.next().await.unwrap();
7179
7180 // Language server returns code actions that contain commands, and not edits.
7181 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7182 fake_server
7183 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7184 Ok(Some(vec![
7185 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7186 title: "The code action".into(),
7187 command: Some(lsp::Command {
7188 title: "The command".into(),
7189 command: "_the/command".into(),
7190 arguments: Some(vec![json!("the-argument")]),
7191 }),
7192 ..Default::default()
7193 }),
7194 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7195 title: "two".into(),
7196 ..Default::default()
7197 }),
7198 ]))
7199 })
7200 .next()
7201 .await;
7202
7203 let action = actions.await.unwrap()[0].clone();
7204 let apply = project.update(cx, |project, cx| {
7205 project.apply_code_action(buffer.clone(), action, true, cx)
7206 });
7207
7208 // Resolving the code action does not populate its edits. In absence of
7209 // edits, we must execute the given command.
7210 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7211 |action, _| async move { Ok(action) },
7212 );
7213
7214 // While executing the command, the language server sends the editor
7215 // a `workspaceEdit` request.
7216 fake_server
7217 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7218 let fake = fake_server.clone();
7219 move |params, _| {
7220 assert_eq!(params.command, "_the/command");
7221 let fake = fake.clone();
7222 async move {
7223 fake.server
7224 .request::<lsp::request::ApplyWorkspaceEdit>(
7225 lsp::ApplyWorkspaceEditParams {
7226 label: None,
7227 edit: lsp::WorkspaceEdit {
7228 changes: Some(
7229 [(
7230 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7231 vec![lsp::TextEdit {
7232 range: lsp::Range::new(
7233 lsp::Position::new(0, 0),
7234 lsp::Position::new(0, 0),
7235 ),
7236 new_text: "X".into(),
7237 }],
7238 )]
7239 .into_iter()
7240 .collect(),
7241 ),
7242 ..Default::default()
7243 },
7244 },
7245 )
7246 .await
7247 .unwrap();
7248 Ok(Some(json!(null)))
7249 }
7250 }
7251 })
7252 .next()
7253 .await;
7254
7255 // Applying the code action returns a project transaction containing the edits
7256 // sent by the language server in its `workspaceEdit` request.
7257 let transaction = apply.await.unwrap();
7258 assert!(transaction.0.contains_key(&buffer));
7259 buffer.update(cx, |buffer, cx| {
7260 assert_eq!(buffer.text(), "Xa");
7261 buffer.undo(cx);
7262 assert_eq!(buffer.text(), "a");
7263 });
7264 }
7265
7266 #[gpui::test]
7267 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7268 let fs = FakeFs::new(cx.background());
7269 fs.insert_tree(
7270 "/dir",
7271 json!({
7272 "file1": "the old contents",
7273 }),
7274 )
7275 .await;
7276
7277 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7278 let buffer = project
7279 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7280 .await
7281 .unwrap();
7282 buffer
7283 .update(cx, |buffer, cx| {
7284 assert_eq!(buffer.text(), "the old contents");
7285 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7286 buffer.save(cx)
7287 })
7288 .await
7289 .unwrap();
7290
7291 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7292 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7293 }
7294
7295 #[gpui::test]
7296 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7297 let fs = FakeFs::new(cx.background());
7298 fs.insert_tree(
7299 "/dir",
7300 json!({
7301 "file1": "the old contents",
7302 }),
7303 )
7304 .await;
7305
7306 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7307 let buffer = project
7308 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7309 .await
7310 .unwrap();
7311 buffer
7312 .update(cx, |buffer, cx| {
7313 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7314 buffer.save(cx)
7315 })
7316 .await
7317 .unwrap();
7318
7319 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7320 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7321 }
7322
7323 #[gpui::test]
7324 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7325 let fs = FakeFs::new(cx.background());
7326 fs.insert_tree("/dir", json!({})).await;
7327
7328 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7329 let buffer = project.update(cx, |project, cx| {
7330 project.create_buffer("", None, cx).unwrap()
7331 });
7332 buffer.update(cx, |buffer, cx| {
7333 buffer.edit([(0..0, "abc")], cx);
7334 assert!(buffer.is_dirty());
7335 assert!(!buffer.has_conflict());
7336 });
7337 project
7338 .update(cx, |project, cx| {
7339 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7340 })
7341 .await
7342 .unwrap();
7343 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7344 buffer.read_with(cx, |buffer, cx| {
7345 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7346 assert!(!buffer.is_dirty());
7347 assert!(!buffer.has_conflict());
7348 });
7349
7350 let opened_buffer = project
7351 .update(cx, |project, cx| {
7352 project.open_local_buffer("/dir/file1", cx)
7353 })
7354 .await
7355 .unwrap();
7356 assert_eq!(opened_buffer, buffer);
7357 }
7358
7359 #[gpui::test(retries = 5)]
7360 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7361 let dir = temp_tree(json!({
7362 "a": {
7363 "file1": "",
7364 "file2": "",
7365 "file3": "",
7366 },
7367 "b": {
7368 "c": {
7369 "file4": "",
7370 "file5": "",
7371 }
7372 }
7373 }));
7374
7375 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7376 let rpc = project.read_with(cx, |p, _| p.client.clone());
7377
7378 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7379 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7380 async move { buffer.await.unwrap() }
7381 };
7382 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7383 project.read_with(cx, |project, cx| {
7384 let tree = project.worktrees(cx).next().unwrap();
7385 tree.read(cx)
7386 .entry_for_path(path)
7387 .expect(&format!("no entry for path {}", path))
7388 .id
7389 })
7390 };
7391
7392 let buffer2 = buffer_for_path("a/file2", cx).await;
7393 let buffer3 = buffer_for_path("a/file3", cx).await;
7394 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7395 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7396
7397 let file2_id = id_for_path("a/file2", &cx);
7398 let file3_id = id_for_path("a/file3", &cx);
7399 let file4_id = id_for_path("b/c/file4", &cx);
7400
7401 // Create a remote copy of this worktree.
7402 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7403 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7404 let (remote, load_task) = cx.update(|cx| {
7405 Worktree::remote(
7406 1,
7407 1,
7408 initial_snapshot.to_proto(&Default::default(), true),
7409 rpc.clone(),
7410 cx,
7411 )
7412 });
7413 // tree
7414 load_task.await;
7415
7416 cx.read(|cx| {
7417 assert!(!buffer2.read(cx).is_dirty());
7418 assert!(!buffer3.read(cx).is_dirty());
7419 assert!(!buffer4.read(cx).is_dirty());
7420 assert!(!buffer5.read(cx).is_dirty());
7421 });
7422
7423 // Rename and delete files and directories.
7424 tree.flush_fs_events(&cx).await;
7425 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7426 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7427 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7428 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7429 tree.flush_fs_events(&cx).await;
7430
7431 let expected_paths = vec![
7432 "a",
7433 "a/file1",
7434 "a/file2.new",
7435 "b",
7436 "d",
7437 "d/file3",
7438 "d/file4",
7439 ];
7440
7441 cx.read(|app| {
7442 assert_eq!(
7443 tree.read(app)
7444 .paths()
7445 .map(|p| p.to_str().unwrap())
7446 .collect::<Vec<_>>(),
7447 expected_paths
7448 );
7449
7450 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7451 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7452 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7453
7454 assert_eq!(
7455 buffer2.read(app).file().unwrap().path().as_ref(),
7456 Path::new("a/file2.new")
7457 );
7458 assert_eq!(
7459 buffer3.read(app).file().unwrap().path().as_ref(),
7460 Path::new("d/file3")
7461 );
7462 assert_eq!(
7463 buffer4.read(app).file().unwrap().path().as_ref(),
7464 Path::new("d/file4")
7465 );
7466 assert_eq!(
7467 buffer5.read(app).file().unwrap().path().as_ref(),
7468 Path::new("b/c/file5")
7469 );
7470
7471 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7472 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7473 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7474 assert!(buffer5.read(app).file().unwrap().is_deleted());
7475 });
7476
7477 // Update the remote worktree. Check that it becomes consistent with the
7478 // local worktree.
7479 remote.update(cx, |remote, cx| {
7480 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7481 &initial_snapshot,
7482 1,
7483 1,
7484 true,
7485 );
7486 remote
7487 .as_remote_mut()
7488 .unwrap()
7489 .snapshot
7490 .apply_remote_update(update_message)
7491 .unwrap();
7492
7493 assert_eq!(
7494 remote
7495 .paths()
7496 .map(|p| p.to_str().unwrap())
7497 .collect::<Vec<_>>(),
7498 expected_paths
7499 );
7500 });
7501 }
7502
7503 #[gpui::test]
7504 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7505 let fs = FakeFs::new(cx.background());
7506 fs.insert_tree(
7507 "/dir",
7508 json!({
7509 "a.txt": "a-contents",
7510 "b.txt": "b-contents",
7511 }),
7512 )
7513 .await;
7514
7515 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7516
7517 // Spawn multiple tasks to open paths, repeating some paths.
7518 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7519 (
7520 p.open_local_buffer("/dir/a.txt", cx),
7521 p.open_local_buffer("/dir/b.txt", cx),
7522 p.open_local_buffer("/dir/a.txt", cx),
7523 )
7524 });
7525
7526 let buffer_a_1 = buffer_a_1.await.unwrap();
7527 let buffer_a_2 = buffer_a_2.await.unwrap();
7528 let buffer_b = buffer_b.await.unwrap();
7529 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7530 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7531
7532 // There is only one buffer per path.
7533 let buffer_a_id = buffer_a_1.id();
7534 assert_eq!(buffer_a_2.id(), buffer_a_id);
7535
7536 // Open the same path again while it is still open.
7537 drop(buffer_a_1);
7538 let buffer_a_3 = project
7539 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7540 .await
7541 .unwrap();
7542
7543 // There's still only one buffer per path.
7544 assert_eq!(buffer_a_3.id(), buffer_a_id);
7545 }
7546
7547 #[gpui::test]
7548 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7549 let fs = FakeFs::new(cx.background());
7550 fs.insert_tree(
7551 "/dir",
7552 json!({
7553 "file1": "abc",
7554 "file2": "def",
7555 "file3": "ghi",
7556 }),
7557 )
7558 .await;
7559
7560 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7561
7562 let buffer1 = project
7563 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7564 .await
7565 .unwrap();
7566 let events = Rc::new(RefCell::new(Vec::new()));
7567
7568 // initially, the buffer isn't dirty.
7569 buffer1.update(cx, |buffer, cx| {
7570 cx.subscribe(&buffer1, {
7571 let events = events.clone();
7572 move |_, _, event, _| match event {
7573 BufferEvent::Operation(_) => {}
7574 _ => events.borrow_mut().push(event.clone()),
7575 }
7576 })
7577 .detach();
7578
7579 assert!(!buffer.is_dirty());
7580 assert!(events.borrow().is_empty());
7581
7582 buffer.edit([(1..2, "")], cx);
7583 });
7584
7585 // after the first edit, the buffer is dirty, and emits a dirtied event.
7586 buffer1.update(cx, |buffer, cx| {
7587 assert!(buffer.text() == "ac");
7588 assert!(buffer.is_dirty());
7589 assert_eq!(
7590 *events.borrow(),
7591 &[language::Event::Edited, language::Event::Dirtied]
7592 );
7593 events.borrow_mut().clear();
7594 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7595 });
7596
7597 // after saving, the buffer is not dirty, and emits a saved event.
7598 buffer1.update(cx, |buffer, cx| {
7599 assert!(!buffer.is_dirty());
7600 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7601 events.borrow_mut().clear();
7602
7603 buffer.edit([(1..1, "B")], cx);
7604 buffer.edit([(2..2, "D")], cx);
7605 });
7606
7607 // after editing again, the buffer is dirty, and emits another dirty event.
7608 buffer1.update(cx, |buffer, cx| {
7609 assert!(buffer.text() == "aBDc");
7610 assert!(buffer.is_dirty());
7611 assert_eq!(
7612 *events.borrow(),
7613 &[
7614 language::Event::Edited,
7615 language::Event::Dirtied,
7616 language::Event::Edited,
7617 ],
7618 );
7619 events.borrow_mut().clear();
7620
7621 // TODO - currently, after restoring the buffer to its
7622 // previously-saved state, the is still considered dirty.
7623 buffer.edit([(1..3, "")], cx);
7624 assert!(buffer.text() == "ac");
7625 assert!(buffer.is_dirty());
7626 });
7627
7628 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7629
7630 // When a file is deleted, the buffer is considered dirty.
7631 let events = Rc::new(RefCell::new(Vec::new()));
7632 let buffer2 = project
7633 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7634 .await
7635 .unwrap();
7636 buffer2.update(cx, |_, cx| {
7637 cx.subscribe(&buffer2, {
7638 let events = events.clone();
7639 move |_, _, event, _| events.borrow_mut().push(event.clone())
7640 })
7641 .detach();
7642 });
7643
7644 fs.remove_file("/dir/file2".as_ref(), Default::default())
7645 .await
7646 .unwrap();
7647 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7648 assert_eq!(
7649 *events.borrow(),
7650 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7651 );
7652
7653 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7654 let events = Rc::new(RefCell::new(Vec::new()));
7655 let buffer3 = project
7656 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7657 .await
7658 .unwrap();
7659 buffer3.update(cx, |_, cx| {
7660 cx.subscribe(&buffer3, {
7661 let events = events.clone();
7662 move |_, _, event, _| events.borrow_mut().push(event.clone())
7663 })
7664 .detach();
7665 });
7666
7667 buffer3.update(cx, |buffer, cx| {
7668 buffer.edit([(0..0, "x")], cx);
7669 });
7670 events.borrow_mut().clear();
7671 fs.remove_file("/dir/file3".as_ref(), Default::default())
7672 .await
7673 .unwrap();
7674 buffer3
7675 .condition(&cx, |_, _| !events.borrow().is_empty())
7676 .await;
7677 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7678 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7679 }
7680
7681 #[gpui::test]
7682 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7683 let initial_contents = "aaa\nbbbbb\nc\n";
7684 let fs = FakeFs::new(cx.background());
7685 fs.insert_tree(
7686 "/dir",
7687 json!({
7688 "the-file": initial_contents,
7689 }),
7690 )
7691 .await;
7692 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7693 let buffer = project
7694 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7695 .await
7696 .unwrap();
7697
7698 let anchors = (0..3)
7699 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7700 .collect::<Vec<_>>();
7701
7702 // Change the file on disk, adding two new lines of text, and removing
7703 // one line.
7704 buffer.read_with(cx, |buffer, _| {
7705 assert!(!buffer.is_dirty());
7706 assert!(!buffer.has_conflict());
7707 });
7708 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7709 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7710 .await
7711 .unwrap();
7712
7713 // Because the buffer was not modified, it is reloaded from disk. Its
7714 // contents are edited according to the diff between the old and new
7715 // file contents.
7716 buffer
7717 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7718 .await;
7719
7720 buffer.update(cx, |buffer, _| {
7721 assert_eq!(buffer.text(), new_contents);
7722 assert!(!buffer.is_dirty());
7723 assert!(!buffer.has_conflict());
7724
7725 let anchor_positions = anchors
7726 .iter()
7727 .map(|anchor| anchor.to_point(&*buffer))
7728 .collect::<Vec<_>>();
7729 assert_eq!(
7730 anchor_positions,
7731 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7732 );
7733 });
7734
7735 // Modify the buffer
7736 buffer.update(cx, |buffer, cx| {
7737 buffer.edit([(0..0, " ")], cx);
7738 assert!(buffer.is_dirty());
7739 assert!(!buffer.has_conflict());
7740 });
7741
7742 // Change the file on disk again, adding blank lines to the beginning.
7743 fs.save(
7744 "/dir/the-file".as_ref(),
7745 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7746 )
7747 .await
7748 .unwrap();
7749
7750 // Because the buffer is modified, it doesn't reload from disk, but is
7751 // marked as having a conflict.
7752 buffer
7753 .condition(&cx, |buffer, _| buffer.has_conflict())
7754 .await;
7755 }
7756
7757 #[gpui::test]
7758 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7759 cx.foreground().forbid_parking();
7760
7761 let fs = FakeFs::new(cx.background());
7762 fs.insert_tree(
7763 "/the-dir",
7764 json!({
7765 "a.rs": "
7766 fn foo(mut v: Vec<usize>) {
7767 for x in &v {
7768 v.push(1);
7769 }
7770 }
7771 "
7772 .unindent(),
7773 }),
7774 )
7775 .await;
7776
7777 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7778 let buffer = project
7779 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7780 .await
7781 .unwrap();
7782
7783 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7784 let message = lsp::PublishDiagnosticsParams {
7785 uri: buffer_uri.clone(),
7786 diagnostics: vec![
7787 lsp::Diagnostic {
7788 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7789 severity: Some(DiagnosticSeverity::WARNING),
7790 message: "error 1".to_string(),
7791 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7792 location: lsp::Location {
7793 uri: buffer_uri.clone(),
7794 range: lsp::Range::new(
7795 lsp::Position::new(1, 8),
7796 lsp::Position::new(1, 9),
7797 ),
7798 },
7799 message: "error 1 hint 1".to_string(),
7800 }]),
7801 ..Default::default()
7802 },
7803 lsp::Diagnostic {
7804 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7805 severity: Some(DiagnosticSeverity::HINT),
7806 message: "error 1 hint 1".to_string(),
7807 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7808 location: lsp::Location {
7809 uri: buffer_uri.clone(),
7810 range: lsp::Range::new(
7811 lsp::Position::new(1, 8),
7812 lsp::Position::new(1, 9),
7813 ),
7814 },
7815 message: "original diagnostic".to_string(),
7816 }]),
7817 ..Default::default()
7818 },
7819 lsp::Diagnostic {
7820 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7821 severity: Some(DiagnosticSeverity::ERROR),
7822 message: "error 2".to_string(),
7823 related_information: Some(vec![
7824 lsp::DiagnosticRelatedInformation {
7825 location: lsp::Location {
7826 uri: buffer_uri.clone(),
7827 range: lsp::Range::new(
7828 lsp::Position::new(1, 13),
7829 lsp::Position::new(1, 15),
7830 ),
7831 },
7832 message: "error 2 hint 1".to_string(),
7833 },
7834 lsp::DiagnosticRelatedInformation {
7835 location: lsp::Location {
7836 uri: buffer_uri.clone(),
7837 range: lsp::Range::new(
7838 lsp::Position::new(1, 13),
7839 lsp::Position::new(1, 15),
7840 ),
7841 },
7842 message: "error 2 hint 2".to_string(),
7843 },
7844 ]),
7845 ..Default::default()
7846 },
7847 lsp::Diagnostic {
7848 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7849 severity: Some(DiagnosticSeverity::HINT),
7850 message: "error 2 hint 1".to_string(),
7851 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7852 location: lsp::Location {
7853 uri: buffer_uri.clone(),
7854 range: lsp::Range::new(
7855 lsp::Position::new(2, 8),
7856 lsp::Position::new(2, 17),
7857 ),
7858 },
7859 message: "original diagnostic".to_string(),
7860 }]),
7861 ..Default::default()
7862 },
7863 lsp::Diagnostic {
7864 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7865 severity: Some(DiagnosticSeverity::HINT),
7866 message: "error 2 hint 2".to_string(),
7867 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7868 location: lsp::Location {
7869 uri: buffer_uri.clone(),
7870 range: lsp::Range::new(
7871 lsp::Position::new(2, 8),
7872 lsp::Position::new(2, 17),
7873 ),
7874 },
7875 message: "original diagnostic".to_string(),
7876 }]),
7877 ..Default::default()
7878 },
7879 ],
7880 version: None,
7881 };
7882
7883 project
7884 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7885 .unwrap();
7886 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7887
7888 assert_eq!(
7889 buffer
7890 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7891 .collect::<Vec<_>>(),
7892 &[
7893 DiagnosticEntry {
7894 range: Point::new(1, 8)..Point::new(1, 9),
7895 diagnostic: Diagnostic {
7896 severity: DiagnosticSeverity::WARNING,
7897 message: "error 1".to_string(),
7898 group_id: 0,
7899 is_primary: true,
7900 ..Default::default()
7901 }
7902 },
7903 DiagnosticEntry {
7904 range: Point::new(1, 8)..Point::new(1, 9),
7905 diagnostic: Diagnostic {
7906 severity: DiagnosticSeverity::HINT,
7907 message: "error 1 hint 1".to_string(),
7908 group_id: 0,
7909 is_primary: false,
7910 ..Default::default()
7911 }
7912 },
7913 DiagnosticEntry {
7914 range: Point::new(1, 13)..Point::new(1, 15),
7915 diagnostic: Diagnostic {
7916 severity: DiagnosticSeverity::HINT,
7917 message: "error 2 hint 1".to_string(),
7918 group_id: 1,
7919 is_primary: false,
7920 ..Default::default()
7921 }
7922 },
7923 DiagnosticEntry {
7924 range: Point::new(1, 13)..Point::new(1, 15),
7925 diagnostic: Diagnostic {
7926 severity: DiagnosticSeverity::HINT,
7927 message: "error 2 hint 2".to_string(),
7928 group_id: 1,
7929 is_primary: false,
7930 ..Default::default()
7931 }
7932 },
7933 DiagnosticEntry {
7934 range: Point::new(2, 8)..Point::new(2, 17),
7935 diagnostic: Diagnostic {
7936 severity: DiagnosticSeverity::ERROR,
7937 message: "error 2".to_string(),
7938 group_id: 1,
7939 is_primary: true,
7940 ..Default::default()
7941 }
7942 }
7943 ]
7944 );
7945
7946 assert_eq!(
7947 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7948 &[
7949 DiagnosticEntry {
7950 range: Point::new(1, 8)..Point::new(1, 9),
7951 diagnostic: Diagnostic {
7952 severity: DiagnosticSeverity::WARNING,
7953 message: "error 1".to_string(),
7954 group_id: 0,
7955 is_primary: true,
7956 ..Default::default()
7957 }
7958 },
7959 DiagnosticEntry {
7960 range: Point::new(1, 8)..Point::new(1, 9),
7961 diagnostic: Diagnostic {
7962 severity: DiagnosticSeverity::HINT,
7963 message: "error 1 hint 1".to_string(),
7964 group_id: 0,
7965 is_primary: false,
7966 ..Default::default()
7967 }
7968 },
7969 ]
7970 );
7971 assert_eq!(
7972 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7973 &[
7974 DiagnosticEntry {
7975 range: Point::new(1, 13)..Point::new(1, 15),
7976 diagnostic: Diagnostic {
7977 severity: DiagnosticSeverity::HINT,
7978 message: "error 2 hint 1".to_string(),
7979 group_id: 1,
7980 is_primary: false,
7981 ..Default::default()
7982 }
7983 },
7984 DiagnosticEntry {
7985 range: Point::new(1, 13)..Point::new(1, 15),
7986 diagnostic: Diagnostic {
7987 severity: DiagnosticSeverity::HINT,
7988 message: "error 2 hint 2".to_string(),
7989 group_id: 1,
7990 is_primary: false,
7991 ..Default::default()
7992 }
7993 },
7994 DiagnosticEntry {
7995 range: Point::new(2, 8)..Point::new(2, 17),
7996 diagnostic: Diagnostic {
7997 severity: DiagnosticSeverity::ERROR,
7998 message: "error 2".to_string(),
7999 group_id: 1,
8000 is_primary: true,
8001 ..Default::default()
8002 }
8003 }
8004 ]
8005 );
8006 }
8007
8008 #[gpui::test]
8009 async fn test_rename(cx: &mut gpui::TestAppContext) {
8010 cx.foreground().forbid_parking();
8011
8012 let mut language = Language::new(
8013 LanguageConfig {
8014 name: "Rust".into(),
8015 path_suffixes: vec!["rs".to_string()],
8016 ..Default::default()
8017 },
8018 Some(tree_sitter_rust::language()),
8019 );
8020 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
8021 capabilities: lsp::ServerCapabilities {
8022 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
8023 prepare_provider: Some(true),
8024 work_done_progress_options: Default::default(),
8025 })),
8026 ..Default::default()
8027 },
8028 ..Default::default()
8029 });
8030
8031 let fs = FakeFs::new(cx.background());
8032 fs.insert_tree(
8033 "/dir",
8034 json!({
8035 "one.rs": "const ONE: usize = 1;",
8036 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
8037 }),
8038 )
8039 .await;
8040
8041 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8042 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
8043 let buffer = project
8044 .update(cx, |project, cx| {
8045 project.open_local_buffer("/dir/one.rs", cx)
8046 })
8047 .await
8048 .unwrap();
8049
8050 let fake_server = fake_servers.next().await.unwrap();
8051
8052 let response = project.update(cx, |project, cx| {
8053 project.prepare_rename(buffer.clone(), 7, cx)
8054 });
8055 fake_server
8056 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
8057 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
8058 assert_eq!(params.position, lsp::Position::new(0, 7));
8059 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
8060 lsp::Position::new(0, 6),
8061 lsp::Position::new(0, 9),
8062 ))))
8063 })
8064 .next()
8065 .await
8066 .unwrap();
8067 let range = response.await.unwrap().unwrap();
8068 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
8069 assert_eq!(range, 6..9);
8070
8071 let response = project.update(cx, |project, cx| {
8072 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
8073 });
8074 fake_server
8075 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
8076 assert_eq!(
8077 params.text_document_position.text_document.uri.as_str(),
8078 "file:///dir/one.rs"
8079 );
8080 assert_eq!(
8081 params.text_document_position.position,
8082 lsp::Position::new(0, 7)
8083 );
8084 assert_eq!(params.new_name, "THREE");
8085 Ok(Some(lsp::WorkspaceEdit {
8086 changes: Some(
8087 [
8088 (
8089 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
8090 vec![lsp::TextEdit::new(
8091 lsp::Range::new(
8092 lsp::Position::new(0, 6),
8093 lsp::Position::new(0, 9),
8094 ),
8095 "THREE".to_string(),
8096 )],
8097 ),
8098 (
8099 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8100 vec![
8101 lsp::TextEdit::new(
8102 lsp::Range::new(
8103 lsp::Position::new(0, 24),
8104 lsp::Position::new(0, 27),
8105 ),
8106 "THREE".to_string(),
8107 ),
8108 lsp::TextEdit::new(
8109 lsp::Range::new(
8110 lsp::Position::new(0, 35),
8111 lsp::Position::new(0, 38),
8112 ),
8113 "THREE".to_string(),
8114 ),
8115 ],
8116 ),
8117 ]
8118 .into_iter()
8119 .collect(),
8120 ),
8121 ..Default::default()
8122 }))
8123 })
8124 .next()
8125 .await
8126 .unwrap();
8127 let mut transaction = response.await.unwrap().0;
8128 assert_eq!(transaction.len(), 2);
8129 assert_eq!(
8130 transaction
8131 .remove_entry(&buffer)
8132 .unwrap()
8133 .0
8134 .read_with(cx, |buffer, _| buffer.text()),
8135 "const THREE: usize = 1;"
8136 );
8137 assert_eq!(
8138 transaction
8139 .into_keys()
8140 .next()
8141 .unwrap()
8142 .read_with(cx, |buffer, _| buffer.text()),
8143 "const TWO: usize = one::THREE + one::THREE;"
8144 );
8145 }
8146
8147 #[gpui::test]
8148 async fn test_search(cx: &mut gpui::TestAppContext) {
8149 let fs = FakeFs::new(cx.background());
8150 fs.insert_tree(
8151 "/dir",
8152 json!({
8153 "one.rs": "const ONE: usize = 1;",
8154 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8155 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8156 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8157 }),
8158 )
8159 .await;
8160 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8161 assert_eq!(
8162 search(&project, SearchQuery::text("TWO", false, true), cx)
8163 .await
8164 .unwrap(),
8165 HashMap::from_iter([
8166 ("two.rs".to_string(), vec![6..9]),
8167 ("three.rs".to_string(), vec![37..40])
8168 ])
8169 );
8170
8171 let buffer_4 = project
8172 .update(cx, |project, cx| {
8173 project.open_local_buffer("/dir/four.rs", cx)
8174 })
8175 .await
8176 .unwrap();
8177 buffer_4.update(cx, |buffer, cx| {
8178 let text = "two::TWO";
8179 buffer.edit([(20..28, text), (31..43, text)], cx);
8180 });
8181
8182 assert_eq!(
8183 search(&project, SearchQuery::text("TWO", false, true), cx)
8184 .await
8185 .unwrap(),
8186 HashMap::from_iter([
8187 ("two.rs".to_string(), vec![6..9]),
8188 ("three.rs".to_string(), vec![37..40]),
8189 ("four.rs".to_string(), vec![25..28, 36..39])
8190 ])
8191 );
8192
8193 async fn search(
8194 project: &ModelHandle<Project>,
8195 query: SearchQuery,
8196 cx: &mut gpui::TestAppContext,
8197 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8198 let results = project
8199 .update(cx, |project, cx| project.search(query, cx))
8200 .await?;
8201
8202 Ok(results
8203 .into_iter()
8204 .map(|(buffer, ranges)| {
8205 buffer.read_with(cx, |buffer, _| {
8206 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8207 let ranges = ranges
8208 .into_iter()
8209 .map(|range| range.to_offset(buffer))
8210 .collect::<Vec<_>>();
8211 (path, ranges)
8212 })
8213 })
8214 .collect())
8215 }
8216 }
8217}