1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, BTreeMap, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
15 MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 point_to_lsp,
19 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
20 range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion,
21 Diagnostic, DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language,
22 LanguageRegistry, LanguageServerName, LocalFile, LspAdapter, OffsetRangeExt, Operation, Patch,
23 PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
24};
25use lsp::{DiagnosticSeverity, DiagnosticTag, DocumentHighlightKind, LanguageServer};
26use lsp_command::*;
27use parking_lot::Mutex;
28use postage::stream::Stream;
29use postage::watch;
30use rand::prelude::*;
31use search::SearchQuery;
32use serde::Serialize;
33use settings::Settings;
34use sha2::{Digest, Sha256};
35use similar::{ChangeTag, TextDiff};
36use std::{
37 cell::RefCell,
38 cmp::{self, Ordering},
39 convert::TryInto,
40 ffi::OsString,
41 hash::Hash,
42 mem,
43 ops::Range,
44 os::unix::{ffi::OsStrExt, prelude::OsStringExt},
45 path::{Component, Path, PathBuf},
46 rc::Rc,
47 sync::{
48 atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
49 Arc,
50 },
51 time::Instant,
52};
53use thiserror::Error;
54use util::{post_inc, ResultExt, TryFutureExt as _};
55
56pub use fs::*;
57pub use worktree::*;
58
59pub trait Item: Entity {
60 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId>;
61}
62
63#[derive(Default)]
64pub struct ProjectStore {
65 projects: Vec<WeakModelHandle<Project>>,
66}
67
68pub struct Project {
69 worktrees: Vec<WorktreeHandle>,
70 active_entry: Option<ProjectEntryId>,
71 languages: Arc<LanguageRegistry>,
72 language_servers:
73 HashMap<(WorktreeId, LanguageServerName), (Arc<dyn LspAdapter>, Arc<LanguageServer>)>,
74 started_language_servers:
75 HashMap<(WorktreeId, LanguageServerName), Task<Option<Arc<LanguageServer>>>>,
76 language_server_statuses: BTreeMap<usize, LanguageServerStatus>,
77 language_server_settings: Arc<Mutex<serde_json::Value>>,
78 last_workspace_edits_by_language_server: HashMap<usize, ProjectTransaction>,
79 next_language_server_id: usize,
80 client: Arc<client::Client>,
81 next_entry_id: Arc<AtomicUsize>,
82 next_diagnostic_group_id: usize,
83 user_store: ModelHandle<UserStore>,
84 project_store: ModelHandle<ProjectStore>,
85 fs: Arc<dyn Fs>,
86 client_state: ProjectClientState,
87 collaborators: HashMap<PeerId, Collaborator>,
88 subscriptions: Vec<client::Subscription>,
89 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
90 shared_buffers: HashMap<PeerId, HashSet<u64>>,
91 loading_buffers: HashMap<
92 ProjectPath,
93 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
94 >,
95 loading_local_worktrees:
96 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
97 opened_buffers: HashMap<u64, OpenBuffer>,
98 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
99 nonce: u128,
100}
101
102#[derive(Error, Debug)]
103pub enum JoinProjectError {
104 #[error("host declined join request")]
105 HostDeclined,
106 #[error("host closed the project")]
107 HostClosedProject,
108 #[error("host went offline")]
109 HostWentOffline,
110 #[error("{0}")]
111 Other(#[from] anyhow::Error),
112}
113
114enum OpenBuffer {
115 Strong(ModelHandle<Buffer>),
116 Weak(WeakModelHandle<Buffer>),
117 Loading(Vec<Operation>),
118}
119
120enum WorktreeHandle {
121 Strong(ModelHandle<Worktree>),
122 Weak(WeakModelHandle<Worktree>),
123}
124
125enum ProjectClientState {
126 Local {
127 is_shared: bool,
128 remote_id_tx: watch::Sender<Option<u64>>,
129 remote_id_rx: watch::Receiver<Option<u64>>,
130 public_tx: watch::Sender<bool>,
131 public_rx: watch::Receiver<bool>,
132 _maintain_remote_id_task: Task<Option<()>>,
133 },
134 Remote {
135 sharing_has_stopped: bool,
136 remote_id: u64,
137 replica_id: ReplicaId,
138 _detect_unshare_task: Task<Option<()>>,
139 },
140}
141
142#[derive(Clone, Debug)]
143pub struct Collaborator {
144 pub user: Arc<User>,
145 pub peer_id: PeerId,
146 pub replica_id: ReplicaId,
147}
148
149#[derive(Clone, Debug, PartialEq, Eq)]
150pub enum Event {
151 ActiveEntryChanged(Option<ProjectEntryId>),
152 WorktreeAdded,
153 WorktreeRemoved(WorktreeId),
154 DiskBasedDiagnosticsStarted,
155 DiskBasedDiagnosticsUpdated,
156 DiskBasedDiagnosticsFinished,
157 DiagnosticsUpdated(ProjectPath),
158 RemoteIdChanged(Option<u64>),
159 CollaboratorLeft(PeerId),
160 ContactRequestedJoin(Arc<User>),
161 ContactCancelledJoinRequest(Arc<User>),
162}
163
164#[derive(Serialize)]
165pub struct LanguageServerStatus {
166 pub name: String,
167 pub pending_work: BTreeMap<String, LanguageServerProgress>,
168 pub pending_diagnostic_updates: isize,
169}
170
171#[derive(Clone, Debug, Serialize)]
172pub struct LanguageServerProgress {
173 pub message: Option<String>,
174 pub percentage: Option<usize>,
175 #[serde(skip_serializing)]
176 pub last_update_at: Instant,
177}
178
179#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
180pub struct ProjectPath {
181 pub worktree_id: WorktreeId,
182 pub path: Arc<Path>,
183}
184
185#[derive(Copy, Clone, Debug, Default, PartialEq, Serialize)]
186pub struct DiagnosticSummary {
187 pub error_count: usize,
188 pub warning_count: usize,
189}
190
191#[derive(Debug)]
192pub struct Location {
193 pub buffer: ModelHandle<Buffer>,
194 pub range: Range<language::Anchor>,
195}
196
197#[derive(Debug)]
198pub struct DocumentHighlight {
199 pub range: Range<language::Anchor>,
200 pub kind: DocumentHighlightKind,
201}
202
203#[derive(Clone, Debug)]
204pub struct Symbol {
205 pub source_worktree_id: WorktreeId,
206 pub worktree_id: WorktreeId,
207 pub language_server_name: LanguageServerName,
208 pub path: PathBuf,
209 pub label: CodeLabel,
210 pub name: String,
211 pub kind: lsp::SymbolKind,
212 pub range: Range<PointUtf16>,
213 pub signature: [u8; 32],
214}
215
216#[derive(Default)]
217pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
218
219impl DiagnosticSummary {
220 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
221 let mut this = Self {
222 error_count: 0,
223 warning_count: 0,
224 };
225
226 for entry in diagnostics {
227 if entry.diagnostic.is_primary {
228 match entry.diagnostic.severity {
229 DiagnosticSeverity::ERROR => this.error_count += 1,
230 DiagnosticSeverity::WARNING => this.warning_count += 1,
231 _ => {}
232 }
233 }
234 }
235
236 this
237 }
238
239 pub fn is_empty(&self) -> bool {
240 self.error_count == 0 && self.warning_count == 0
241 }
242
243 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
244 proto::DiagnosticSummary {
245 path: path.to_string_lossy().to_string(),
246 error_count: self.error_count as u32,
247 warning_count: self.warning_count as u32,
248 }
249 }
250}
251
252#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
253pub struct ProjectEntryId(usize);
254
255impl ProjectEntryId {
256 pub const MAX: Self = Self(usize::MAX);
257
258 pub fn new(counter: &AtomicUsize) -> Self {
259 Self(counter.fetch_add(1, SeqCst))
260 }
261
262 pub fn from_proto(id: u64) -> Self {
263 Self(id as usize)
264 }
265
266 pub fn to_proto(&self) -> u64 {
267 self.0 as u64
268 }
269
270 pub fn to_usize(&self) -> usize {
271 self.0
272 }
273}
274
275impl Project {
276 pub fn init(client: &Arc<Client>) {
277 client.add_model_message_handler(Self::handle_request_join_project);
278 client.add_model_message_handler(Self::handle_add_collaborator);
279 client.add_model_message_handler(Self::handle_buffer_reloaded);
280 client.add_model_message_handler(Self::handle_buffer_saved);
281 client.add_model_message_handler(Self::handle_start_language_server);
282 client.add_model_message_handler(Self::handle_update_language_server);
283 client.add_model_message_handler(Self::handle_remove_collaborator);
284 client.add_model_message_handler(Self::handle_join_project_request_cancelled);
285 client.add_model_message_handler(Self::handle_register_worktree);
286 client.add_model_message_handler(Self::handle_unregister_worktree);
287 client.add_model_message_handler(Self::handle_unregister_project);
288 client.add_model_message_handler(Self::handle_project_unshared);
289 client.add_model_message_handler(Self::handle_update_buffer_file);
290 client.add_model_message_handler(Self::handle_update_buffer);
291 client.add_model_message_handler(Self::handle_update_diagnostic_summary);
292 client.add_model_message_handler(Self::handle_update_worktree);
293 client.add_model_request_handler(Self::handle_create_project_entry);
294 client.add_model_request_handler(Self::handle_rename_project_entry);
295 client.add_model_request_handler(Self::handle_copy_project_entry);
296 client.add_model_request_handler(Self::handle_delete_project_entry);
297 client.add_model_request_handler(Self::handle_apply_additional_edits_for_completion);
298 client.add_model_request_handler(Self::handle_apply_code_action);
299 client.add_model_request_handler(Self::handle_reload_buffers);
300 client.add_model_request_handler(Self::handle_format_buffers);
301 client.add_model_request_handler(Self::handle_get_code_actions);
302 client.add_model_request_handler(Self::handle_get_completions);
303 client.add_model_request_handler(Self::handle_lsp_command::<GetDefinition>);
304 client.add_model_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
305 client.add_model_request_handler(Self::handle_lsp_command::<GetReferences>);
306 client.add_model_request_handler(Self::handle_lsp_command::<PrepareRename>);
307 client.add_model_request_handler(Self::handle_lsp_command::<PerformRename>);
308 client.add_model_request_handler(Self::handle_search_project);
309 client.add_model_request_handler(Self::handle_get_project_symbols);
310 client.add_model_request_handler(Self::handle_open_buffer_for_symbol);
311 client.add_model_request_handler(Self::handle_open_buffer_by_id);
312 client.add_model_request_handler(Self::handle_open_buffer_by_path);
313 client.add_model_request_handler(Self::handle_save_buffer);
314 }
315
316 pub fn local(
317 public: bool,
318 client: Arc<Client>,
319 user_store: ModelHandle<UserStore>,
320 project_store: ModelHandle<ProjectStore>,
321 languages: Arc<LanguageRegistry>,
322 fs: Arc<dyn Fs>,
323 cx: &mut MutableAppContext,
324 ) -> ModelHandle<Self> {
325 cx.add_model(|cx: &mut ModelContext<Self>| {
326 let (public_tx, public_rx) = watch::channel_with(public);
327 let (remote_id_tx, remote_id_rx) = watch::channel();
328 let _maintain_remote_id_task = cx.spawn_weak({
329 let status_rx = client.clone().status();
330 let public_rx = public_rx.clone();
331 move |this, mut cx| async move {
332 let mut stream = Stream::map(status_rx.clone(), drop)
333 .merge(Stream::map(public_rx.clone(), drop));
334 while stream.recv().await.is_some() {
335 let this = this.upgrade(&cx)?;
336 if status_rx.borrow().is_connected() && *public_rx.borrow() {
337 this.update(&mut cx, |this, cx| this.register(cx))
338 .await
339 .log_err()?;
340 } else {
341 this.update(&mut cx, |this, cx| this.unregister(cx));
342 }
343 }
344 None
345 }
346 });
347
348 let handle = cx.weak_handle();
349 project_store.update(cx, |store, cx| store.add_project(handle, cx));
350
351 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
352 Self {
353 worktrees: Default::default(),
354 collaborators: Default::default(),
355 opened_buffers: Default::default(),
356 shared_buffers: Default::default(),
357 loading_buffers: Default::default(),
358 loading_local_worktrees: Default::default(),
359 buffer_snapshots: Default::default(),
360 client_state: ProjectClientState::Local {
361 is_shared: false,
362 remote_id_tx,
363 remote_id_rx,
364 public_tx,
365 public_rx,
366 _maintain_remote_id_task,
367 },
368 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
369 subscriptions: Vec::new(),
370 active_entry: None,
371 languages,
372 client,
373 user_store,
374 project_store,
375 fs,
376 next_entry_id: Default::default(),
377 next_diagnostic_group_id: Default::default(),
378 language_servers: Default::default(),
379 started_language_servers: Default::default(),
380 language_server_statuses: Default::default(),
381 last_workspace_edits_by_language_server: Default::default(),
382 language_server_settings: Default::default(),
383 next_language_server_id: 0,
384 nonce: StdRng::from_entropy().gen(),
385 }
386 })
387 }
388
389 pub async fn remote(
390 remote_id: u64,
391 client: Arc<Client>,
392 user_store: ModelHandle<UserStore>,
393 project_store: ModelHandle<ProjectStore>,
394 languages: Arc<LanguageRegistry>,
395 fs: Arc<dyn Fs>,
396 mut cx: AsyncAppContext,
397 ) -> Result<ModelHandle<Self>, JoinProjectError> {
398 client.authenticate_and_connect(true, &cx).await?;
399
400 let response = client
401 .request(proto::JoinProject {
402 project_id: remote_id,
403 })
404 .await?;
405
406 let response = match response.variant.ok_or_else(|| anyhow!("missing variant"))? {
407 proto::join_project_response::Variant::Accept(response) => response,
408 proto::join_project_response::Variant::Decline(decline) => {
409 match proto::join_project_response::decline::Reason::from_i32(decline.reason) {
410 Some(proto::join_project_response::decline::Reason::Declined) => {
411 Err(JoinProjectError::HostDeclined)?
412 }
413 Some(proto::join_project_response::decline::Reason::Closed) => {
414 Err(JoinProjectError::HostClosedProject)?
415 }
416 Some(proto::join_project_response::decline::Reason::WentOffline) => {
417 Err(JoinProjectError::HostWentOffline)?
418 }
419 None => Err(anyhow!("missing decline reason"))?,
420 }
421 }
422 };
423
424 let replica_id = response.replica_id as ReplicaId;
425
426 let mut worktrees = Vec::new();
427 for worktree in response.worktrees {
428 let (worktree, load_task) = cx
429 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
430 worktrees.push(worktree);
431 load_task.detach();
432 }
433
434 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
435 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
436 let handle = cx.weak_handle();
437 project_store.update(cx, |store, cx| store.add_project(handle, cx));
438
439 let mut this = Self {
440 worktrees: Vec::new(),
441 loading_buffers: Default::default(),
442 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
443 shared_buffers: Default::default(),
444 loading_local_worktrees: Default::default(),
445 active_entry: None,
446 collaborators: Default::default(),
447 languages,
448 user_store: user_store.clone(),
449 project_store,
450 fs,
451 next_entry_id: Default::default(),
452 next_diagnostic_group_id: Default::default(),
453 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
454 client: client.clone(),
455 client_state: ProjectClientState::Remote {
456 sharing_has_stopped: false,
457 remote_id,
458 replica_id,
459 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
460 async move {
461 let mut status = client.status();
462 let is_connected =
463 status.next().await.map_or(false, |s| s.is_connected());
464 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
465 if !is_connected || status.next().await.is_some() {
466 if let Some(this) = this.upgrade(&cx) {
467 this.update(&mut cx, |this, cx| this.removed_from_project(cx))
468 }
469 }
470 Ok(())
471 }
472 .log_err()
473 }),
474 },
475 language_servers: Default::default(),
476 started_language_servers: Default::default(),
477 language_server_settings: Default::default(),
478 language_server_statuses: response
479 .language_servers
480 .into_iter()
481 .map(|server| {
482 (
483 server.id as usize,
484 LanguageServerStatus {
485 name: server.name,
486 pending_work: Default::default(),
487 pending_diagnostic_updates: 0,
488 },
489 )
490 })
491 .collect(),
492 last_workspace_edits_by_language_server: Default::default(),
493 next_language_server_id: 0,
494 opened_buffers: Default::default(),
495 buffer_snapshots: Default::default(),
496 nonce: StdRng::from_entropy().gen(),
497 };
498 for worktree in worktrees {
499 this.add_worktree(&worktree, cx);
500 }
501 this
502 });
503
504 let user_ids = response
505 .collaborators
506 .iter()
507 .map(|peer| peer.user_id)
508 .collect();
509 user_store
510 .update(&mut cx, |user_store, cx| user_store.get_users(user_ids, cx))
511 .await?;
512 let mut collaborators = HashMap::default();
513 for message in response.collaborators {
514 let collaborator = Collaborator::from_proto(message, &user_store, &mut cx).await?;
515 collaborators.insert(collaborator.peer_id, collaborator);
516 }
517
518 this.update(&mut cx, |this, _| {
519 this.collaborators = collaborators;
520 });
521
522 Ok(this)
523 }
524
525 #[cfg(any(test, feature = "test-support"))]
526 pub async fn test(
527 fs: Arc<dyn Fs>,
528 root_paths: impl IntoIterator<Item = &Path>,
529 cx: &mut gpui::TestAppContext,
530 ) -> ModelHandle<Project> {
531 let languages = Arc::new(LanguageRegistry::test());
532 let http_client = client::test::FakeHttpClient::with_404_response();
533 let client = client::Client::new(http_client.clone());
534 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
535 let project_store = cx.add_model(|_| ProjectStore::default());
536 let project = cx.update(|cx| {
537 Project::local(true, client, user_store, project_store, languages, fs, cx)
538 });
539 for path in root_paths {
540 let (tree, _) = project
541 .update(cx, |project, cx| {
542 project.find_or_create_local_worktree(path, true, cx)
543 })
544 .await
545 .unwrap();
546 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
547 .await;
548 }
549 project
550 }
551
552 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
553 self.opened_buffers
554 .get(&remote_id)
555 .and_then(|buffer| buffer.upgrade(cx))
556 }
557
558 pub fn languages(&self) -> &Arc<LanguageRegistry> {
559 &self.languages
560 }
561
562 pub fn client(&self) -> Arc<Client> {
563 self.client.clone()
564 }
565
566 pub fn user_store(&self) -> ModelHandle<UserStore> {
567 self.user_store.clone()
568 }
569
570 #[cfg(any(test, feature = "test-support"))]
571 pub fn check_invariants(&self, cx: &AppContext) {
572 if self.is_local() {
573 let mut worktree_root_paths = HashMap::default();
574 for worktree in self.worktrees(cx) {
575 let worktree = worktree.read(cx);
576 let abs_path = worktree.as_local().unwrap().abs_path().clone();
577 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
578 assert_eq!(
579 prev_worktree_id,
580 None,
581 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
582 abs_path,
583 worktree.id(),
584 prev_worktree_id
585 )
586 }
587 } else {
588 let replica_id = self.replica_id();
589 for buffer in self.opened_buffers.values() {
590 if let Some(buffer) = buffer.upgrade(cx) {
591 let buffer = buffer.read(cx);
592 assert_eq!(
593 buffer.deferred_ops_len(),
594 0,
595 "replica {}, buffer {} has deferred operations",
596 replica_id,
597 buffer.remote_id()
598 );
599 }
600 }
601 }
602 }
603
604 #[cfg(any(test, feature = "test-support"))]
605 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
606 let path = path.into();
607 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
608 self.opened_buffers.iter().any(|(_, buffer)| {
609 if let Some(buffer) = buffer.upgrade(cx) {
610 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
611 if file.worktree == worktree && file.path() == &path.path {
612 return true;
613 }
614 }
615 }
616 false
617 })
618 } else {
619 false
620 }
621 }
622
623 pub fn fs(&self) -> &Arc<dyn Fs> {
624 &self.fs
625 }
626
627 pub fn set_public(&mut self, is_public: bool, cx: &mut ModelContext<Self>) {
628 if let ProjectClientState::Local { public_tx, .. } = &mut self.client_state {
629 *public_tx.borrow_mut() = is_public;
630 self.metadata_changed(cx);
631 }
632 }
633
634 pub fn is_public(&self) -> bool {
635 match &self.client_state {
636 ProjectClientState::Local { public_rx, .. } => *public_rx.borrow(),
637 ProjectClientState::Remote { .. } => true,
638 }
639 }
640
641 fn unregister(&mut self, cx: &mut ModelContext<Self>) {
642 self.unshared(cx);
643 for worktree in &self.worktrees {
644 if let Some(worktree) = worktree.upgrade(cx) {
645 worktree.update(cx, |worktree, _| {
646 worktree.as_local_mut().unwrap().unregister();
647 });
648 }
649 }
650
651 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
652 let mut remote_id = remote_id_tx.borrow_mut();
653 if let Some(remote_id) = *remote_id {
654 self.client
655 .send(proto::UnregisterProject {
656 project_id: remote_id,
657 })
658 .log_err();
659 }
660 *remote_id = None;
661 }
662
663 self.subscriptions.clear();
664 self.metadata_changed(cx);
665 }
666
667 fn register(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
668 if let ProjectClientState::Local { remote_id_rx, .. } = &self.client_state {
669 if remote_id_rx.borrow().is_some() {
670 return Task::ready(Ok(()));
671 }
672 }
673
674 let response = self.client.request(proto::RegisterProject {});
675 cx.spawn(|this, mut cx| async move {
676 let remote_id = response.await?.project_id;
677
678 let mut registrations = Vec::new();
679 this.update(&mut cx, |this, cx| {
680 if let ProjectClientState::Local { remote_id_tx, .. } = &mut this.client_state {
681 *remote_id_tx.borrow_mut() = Some(remote_id);
682 }
683
684 this.metadata_changed(cx);
685 cx.emit(Event::RemoteIdChanged(Some(remote_id)));
686
687 this.subscriptions
688 .push(this.client.add_model_for_remote_entity(remote_id, cx));
689
690 for worktree in &this.worktrees {
691 if let Some(worktree) = worktree.upgrade(cx) {
692 registrations.push(worktree.update(cx, |worktree, cx| {
693 let worktree = worktree.as_local_mut().unwrap();
694 worktree.register(remote_id, cx)
695 }));
696 }
697 }
698 });
699
700 futures::future::try_join_all(registrations).await?;
701 Ok(())
702 })
703 }
704
705 pub fn remote_id(&self) -> Option<u64> {
706 match &self.client_state {
707 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
708 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
709 }
710 }
711
712 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
713 let mut id = None;
714 let mut watch = None;
715 match &self.client_state {
716 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
717 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
718 }
719
720 async move {
721 if let Some(id) = id {
722 return id;
723 }
724 let mut watch = watch.unwrap();
725 loop {
726 let id = *watch.borrow();
727 if let Some(id) = id {
728 return id;
729 }
730 watch.next().await;
731 }
732 }
733 }
734
735 pub fn shared_remote_id(&self) -> Option<u64> {
736 match &self.client_state {
737 ProjectClientState::Local {
738 remote_id_rx,
739 is_shared,
740 ..
741 } => {
742 if *is_shared {
743 *remote_id_rx.borrow()
744 } else {
745 None
746 }
747 }
748 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
749 }
750 }
751
752 pub fn replica_id(&self) -> ReplicaId {
753 match &self.client_state {
754 ProjectClientState::Local { .. } => 0,
755 ProjectClientState::Remote { replica_id, .. } => *replica_id,
756 }
757 }
758
759 fn metadata_changed(&mut self, cx: &mut ModelContext<Self>) {
760 self.project_store.update(cx, |_, cx| cx.notify());
761 }
762
763 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
764 &self.collaborators
765 }
766
767 pub fn worktrees<'a>(
768 &'a self,
769 cx: &'a AppContext,
770 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
771 self.worktrees
772 .iter()
773 .filter_map(move |worktree| worktree.upgrade(cx))
774 }
775
776 pub fn visible_worktrees<'a>(
777 &'a self,
778 cx: &'a AppContext,
779 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
780 self.worktrees.iter().filter_map(|worktree| {
781 worktree.upgrade(cx).and_then(|worktree| {
782 if worktree.read(cx).is_visible() {
783 Some(worktree)
784 } else {
785 None
786 }
787 })
788 })
789 }
790
791 pub fn worktree_root_names<'a>(&'a self, cx: &'a AppContext) -> impl Iterator<Item = &'a str> {
792 self.visible_worktrees(cx)
793 .map(|tree| tree.read(cx).root_name())
794 }
795
796 pub fn worktree_for_id(
797 &self,
798 id: WorktreeId,
799 cx: &AppContext,
800 ) -> Option<ModelHandle<Worktree>> {
801 self.worktrees(cx)
802 .find(|worktree| worktree.read(cx).id() == id)
803 }
804
805 pub fn worktree_for_entry(
806 &self,
807 entry_id: ProjectEntryId,
808 cx: &AppContext,
809 ) -> Option<ModelHandle<Worktree>> {
810 self.worktrees(cx)
811 .find(|worktree| worktree.read(cx).contains_entry(entry_id))
812 }
813
814 pub fn worktree_id_for_entry(
815 &self,
816 entry_id: ProjectEntryId,
817 cx: &AppContext,
818 ) -> Option<WorktreeId> {
819 self.worktree_for_entry(entry_id, cx)
820 .map(|worktree| worktree.read(cx).id())
821 }
822
823 pub fn contains_paths(&self, paths: &[PathBuf], cx: &AppContext) -> bool {
824 paths.iter().all(|path| self.contains_path(&path, cx))
825 }
826
827 pub fn contains_path(&self, path: &Path, cx: &AppContext) -> bool {
828 for worktree in self.worktrees(cx) {
829 let worktree = worktree.read(cx).as_local();
830 if worktree.map_or(false, |w| w.contains_abs_path(path)) {
831 return true;
832 }
833 }
834 false
835 }
836
837 pub fn create_entry(
838 &mut self,
839 project_path: impl Into<ProjectPath>,
840 is_directory: bool,
841 cx: &mut ModelContext<Self>,
842 ) -> Option<Task<Result<Entry>>> {
843 let project_path = project_path.into();
844 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
845 if self.is_local() {
846 Some(worktree.update(cx, |worktree, cx| {
847 worktree
848 .as_local_mut()
849 .unwrap()
850 .create_entry(project_path.path, is_directory, cx)
851 }))
852 } else {
853 let client = self.client.clone();
854 let project_id = self.remote_id().unwrap();
855 Some(cx.spawn_weak(|_, mut cx| async move {
856 let response = client
857 .request(proto::CreateProjectEntry {
858 worktree_id: project_path.worktree_id.to_proto(),
859 project_id,
860 path: project_path.path.as_os_str().as_bytes().to_vec(),
861 is_directory,
862 })
863 .await?;
864 let entry = response
865 .entry
866 .ok_or_else(|| anyhow!("missing entry in response"))?;
867 worktree
868 .update(&mut cx, |worktree, cx| {
869 worktree.as_remote().unwrap().insert_entry(
870 entry,
871 response.worktree_scan_id as usize,
872 cx,
873 )
874 })
875 .await
876 }))
877 }
878 }
879
880 pub fn copy_entry(
881 &mut self,
882 entry_id: ProjectEntryId,
883 new_path: impl Into<Arc<Path>>,
884 cx: &mut ModelContext<Self>,
885 ) -> Option<Task<Result<Entry>>> {
886 let worktree = self.worktree_for_entry(entry_id, cx)?;
887 let new_path = new_path.into();
888 if self.is_local() {
889 worktree.update(cx, |worktree, cx| {
890 worktree
891 .as_local_mut()
892 .unwrap()
893 .copy_entry(entry_id, new_path, cx)
894 })
895 } else {
896 let client = self.client.clone();
897 let project_id = self.remote_id().unwrap();
898
899 Some(cx.spawn_weak(|_, mut cx| async move {
900 let response = client
901 .request(proto::CopyProjectEntry {
902 project_id,
903 entry_id: entry_id.to_proto(),
904 new_path: new_path.as_os_str().as_bytes().to_vec(),
905 })
906 .await?;
907 let entry = response
908 .entry
909 .ok_or_else(|| anyhow!("missing entry in response"))?;
910 worktree
911 .update(&mut cx, |worktree, cx| {
912 worktree.as_remote().unwrap().insert_entry(
913 entry,
914 response.worktree_scan_id as usize,
915 cx,
916 )
917 })
918 .await
919 }))
920 }
921 }
922
923 pub fn rename_entry(
924 &mut self,
925 entry_id: ProjectEntryId,
926 new_path: impl Into<Arc<Path>>,
927 cx: &mut ModelContext<Self>,
928 ) -> Option<Task<Result<Entry>>> {
929 let worktree = self.worktree_for_entry(entry_id, cx)?;
930 let new_path = new_path.into();
931 if self.is_local() {
932 worktree.update(cx, |worktree, cx| {
933 worktree
934 .as_local_mut()
935 .unwrap()
936 .rename_entry(entry_id, new_path, cx)
937 })
938 } else {
939 let client = self.client.clone();
940 let project_id = self.remote_id().unwrap();
941
942 Some(cx.spawn_weak(|_, mut cx| async move {
943 let response = client
944 .request(proto::RenameProjectEntry {
945 project_id,
946 entry_id: entry_id.to_proto(),
947 new_path: new_path.as_os_str().as_bytes().to_vec(),
948 })
949 .await?;
950 let entry = response
951 .entry
952 .ok_or_else(|| anyhow!("missing entry in response"))?;
953 worktree
954 .update(&mut cx, |worktree, cx| {
955 worktree.as_remote().unwrap().insert_entry(
956 entry,
957 response.worktree_scan_id as usize,
958 cx,
959 )
960 })
961 .await
962 }))
963 }
964 }
965
966 pub fn delete_entry(
967 &mut self,
968 entry_id: ProjectEntryId,
969 cx: &mut ModelContext<Self>,
970 ) -> Option<Task<Result<()>>> {
971 let worktree = self.worktree_for_entry(entry_id, cx)?;
972 if self.is_local() {
973 worktree.update(cx, |worktree, cx| {
974 worktree.as_local_mut().unwrap().delete_entry(entry_id, cx)
975 })
976 } else {
977 let client = self.client.clone();
978 let project_id = self.remote_id().unwrap();
979 Some(cx.spawn_weak(|_, mut cx| async move {
980 let response = client
981 .request(proto::DeleteProjectEntry {
982 project_id,
983 entry_id: entry_id.to_proto(),
984 })
985 .await?;
986 worktree
987 .update(&mut cx, move |worktree, cx| {
988 worktree.as_remote().unwrap().delete_entry(
989 entry_id,
990 response.worktree_scan_id as usize,
991 cx,
992 )
993 })
994 .await
995 }))
996 }
997 }
998
999 fn share(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
1000 let project_id;
1001 if let ProjectClientState::Local {
1002 remote_id_rx,
1003 is_shared,
1004 ..
1005 } = &mut self.client_state
1006 {
1007 if *is_shared {
1008 return Task::ready(Ok(()));
1009 }
1010 *is_shared = true;
1011 if let Some(id) = *remote_id_rx.borrow() {
1012 project_id = id;
1013 } else {
1014 return Task::ready(Err(anyhow!("project hasn't been registered")));
1015 }
1016 } else {
1017 return Task::ready(Err(anyhow!("can't share a remote project")));
1018 };
1019
1020 for open_buffer in self.opened_buffers.values_mut() {
1021 match open_buffer {
1022 OpenBuffer::Strong(_) => {}
1023 OpenBuffer::Weak(buffer) => {
1024 if let Some(buffer) = buffer.upgrade(cx) {
1025 *open_buffer = OpenBuffer::Strong(buffer);
1026 }
1027 }
1028 OpenBuffer::Loading(_) => unreachable!(),
1029 }
1030 }
1031
1032 for worktree_handle in self.worktrees.iter_mut() {
1033 match worktree_handle {
1034 WorktreeHandle::Strong(_) => {}
1035 WorktreeHandle::Weak(worktree) => {
1036 if let Some(worktree) = worktree.upgrade(cx) {
1037 *worktree_handle = WorktreeHandle::Strong(worktree);
1038 }
1039 }
1040 }
1041 }
1042
1043 let mut tasks = Vec::new();
1044 for worktree in self.worktrees(cx).collect::<Vec<_>>() {
1045 worktree.update(cx, |worktree, cx| {
1046 let worktree = worktree.as_local_mut().unwrap();
1047 tasks.push(worktree.share(project_id, cx));
1048 });
1049 }
1050
1051 cx.spawn(|this, mut cx| async move {
1052 for task in tasks {
1053 task.await?;
1054 }
1055 this.update(&mut cx, |_, cx| cx.notify());
1056 Ok(())
1057 })
1058 }
1059
1060 fn unshared(&mut self, cx: &mut ModelContext<Self>) {
1061 if let ProjectClientState::Local { is_shared, .. } = &mut self.client_state {
1062 if !*is_shared {
1063 return;
1064 }
1065
1066 *is_shared = false;
1067 self.collaborators.clear();
1068 self.shared_buffers.clear();
1069 for worktree_handle in self.worktrees.iter_mut() {
1070 if let WorktreeHandle::Strong(worktree) = worktree_handle {
1071 let is_visible = worktree.update(cx, |worktree, _| {
1072 worktree.as_local_mut().unwrap().unshare();
1073 worktree.is_visible()
1074 });
1075 if !is_visible {
1076 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
1077 }
1078 }
1079 }
1080
1081 for open_buffer in self.opened_buffers.values_mut() {
1082 match open_buffer {
1083 OpenBuffer::Strong(buffer) => {
1084 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
1085 }
1086 _ => {}
1087 }
1088 }
1089
1090 cx.notify();
1091 } else {
1092 log::error!("attempted to unshare a remote project");
1093 }
1094 }
1095
1096 pub fn respond_to_join_request(
1097 &mut self,
1098 requester_id: u64,
1099 allow: bool,
1100 cx: &mut ModelContext<Self>,
1101 ) {
1102 if let Some(project_id) = self.remote_id() {
1103 let share = self.share(cx);
1104 let client = self.client.clone();
1105 cx.foreground()
1106 .spawn(async move {
1107 share.await?;
1108 client.send(proto::RespondToJoinProjectRequest {
1109 requester_id,
1110 project_id,
1111 allow,
1112 })
1113 })
1114 .detach_and_log_err(cx);
1115 }
1116 }
1117
1118 fn removed_from_project(&mut self, cx: &mut ModelContext<Self>) {
1119 if let ProjectClientState::Remote {
1120 sharing_has_stopped,
1121 ..
1122 } = &mut self.client_state
1123 {
1124 *sharing_has_stopped = true;
1125 self.collaborators.clear();
1126 cx.notify();
1127 }
1128 }
1129
1130 pub fn is_read_only(&self) -> bool {
1131 match &self.client_state {
1132 ProjectClientState::Local { .. } => false,
1133 ProjectClientState::Remote {
1134 sharing_has_stopped,
1135 ..
1136 } => *sharing_has_stopped,
1137 }
1138 }
1139
1140 pub fn is_local(&self) -> bool {
1141 match &self.client_state {
1142 ProjectClientState::Local { .. } => true,
1143 ProjectClientState::Remote { .. } => false,
1144 }
1145 }
1146
1147 pub fn is_remote(&self) -> bool {
1148 !self.is_local()
1149 }
1150
1151 pub fn create_buffer(
1152 &mut self,
1153 text: &str,
1154 language: Option<Arc<Language>>,
1155 cx: &mut ModelContext<Self>,
1156 ) -> Result<ModelHandle<Buffer>> {
1157 if self.is_remote() {
1158 return Err(anyhow!("creating buffers as a guest is not supported yet"));
1159 }
1160
1161 let buffer = cx.add_model(|cx| {
1162 Buffer::new(self.replica_id(), text, cx)
1163 .with_language(language.unwrap_or(language::PLAIN_TEXT.clone()), cx)
1164 });
1165 self.register_buffer(&buffer, cx)?;
1166 Ok(buffer)
1167 }
1168
1169 pub fn open_path(
1170 &mut self,
1171 path: impl Into<ProjectPath>,
1172 cx: &mut ModelContext<Self>,
1173 ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
1174 let task = self.open_buffer(path, cx);
1175 cx.spawn_weak(|_, cx| async move {
1176 let buffer = task.await?;
1177 let project_entry_id = buffer
1178 .read_with(&cx, |buffer, cx| {
1179 File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
1180 })
1181 .ok_or_else(|| anyhow!("no project entry"))?;
1182 Ok((project_entry_id, buffer.into()))
1183 })
1184 }
1185
1186 pub fn open_local_buffer(
1187 &mut self,
1188 abs_path: impl AsRef<Path>,
1189 cx: &mut ModelContext<Self>,
1190 ) -> Task<Result<ModelHandle<Buffer>>> {
1191 if let Some((worktree, relative_path)) = self.find_local_worktree(abs_path.as_ref(), cx) {
1192 self.open_buffer((worktree.read(cx).id(), relative_path), cx)
1193 } else {
1194 Task::ready(Err(anyhow!("no such path")))
1195 }
1196 }
1197
1198 pub fn open_buffer(
1199 &mut self,
1200 path: impl Into<ProjectPath>,
1201 cx: &mut ModelContext<Self>,
1202 ) -> Task<Result<ModelHandle<Buffer>>> {
1203 let project_path = path.into();
1204 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
1205 worktree
1206 } else {
1207 return Task::ready(Err(anyhow!("no such worktree")));
1208 };
1209
1210 // If there is already a buffer for the given path, then return it.
1211 let existing_buffer = self.get_open_buffer(&project_path, cx);
1212 if let Some(existing_buffer) = existing_buffer {
1213 return Task::ready(Ok(existing_buffer));
1214 }
1215
1216 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
1217 // If the given path is already being loaded, then wait for that existing
1218 // task to complete and return the same buffer.
1219 hash_map::Entry::Occupied(e) => e.get().clone(),
1220
1221 // Otherwise, record the fact that this path is now being loaded.
1222 hash_map::Entry::Vacant(entry) => {
1223 let (mut tx, rx) = postage::watch::channel();
1224 entry.insert(rx.clone());
1225
1226 let load_buffer = if worktree.read(cx).is_local() {
1227 self.open_local_buffer_internal(&project_path.path, &worktree, cx)
1228 } else {
1229 self.open_remote_buffer_internal(&project_path.path, &worktree, cx)
1230 };
1231
1232 cx.spawn(move |this, mut cx| async move {
1233 let load_result = load_buffer.await;
1234 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
1235 // Record the fact that the buffer is no longer loading.
1236 this.loading_buffers.remove(&project_path);
1237 let buffer = load_result.map_err(Arc::new)?;
1238 Ok(buffer)
1239 }));
1240 })
1241 .detach();
1242 rx
1243 }
1244 };
1245
1246 cx.foreground().spawn(async move {
1247 loop {
1248 if let Some(result) = loading_watch.borrow().as_ref() {
1249 match result {
1250 Ok(buffer) => return Ok(buffer.clone()),
1251 Err(error) => return Err(anyhow!("{}", error)),
1252 }
1253 }
1254 loading_watch.next().await;
1255 }
1256 })
1257 }
1258
1259 fn open_local_buffer_internal(
1260 &mut self,
1261 path: &Arc<Path>,
1262 worktree: &ModelHandle<Worktree>,
1263 cx: &mut ModelContext<Self>,
1264 ) -> Task<Result<ModelHandle<Buffer>>> {
1265 let load_buffer = worktree.update(cx, |worktree, cx| {
1266 let worktree = worktree.as_local_mut().unwrap();
1267 worktree.load_buffer(path, cx)
1268 });
1269 cx.spawn(|this, mut cx| async move {
1270 let buffer = load_buffer.await?;
1271 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
1272 Ok(buffer)
1273 })
1274 }
1275
1276 fn open_remote_buffer_internal(
1277 &mut self,
1278 path: &Arc<Path>,
1279 worktree: &ModelHandle<Worktree>,
1280 cx: &mut ModelContext<Self>,
1281 ) -> Task<Result<ModelHandle<Buffer>>> {
1282 let rpc = self.client.clone();
1283 let project_id = self.remote_id().unwrap();
1284 let remote_worktree_id = worktree.read(cx).id();
1285 let path = path.clone();
1286 let path_string = path.to_string_lossy().to_string();
1287 cx.spawn(|this, mut cx| async move {
1288 let response = rpc
1289 .request(proto::OpenBufferByPath {
1290 project_id,
1291 worktree_id: remote_worktree_id.to_proto(),
1292 path: path_string,
1293 })
1294 .await?;
1295 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
1296 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1297 .await
1298 })
1299 }
1300
1301 fn open_local_buffer_via_lsp(
1302 &mut self,
1303 abs_path: lsp::Url,
1304 lsp_adapter: Arc<dyn LspAdapter>,
1305 lsp_server: Arc<LanguageServer>,
1306 cx: &mut ModelContext<Self>,
1307 ) -> Task<Result<ModelHandle<Buffer>>> {
1308 cx.spawn(|this, mut cx| async move {
1309 let abs_path = abs_path
1310 .to_file_path()
1311 .map_err(|_| anyhow!("can't convert URI to path"))?;
1312 let (worktree, relative_path) = if let Some(result) =
1313 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
1314 {
1315 result
1316 } else {
1317 let worktree = this
1318 .update(&mut cx, |this, cx| {
1319 this.create_local_worktree(&abs_path, false, cx)
1320 })
1321 .await?;
1322 this.update(&mut cx, |this, cx| {
1323 this.language_servers.insert(
1324 (worktree.read(cx).id(), lsp_adapter.name()),
1325 (lsp_adapter, lsp_server),
1326 );
1327 });
1328 (worktree, PathBuf::new())
1329 };
1330
1331 let project_path = ProjectPath {
1332 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
1333 path: relative_path.into(),
1334 };
1335 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
1336 .await
1337 })
1338 }
1339
1340 pub fn open_buffer_by_id(
1341 &mut self,
1342 id: u64,
1343 cx: &mut ModelContext<Self>,
1344 ) -> Task<Result<ModelHandle<Buffer>>> {
1345 if let Some(buffer) = self.buffer_for_id(id, cx) {
1346 Task::ready(Ok(buffer))
1347 } else if self.is_local() {
1348 Task::ready(Err(anyhow!("buffer {} does not exist", id)))
1349 } else if let Some(project_id) = self.remote_id() {
1350 let request = self
1351 .client
1352 .request(proto::OpenBufferById { project_id, id });
1353 cx.spawn(|this, mut cx| async move {
1354 let buffer = request
1355 .await?
1356 .buffer
1357 .ok_or_else(|| anyhow!("invalid buffer"))?;
1358 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1359 .await
1360 })
1361 } else {
1362 Task::ready(Err(anyhow!("cannot open buffer while disconnected")))
1363 }
1364 }
1365
1366 pub fn save_buffer_as(
1367 &mut self,
1368 buffer: ModelHandle<Buffer>,
1369 abs_path: PathBuf,
1370 cx: &mut ModelContext<Project>,
1371 ) -> Task<Result<()>> {
1372 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
1373 let old_path =
1374 File::from_dyn(buffer.read(cx).file()).and_then(|f| Some(f.as_local()?.abs_path(cx)));
1375 cx.spawn(|this, mut cx| async move {
1376 if let Some(old_path) = old_path {
1377 this.update(&mut cx, |this, cx| {
1378 this.unregister_buffer_from_language_server(&buffer, old_path, cx);
1379 });
1380 }
1381 let (worktree, path) = worktree_task.await?;
1382 worktree
1383 .update(&mut cx, |worktree, cx| {
1384 worktree
1385 .as_local_mut()
1386 .unwrap()
1387 .save_buffer_as(buffer.clone(), path, cx)
1388 })
1389 .await?;
1390 this.update(&mut cx, |this, cx| {
1391 this.assign_language_to_buffer(&buffer, cx);
1392 this.register_buffer_with_language_server(&buffer, cx);
1393 });
1394 Ok(())
1395 })
1396 }
1397
1398 pub fn get_open_buffer(
1399 &mut self,
1400 path: &ProjectPath,
1401 cx: &mut ModelContext<Self>,
1402 ) -> Option<ModelHandle<Buffer>> {
1403 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
1404 self.opened_buffers.values().find_map(|buffer| {
1405 let buffer = buffer.upgrade(cx)?;
1406 let file = File::from_dyn(buffer.read(cx).file())?;
1407 if file.worktree == worktree && file.path() == &path.path {
1408 Some(buffer)
1409 } else {
1410 None
1411 }
1412 })
1413 }
1414
1415 fn register_buffer(
1416 &mut self,
1417 buffer: &ModelHandle<Buffer>,
1418 cx: &mut ModelContext<Self>,
1419 ) -> Result<()> {
1420 let remote_id = buffer.read(cx).remote_id();
1421 let open_buffer = if self.is_remote() || self.is_shared() {
1422 OpenBuffer::Strong(buffer.clone())
1423 } else {
1424 OpenBuffer::Weak(buffer.downgrade())
1425 };
1426
1427 match self.opened_buffers.insert(remote_id, open_buffer) {
1428 None => {}
1429 Some(OpenBuffer::Loading(operations)) => {
1430 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
1431 }
1432 Some(OpenBuffer::Weak(existing_handle)) => {
1433 if existing_handle.upgrade(cx).is_some() {
1434 Err(anyhow!(
1435 "already registered buffer with remote id {}",
1436 remote_id
1437 ))?
1438 }
1439 }
1440 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
1441 "already registered buffer with remote id {}",
1442 remote_id
1443 ))?,
1444 }
1445 cx.subscribe(buffer, |this, buffer, event, cx| {
1446 this.on_buffer_event(buffer, event, cx);
1447 })
1448 .detach();
1449
1450 self.assign_language_to_buffer(buffer, cx);
1451 self.register_buffer_with_language_server(buffer, cx);
1452 cx.observe_release(buffer, |this, buffer, cx| {
1453 if let Some(file) = File::from_dyn(buffer.file()) {
1454 if file.is_local() {
1455 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1456 if let Some((_, server)) = this.language_server_for_buffer(buffer, cx) {
1457 server
1458 .notify::<lsp::notification::DidCloseTextDocument>(
1459 lsp::DidCloseTextDocumentParams {
1460 text_document: lsp::TextDocumentIdentifier::new(uri.clone()),
1461 },
1462 )
1463 .log_err();
1464 }
1465 }
1466 }
1467 })
1468 .detach();
1469
1470 Ok(())
1471 }
1472
1473 fn register_buffer_with_language_server(
1474 &mut self,
1475 buffer_handle: &ModelHandle<Buffer>,
1476 cx: &mut ModelContext<Self>,
1477 ) {
1478 let buffer = buffer_handle.read(cx);
1479 let buffer_id = buffer.remote_id();
1480 if let Some(file) = File::from_dyn(buffer.file()) {
1481 if file.is_local() {
1482 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1483 let initial_snapshot = buffer.text_snapshot();
1484
1485 let mut language_server = None;
1486 let mut language_id = None;
1487 if let Some(language) = buffer.language() {
1488 let worktree_id = file.worktree_id(cx);
1489 if let Some(adapter) = language.lsp_adapter() {
1490 language_id = adapter.id_for_language(language.name().as_ref());
1491 language_server = self
1492 .language_servers
1493 .get(&(worktree_id, adapter.name()))
1494 .cloned();
1495 }
1496 }
1497
1498 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
1499 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
1500 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
1501 .log_err();
1502 }
1503 }
1504
1505 if let Some((_, server)) = language_server {
1506 server
1507 .notify::<lsp::notification::DidOpenTextDocument>(
1508 lsp::DidOpenTextDocumentParams {
1509 text_document: lsp::TextDocumentItem::new(
1510 uri,
1511 language_id.unwrap_or_default(),
1512 0,
1513 initial_snapshot.text(),
1514 ),
1515 }
1516 .clone(),
1517 )
1518 .log_err();
1519 buffer_handle.update(cx, |buffer, cx| {
1520 buffer.set_completion_triggers(
1521 server
1522 .capabilities()
1523 .completion_provider
1524 .as_ref()
1525 .and_then(|provider| provider.trigger_characters.clone())
1526 .unwrap_or(Vec::new()),
1527 cx,
1528 )
1529 });
1530 self.buffer_snapshots
1531 .insert(buffer_id, vec![(0, initial_snapshot)]);
1532 }
1533 }
1534 }
1535 }
1536
1537 fn unregister_buffer_from_language_server(
1538 &mut self,
1539 buffer: &ModelHandle<Buffer>,
1540 old_path: PathBuf,
1541 cx: &mut ModelContext<Self>,
1542 ) {
1543 buffer.update(cx, |buffer, cx| {
1544 buffer.update_diagnostics(Default::default(), cx);
1545 self.buffer_snapshots.remove(&buffer.remote_id());
1546 if let Some((_, language_server)) = self.language_server_for_buffer(buffer, cx) {
1547 language_server
1548 .notify::<lsp::notification::DidCloseTextDocument>(
1549 lsp::DidCloseTextDocumentParams {
1550 text_document: lsp::TextDocumentIdentifier::new(
1551 lsp::Url::from_file_path(old_path).unwrap(),
1552 ),
1553 },
1554 )
1555 .log_err();
1556 }
1557 });
1558 }
1559
1560 fn on_buffer_event(
1561 &mut self,
1562 buffer: ModelHandle<Buffer>,
1563 event: &BufferEvent,
1564 cx: &mut ModelContext<Self>,
1565 ) -> Option<()> {
1566 match event {
1567 BufferEvent::Operation(operation) => {
1568 if let Some(project_id) = self.shared_remote_id() {
1569 let request = self.client.request(proto::UpdateBuffer {
1570 project_id,
1571 buffer_id: buffer.read(cx).remote_id(),
1572 operations: vec![language::proto::serialize_operation(&operation)],
1573 });
1574 cx.background().spawn(request).detach_and_log_err(cx);
1575 }
1576 }
1577 BufferEvent::Edited { .. } => {
1578 let (_, language_server) = self
1579 .language_server_for_buffer(buffer.read(cx), cx)?
1580 .clone();
1581 let buffer = buffer.read(cx);
1582 let file = File::from_dyn(buffer.file())?;
1583 let abs_path = file.as_local()?.abs_path(cx);
1584 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1585 let buffer_snapshots = self.buffer_snapshots.get_mut(&buffer.remote_id())?;
1586 let (version, prev_snapshot) = buffer_snapshots.last()?;
1587 let next_snapshot = buffer.text_snapshot();
1588 let next_version = version + 1;
1589
1590 let content_changes = buffer
1591 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1592 .map(|edit| {
1593 let edit_start = edit.new.start.0;
1594 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1595 let new_text = next_snapshot
1596 .text_for_range(edit.new.start.1..edit.new.end.1)
1597 .collect();
1598 lsp::TextDocumentContentChangeEvent {
1599 range: Some(lsp::Range::new(
1600 point_to_lsp(edit_start),
1601 point_to_lsp(edit_end),
1602 )),
1603 range_length: None,
1604 text: new_text,
1605 }
1606 })
1607 .collect();
1608
1609 buffer_snapshots.push((next_version, next_snapshot));
1610
1611 language_server
1612 .notify::<lsp::notification::DidChangeTextDocument>(
1613 lsp::DidChangeTextDocumentParams {
1614 text_document: lsp::VersionedTextDocumentIdentifier::new(
1615 uri,
1616 next_version,
1617 ),
1618 content_changes,
1619 },
1620 )
1621 .log_err();
1622 }
1623 BufferEvent::Saved => {
1624 let file = File::from_dyn(buffer.read(cx).file())?;
1625 let worktree_id = file.worktree_id(cx);
1626 let abs_path = file.as_local()?.abs_path(cx);
1627 let text_document = lsp::TextDocumentIdentifier {
1628 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1629 };
1630
1631 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1632 server
1633 .notify::<lsp::notification::DidSaveTextDocument>(
1634 lsp::DidSaveTextDocumentParams {
1635 text_document: text_document.clone(),
1636 text: None,
1637 },
1638 )
1639 .log_err();
1640 }
1641 }
1642 _ => {}
1643 }
1644
1645 None
1646 }
1647
1648 fn language_servers_for_worktree(
1649 &self,
1650 worktree_id: WorktreeId,
1651 ) -> impl Iterator<Item = &(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
1652 self.language_servers.iter().filter_map(
1653 move |((language_server_worktree_id, _), server)| {
1654 if *language_server_worktree_id == worktree_id {
1655 Some(server)
1656 } else {
1657 None
1658 }
1659 },
1660 )
1661 }
1662
1663 fn assign_language_to_buffer(
1664 &mut self,
1665 buffer: &ModelHandle<Buffer>,
1666 cx: &mut ModelContext<Self>,
1667 ) -> Option<()> {
1668 // If the buffer has a language, set it and start the language server if we haven't already.
1669 let full_path = buffer.read(cx).file()?.full_path(cx);
1670 let language = self.languages.select_language(&full_path)?;
1671 buffer.update(cx, |buffer, cx| {
1672 buffer.set_language(Some(language.clone()), cx);
1673 });
1674
1675 let file = File::from_dyn(buffer.read(cx).file())?;
1676 let worktree = file.worktree.read(cx).as_local()?;
1677 let worktree_id = worktree.id();
1678 let worktree_abs_path = worktree.abs_path().clone();
1679 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1680
1681 None
1682 }
1683
1684 fn start_language_server(
1685 &mut self,
1686 worktree_id: WorktreeId,
1687 worktree_path: Arc<Path>,
1688 language: Arc<Language>,
1689 cx: &mut ModelContext<Self>,
1690 ) {
1691 let adapter = if let Some(adapter) = language.lsp_adapter() {
1692 adapter
1693 } else {
1694 return;
1695 };
1696 let key = (worktree_id, adapter.name());
1697 self.started_language_servers
1698 .entry(key.clone())
1699 .or_insert_with(|| {
1700 let server_id = post_inc(&mut self.next_language_server_id);
1701 let language_server = self.languages.start_language_server(
1702 server_id,
1703 language.clone(),
1704 worktree_path,
1705 self.client.http_client(),
1706 cx,
1707 );
1708 cx.spawn_weak(|this, mut cx| async move {
1709 let language_server = language_server?.await.log_err()?;
1710 let language_server = language_server
1711 .initialize(adapter.initialization_options())
1712 .await
1713 .log_err()?;
1714 let this = this.upgrade(&cx)?;
1715 let disk_based_diagnostics_progress_token =
1716 adapter.disk_based_diagnostics_progress_token();
1717
1718 language_server
1719 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1720 let this = this.downgrade();
1721 let adapter = adapter.clone();
1722 move |params, mut cx| {
1723 if let Some(this) = this.upgrade(&cx) {
1724 this.update(&mut cx, |this, cx| {
1725 this.on_lsp_diagnostics_published(
1726 server_id,
1727 params,
1728 &adapter,
1729 disk_based_diagnostics_progress_token,
1730 cx,
1731 );
1732 });
1733 }
1734 }
1735 })
1736 .detach();
1737
1738 language_server
1739 .on_request::<lsp::request::WorkspaceConfiguration, _, _>({
1740 let settings = this
1741 .read_with(&cx, |this, _| this.language_server_settings.clone());
1742 move |params, _| {
1743 let settings = settings.lock().clone();
1744 async move {
1745 Ok(params
1746 .items
1747 .into_iter()
1748 .map(|item| {
1749 if let Some(section) = &item.section {
1750 settings
1751 .get(section)
1752 .cloned()
1753 .unwrap_or(serde_json::Value::Null)
1754 } else {
1755 settings.clone()
1756 }
1757 })
1758 .collect())
1759 }
1760 }
1761 })
1762 .detach();
1763
1764 language_server
1765 .on_request::<lsp::request::ApplyWorkspaceEdit, _, _>({
1766 let this = this.downgrade();
1767 let adapter = adapter.clone();
1768 let language_server = language_server.clone();
1769 move |params, cx| {
1770 Self::on_lsp_workspace_edit(
1771 this,
1772 params,
1773 server_id,
1774 adapter.clone(),
1775 language_server.clone(),
1776 cx,
1777 )
1778 }
1779 })
1780 .detach();
1781
1782 language_server
1783 .on_notification::<lsp::notification::Progress, _>({
1784 let this = this.downgrade();
1785 move |params, mut cx| {
1786 if let Some(this) = this.upgrade(&cx) {
1787 this.update(&mut cx, |this, cx| {
1788 this.on_lsp_progress(
1789 params,
1790 server_id,
1791 disk_based_diagnostics_progress_token,
1792 cx,
1793 );
1794 });
1795 }
1796 }
1797 })
1798 .detach();
1799
1800 this.update(&mut cx, |this, cx| {
1801 this.language_servers
1802 .insert(key.clone(), (adapter.clone(), language_server.clone()));
1803 this.language_server_statuses.insert(
1804 server_id,
1805 LanguageServerStatus {
1806 name: language_server.name().to_string(),
1807 pending_work: Default::default(),
1808 pending_diagnostic_updates: 0,
1809 },
1810 );
1811 language_server
1812 .notify::<lsp::notification::DidChangeConfiguration>(
1813 lsp::DidChangeConfigurationParams {
1814 settings: this.language_server_settings.lock().clone(),
1815 },
1816 )
1817 .ok();
1818
1819 if let Some(project_id) = this.shared_remote_id() {
1820 this.client
1821 .send(proto::StartLanguageServer {
1822 project_id,
1823 server: Some(proto::LanguageServer {
1824 id: server_id as u64,
1825 name: language_server.name().to_string(),
1826 }),
1827 })
1828 .log_err();
1829 }
1830
1831 // Tell the language server about every open buffer in the worktree that matches the language.
1832 for buffer in this.opened_buffers.values() {
1833 if let Some(buffer_handle) = buffer.upgrade(cx) {
1834 let buffer = buffer_handle.read(cx);
1835 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1836 file
1837 } else {
1838 continue;
1839 };
1840 let language = if let Some(language) = buffer.language() {
1841 language
1842 } else {
1843 continue;
1844 };
1845 if file.worktree.read(cx).id() != key.0
1846 || language.lsp_adapter().map(|a| a.name())
1847 != Some(key.1.clone())
1848 {
1849 continue;
1850 }
1851
1852 let file = file.as_local()?;
1853 let versions = this
1854 .buffer_snapshots
1855 .entry(buffer.remote_id())
1856 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1857 let (version, initial_snapshot) = versions.last().unwrap();
1858 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1859 let language_id = adapter.id_for_language(language.name().as_ref());
1860 language_server
1861 .notify::<lsp::notification::DidOpenTextDocument>(
1862 lsp::DidOpenTextDocumentParams {
1863 text_document: lsp::TextDocumentItem::new(
1864 uri,
1865 language_id.unwrap_or_default(),
1866 *version,
1867 initial_snapshot.text(),
1868 ),
1869 },
1870 )
1871 .log_err()?;
1872 buffer_handle.update(cx, |buffer, cx| {
1873 buffer.set_completion_triggers(
1874 language_server
1875 .capabilities()
1876 .completion_provider
1877 .as_ref()
1878 .and_then(|provider| {
1879 provider.trigger_characters.clone()
1880 })
1881 .unwrap_or(Vec::new()),
1882 cx,
1883 )
1884 });
1885 }
1886 }
1887
1888 cx.notify();
1889 Some(())
1890 });
1891
1892 Some(language_server)
1893 })
1894 });
1895 }
1896
1897 pub fn restart_language_servers_for_buffers(
1898 &mut self,
1899 buffers: impl IntoIterator<Item = ModelHandle<Buffer>>,
1900 cx: &mut ModelContext<Self>,
1901 ) -> Option<()> {
1902 let language_server_lookup_info: HashSet<(WorktreeId, Arc<Path>, PathBuf)> = buffers
1903 .into_iter()
1904 .filter_map(|buffer| {
1905 let file = File::from_dyn(buffer.read(cx).file())?;
1906 let worktree = file.worktree.read(cx).as_local()?;
1907 let worktree_id = worktree.id();
1908 let worktree_abs_path = worktree.abs_path().clone();
1909 let full_path = file.full_path(cx);
1910 Some((worktree_id, worktree_abs_path, full_path))
1911 })
1912 .collect();
1913 for (worktree_id, worktree_abs_path, full_path) in language_server_lookup_info {
1914 let language = self.languages.select_language(&full_path)?;
1915 self.restart_language_server(worktree_id, worktree_abs_path, language, cx);
1916 }
1917
1918 None
1919 }
1920
1921 fn restart_language_server(
1922 &mut self,
1923 worktree_id: WorktreeId,
1924 worktree_path: Arc<Path>,
1925 language: Arc<Language>,
1926 cx: &mut ModelContext<Self>,
1927 ) {
1928 let adapter = if let Some(adapter) = language.lsp_adapter() {
1929 adapter
1930 } else {
1931 return;
1932 };
1933 let key = (worktree_id, adapter.name());
1934 let server_to_shutdown = self.language_servers.remove(&key);
1935 self.started_language_servers.remove(&key);
1936 server_to_shutdown
1937 .as_ref()
1938 .map(|(_, server)| self.language_server_statuses.remove(&server.server_id()));
1939 cx.spawn_weak(|this, mut cx| async move {
1940 if let Some(this) = this.upgrade(&cx) {
1941 if let Some((_, server_to_shutdown)) = server_to_shutdown {
1942 if let Some(shutdown_task) = server_to_shutdown.shutdown() {
1943 shutdown_task.await;
1944 }
1945 }
1946
1947 this.update(&mut cx, |this, cx| {
1948 this.start_language_server(worktree_id, worktree_path, language, cx);
1949 });
1950 }
1951 })
1952 .detach();
1953 }
1954
1955 fn on_lsp_diagnostics_published(
1956 &mut self,
1957 server_id: usize,
1958 mut params: lsp::PublishDiagnosticsParams,
1959 adapter: &Arc<dyn LspAdapter>,
1960 disk_based_diagnostics_progress_token: Option<&str>,
1961 cx: &mut ModelContext<Self>,
1962 ) {
1963 adapter.process_diagnostics(&mut params);
1964 if disk_based_diagnostics_progress_token.is_none() {
1965 self.disk_based_diagnostics_started(cx);
1966 self.broadcast_language_server_update(
1967 server_id,
1968 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
1969 proto::LspDiskBasedDiagnosticsUpdating {},
1970 ),
1971 );
1972 }
1973 self.update_diagnostics(params, adapter.disk_based_diagnostic_sources(), cx)
1974 .log_err();
1975 if disk_based_diagnostics_progress_token.is_none() {
1976 self.disk_based_diagnostics_finished(cx);
1977 self.broadcast_language_server_update(
1978 server_id,
1979 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
1980 proto::LspDiskBasedDiagnosticsUpdated {},
1981 ),
1982 );
1983 }
1984 }
1985
1986 fn on_lsp_progress(
1987 &mut self,
1988 progress: lsp::ProgressParams,
1989 server_id: usize,
1990 disk_based_diagnostics_progress_token: Option<&str>,
1991 cx: &mut ModelContext<Self>,
1992 ) {
1993 let token = match progress.token {
1994 lsp::NumberOrString::String(token) => token,
1995 lsp::NumberOrString::Number(token) => {
1996 log::info!("skipping numeric progress token {}", token);
1997 return;
1998 }
1999 };
2000 let progress = match progress.value {
2001 lsp::ProgressParamsValue::WorkDone(value) => value,
2002 };
2003 let language_server_status =
2004 if let Some(status) = self.language_server_statuses.get_mut(&server_id) {
2005 status
2006 } else {
2007 return;
2008 };
2009 match progress {
2010 lsp::WorkDoneProgress::Begin(_) => {
2011 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2012 language_server_status.pending_diagnostic_updates += 1;
2013 if language_server_status.pending_diagnostic_updates == 1 {
2014 self.disk_based_diagnostics_started(cx);
2015 self.broadcast_language_server_update(
2016 server_id,
2017 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(
2018 proto::LspDiskBasedDiagnosticsUpdating {},
2019 ),
2020 );
2021 }
2022 } else {
2023 self.on_lsp_work_start(server_id, token.clone(), cx);
2024 self.broadcast_language_server_update(
2025 server_id,
2026 proto::update_language_server::Variant::WorkStart(proto::LspWorkStart {
2027 token,
2028 }),
2029 );
2030 }
2031 }
2032 lsp::WorkDoneProgress::Report(report) => {
2033 if Some(token.as_str()) != disk_based_diagnostics_progress_token {
2034 self.on_lsp_work_progress(
2035 server_id,
2036 token.clone(),
2037 LanguageServerProgress {
2038 message: report.message.clone(),
2039 percentage: report.percentage.map(|p| p as usize),
2040 last_update_at: Instant::now(),
2041 },
2042 cx,
2043 );
2044 self.broadcast_language_server_update(
2045 server_id,
2046 proto::update_language_server::Variant::WorkProgress(
2047 proto::LspWorkProgress {
2048 token,
2049 message: report.message,
2050 percentage: report.percentage.map(|p| p as u32),
2051 },
2052 ),
2053 );
2054 }
2055 }
2056 lsp::WorkDoneProgress::End(_) => {
2057 if Some(token.as_str()) == disk_based_diagnostics_progress_token {
2058 language_server_status.pending_diagnostic_updates -= 1;
2059 if language_server_status.pending_diagnostic_updates == 0 {
2060 self.disk_based_diagnostics_finished(cx);
2061 self.broadcast_language_server_update(
2062 server_id,
2063 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
2064 proto::LspDiskBasedDiagnosticsUpdated {},
2065 ),
2066 );
2067 }
2068 } else {
2069 self.on_lsp_work_end(server_id, token.clone(), cx);
2070 self.broadcast_language_server_update(
2071 server_id,
2072 proto::update_language_server::Variant::WorkEnd(proto::LspWorkEnd {
2073 token,
2074 }),
2075 );
2076 }
2077 }
2078 }
2079 }
2080
2081 fn on_lsp_work_start(
2082 &mut self,
2083 language_server_id: usize,
2084 token: String,
2085 cx: &mut ModelContext<Self>,
2086 ) {
2087 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2088 status.pending_work.insert(
2089 token,
2090 LanguageServerProgress {
2091 message: None,
2092 percentage: None,
2093 last_update_at: Instant::now(),
2094 },
2095 );
2096 cx.notify();
2097 }
2098 }
2099
2100 fn on_lsp_work_progress(
2101 &mut self,
2102 language_server_id: usize,
2103 token: String,
2104 progress: LanguageServerProgress,
2105 cx: &mut ModelContext<Self>,
2106 ) {
2107 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2108 status.pending_work.insert(token, progress);
2109 cx.notify();
2110 }
2111 }
2112
2113 fn on_lsp_work_end(
2114 &mut self,
2115 language_server_id: usize,
2116 token: String,
2117 cx: &mut ModelContext<Self>,
2118 ) {
2119 if let Some(status) = self.language_server_statuses.get_mut(&language_server_id) {
2120 status.pending_work.remove(&token);
2121 cx.notify();
2122 }
2123 }
2124
2125 async fn on_lsp_workspace_edit(
2126 this: WeakModelHandle<Self>,
2127 params: lsp::ApplyWorkspaceEditParams,
2128 server_id: usize,
2129 adapter: Arc<dyn LspAdapter>,
2130 language_server: Arc<LanguageServer>,
2131 mut cx: AsyncAppContext,
2132 ) -> Result<lsp::ApplyWorkspaceEditResponse> {
2133 let this = this
2134 .upgrade(&cx)
2135 .ok_or_else(|| anyhow!("project project closed"))?;
2136 let transaction = Self::deserialize_workspace_edit(
2137 this.clone(),
2138 params.edit,
2139 true,
2140 adapter.clone(),
2141 language_server.clone(),
2142 &mut cx,
2143 )
2144 .await
2145 .log_err();
2146 this.update(&mut cx, |this, _| {
2147 if let Some(transaction) = transaction {
2148 this.last_workspace_edits_by_language_server
2149 .insert(server_id, transaction);
2150 }
2151 });
2152 Ok(lsp::ApplyWorkspaceEditResponse {
2153 applied: true,
2154 failed_change: None,
2155 failure_reason: None,
2156 })
2157 }
2158
2159 fn broadcast_language_server_update(
2160 &self,
2161 language_server_id: usize,
2162 event: proto::update_language_server::Variant,
2163 ) {
2164 if let Some(project_id) = self.shared_remote_id() {
2165 self.client
2166 .send(proto::UpdateLanguageServer {
2167 project_id,
2168 language_server_id: language_server_id as u64,
2169 variant: Some(event),
2170 })
2171 .log_err();
2172 }
2173 }
2174
2175 pub fn set_language_server_settings(&mut self, settings: serde_json::Value) {
2176 for (_, server) in self.language_servers.values() {
2177 server
2178 .notify::<lsp::notification::DidChangeConfiguration>(
2179 lsp::DidChangeConfigurationParams {
2180 settings: settings.clone(),
2181 },
2182 )
2183 .ok();
2184 }
2185 *self.language_server_settings.lock() = settings;
2186 }
2187
2188 pub fn language_server_statuses(
2189 &self,
2190 ) -> impl DoubleEndedIterator<Item = &LanguageServerStatus> {
2191 self.language_server_statuses.values()
2192 }
2193
2194 pub fn update_diagnostics(
2195 &mut self,
2196 params: lsp::PublishDiagnosticsParams,
2197 disk_based_sources: &[&str],
2198 cx: &mut ModelContext<Self>,
2199 ) -> Result<()> {
2200 let abs_path = params
2201 .uri
2202 .to_file_path()
2203 .map_err(|_| anyhow!("URI is not a file"))?;
2204 let mut diagnostics = Vec::default();
2205 let mut primary_diagnostic_group_ids = HashMap::default();
2206 let mut sources_by_group_id = HashMap::default();
2207 let mut supporting_diagnostics = HashMap::default();
2208 for diagnostic in ¶ms.diagnostics {
2209 let source = diagnostic.source.as_ref();
2210 let code = diagnostic.code.as_ref().map(|code| match code {
2211 lsp::NumberOrString::Number(code) => code.to_string(),
2212 lsp::NumberOrString::String(code) => code.clone(),
2213 });
2214 let range = range_from_lsp(diagnostic.range);
2215 let is_supporting = diagnostic
2216 .related_information
2217 .as_ref()
2218 .map_or(false, |infos| {
2219 infos.iter().any(|info| {
2220 primary_diagnostic_group_ids.contains_key(&(
2221 source,
2222 code.clone(),
2223 range_from_lsp(info.location.range),
2224 ))
2225 })
2226 });
2227
2228 let is_unnecessary = diagnostic.tags.as_ref().map_or(false, |tags| {
2229 tags.iter().any(|tag| *tag == DiagnosticTag::UNNECESSARY)
2230 });
2231
2232 if is_supporting {
2233 supporting_diagnostics.insert(
2234 (source, code.clone(), range),
2235 (diagnostic.severity, is_unnecessary),
2236 );
2237 } else {
2238 let group_id = post_inc(&mut self.next_diagnostic_group_id);
2239 let is_disk_based = source.map_or(false, |source| {
2240 disk_based_sources.contains(&source.as_str())
2241 });
2242
2243 sources_by_group_id.insert(group_id, source);
2244 primary_diagnostic_group_ids
2245 .insert((source, code.clone(), range.clone()), group_id);
2246
2247 diagnostics.push(DiagnosticEntry {
2248 range,
2249 diagnostic: Diagnostic {
2250 code: code.clone(),
2251 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
2252 message: diagnostic.message.clone(),
2253 group_id,
2254 is_primary: true,
2255 is_valid: true,
2256 is_disk_based,
2257 is_unnecessary,
2258 },
2259 });
2260 if let Some(infos) = &diagnostic.related_information {
2261 for info in infos {
2262 if info.location.uri == params.uri && !info.message.is_empty() {
2263 let range = range_from_lsp(info.location.range);
2264 diagnostics.push(DiagnosticEntry {
2265 range,
2266 diagnostic: Diagnostic {
2267 code: code.clone(),
2268 severity: DiagnosticSeverity::INFORMATION,
2269 message: info.message.clone(),
2270 group_id,
2271 is_primary: false,
2272 is_valid: true,
2273 is_disk_based,
2274 is_unnecessary: false,
2275 },
2276 });
2277 }
2278 }
2279 }
2280 }
2281 }
2282
2283 for entry in &mut diagnostics {
2284 let diagnostic = &mut entry.diagnostic;
2285 if !diagnostic.is_primary {
2286 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
2287 if let Some(&(severity, is_unnecessary)) = supporting_diagnostics.get(&(
2288 source,
2289 diagnostic.code.clone(),
2290 entry.range.clone(),
2291 )) {
2292 if let Some(severity) = severity {
2293 diagnostic.severity = severity;
2294 }
2295 diagnostic.is_unnecessary = is_unnecessary;
2296 }
2297 }
2298 }
2299
2300 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
2301 Ok(())
2302 }
2303
2304 pub fn update_diagnostic_entries(
2305 &mut self,
2306 abs_path: PathBuf,
2307 version: Option<i32>,
2308 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2309 cx: &mut ModelContext<Project>,
2310 ) -> Result<(), anyhow::Error> {
2311 let (worktree, relative_path) = self
2312 .find_local_worktree(&abs_path, cx)
2313 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
2314 if !worktree.read(cx).is_visible() {
2315 return Ok(());
2316 }
2317
2318 let project_path = ProjectPath {
2319 worktree_id: worktree.read(cx).id(),
2320 path: relative_path.into(),
2321 };
2322 if let Some(buffer) = self.get_open_buffer(&project_path, cx) {
2323 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
2324 }
2325
2326 let updated = worktree.update(cx, |worktree, cx| {
2327 worktree
2328 .as_local_mut()
2329 .ok_or_else(|| anyhow!("not a local worktree"))?
2330 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
2331 })?;
2332 if updated {
2333 cx.emit(Event::DiagnosticsUpdated(project_path));
2334 }
2335 Ok(())
2336 }
2337
2338 fn update_buffer_diagnostics(
2339 &mut self,
2340 buffer: &ModelHandle<Buffer>,
2341 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
2342 version: Option<i32>,
2343 cx: &mut ModelContext<Self>,
2344 ) -> Result<()> {
2345 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
2346 Ordering::Equal
2347 .then_with(|| b.is_primary.cmp(&a.is_primary))
2348 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
2349 .then_with(|| a.severity.cmp(&b.severity))
2350 .then_with(|| a.message.cmp(&b.message))
2351 }
2352
2353 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
2354
2355 diagnostics.sort_unstable_by(|a, b| {
2356 Ordering::Equal
2357 .then_with(|| a.range.start.cmp(&b.range.start))
2358 .then_with(|| b.range.end.cmp(&a.range.end))
2359 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
2360 });
2361
2362 let mut sanitized_diagnostics = Vec::new();
2363 let edits_since_save = Patch::new(
2364 snapshot
2365 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
2366 .collect(),
2367 );
2368 for entry in diagnostics {
2369 let start;
2370 let end;
2371 if entry.diagnostic.is_disk_based {
2372 // Some diagnostics are based on files on disk instead of buffers'
2373 // current contents. Adjust these diagnostics' ranges to reflect
2374 // any unsaved edits.
2375 start = edits_since_save.old_to_new(entry.range.start);
2376 end = edits_since_save.old_to_new(entry.range.end);
2377 } else {
2378 start = entry.range.start;
2379 end = entry.range.end;
2380 }
2381
2382 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
2383 ..snapshot.clip_point_utf16(end, Bias::Right);
2384
2385 // Expand empty ranges by one character
2386 if range.start == range.end {
2387 range.end.column += 1;
2388 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
2389 if range.start == range.end && range.end.column > 0 {
2390 range.start.column -= 1;
2391 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
2392 }
2393 }
2394
2395 sanitized_diagnostics.push(DiagnosticEntry {
2396 range,
2397 diagnostic: entry.diagnostic,
2398 });
2399 }
2400 drop(edits_since_save);
2401
2402 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
2403 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
2404 Ok(())
2405 }
2406
2407 pub fn reload_buffers(
2408 &self,
2409 buffers: HashSet<ModelHandle<Buffer>>,
2410 push_to_history: bool,
2411 cx: &mut ModelContext<Self>,
2412 ) -> Task<Result<ProjectTransaction>> {
2413 let mut local_buffers = Vec::new();
2414 let mut remote_buffers = None;
2415 for buffer_handle in buffers {
2416 let buffer = buffer_handle.read(cx);
2417 if buffer.is_dirty() {
2418 if let Some(file) = File::from_dyn(buffer.file()) {
2419 if file.is_local() {
2420 local_buffers.push(buffer_handle);
2421 } else {
2422 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2423 }
2424 }
2425 }
2426 }
2427
2428 let remote_buffers = self.remote_id().zip(remote_buffers);
2429 let client = self.client.clone();
2430
2431 cx.spawn(|this, mut cx| async move {
2432 let mut project_transaction = ProjectTransaction::default();
2433
2434 if let Some((project_id, remote_buffers)) = remote_buffers {
2435 let response = client
2436 .request(proto::ReloadBuffers {
2437 project_id,
2438 buffer_ids: remote_buffers
2439 .iter()
2440 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2441 .collect(),
2442 })
2443 .await?
2444 .transaction
2445 .ok_or_else(|| anyhow!("missing transaction"))?;
2446 project_transaction = this
2447 .update(&mut cx, |this, cx| {
2448 this.deserialize_project_transaction(response, push_to_history, cx)
2449 })
2450 .await?;
2451 }
2452
2453 for buffer in local_buffers {
2454 let transaction = buffer
2455 .update(&mut cx, |buffer, cx| buffer.reload(cx))
2456 .await?;
2457 buffer.update(&mut cx, |buffer, cx| {
2458 if let Some(transaction) = transaction {
2459 if !push_to_history {
2460 buffer.forget_transaction(transaction.id);
2461 }
2462 project_transaction.0.insert(cx.handle(), transaction);
2463 }
2464 });
2465 }
2466
2467 Ok(project_transaction)
2468 })
2469 }
2470
2471 pub fn format(
2472 &self,
2473 buffers: HashSet<ModelHandle<Buffer>>,
2474 push_to_history: bool,
2475 cx: &mut ModelContext<Project>,
2476 ) -> Task<Result<ProjectTransaction>> {
2477 let mut local_buffers = Vec::new();
2478 let mut remote_buffers = None;
2479 for buffer_handle in buffers {
2480 let buffer = buffer_handle.read(cx);
2481 if let Some(file) = File::from_dyn(buffer.file()) {
2482 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
2483 if let Some((_, server)) = self.language_server_for_buffer(buffer, cx) {
2484 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
2485 }
2486 } else {
2487 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
2488 }
2489 } else {
2490 return Task::ready(Ok(Default::default()));
2491 }
2492 }
2493
2494 let remote_buffers = self.remote_id().zip(remote_buffers);
2495 let client = self.client.clone();
2496
2497 cx.spawn(|this, mut cx| async move {
2498 let mut project_transaction = ProjectTransaction::default();
2499
2500 if let Some((project_id, remote_buffers)) = remote_buffers {
2501 let response = client
2502 .request(proto::FormatBuffers {
2503 project_id,
2504 buffer_ids: remote_buffers
2505 .iter()
2506 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
2507 .collect(),
2508 })
2509 .await?
2510 .transaction
2511 .ok_or_else(|| anyhow!("missing transaction"))?;
2512 project_transaction = this
2513 .update(&mut cx, |this, cx| {
2514 this.deserialize_project_transaction(response, push_to_history, cx)
2515 })
2516 .await?;
2517 }
2518
2519 for (buffer, buffer_abs_path, language_server) in local_buffers {
2520 let text_document = lsp::TextDocumentIdentifier::new(
2521 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
2522 );
2523 let capabilities = &language_server.capabilities();
2524 let tab_size = cx.update(|cx| {
2525 let language_name = buffer.read(cx).language().map(|language| language.name());
2526 cx.global::<Settings>().tab_size(language_name.as_deref())
2527 });
2528 let lsp_edits = if capabilities
2529 .document_formatting_provider
2530 .as_ref()
2531 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2532 {
2533 language_server
2534 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
2535 text_document,
2536 options: lsp::FormattingOptions {
2537 tab_size,
2538 insert_spaces: true,
2539 insert_final_newline: Some(true),
2540 ..Default::default()
2541 },
2542 work_done_progress_params: Default::default(),
2543 })
2544 .await?
2545 } else if capabilities
2546 .document_range_formatting_provider
2547 .as_ref()
2548 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
2549 {
2550 let buffer_start = lsp::Position::new(0, 0);
2551 let buffer_end =
2552 buffer.read_with(&cx, |buffer, _| point_to_lsp(buffer.max_point_utf16()));
2553 language_server
2554 .request::<lsp::request::RangeFormatting>(
2555 lsp::DocumentRangeFormattingParams {
2556 text_document,
2557 range: lsp::Range::new(buffer_start, buffer_end),
2558 options: lsp::FormattingOptions {
2559 tab_size: 4,
2560 insert_spaces: true,
2561 insert_final_newline: Some(true),
2562 ..Default::default()
2563 },
2564 work_done_progress_params: Default::default(),
2565 },
2566 )
2567 .await?
2568 } else {
2569 continue;
2570 };
2571
2572 if let Some(lsp_edits) = lsp_edits {
2573 let edits = this
2574 .update(&mut cx, |this, cx| {
2575 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
2576 })
2577 .await?;
2578 buffer.update(&mut cx, |buffer, cx| {
2579 buffer.finalize_last_transaction();
2580 buffer.start_transaction();
2581 for (range, text) in edits {
2582 buffer.edit([(range, text)], cx);
2583 }
2584 if buffer.end_transaction(cx).is_some() {
2585 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2586 if !push_to_history {
2587 buffer.forget_transaction(transaction.id);
2588 }
2589 project_transaction.0.insert(cx.handle(), transaction);
2590 }
2591 });
2592 }
2593 }
2594
2595 Ok(project_transaction)
2596 })
2597 }
2598
2599 pub fn definition<T: ToPointUtf16>(
2600 &self,
2601 buffer: &ModelHandle<Buffer>,
2602 position: T,
2603 cx: &mut ModelContext<Self>,
2604 ) -> Task<Result<Vec<Location>>> {
2605 let position = position.to_point_utf16(buffer.read(cx));
2606 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
2607 }
2608
2609 pub fn references<T: ToPointUtf16>(
2610 &self,
2611 buffer: &ModelHandle<Buffer>,
2612 position: T,
2613 cx: &mut ModelContext<Self>,
2614 ) -> Task<Result<Vec<Location>>> {
2615 let position = position.to_point_utf16(buffer.read(cx));
2616 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
2617 }
2618
2619 pub fn document_highlights<T: ToPointUtf16>(
2620 &self,
2621 buffer: &ModelHandle<Buffer>,
2622 position: T,
2623 cx: &mut ModelContext<Self>,
2624 ) -> Task<Result<Vec<DocumentHighlight>>> {
2625 let position = position.to_point_utf16(buffer.read(cx));
2626
2627 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
2628 }
2629
2630 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
2631 if self.is_local() {
2632 let mut requests = Vec::new();
2633 for ((worktree_id, _), (lsp_adapter, language_server)) in self.language_servers.iter() {
2634 let worktree_id = *worktree_id;
2635 if let Some(worktree) = self
2636 .worktree_for_id(worktree_id, cx)
2637 .and_then(|worktree| worktree.read(cx).as_local())
2638 {
2639 let lsp_adapter = lsp_adapter.clone();
2640 let worktree_abs_path = worktree.abs_path().clone();
2641 requests.push(
2642 language_server
2643 .request::<lsp::request::WorkspaceSymbol>(lsp::WorkspaceSymbolParams {
2644 query: query.to_string(),
2645 ..Default::default()
2646 })
2647 .log_err()
2648 .map(move |response| {
2649 (
2650 lsp_adapter,
2651 worktree_id,
2652 worktree_abs_path,
2653 response.unwrap_or_default(),
2654 )
2655 }),
2656 );
2657 }
2658 }
2659
2660 cx.spawn_weak(|this, cx| async move {
2661 let responses = futures::future::join_all(requests).await;
2662 let this = if let Some(this) = this.upgrade(&cx) {
2663 this
2664 } else {
2665 return Ok(Default::default());
2666 };
2667 this.read_with(&cx, |this, cx| {
2668 let mut symbols = Vec::new();
2669 for (adapter, source_worktree_id, worktree_abs_path, response) in responses {
2670 symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
2671 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
2672 let mut worktree_id = source_worktree_id;
2673 let path;
2674 if let Some((worktree, rel_path)) =
2675 this.find_local_worktree(&abs_path, cx)
2676 {
2677 worktree_id = worktree.read(cx).id();
2678 path = rel_path;
2679 } else {
2680 path = relativize_path(&worktree_abs_path, &abs_path);
2681 }
2682
2683 let label = this
2684 .languages
2685 .select_language(&path)
2686 .and_then(|language| {
2687 language.label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
2688 })
2689 .unwrap_or_else(|| CodeLabel::plain(lsp_symbol.name.clone(), None));
2690 let signature = this.symbol_signature(worktree_id, &path);
2691
2692 Some(Symbol {
2693 source_worktree_id,
2694 worktree_id,
2695 language_server_name: adapter.name(),
2696 name: lsp_symbol.name,
2697 kind: lsp_symbol.kind,
2698 label,
2699 path,
2700 range: range_from_lsp(lsp_symbol.location.range),
2701 signature,
2702 })
2703 }));
2704 }
2705 Ok(symbols)
2706 })
2707 })
2708 } else if let Some(project_id) = self.remote_id() {
2709 let request = self.client.request(proto::GetProjectSymbols {
2710 project_id,
2711 query: query.to_string(),
2712 });
2713 cx.spawn_weak(|this, cx| async move {
2714 let response = request.await?;
2715 let mut symbols = Vec::new();
2716 if let Some(this) = this.upgrade(&cx) {
2717 this.read_with(&cx, |this, _| {
2718 symbols.extend(
2719 response
2720 .symbols
2721 .into_iter()
2722 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
2723 );
2724 })
2725 }
2726 Ok(symbols)
2727 })
2728 } else {
2729 Task::ready(Ok(Default::default()))
2730 }
2731 }
2732
2733 pub fn open_buffer_for_symbol(
2734 &mut self,
2735 symbol: &Symbol,
2736 cx: &mut ModelContext<Self>,
2737 ) -> Task<Result<ModelHandle<Buffer>>> {
2738 if self.is_local() {
2739 let (lsp_adapter, language_server) = if let Some(server) = self.language_servers.get(&(
2740 symbol.source_worktree_id,
2741 symbol.language_server_name.clone(),
2742 )) {
2743 server.clone()
2744 } else {
2745 return Task::ready(Err(anyhow!(
2746 "language server for worktree and language not found"
2747 )));
2748 };
2749
2750 let worktree_abs_path = if let Some(worktree_abs_path) = self
2751 .worktree_for_id(symbol.worktree_id, cx)
2752 .and_then(|worktree| worktree.read(cx).as_local())
2753 .map(|local_worktree| local_worktree.abs_path())
2754 {
2755 worktree_abs_path
2756 } else {
2757 return Task::ready(Err(anyhow!("worktree not found for symbol")));
2758 };
2759 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
2760 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
2761 uri
2762 } else {
2763 return Task::ready(Err(anyhow!("invalid symbol path")));
2764 };
2765
2766 self.open_local_buffer_via_lsp(symbol_uri, lsp_adapter, language_server, cx)
2767 } else if let Some(project_id) = self.remote_id() {
2768 let request = self.client.request(proto::OpenBufferForSymbol {
2769 project_id,
2770 symbol: Some(serialize_symbol(symbol)),
2771 });
2772 cx.spawn(|this, mut cx| async move {
2773 let response = request.await?;
2774 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
2775 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2776 .await
2777 })
2778 } else {
2779 Task::ready(Err(anyhow!("project does not have a remote id")))
2780 }
2781 }
2782
2783 pub fn completions<T: ToPointUtf16>(
2784 &self,
2785 source_buffer_handle: &ModelHandle<Buffer>,
2786 position: T,
2787 cx: &mut ModelContext<Self>,
2788 ) -> Task<Result<Vec<Completion>>> {
2789 let source_buffer_handle = source_buffer_handle.clone();
2790 let source_buffer = source_buffer_handle.read(cx);
2791 let buffer_id = source_buffer.remote_id();
2792 let language = source_buffer.language().cloned();
2793 let worktree;
2794 let buffer_abs_path;
2795 if let Some(file) = File::from_dyn(source_buffer.file()) {
2796 worktree = file.worktree.clone();
2797 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2798 } else {
2799 return Task::ready(Ok(Default::default()));
2800 };
2801
2802 let position = position.to_point_utf16(source_buffer);
2803 let anchor = source_buffer.anchor_after(position);
2804
2805 if worktree.read(cx).as_local().is_some() {
2806 let buffer_abs_path = buffer_abs_path.unwrap();
2807 let (_, lang_server) =
2808 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
2809 server.clone()
2810 } else {
2811 return Task::ready(Ok(Default::default()));
2812 };
2813
2814 cx.spawn(|_, cx| async move {
2815 let completions = lang_server
2816 .request::<lsp::request::Completion>(lsp::CompletionParams {
2817 text_document_position: lsp::TextDocumentPositionParams::new(
2818 lsp::TextDocumentIdentifier::new(
2819 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2820 ),
2821 point_to_lsp(position),
2822 ),
2823 context: Default::default(),
2824 work_done_progress_params: Default::default(),
2825 partial_result_params: Default::default(),
2826 })
2827 .await
2828 .context("lsp completion request failed")?;
2829
2830 let completions = if let Some(completions) = completions {
2831 match completions {
2832 lsp::CompletionResponse::Array(completions) => completions,
2833 lsp::CompletionResponse::List(list) => list.items,
2834 }
2835 } else {
2836 Default::default()
2837 };
2838
2839 source_buffer_handle.read_with(&cx, |this, _| {
2840 let snapshot = this.snapshot();
2841 let clipped_position = this.clip_point_utf16(position, Bias::Left);
2842 let mut range_for_token = None;
2843 Ok(completions
2844 .into_iter()
2845 .filter_map(|lsp_completion| {
2846 let (old_range, new_text) = match lsp_completion.text_edit.as_ref() {
2847 // If the language server provides a range to overwrite, then
2848 // check that the range is valid.
2849 Some(lsp::CompletionTextEdit::Edit(edit)) => {
2850 let range = range_from_lsp(edit.range);
2851 let start = snapshot.clip_point_utf16(range.start, Bias::Left);
2852 let end = snapshot.clip_point_utf16(range.end, Bias::Left);
2853 if start != range.start || end != range.end {
2854 log::info!("completion out of expected range");
2855 return None;
2856 }
2857 (
2858 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2859 edit.new_text.clone(),
2860 )
2861 }
2862 // If the language server does not provide a range, then infer
2863 // the range based on the syntax tree.
2864 None => {
2865 if position != clipped_position {
2866 log::info!("completion out of expected range");
2867 return None;
2868 }
2869 let Range { start, end } = range_for_token
2870 .get_or_insert_with(|| {
2871 let offset = position.to_offset(&snapshot);
2872 snapshot
2873 .range_for_word_token_at(offset)
2874 .unwrap_or_else(|| offset..offset)
2875 })
2876 .clone();
2877 let text = lsp_completion
2878 .insert_text
2879 .as_ref()
2880 .unwrap_or(&lsp_completion.label)
2881 .clone();
2882 (
2883 snapshot.anchor_before(start)..snapshot.anchor_after(end),
2884 text.clone(),
2885 )
2886 }
2887 Some(lsp::CompletionTextEdit::InsertAndReplace(_)) => {
2888 log::info!("unsupported insert/replace completion");
2889 return None;
2890 }
2891 };
2892
2893 Some(Completion {
2894 old_range,
2895 new_text,
2896 label: language
2897 .as_ref()
2898 .and_then(|l| l.label_for_completion(&lsp_completion))
2899 .unwrap_or_else(|| {
2900 CodeLabel::plain(
2901 lsp_completion.label.clone(),
2902 lsp_completion.filter_text.as_deref(),
2903 )
2904 }),
2905 lsp_completion,
2906 })
2907 })
2908 .collect())
2909 })
2910 })
2911 } else if let Some(project_id) = self.remote_id() {
2912 let rpc = self.client.clone();
2913 let message = proto::GetCompletions {
2914 project_id,
2915 buffer_id,
2916 position: Some(language::proto::serialize_anchor(&anchor)),
2917 version: serialize_version(&source_buffer.version()),
2918 };
2919 cx.spawn_weak(|_, mut cx| async move {
2920 let response = rpc.request(message).await?;
2921
2922 source_buffer_handle
2923 .update(&mut cx, |buffer, _| {
2924 buffer.wait_for_version(deserialize_version(response.version))
2925 })
2926 .await;
2927
2928 response
2929 .completions
2930 .into_iter()
2931 .map(|completion| {
2932 language::proto::deserialize_completion(completion, language.as_ref())
2933 })
2934 .collect()
2935 })
2936 } else {
2937 Task::ready(Ok(Default::default()))
2938 }
2939 }
2940
2941 pub fn apply_additional_edits_for_completion(
2942 &self,
2943 buffer_handle: ModelHandle<Buffer>,
2944 completion: Completion,
2945 push_to_history: bool,
2946 cx: &mut ModelContext<Self>,
2947 ) -> Task<Result<Option<Transaction>>> {
2948 let buffer = buffer_handle.read(cx);
2949 let buffer_id = buffer.remote_id();
2950
2951 if self.is_local() {
2952 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
2953 {
2954 server.clone()
2955 } else {
2956 return Task::ready(Ok(Default::default()));
2957 };
2958
2959 cx.spawn(|this, mut cx| async move {
2960 let resolved_completion = lang_server
2961 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2962 .await?;
2963 if let Some(edits) = resolved_completion.additional_text_edits {
2964 let edits = this
2965 .update(&mut cx, |this, cx| {
2966 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2967 })
2968 .await?;
2969 buffer_handle.update(&mut cx, |buffer, cx| {
2970 buffer.finalize_last_transaction();
2971 buffer.start_transaction();
2972 for (range, text) in edits {
2973 buffer.edit([(range, text)], cx);
2974 }
2975 let transaction = if buffer.end_transaction(cx).is_some() {
2976 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2977 if !push_to_history {
2978 buffer.forget_transaction(transaction.id);
2979 }
2980 Some(transaction)
2981 } else {
2982 None
2983 };
2984 Ok(transaction)
2985 })
2986 } else {
2987 Ok(None)
2988 }
2989 })
2990 } else if let Some(project_id) = self.remote_id() {
2991 let client = self.client.clone();
2992 cx.spawn(|_, mut cx| async move {
2993 let response = client
2994 .request(proto::ApplyCompletionAdditionalEdits {
2995 project_id,
2996 buffer_id,
2997 completion: Some(language::proto::serialize_completion(&completion)),
2998 })
2999 .await?;
3000
3001 if let Some(transaction) = response.transaction {
3002 let transaction = language::proto::deserialize_transaction(transaction)?;
3003 buffer_handle
3004 .update(&mut cx, |buffer, _| {
3005 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3006 })
3007 .await;
3008 if push_to_history {
3009 buffer_handle.update(&mut cx, |buffer, _| {
3010 buffer.push_transaction(transaction.clone(), Instant::now());
3011 });
3012 }
3013 Ok(Some(transaction))
3014 } else {
3015 Ok(None)
3016 }
3017 })
3018 } else {
3019 Task::ready(Err(anyhow!("project does not have a remote id")))
3020 }
3021 }
3022
3023 pub fn code_actions<T: Clone + ToOffset>(
3024 &self,
3025 buffer_handle: &ModelHandle<Buffer>,
3026 range: Range<T>,
3027 cx: &mut ModelContext<Self>,
3028 ) -> Task<Result<Vec<CodeAction>>> {
3029 let buffer_handle = buffer_handle.clone();
3030 let buffer = buffer_handle.read(cx);
3031 let snapshot = buffer.snapshot();
3032 let relevant_diagnostics = snapshot
3033 .diagnostics_in_range::<usize, usize>(range.to_offset(&snapshot), false)
3034 .map(|entry| entry.to_lsp_diagnostic_stub())
3035 .collect();
3036 let buffer_id = buffer.remote_id();
3037 let worktree;
3038 let buffer_abs_path;
3039 if let Some(file) = File::from_dyn(buffer.file()) {
3040 worktree = file.worktree.clone();
3041 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
3042 } else {
3043 return Task::ready(Ok(Default::default()));
3044 };
3045 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
3046
3047 if worktree.read(cx).as_local().is_some() {
3048 let buffer_abs_path = buffer_abs_path.unwrap();
3049 let (_, lang_server) = if let Some(server) = self.language_server_for_buffer(buffer, cx)
3050 {
3051 server.clone()
3052 } else {
3053 return Task::ready(Ok(Default::default()));
3054 };
3055
3056 let lsp_range = range_to_lsp(range.to_point_utf16(buffer));
3057 cx.foreground().spawn(async move {
3058 if !lang_server.capabilities().code_action_provider.is_some() {
3059 return Ok(Default::default());
3060 }
3061
3062 Ok(lang_server
3063 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
3064 text_document: lsp::TextDocumentIdentifier::new(
3065 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
3066 ),
3067 range: lsp_range,
3068 work_done_progress_params: Default::default(),
3069 partial_result_params: Default::default(),
3070 context: lsp::CodeActionContext {
3071 diagnostics: relevant_diagnostics,
3072 only: Some(vec![
3073 lsp::CodeActionKind::QUICKFIX,
3074 lsp::CodeActionKind::REFACTOR,
3075 lsp::CodeActionKind::REFACTOR_EXTRACT,
3076 lsp::CodeActionKind::SOURCE,
3077 ]),
3078 },
3079 })
3080 .await?
3081 .unwrap_or_default()
3082 .into_iter()
3083 .filter_map(|entry| {
3084 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
3085 Some(CodeAction {
3086 range: range.clone(),
3087 lsp_action,
3088 })
3089 } else {
3090 None
3091 }
3092 })
3093 .collect())
3094 })
3095 } else if let Some(project_id) = self.remote_id() {
3096 let rpc = self.client.clone();
3097 let version = buffer.version();
3098 cx.spawn_weak(|_, mut cx| async move {
3099 let response = rpc
3100 .request(proto::GetCodeActions {
3101 project_id,
3102 buffer_id,
3103 start: Some(language::proto::serialize_anchor(&range.start)),
3104 end: Some(language::proto::serialize_anchor(&range.end)),
3105 version: serialize_version(&version),
3106 })
3107 .await?;
3108
3109 buffer_handle
3110 .update(&mut cx, |buffer, _| {
3111 buffer.wait_for_version(deserialize_version(response.version))
3112 })
3113 .await;
3114
3115 response
3116 .actions
3117 .into_iter()
3118 .map(language::proto::deserialize_code_action)
3119 .collect()
3120 })
3121 } else {
3122 Task::ready(Ok(Default::default()))
3123 }
3124 }
3125
3126 pub fn apply_code_action(
3127 &self,
3128 buffer_handle: ModelHandle<Buffer>,
3129 mut action: CodeAction,
3130 push_to_history: bool,
3131 cx: &mut ModelContext<Self>,
3132 ) -> Task<Result<ProjectTransaction>> {
3133 if self.is_local() {
3134 let buffer = buffer_handle.read(cx);
3135 let (lsp_adapter, lang_server) =
3136 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
3137 server.clone()
3138 } else {
3139 return Task::ready(Ok(Default::default()));
3140 };
3141 let range = action.range.to_point_utf16(buffer);
3142
3143 cx.spawn(|this, mut cx| async move {
3144 if let Some(lsp_range) = action
3145 .lsp_action
3146 .data
3147 .as_mut()
3148 .and_then(|d| d.get_mut("codeActionParams"))
3149 .and_then(|d| d.get_mut("range"))
3150 {
3151 *lsp_range = serde_json::to_value(&range_to_lsp(range)).unwrap();
3152 action.lsp_action = lang_server
3153 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
3154 .await?;
3155 } else {
3156 let actions = this
3157 .update(&mut cx, |this, cx| {
3158 this.code_actions(&buffer_handle, action.range, cx)
3159 })
3160 .await?;
3161 action.lsp_action = actions
3162 .into_iter()
3163 .find(|a| a.lsp_action.title == action.lsp_action.title)
3164 .ok_or_else(|| anyhow!("code action is outdated"))?
3165 .lsp_action;
3166 }
3167
3168 if let Some(edit) = action.lsp_action.edit {
3169 Self::deserialize_workspace_edit(
3170 this,
3171 edit,
3172 push_to_history,
3173 lsp_adapter,
3174 lang_server,
3175 &mut cx,
3176 )
3177 .await
3178 } else if let Some(command) = action.lsp_action.command {
3179 this.update(&mut cx, |this, _| {
3180 this.last_workspace_edits_by_language_server
3181 .remove(&lang_server.server_id());
3182 });
3183 lang_server
3184 .request::<lsp::request::ExecuteCommand>(lsp::ExecuteCommandParams {
3185 command: command.command,
3186 arguments: command.arguments.unwrap_or_default(),
3187 ..Default::default()
3188 })
3189 .await?;
3190 Ok(this.update(&mut cx, |this, _| {
3191 this.last_workspace_edits_by_language_server
3192 .remove(&lang_server.server_id())
3193 .unwrap_or_default()
3194 }))
3195 } else {
3196 Ok(ProjectTransaction::default())
3197 }
3198 })
3199 } else if let Some(project_id) = self.remote_id() {
3200 let client = self.client.clone();
3201 let request = proto::ApplyCodeAction {
3202 project_id,
3203 buffer_id: buffer_handle.read(cx).remote_id(),
3204 action: Some(language::proto::serialize_code_action(&action)),
3205 };
3206 cx.spawn(|this, mut cx| async move {
3207 let response = client
3208 .request(request)
3209 .await?
3210 .transaction
3211 .ok_or_else(|| anyhow!("missing transaction"))?;
3212 this.update(&mut cx, |this, cx| {
3213 this.deserialize_project_transaction(response, push_to_history, cx)
3214 })
3215 .await
3216 })
3217 } else {
3218 Task::ready(Err(anyhow!("project does not have a remote id")))
3219 }
3220 }
3221
3222 async fn deserialize_workspace_edit(
3223 this: ModelHandle<Self>,
3224 edit: lsp::WorkspaceEdit,
3225 push_to_history: bool,
3226 lsp_adapter: Arc<dyn LspAdapter>,
3227 language_server: Arc<LanguageServer>,
3228 cx: &mut AsyncAppContext,
3229 ) -> Result<ProjectTransaction> {
3230 let fs = this.read_with(cx, |this, _| this.fs.clone());
3231 let mut operations = Vec::new();
3232 if let Some(document_changes) = edit.document_changes {
3233 match document_changes {
3234 lsp::DocumentChanges::Edits(edits) => {
3235 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
3236 }
3237 lsp::DocumentChanges::Operations(ops) => operations = ops,
3238 }
3239 } else if let Some(changes) = edit.changes {
3240 operations.extend(changes.into_iter().map(|(uri, edits)| {
3241 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
3242 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
3243 uri,
3244 version: None,
3245 },
3246 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
3247 })
3248 }));
3249 }
3250
3251 let mut project_transaction = ProjectTransaction::default();
3252 for operation in operations {
3253 match operation {
3254 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
3255 let abs_path = op
3256 .uri
3257 .to_file_path()
3258 .map_err(|_| anyhow!("can't convert URI to path"))?;
3259
3260 if let Some(parent_path) = abs_path.parent() {
3261 fs.create_dir(parent_path).await?;
3262 }
3263 if abs_path.ends_with("/") {
3264 fs.create_dir(&abs_path).await?;
3265 } else {
3266 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
3267 .await?;
3268 }
3269 }
3270 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
3271 let source_abs_path = op
3272 .old_uri
3273 .to_file_path()
3274 .map_err(|_| anyhow!("can't convert URI to path"))?;
3275 let target_abs_path = op
3276 .new_uri
3277 .to_file_path()
3278 .map_err(|_| anyhow!("can't convert URI to path"))?;
3279 fs.rename(
3280 &source_abs_path,
3281 &target_abs_path,
3282 op.options.map(Into::into).unwrap_or_default(),
3283 )
3284 .await?;
3285 }
3286 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
3287 let abs_path = op
3288 .uri
3289 .to_file_path()
3290 .map_err(|_| anyhow!("can't convert URI to path"))?;
3291 let options = op.options.map(Into::into).unwrap_or_default();
3292 if abs_path.ends_with("/") {
3293 fs.remove_dir(&abs_path, options).await?;
3294 } else {
3295 fs.remove_file(&abs_path, options).await?;
3296 }
3297 }
3298 lsp::DocumentChangeOperation::Edit(op) => {
3299 let buffer_to_edit = this
3300 .update(cx, |this, cx| {
3301 this.open_local_buffer_via_lsp(
3302 op.text_document.uri,
3303 lsp_adapter.clone(),
3304 language_server.clone(),
3305 cx,
3306 )
3307 })
3308 .await?;
3309
3310 let edits = this
3311 .update(cx, |this, cx| {
3312 let edits = op.edits.into_iter().map(|edit| match edit {
3313 lsp::OneOf::Left(edit) => edit,
3314 lsp::OneOf::Right(edit) => edit.text_edit,
3315 });
3316 this.edits_from_lsp(
3317 &buffer_to_edit,
3318 edits,
3319 op.text_document.version,
3320 cx,
3321 )
3322 })
3323 .await?;
3324
3325 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
3326 buffer.finalize_last_transaction();
3327 buffer.start_transaction();
3328 for (range, text) in edits {
3329 buffer.edit([(range, text)], cx);
3330 }
3331 let transaction = if buffer.end_transaction(cx).is_some() {
3332 let transaction = buffer.finalize_last_transaction().unwrap().clone();
3333 if !push_to_history {
3334 buffer.forget_transaction(transaction.id);
3335 }
3336 Some(transaction)
3337 } else {
3338 None
3339 };
3340
3341 transaction
3342 });
3343 if let Some(transaction) = transaction {
3344 project_transaction.0.insert(buffer_to_edit, transaction);
3345 }
3346 }
3347 }
3348 }
3349
3350 Ok(project_transaction)
3351 }
3352
3353 pub fn prepare_rename<T: ToPointUtf16>(
3354 &self,
3355 buffer: ModelHandle<Buffer>,
3356 position: T,
3357 cx: &mut ModelContext<Self>,
3358 ) -> Task<Result<Option<Range<Anchor>>>> {
3359 let position = position.to_point_utf16(buffer.read(cx));
3360 self.request_lsp(buffer, PrepareRename { position }, cx)
3361 }
3362
3363 pub fn perform_rename<T: ToPointUtf16>(
3364 &self,
3365 buffer: ModelHandle<Buffer>,
3366 position: T,
3367 new_name: String,
3368 push_to_history: bool,
3369 cx: &mut ModelContext<Self>,
3370 ) -> Task<Result<ProjectTransaction>> {
3371 let position = position.to_point_utf16(buffer.read(cx));
3372 self.request_lsp(
3373 buffer,
3374 PerformRename {
3375 position,
3376 new_name,
3377 push_to_history,
3378 },
3379 cx,
3380 )
3381 }
3382
3383 pub fn search(
3384 &self,
3385 query: SearchQuery,
3386 cx: &mut ModelContext<Self>,
3387 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
3388 if self.is_local() {
3389 let snapshots = self
3390 .visible_worktrees(cx)
3391 .filter_map(|tree| {
3392 let tree = tree.read(cx).as_local()?;
3393 Some(tree.snapshot())
3394 })
3395 .collect::<Vec<_>>();
3396
3397 let background = cx.background().clone();
3398 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
3399 if path_count == 0 {
3400 return Task::ready(Ok(Default::default()));
3401 }
3402 let workers = background.num_cpus().min(path_count);
3403 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
3404 cx.background()
3405 .spawn({
3406 let fs = self.fs.clone();
3407 let background = cx.background().clone();
3408 let query = query.clone();
3409 async move {
3410 let fs = &fs;
3411 let query = &query;
3412 let matching_paths_tx = &matching_paths_tx;
3413 let paths_per_worker = (path_count + workers - 1) / workers;
3414 let snapshots = &snapshots;
3415 background
3416 .scoped(|scope| {
3417 for worker_ix in 0..workers {
3418 let worker_start_ix = worker_ix * paths_per_worker;
3419 let worker_end_ix = worker_start_ix + paths_per_worker;
3420 scope.spawn(async move {
3421 let mut snapshot_start_ix = 0;
3422 let mut abs_path = PathBuf::new();
3423 for snapshot in snapshots {
3424 let snapshot_end_ix =
3425 snapshot_start_ix + snapshot.visible_file_count();
3426 if worker_end_ix <= snapshot_start_ix {
3427 break;
3428 } else if worker_start_ix > snapshot_end_ix {
3429 snapshot_start_ix = snapshot_end_ix;
3430 continue;
3431 } else {
3432 let start_in_snapshot = worker_start_ix
3433 .saturating_sub(snapshot_start_ix);
3434 let end_in_snapshot =
3435 cmp::min(worker_end_ix, snapshot_end_ix)
3436 - snapshot_start_ix;
3437
3438 for entry in snapshot
3439 .files(false, start_in_snapshot)
3440 .take(end_in_snapshot - start_in_snapshot)
3441 {
3442 if matching_paths_tx.is_closed() {
3443 break;
3444 }
3445
3446 abs_path.clear();
3447 abs_path.push(&snapshot.abs_path());
3448 abs_path.push(&entry.path);
3449 let matches = if let Some(file) =
3450 fs.open_sync(&abs_path).await.log_err()
3451 {
3452 query.detect(file).unwrap_or(false)
3453 } else {
3454 false
3455 };
3456
3457 if matches {
3458 let project_path =
3459 (snapshot.id(), entry.path.clone());
3460 if matching_paths_tx
3461 .send(project_path)
3462 .await
3463 .is_err()
3464 {
3465 break;
3466 }
3467 }
3468 }
3469
3470 snapshot_start_ix = snapshot_end_ix;
3471 }
3472 }
3473 });
3474 }
3475 })
3476 .await;
3477 }
3478 })
3479 .detach();
3480
3481 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
3482 let open_buffers = self
3483 .opened_buffers
3484 .values()
3485 .filter_map(|b| b.upgrade(cx))
3486 .collect::<HashSet<_>>();
3487 cx.spawn(|this, cx| async move {
3488 for buffer in &open_buffers {
3489 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3490 buffers_tx.send((buffer.clone(), snapshot)).await?;
3491 }
3492
3493 let open_buffers = Rc::new(RefCell::new(open_buffers));
3494 while let Some(project_path) = matching_paths_rx.next().await {
3495 if buffers_tx.is_closed() {
3496 break;
3497 }
3498
3499 let this = this.clone();
3500 let open_buffers = open_buffers.clone();
3501 let buffers_tx = buffers_tx.clone();
3502 cx.spawn(|mut cx| async move {
3503 if let Some(buffer) = this
3504 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
3505 .await
3506 .log_err()
3507 {
3508 if open_buffers.borrow_mut().insert(buffer.clone()) {
3509 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
3510 buffers_tx.send((buffer, snapshot)).await?;
3511 }
3512 }
3513
3514 Ok::<_, anyhow::Error>(())
3515 })
3516 .detach();
3517 }
3518
3519 Ok::<_, anyhow::Error>(())
3520 })
3521 .detach_and_log_err(cx);
3522
3523 let background = cx.background().clone();
3524 cx.background().spawn(async move {
3525 let query = &query;
3526 let mut matched_buffers = Vec::new();
3527 for _ in 0..workers {
3528 matched_buffers.push(HashMap::default());
3529 }
3530 background
3531 .scoped(|scope| {
3532 for worker_matched_buffers in matched_buffers.iter_mut() {
3533 let mut buffers_rx = buffers_rx.clone();
3534 scope.spawn(async move {
3535 while let Some((buffer, snapshot)) = buffers_rx.next().await {
3536 let buffer_matches = query
3537 .search(snapshot.as_rope())
3538 .await
3539 .iter()
3540 .map(|range| {
3541 snapshot.anchor_before(range.start)
3542 ..snapshot.anchor_after(range.end)
3543 })
3544 .collect::<Vec<_>>();
3545 if !buffer_matches.is_empty() {
3546 worker_matched_buffers
3547 .insert(buffer.clone(), buffer_matches);
3548 }
3549 }
3550 });
3551 }
3552 })
3553 .await;
3554 Ok(matched_buffers.into_iter().flatten().collect())
3555 })
3556 } else if let Some(project_id) = self.remote_id() {
3557 let request = self.client.request(query.to_proto(project_id));
3558 cx.spawn(|this, mut cx| async move {
3559 let response = request.await?;
3560 let mut result = HashMap::default();
3561 for location in response.locations {
3562 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
3563 let target_buffer = this
3564 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3565 .await?;
3566 let start = location
3567 .start
3568 .and_then(deserialize_anchor)
3569 .ok_or_else(|| anyhow!("missing target start"))?;
3570 let end = location
3571 .end
3572 .and_then(deserialize_anchor)
3573 .ok_or_else(|| anyhow!("missing target end"))?;
3574 result
3575 .entry(target_buffer)
3576 .or_insert(Vec::new())
3577 .push(start..end)
3578 }
3579 Ok(result)
3580 })
3581 } else {
3582 Task::ready(Ok(Default::default()))
3583 }
3584 }
3585
3586 fn request_lsp<R: LspCommand>(
3587 &self,
3588 buffer_handle: ModelHandle<Buffer>,
3589 request: R,
3590 cx: &mut ModelContext<Self>,
3591 ) -> Task<Result<R::Response>>
3592 where
3593 <R::LspRequest as lsp::request::Request>::Result: Send,
3594 {
3595 let buffer = buffer_handle.read(cx);
3596 if self.is_local() {
3597 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
3598 if let Some((file, (_, language_server))) =
3599 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
3600 {
3601 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
3602 return cx.spawn(|this, cx| async move {
3603 if !request.check_capabilities(&language_server.capabilities()) {
3604 return Ok(Default::default());
3605 }
3606
3607 let response = language_server
3608 .request::<R::LspRequest>(lsp_params)
3609 .await
3610 .context("lsp request failed")?;
3611 request
3612 .response_from_lsp(response, this, buffer_handle, cx)
3613 .await
3614 });
3615 }
3616 } else if let Some(project_id) = self.remote_id() {
3617 let rpc = self.client.clone();
3618 let message = request.to_proto(project_id, buffer);
3619 return cx.spawn(|this, cx| async move {
3620 let response = rpc.request(message).await?;
3621 request
3622 .response_from_proto(response, this, buffer_handle, cx)
3623 .await
3624 });
3625 }
3626 Task::ready(Ok(Default::default()))
3627 }
3628
3629 pub fn find_or_create_local_worktree(
3630 &mut self,
3631 abs_path: impl AsRef<Path>,
3632 visible: bool,
3633 cx: &mut ModelContext<Self>,
3634 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
3635 let abs_path = abs_path.as_ref();
3636 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
3637 Task::ready(Ok((tree.clone(), relative_path.into())))
3638 } else {
3639 let worktree = self.create_local_worktree(abs_path, visible, cx);
3640 cx.foreground()
3641 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
3642 }
3643 }
3644
3645 pub fn find_local_worktree(
3646 &self,
3647 abs_path: &Path,
3648 cx: &AppContext,
3649 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
3650 for tree in self.worktrees(cx) {
3651 if let Some(relative_path) = tree
3652 .read(cx)
3653 .as_local()
3654 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
3655 {
3656 return Some((tree.clone(), relative_path.into()));
3657 }
3658 }
3659 None
3660 }
3661
3662 pub fn is_shared(&self) -> bool {
3663 match &self.client_state {
3664 ProjectClientState::Local { is_shared, .. } => *is_shared,
3665 ProjectClientState::Remote { .. } => false,
3666 }
3667 }
3668
3669 fn create_local_worktree(
3670 &mut self,
3671 abs_path: impl AsRef<Path>,
3672 visible: bool,
3673 cx: &mut ModelContext<Self>,
3674 ) -> Task<Result<ModelHandle<Worktree>>> {
3675 let fs = self.fs.clone();
3676 let client = self.client.clone();
3677 let next_entry_id = self.next_entry_id.clone();
3678 let path: Arc<Path> = abs_path.as_ref().into();
3679 let task = self
3680 .loading_local_worktrees
3681 .entry(path.clone())
3682 .or_insert_with(|| {
3683 cx.spawn(|project, mut cx| {
3684 async move {
3685 let worktree = Worktree::local(
3686 client.clone(),
3687 path.clone(),
3688 visible,
3689 fs,
3690 next_entry_id,
3691 &mut cx,
3692 )
3693 .await;
3694 project.update(&mut cx, |project, _| {
3695 project.loading_local_worktrees.remove(&path);
3696 });
3697 let worktree = worktree?;
3698
3699 let remote_project_id = project.update(&mut cx, |project, cx| {
3700 project.add_worktree(&worktree, cx);
3701 project.remote_id()
3702 });
3703
3704 if let Some(project_id) = remote_project_id {
3705 // Because sharing is async, we may have *unshared* the project by the time it completes,
3706 // in which case we need to register the worktree instead.
3707 loop {
3708 if project.read_with(&cx, |project, _| project.is_shared()) {
3709 if worktree
3710 .update(&mut cx, |worktree, cx| {
3711 worktree.as_local_mut().unwrap().share(project_id, cx)
3712 })
3713 .await
3714 .is_ok()
3715 {
3716 break;
3717 }
3718 } else {
3719 worktree
3720 .update(&mut cx, |worktree, cx| {
3721 worktree
3722 .as_local_mut()
3723 .unwrap()
3724 .register(project_id, cx)
3725 })
3726 .await?;
3727 break;
3728 }
3729 }
3730 }
3731
3732 Ok(worktree)
3733 }
3734 .map_err(|err| Arc::new(err))
3735 })
3736 .shared()
3737 })
3738 .clone();
3739 cx.foreground().spawn(async move {
3740 match task.await {
3741 Ok(worktree) => Ok(worktree),
3742 Err(err) => Err(anyhow!("{}", err)),
3743 }
3744 })
3745 }
3746
3747 pub fn remove_worktree(&mut self, id_to_remove: WorktreeId, cx: &mut ModelContext<Self>) {
3748 self.worktrees.retain(|worktree| {
3749 if let Some(worktree) = worktree.upgrade(cx) {
3750 let id = worktree.read(cx).id();
3751 if id == id_to_remove {
3752 cx.emit(Event::WorktreeRemoved(id));
3753 false
3754 } else {
3755 true
3756 }
3757 } else {
3758 false
3759 }
3760 });
3761 self.metadata_changed(cx);
3762 cx.notify();
3763 }
3764
3765 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
3766 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
3767 if worktree.read(cx).is_local() {
3768 cx.subscribe(&worktree, |this, worktree, _, cx| {
3769 this.update_local_worktree_buffers(worktree, cx);
3770 })
3771 .detach();
3772 }
3773
3774 let push_strong_handle = {
3775 let worktree = worktree.read(cx);
3776 self.is_shared() || worktree.is_visible() || worktree.is_remote()
3777 };
3778 if push_strong_handle {
3779 self.worktrees
3780 .push(WorktreeHandle::Strong(worktree.clone()));
3781 } else {
3782 cx.observe_release(&worktree, |this, _, cx| {
3783 this.worktrees
3784 .retain(|worktree| worktree.upgrade(cx).is_some());
3785 cx.notify();
3786 })
3787 .detach();
3788 self.worktrees
3789 .push(WorktreeHandle::Weak(worktree.downgrade()));
3790 }
3791 self.metadata_changed(cx);
3792 cx.emit(Event::WorktreeAdded);
3793 cx.notify();
3794 }
3795
3796 fn update_local_worktree_buffers(
3797 &mut self,
3798 worktree_handle: ModelHandle<Worktree>,
3799 cx: &mut ModelContext<Self>,
3800 ) {
3801 let snapshot = worktree_handle.read(cx).snapshot();
3802 let mut buffers_to_delete = Vec::new();
3803 let mut renamed_buffers = Vec::new();
3804 for (buffer_id, buffer) in &self.opened_buffers {
3805 if let Some(buffer) = buffer.upgrade(cx) {
3806 buffer.update(cx, |buffer, cx| {
3807 if let Some(old_file) = File::from_dyn(buffer.file()) {
3808 if old_file.worktree != worktree_handle {
3809 return;
3810 }
3811
3812 let new_file = if let Some(entry) = old_file
3813 .entry_id
3814 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
3815 {
3816 File {
3817 is_local: true,
3818 entry_id: Some(entry.id),
3819 mtime: entry.mtime,
3820 path: entry.path.clone(),
3821 worktree: worktree_handle.clone(),
3822 }
3823 } else if let Some(entry) =
3824 snapshot.entry_for_path(old_file.path().as_ref())
3825 {
3826 File {
3827 is_local: true,
3828 entry_id: Some(entry.id),
3829 mtime: entry.mtime,
3830 path: entry.path.clone(),
3831 worktree: worktree_handle.clone(),
3832 }
3833 } else {
3834 File {
3835 is_local: true,
3836 entry_id: None,
3837 path: old_file.path().clone(),
3838 mtime: old_file.mtime(),
3839 worktree: worktree_handle.clone(),
3840 }
3841 };
3842
3843 let old_path = old_file.abs_path(cx);
3844 if new_file.abs_path(cx) != old_path {
3845 renamed_buffers.push((cx.handle(), old_path));
3846 }
3847
3848 if let Some(project_id) = self.shared_remote_id() {
3849 self.client
3850 .send(proto::UpdateBufferFile {
3851 project_id,
3852 buffer_id: *buffer_id as u64,
3853 file: Some(new_file.to_proto()),
3854 })
3855 .log_err();
3856 }
3857 buffer.file_updated(Box::new(new_file), cx).detach();
3858 }
3859 });
3860 } else {
3861 buffers_to_delete.push(*buffer_id);
3862 }
3863 }
3864
3865 for buffer_id in buffers_to_delete {
3866 self.opened_buffers.remove(&buffer_id);
3867 }
3868
3869 for (buffer, old_path) in renamed_buffers {
3870 self.unregister_buffer_from_language_server(&buffer, old_path, cx);
3871 self.assign_language_to_buffer(&buffer, cx);
3872 self.register_buffer_with_language_server(&buffer, cx);
3873 }
3874 }
3875
3876 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
3877 let new_active_entry = entry.and_then(|project_path| {
3878 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
3879 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
3880 Some(entry.id)
3881 });
3882 if new_active_entry != self.active_entry {
3883 self.active_entry = new_active_entry;
3884 cx.emit(Event::ActiveEntryChanged(new_active_entry));
3885 }
3886 }
3887
3888 pub fn is_running_disk_based_diagnostics(&self) -> bool {
3889 self.language_server_statuses
3890 .values()
3891 .any(|status| status.pending_diagnostic_updates > 0)
3892 }
3893
3894 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
3895 let mut summary = DiagnosticSummary::default();
3896 for (_, path_summary) in self.diagnostic_summaries(cx) {
3897 summary.error_count += path_summary.error_count;
3898 summary.warning_count += path_summary.warning_count;
3899 }
3900 summary
3901 }
3902
3903 pub fn diagnostic_summaries<'a>(
3904 &'a self,
3905 cx: &'a AppContext,
3906 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
3907 self.worktrees(cx).flat_map(move |worktree| {
3908 let worktree = worktree.read(cx);
3909 let worktree_id = worktree.id();
3910 worktree
3911 .diagnostic_summaries()
3912 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
3913 })
3914 }
3915
3916 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
3917 if self
3918 .language_server_statuses
3919 .values()
3920 .map(|status| status.pending_diagnostic_updates)
3921 .sum::<isize>()
3922 == 1
3923 {
3924 cx.emit(Event::DiskBasedDiagnosticsStarted);
3925 }
3926 }
3927
3928 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
3929 cx.emit(Event::DiskBasedDiagnosticsUpdated);
3930 if self
3931 .language_server_statuses
3932 .values()
3933 .map(|status| status.pending_diagnostic_updates)
3934 .sum::<isize>()
3935 == 0
3936 {
3937 cx.emit(Event::DiskBasedDiagnosticsFinished);
3938 }
3939 }
3940
3941 pub fn active_entry(&self) -> Option<ProjectEntryId> {
3942 self.active_entry
3943 }
3944
3945 pub fn entry_for_path(&self, path: &ProjectPath, cx: &AppContext) -> Option<ProjectEntryId> {
3946 self.worktree_for_id(path.worktree_id, cx)?
3947 .read(cx)
3948 .entry_for_path(&path.path)
3949 .map(|entry| entry.id)
3950 }
3951
3952 pub fn path_for_entry(&self, entry_id: ProjectEntryId, cx: &AppContext) -> Option<ProjectPath> {
3953 let worktree = self.worktree_for_entry(entry_id, cx)?;
3954 let worktree = worktree.read(cx);
3955 let worktree_id = worktree.id();
3956 let path = worktree.entry_for_id(entry_id)?.path.clone();
3957 Some(ProjectPath { worktree_id, path })
3958 }
3959
3960 // RPC message handlers
3961
3962 async fn handle_request_join_project(
3963 this: ModelHandle<Self>,
3964 message: TypedEnvelope<proto::RequestJoinProject>,
3965 _: Arc<Client>,
3966 mut cx: AsyncAppContext,
3967 ) -> Result<()> {
3968 let user_id = message.payload.requester_id;
3969 if this.read_with(&cx, |project, _| {
3970 project.collaborators.values().any(|c| c.user.id == user_id)
3971 }) {
3972 this.update(&mut cx, |this, cx| {
3973 this.respond_to_join_request(user_id, true, cx)
3974 });
3975 } else {
3976 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
3977 let user = user_store
3978 .update(&mut cx, |store, cx| store.fetch_user(user_id, cx))
3979 .await?;
3980 this.update(&mut cx, |_, cx| cx.emit(Event::ContactRequestedJoin(user)));
3981 }
3982 Ok(())
3983 }
3984
3985 async fn handle_unregister_project(
3986 this: ModelHandle<Self>,
3987 _: TypedEnvelope<proto::UnregisterProject>,
3988 _: Arc<Client>,
3989 mut cx: AsyncAppContext,
3990 ) -> Result<()> {
3991 this.update(&mut cx, |this, cx| this.removed_from_project(cx));
3992 Ok(())
3993 }
3994
3995 async fn handle_project_unshared(
3996 this: ModelHandle<Self>,
3997 _: TypedEnvelope<proto::ProjectUnshared>,
3998 _: Arc<Client>,
3999 mut cx: AsyncAppContext,
4000 ) -> Result<()> {
4001 this.update(&mut cx, |this, cx| this.unshared(cx));
4002 Ok(())
4003 }
4004
4005 async fn handle_add_collaborator(
4006 this: ModelHandle<Self>,
4007 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
4008 _: Arc<Client>,
4009 mut cx: AsyncAppContext,
4010 ) -> Result<()> {
4011 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
4012 let collaborator = envelope
4013 .payload
4014 .collaborator
4015 .take()
4016 .ok_or_else(|| anyhow!("empty collaborator"))?;
4017
4018 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
4019 this.update(&mut cx, |this, cx| {
4020 this.collaborators
4021 .insert(collaborator.peer_id, collaborator);
4022 cx.notify();
4023 });
4024
4025 Ok(())
4026 }
4027
4028 async fn handle_remove_collaborator(
4029 this: ModelHandle<Self>,
4030 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
4031 _: Arc<Client>,
4032 mut cx: AsyncAppContext,
4033 ) -> Result<()> {
4034 this.update(&mut cx, |this, cx| {
4035 let peer_id = PeerId(envelope.payload.peer_id);
4036 let replica_id = this
4037 .collaborators
4038 .remove(&peer_id)
4039 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
4040 .replica_id;
4041 for (_, buffer) in &this.opened_buffers {
4042 if let Some(buffer) = buffer.upgrade(cx) {
4043 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
4044 }
4045 }
4046
4047 cx.emit(Event::CollaboratorLeft(peer_id));
4048 cx.notify();
4049 Ok(())
4050 })
4051 }
4052
4053 async fn handle_join_project_request_cancelled(
4054 this: ModelHandle<Self>,
4055 envelope: TypedEnvelope<proto::JoinProjectRequestCancelled>,
4056 _: Arc<Client>,
4057 mut cx: AsyncAppContext,
4058 ) -> Result<()> {
4059 let user = this
4060 .update(&mut cx, |this, cx| {
4061 this.user_store.update(cx, |user_store, cx| {
4062 user_store.fetch_user(envelope.payload.requester_id, cx)
4063 })
4064 })
4065 .await?;
4066
4067 this.update(&mut cx, |_, cx| {
4068 cx.emit(Event::ContactCancelledJoinRequest(user));
4069 });
4070
4071 Ok(())
4072 }
4073
4074 async fn handle_register_worktree(
4075 this: ModelHandle<Self>,
4076 envelope: TypedEnvelope<proto::RegisterWorktree>,
4077 client: Arc<Client>,
4078 mut cx: AsyncAppContext,
4079 ) -> Result<()> {
4080 this.update(&mut cx, |this, cx| {
4081 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
4082 let replica_id = this.replica_id();
4083 let worktree = proto::Worktree {
4084 id: envelope.payload.worktree_id,
4085 root_name: envelope.payload.root_name,
4086 entries: Default::default(),
4087 diagnostic_summaries: Default::default(),
4088 visible: envelope.payload.visible,
4089 scan_id: 0,
4090 };
4091 let (worktree, load_task) =
4092 Worktree::remote(remote_id, replica_id, worktree, client, cx);
4093 this.add_worktree(&worktree, cx);
4094 load_task.detach();
4095 Ok(())
4096 })
4097 }
4098
4099 async fn handle_unregister_worktree(
4100 this: ModelHandle<Self>,
4101 envelope: TypedEnvelope<proto::UnregisterWorktree>,
4102 _: Arc<Client>,
4103 mut cx: AsyncAppContext,
4104 ) -> Result<()> {
4105 this.update(&mut cx, |this, cx| {
4106 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4107 this.remove_worktree(worktree_id, cx);
4108 Ok(())
4109 })
4110 }
4111
4112 async fn handle_update_worktree(
4113 this: ModelHandle<Self>,
4114 envelope: TypedEnvelope<proto::UpdateWorktree>,
4115 _: Arc<Client>,
4116 mut cx: AsyncAppContext,
4117 ) -> Result<()> {
4118 this.update(&mut cx, |this, cx| {
4119 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4120 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4121 worktree.update(cx, |worktree, _| {
4122 let worktree = worktree.as_remote_mut().unwrap();
4123 worktree.update_from_remote(envelope)
4124 })?;
4125 }
4126 Ok(())
4127 })
4128 }
4129
4130 async fn handle_create_project_entry(
4131 this: ModelHandle<Self>,
4132 envelope: TypedEnvelope<proto::CreateProjectEntry>,
4133 _: Arc<Client>,
4134 mut cx: AsyncAppContext,
4135 ) -> Result<proto::ProjectEntryResponse> {
4136 let worktree = this.update(&mut cx, |this, cx| {
4137 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4138 this.worktree_for_id(worktree_id, cx)
4139 .ok_or_else(|| anyhow!("worktree not found"))
4140 })?;
4141 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4142 let entry = worktree
4143 .update(&mut cx, |worktree, cx| {
4144 let worktree = worktree.as_local_mut().unwrap();
4145 let path = PathBuf::from(OsString::from_vec(envelope.payload.path));
4146 worktree.create_entry(path, envelope.payload.is_directory, cx)
4147 })
4148 .await?;
4149 Ok(proto::ProjectEntryResponse {
4150 entry: Some((&entry).into()),
4151 worktree_scan_id: worktree_scan_id as u64,
4152 })
4153 }
4154
4155 async fn handle_rename_project_entry(
4156 this: ModelHandle<Self>,
4157 envelope: TypedEnvelope<proto::RenameProjectEntry>,
4158 _: Arc<Client>,
4159 mut cx: AsyncAppContext,
4160 ) -> Result<proto::ProjectEntryResponse> {
4161 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4162 let worktree = this.read_with(&cx, |this, cx| {
4163 this.worktree_for_entry(entry_id, cx)
4164 .ok_or_else(|| anyhow!("worktree not found"))
4165 })?;
4166 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4167 let entry = worktree
4168 .update(&mut cx, |worktree, cx| {
4169 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4170 worktree
4171 .as_local_mut()
4172 .unwrap()
4173 .rename_entry(entry_id, new_path, cx)
4174 .ok_or_else(|| anyhow!("invalid entry"))
4175 })?
4176 .await?;
4177 Ok(proto::ProjectEntryResponse {
4178 entry: Some((&entry).into()),
4179 worktree_scan_id: worktree_scan_id as u64,
4180 })
4181 }
4182
4183 async fn handle_copy_project_entry(
4184 this: ModelHandle<Self>,
4185 envelope: TypedEnvelope<proto::CopyProjectEntry>,
4186 _: Arc<Client>,
4187 mut cx: AsyncAppContext,
4188 ) -> Result<proto::ProjectEntryResponse> {
4189 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4190 let worktree = this.read_with(&cx, |this, cx| {
4191 this.worktree_for_entry(entry_id, cx)
4192 .ok_or_else(|| anyhow!("worktree not found"))
4193 })?;
4194 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4195 let entry = worktree
4196 .update(&mut cx, |worktree, cx| {
4197 let new_path = PathBuf::from(OsString::from_vec(envelope.payload.new_path));
4198 worktree
4199 .as_local_mut()
4200 .unwrap()
4201 .copy_entry(entry_id, new_path, cx)
4202 .ok_or_else(|| anyhow!("invalid entry"))
4203 })?
4204 .await?;
4205 Ok(proto::ProjectEntryResponse {
4206 entry: Some((&entry).into()),
4207 worktree_scan_id: worktree_scan_id as u64,
4208 })
4209 }
4210
4211 async fn handle_delete_project_entry(
4212 this: ModelHandle<Self>,
4213 envelope: TypedEnvelope<proto::DeleteProjectEntry>,
4214 _: Arc<Client>,
4215 mut cx: AsyncAppContext,
4216 ) -> Result<proto::ProjectEntryResponse> {
4217 let entry_id = ProjectEntryId::from_proto(envelope.payload.entry_id);
4218 let worktree = this.read_with(&cx, |this, cx| {
4219 this.worktree_for_entry(entry_id, cx)
4220 .ok_or_else(|| anyhow!("worktree not found"))
4221 })?;
4222 let worktree_scan_id = worktree.read_with(&cx, |worktree, _| worktree.scan_id());
4223 worktree
4224 .update(&mut cx, |worktree, cx| {
4225 worktree
4226 .as_local_mut()
4227 .unwrap()
4228 .delete_entry(entry_id, cx)
4229 .ok_or_else(|| anyhow!("invalid entry"))
4230 })?
4231 .await?;
4232 Ok(proto::ProjectEntryResponse {
4233 entry: None,
4234 worktree_scan_id: worktree_scan_id as u64,
4235 })
4236 }
4237
4238 async fn handle_update_diagnostic_summary(
4239 this: ModelHandle<Self>,
4240 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
4241 _: Arc<Client>,
4242 mut cx: AsyncAppContext,
4243 ) -> Result<()> {
4244 this.update(&mut cx, |this, cx| {
4245 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4246 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
4247 if let Some(summary) = envelope.payload.summary {
4248 let project_path = ProjectPath {
4249 worktree_id,
4250 path: Path::new(&summary.path).into(),
4251 };
4252 worktree.update(cx, |worktree, _| {
4253 worktree
4254 .as_remote_mut()
4255 .unwrap()
4256 .update_diagnostic_summary(project_path.path.clone(), &summary);
4257 });
4258 cx.emit(Event::DiagnosticsUpdated(project_path));
4259 }
4260 }
4261 Ok(())
4262 })
4263 }
4264
4265 async fn handle_start_language_server(
4266 this: ModelHandle<Self>,
4267 envelope: TypedEnvelope<proto::StartLanguageServer>,
4268 _: Arc<Client>,
4269 mut cx: AsyncAppContext,
4270 ) -> Result<()> {
4271 let server = envelope
4272 .payload
4273 .server
4274 .ok_or_else(|| anyhow!("invalid server"))?;
4275 this.update(&mut cx, |this, cx| {
4276 this.language_server_statuses.insert(
4277 server.id as usize,
4278 LanguageServerStatus {
4279 name: server.name,
4280 pending_work: Default::default(),
4281 pending_diagnostic_updates: 0,
4282 },
4283 );
4284 cx.notify();
4285 });
4286 Ok(())
4287 }
4288
4289 async fn handle_update_language_server(
4290 this: ModelHandle<Self>,
4291 envelope: TypedEnvelope<proto::UpdateLanguageServer>,
4292 _: Arc<Client>,
4293 mut cx: AsyncAppContext,
4294 ) -> Result<()> {
4295 let language_server_id = envelope.payload.language_server_id as usize;
4296 match envelope
4297 .payload
4298 .variant
4299 .ok_or_else(|| anyhow!("invalid variant"))?
4300 {
4301 proto::update_language_server::Variant::WorkStart(payload) => {
4302 this.update(&mut cx, |this, cx| {
4303 this.on_lsp_work_start(language_server_id, payload.token, cx);
4304 })
4305 }
4306 proto::update_language_server::Variant::WorkProgress(payload) => {
4307 this.update(&mut cx, |this, cx| {
4308 this.on_lsp_work_progress(
4309 language_server_id,
4310 payload.token,
4311 LanguageServerProgress {
4312 message: payload.message,
4313 percentage: payload.percentage.map(|p| p as usize),
4314 last_update_at: Instant::now(),
4315 },
4316 cx,
4317 );
4318 })
4319 }
4320 proto::update_language_server::Variant::WorkEnd(payload) => {
4321 this.update(&mut cx, |this, cx| {
4322 this.on_lsp_work_end(language_server_id, payload.token, cx);
4323 })
4324 }
4325 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdating(_) => {
4326 this.update(&mut cx, |this, cx| {
4327 this.disk_based_diagnostics_started(cx);
4328 })
4329 }
4330 proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(_) => {
4331 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
4332 }
4333 }
4334
4335 Ok(())
4336 }
4337
4338 async fn handle_update_buffer(
4339 this: ModelHandle<Self>,
4340 envelope: TypedEnvelope<proto::UpdateBuffer>,
4341 _: Arc<Client>,
4342 mut cx: AsyncAppContext,
4343 ) -> Result<()> {
4344 this.update(&mut cx, |this, cx| {
4345 let payload = envelope.payload.clone();
4346 let buffer_id = payload.buffer_id;
4347 let ops = payload
4348 .operations
4349 .into_iter()
4350 .map(|op| language::proto::deserialize_operation(op))
4351 .collect::<Result<Vec<_>, _>>()?;
4352 let is_remote = this.is_remote();
4353 match this.opened_buffers.entry(buffer_id) {
4354 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
4355 OpenBuffer::Strong(buffer) => {
4356 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
4357 }
4358 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
4359 OpenBuffer::Weak(_) => {}
4360 },
4361 hash_map::Entry::Vacant(e) => {
4362 assert!(
4363 is_remote,
4364 "received buffer update from {:?}",
4365 envelope.original_sender_id
4366 );
4367 e.insert(OpenBuffer::Loading(ops));
4368 }
4369 }
4370 Ok(())
4371 })
4372 }
4373
4374 async fn handle_update_buffer_file(
4375 this: ModelHandle<Self>,
4376 envelope: TypedEnvelope<proto::UpdateBufferFile>,
4377 _: Arc<Client>,
4378 mut cx: AsyncAppContext,
4379 ) -> Result<()> {
4380 this.update(&mut cx, |this, cx| {
4381 let payload = envelope.payload.clone();
4382 let buffer_id = payload.buffer_id;
4383 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
4384 let worktree = this
4385 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
4386 .ok_or_else(|| anyhow!("no such worktree"))?;
4387 let file = File::from_proto(file, worktree.clone(), cx)?;
4388 let buffer = this
4389 .opened_buffers
4390 .get_mut(&buffer_id)
4391 .and_then(|b| b.upgrade(cx))
4392 .ok_or_else(|| anyhow!("no such buffer"))?;
4393 buffer.update(cx, |buffer, cx| {
4394 buffer.file_updated(Box::new(file), cx).detach();
4395 });
4396 Ok(())
4397 })
4398 }
4399
4400 async fn handle_save_buffer(
4401 this: ModelHandle<Self>,
4402 envelope: TypedEnvelope<proto::SaveBuffer>,
4403 _: Arc<Client>,
4404 mut cx: AsyncAppContext,
4405 ) -> Result<proto::BufferSaved> {
4406 let buffer_id = envelope.payload.buffer_id;
4407 let requested_version = deserialize_version(envelope.payload.version);
4408
4409 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
4410 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
4411 let buffer = this
4412 .opened_buffers
4413 .get(&buffer_id)
4414 .and_then(|buffer| buffer.upgrade(cx))
4415 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
4416 Ok::<_, anyhow::Error>((project_id, buffer))
4417 })?;
4418 buffer
4419 .update(&mut cx, |buffer, _| {
4420 buffer.wait_for_version(requested_version)
4421 })
4422 .await;
4423
4424 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
4425 Ok(proto::BufferSaved {
4426 project_id,
4427 buffer_id,
4428 version: serialize_version(&saved_version),
4429 mtime: Some(mtime.into()),
4430 })
4431 }
4432
4433 async fn handle_reload_buffers(
4434 this: ModelHandle<Self>,
4435 envelope: TypedEnvelope<proto::ReloadBuffers>,
4436 _: Arc<Client>,
4437 mut cx: AsyncAppContext,
4438 ) -> Result<proto::ReloadBuffersResponse> {
4439 let sender_id = envelope.original_sender_id()?;
4440 let reload = this.update(&mut cx, |this, cx| {
4441 let mut buffers = HashSet::default();
4442 for buffer_id in &envelope.payload.buffer_ids {
4443 buffers.insert(
4444 this.opened_buffers
4445 .get(buffer_id)
4446 .and_then(|buffer| buffer.upgrade(cx))
4447 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4448 );
4449 }
4450 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
4451 })?;
4452
4453 let project_transaction = reload.await?;
4454 let project_transaction = this.update(&mut cx, |this, cx| {
4455 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4456 });
4457 Ok(proto::ReloadBuffersResponse {
4458 transaction: Some(project_transaction),
4459 })
4460 }
4461
4462 async fn handle_format_buffers(
4463 this: ModelHandle<Self>,
4464 envelope: TypedEnvelope<proto::FormatBuffers>,
4465 _: Arc<Client>,
4466 mut cx: AsyncAppContext,
4467 ) -> Result<proto::FormatBuffersResponse> {
4468 let sender_id = envelope.original_sender_id()?;
4469 let format = this.update(&mut cx, |this, cx| {
4470 let mut buffers = HashSet::default();
4471 for buffer_id in &envelope.payload.buffer_ids {
4472 buffers.insert(
4473 this.opened_buffers
4474 .get(buffer_id)
4475 .and_then(|buffer| buffer.upgrade(cx))
4476 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
4477 );
4478 }
4479 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
4480 })?;
4481
4482 let project_transaction = format.await?;
4483 let project_transaction = this.update(&mut cx, |this, cx| {
4484 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4485 });
4486 Ok(proto::FormatBuffersResponse {
4487 transaction: Some(project_transaction),
4488 })
4489 }
4490
4491 async fn handle_get_completions(
4492 this: ModelHandle<Self>,
4493 envelope: TypedEnvelope<proto::GetCompletions>,
4494 _: Arc<Client>,
4495 mut cx: AsyncAppContext,
4496 ) -> Result<proto::GetCompletionsResponse> {
4497 let position = envelope
4498 .payload
4499 .position
4500 .and_then(language::proto::deserialize_anchor)
4501 .ok_or_else(|| anyhow!("invalid position"))?;
4502 let version = deserialize_version(envelope.payload.version);
4503 let buffer = this.read_with(&cx, |this, cx| {
4504 this.opened_buffers
4505 .get(&envelope.payload.buffer_id)
4506 .and_then(|buffer| buffer.upgrade(cx))
4507 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4508 })?;
4509 buffer
4510 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
4511 .await;
4512 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4513 let completions = this
4514 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
4515 .await?;
4516
4517 Ok(proto::GetCompletionsResponse {
4518 completions: completions
4519 .iter()
4520 .map(language::proto::serialize_completion)
4521 .collect(),
4522 version: serialize_version(&version),
4523 })
4524 }
4525
4526 async fn handle_apply_additional_edits_for_completion(
4527 this: ModelHandle<Self>,
4528 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
4529 _: Arc<Client>,
4530 mut cx: AsyncAppContext,
4531 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
4532 let apply_additional_edits = this.update(&mut cx, |this, cx| {
4533 let buffer = this
4534 .opened_buffers
4535 .get(&envelope.payload.buffer_id)
4536 .and_then(|buffer| buffer.upgrade(cx))
4537 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4538 let language = buffer.read(cx).language();
4539 let completion = language::proto::deserialize_completion(
4540 envelope
4541 .payload
4542 .completion
4543 .ok_or_else(|| anyhow!("invalid completion"))?,
4544 language,
4545 )?;
4546 Ok::<_, anyhow::Error>(
4547 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
4548 )
4549 })?;
4550
4551 Ok(proto::ApplyCompletionAdditionalEditsResponse {
4552 transaction: apply_additional_edits
4553 .await?
4554 .as_ref()
4555 .map(language::proto::serialize_transaction),
4556 })
4557 }
4558
4559 async fn handle_get_code_actions(
4560 this: ModelHandle<Self>,
4561 envelope: TypedEnvelope<proto::GetCodeActions>,
4562 _: Arc<Client>,
4563 mut cx: AsyncAppContext,
4564 ) -> Result<proto::GetCodeActionsResponse> {
4565 let start = envelope
4566 .payload
4567 .start
4568 .and_then(language::proto::deserialize_anchor)
4569 .ok_or_else(|| anyhow!("invalid start"))?;
4570 let end = envelope
4571 .payload
4572 .end
4573 .and_then(language::proto::deserialize_anchor)
4574 .ok_or_else(|| anyhow!("invalid end"))?;
4575 let buffer = this.update(&mut cx, |this, cx| {
4576 this.opened_buffers
4577 .get(&envelope.payload.buffer_id)
4578 .and_then(|buffer| buffer.upgrade(cx))
4579 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
4580 })?;
4581 buffer
4582 .update(&mut cx, |buffer, _| {
4583 buffer.wait_for_version(deserialize_version(envelope.payload.version))
4584 })
4585 .await;
4586
4587 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
4588 let code_actions = this.update(&mut cx, |this, cx| {
4589 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
4590 })?;
4591
4592 Ok(proto::GetCodeActionsResponse {
4593 actions: code_actions
4594 .await?
4595 .iter()
4596 .map(language::proto::serialize_code_action)
4597 .collect(),
4598 version: serialize_version(&version),
4599 })
4600 }
4601
4602 async fn handle_apply_code_action(
4603 this: ModelHandle<Self>,
4604 envelope: TypedEnvelope<proto::ApplyCodeAction>,
4605 _: Arc<Client>,
4606 mut cx: AsyncAppContext,
4607 ) -> Result<proto::ApplyCodeActionResponse> {
4608 let sender_id = envelope.original_sender_id()?;
4609 let action = language::proto::deserialize_code_action(
4610 envelope
4611 .payload
4612 .action
4613 .ok_or_else(|| anyhow!("invalid action"))?,
4614 )?;
4615 let apply_code_action = this.update(&mut cx, |this, cx| {
4616 let buffer = this
4617 .opened_buffers
4618 .get(&envelope.payload.buffer_id)
4619 .and_then(|buffer| buffer.upgrade(cx))
4620 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
4621 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
4622 })?;
4623
4624 let project_transaction = apply_code_action.await?;
4625 let project_transaction = this.update(&mut cx, |this, cx| {
4626 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
4627 });
4628 Ok(proto::ApplyCodeActionResponse {
4629 transaction: Some(project_transaction),
4630 })
4631 }
4632
4633 async fn handle_lsp_command<T: LspCommand>(
4634 this: ModelHandle<Self>,
4635 envelope: TypedEnvelope<T::ProtoRequest>,
4636 _: Arc<Client>,
4637 mut cx: AsyncAppContext,
4638 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
4639 where
4640 <T::LspRequest as lsp::request::Request>::Result: Send,
4641 {
4642 let sender_id = envelope.original_sender_id()?;
4643 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
4644 let buffer_handle = this.read_with(&cx, |this, _| {
4645 this.opened_buffers
4646 .get(&buffer_id)
4647 .and_then(|buffer| buffer.upgrade(&cx))
4648 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
4649 })?;
4650 let request = T::from_proto(
4651 envelope.payload,
4652 this.clone(),
4653 buffer_handle.clone(),
4654 cx.clone(),
4655 )
4656 .await?;
4657 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
4658 let response = this
4659 .update(&mut cx, |this, cx| {
4660 this.request_lsp(buffer_handle, request, cx)
4661 })
4662 .await?;
4663 this.update(&mut cx, |this, cx| {
4664 Ok(T::response_to_proto(
4665 response,
4666 this,
4667 sender_id,
4668 &buffer_version,
4669 cx,
4670 ))
4671 })
4672 }
4673
4674 async fn handle_get_project_symbols(
4675 this: ModelHandle<Self>,
4676 envelope: TypedEnvelope<proto::GetProjectSymbols>,
4677 _: Arc<Client>,
4678 mut cx: AsyncAppContext,
4679 ) -> Result<proto::GetProjectSymbolsResponse> {
4680 let symbols = this
4681 .update(&mut cx, |this, cx| {
4682 this.symbols(&envelope.payload.query, cx)
4683 })
4684 .await?;
4685
4686 Ok(proto::GetProjectSymbolsResponse {
4687 symbols: symbols.iter().map(serialize_symbol).collect(),
4688 })
4689 }
4690
4691 async fn handle_search_project(
4692 this: ModelHandle<Self>,
4693 envelope: TypedEnvelope<proto::SearchProject>,
4694 _: Arc<Client>,
4695 mut cx: AsyncAppContext,
4696 ) -> Result<proto::SearchProjectResponse> {
4697 let peer_id = envelope.original_sender_id()?;
4698 let query = SearchQuery::from_proto(envelope.payload)?;
4699 let result = this
4700 .update(&mut cx, |this, cx| this.search(query, cx))
4701 .await?;
4702
4703 this.update(&mut cx, |this, cx| {
4704 let mut locations = Vec::new();
4705 for (buffer, ranges) in result {
4706 for range in ranges {
4707 let start = serialize_anchor(&range.start);
4708 let end = serialize_anchor(&range.end);
4709 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
4710 locations.push(proto::Location {
4711 buffer: Some(buffer),
4712 start: Some(start),
4713 end: Some(end),
4714 });
4715 }
4716 }
4717 Ok(proto::SearchProjectResponse { locations })
4718 })
4719 }
4720
4721 async fn handle_open_buffer_for_symbol(
4722 this: ModelHandle<Self>,
4723 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
4724 _: Arc<Client>,
4725 mut cx: AsyncAppContext,
4726 ) -> Result<proto::OpenBufferForSymbolResponse> {
4727 let peer_id = envelope.original_sender_id()?;
4728 let symbol = envelope
4729 .payload
4730 .symbol
4731 .ok_or_else(|| anyhow!("invalid symbol"))?;
4732 let symbol = this.read_with(&cx, |this, _| {
4733 let symbol = this.deserialize_symbol(symbol)?;
4734 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
4735 if signature == symbol.signature {
4736 Ok(symbol)
4737 } else {
4738 Err(anyhow!("invalid symbol signature"))
4739 }
4740 })?;
4741 let buffer = this
4742 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
4743 .await?;
4744
4745 Ok(proto::OpenBufferForSymbolResponse {
4746 buffer: Some(this.update(&mut cx, |this, cx| {
4747 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
4748 })),
4749 })
4750 }
4751
4752 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
4753 let mut hasher = Sha256::new();
4754 hasher.update(worktree_id.to_proto().to_be_bytes());
4755 hasher.update(path.to_string_lossy().as_bytes());
4756 hasher.update(self.nonce.to_be_bytes());
4757 hasher.finalize().as_slice().try_into().unwrap()
4758 }
4759
4760 async fn handle_open_buffer_by_id(
4761 this: ModelHandle<Self>,
4762 envelope: TypedEnvelope<proto::OpenBufferById>,
4763 _: Arc<Client>,
4764 mut cx: AsyncAppContext,
4765 ) -> Result<proto::OpenBufferResponse> {
4766 let peer_id = envelope.original_sender_id()?;
4767 let buffer = this
4768 .update(&mut cx, |this, cx| {
4769 this.open_buffer_by_id(envelope.payload.id, cx)
4770 })
4771 .await?;
4772 this.update(&mut cx, |this, cx| {
4773 Ok(proto::OpenBufferResponse {
4774 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4775 })
4776 })
4777 }
4778
4779 async fn handle_open_buffer_by_path(
4780 this: ModelHandle<Self>,
4781 envelope: TypedEnvelope<proto::OpenBufferByPath>,
4782 _: Arc<Client>,
4783 mut cx: AsyncAppContext,
4784 ) -> Result<proto::OpenBufferResponse> {
4785 let peer_id = envelope.original_sender_id()?;
4786 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
4787 let open_buffer = this.update(&mut cx, |this, cx| {
4788 this.open_buffer(
4789 ProjectPath {
4790 worktree_id,
4791 path: PathBuf::from(envelope.payload.path).into(),
4792 },
4793 cx,
4794 )
4795 });
4796
4797 let buffer = open_buffer.await?;
4798 this.update(&mut cx, |this, cx| {
4799 Ok(proto::OpenBufferResponse {
4800 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
4801 })
4802 })
4803 }
4804
4805 fn serialize_project_transaction_for_peer(
4806 &mut self,
4807 project_transaction: ProjectTransaction,
4808 peer_id: PeerId,
4809 cx: &AppContext,
4810 ) -> proto::ProjectTransaction {
4811 let mut serialized_transaction = proto::ProjectTransaction {
4812 buffers: Default::default(),
4813 transactions: Default::default(),
4814 };
4815 for (buffer, transaction) in project_transaction.0 {
4816 serialized_transaction
4817 .buffers
4818 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
4819 serialized_transaction
4820 .transactions
4821 .push(language::proto::serialize_transaction(&transaction));
4822 }
4823 serialized_transaction
4824 }
4825
4826 fn deserialize_project_transaction(
4827 &mut self,
4828 message: proto::ProjectTransaction,
4829 push_to_history: bool,
4830 cx: &mut ModelContext<Self>,
4831 ) -> Task<Result<ProjectTransaction>> {
4832 cx.spawn(|this, mut cx| async move {
4833 let mut project_transaction = ProjectTransaction::default();
4834 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
4835 let buffer = this
4836 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
4837 .await?;
4838 let transaction = language::proto::deserialize_transaction(transaction)?;
4839 project_transaction.0.insert(buffer, transaction);
4840 }
4841
4842 for (buffer, transaction) in &project_transaction.0 {
4843 buffer
4844 .update(&mut cx, |buffer, _| {
4845 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
4846 })
4847 .await;
4848
4849 if push_to_history {
4850 buffer.update(&mut cx, |buffer, _| {
4851 buffer.push_transaction(transaction.clone(), Instant::now());
4852 });
4853 }
4854 }
4855
4856 Ok(project_transaction)
4857 })
4858 }
4859
4860 fn serialize_buffer_for_peer(
4861 &mut self,
4862 buffer: &ModelHandle<Buffer>,
4863 peer_id: PeerId,
4864 cx: &AppContext,
4865 ) -> proto::Buffer {
4866 let buffer_id = buffer.read(cx).remote_id();
4867 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
4868 if shared_buffers.insert(buffer_id) {
4869 proto::Buffer {
4870 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
4871 }
4872 } else {
4873 proto::Buffer {
4874 variant: Some(proto::buffer::Variant::Id(buffer_id)),
4875 }
4876 }
4877 }
4878
4879 fn deserialize_buffer(
4880 &mut self,
4881 buffer: proto::Buffer,
4882 cx: &mut ModelContext<Self>,
4883 ) -> Task<Result<ModelHandle<Buffer>>> {
4884 let replica_id = self.replica_id();
4885
4886 let opened_buffer_tx = self.opened_buffer.0.clone();
4887 let mut opened_buffer_rx = self.opened_buffer.1.clone();
4888 cx.spawn(|this, mut cx| async move {
4889 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
4890 proto::buffer::Variant::Id(id) => {
4891 let buffer = loop {
4892 let buffer = this.read_with(&cx, |this, cx| {
4893 this.opened_buffers
4894 .get(&id)
4895 .and_then(|buffer| buffer.upgrade(cx))
4896 });
4897 if let Some(buffer) = buffer {
4898 break buffer;
4899 }
4900 opened_buffer_rx
4901 .next()
4902 .await
4903 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
4904 };
4905 Ok(buffer)
4906 }
4907 proto::buffer::Variant::State(mut buffer) => {
4908 let mut buffer_worktree = None;
4909 let mut buffer_file = None;
4910 if let Some(file) = buffer.file.take() {
4911 this.read_with(&cx, |this, cx| {
4912 let worktree_id = WorktreeId::from_proto(file.worktree_id);
4913 let worktree =
4914 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
4915 anyhow!("no worktree found for id {}", file.worktree_id)
4916 })?;
4917 buffer_file =
4918 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
4919 as Box<dyn language::File>);
4920 buffer_worktree = Some(worktree);
4921 Ok::<_, anyhow::Error>(())
4922 })?;
4923 }
4924
4925 let buffer = cx.add_model(|cx| {
4926 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
4927 });
4928
4929 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
4930
4931 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
4932 Ok(buffer)
4933 }
4934 }
4935 })
4936 }
4937
4938 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
4939 let source_worktree_id = WorktreeId::from_proto(serialized_symbol.source_worktree_id);
4940 let worktree_id = WorktreeId::from_proto(serialized_symbol.worktree_id);
4941 let start = serialized_symbol
4942 .start
4943 .ok_or_else(|| anyhow!("invalid start"))?;
4944 let end = serialized_symbol
4945 .end
4946 .ok_or_else(|| anyhow!("invalid end"))?;
4947 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
4948 let path = PathBuf::from(serialized_symbol.path);
4949 let language = self.languages.select_language(&path);
4950 Ok(Symbol {
4951 source_worktree_id,
4952 worktree_id,
4953 language_server_name: LanguageServerName(serialized_symbol.language_server_name.into()),
4954 label: language
4955 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
4956 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
4957 name: serialized_symbol.name,
4958 path,
4959 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
4960 kind,
4961 signature: serialized_symbol
4962 .signature
4963 .try_into()
4964 .map_err(|_| anyhow!("invalid signature"))?,
4965 })
4966 }
4967
4968 async fn handle_buffer_saved(
4969 this: ModelHandle<Self>,
4970 envelope: TypedEnvelope<proto::BufferSaved>,
4971 _: Arc<Client>,
4972 mut cx: AsyncAppContext,
4973 ) -> Result<()> {
4974 let version = deserialize_version(envelope.payload.version);
4975 let mtime = envelope
4976 .payload
4977 .mtime
4978 .ok_or_else(|| anyhow!("missing mtime"))?
4979 .into();
4980
4981 this.update(&mut cx, |this, cx| {
4982 let buffer = this
4983 .opened_buffers
4984 .get(&envelope.payload.buffer_id)
4985 .and_then(|buffer| buffer.upgrade(cx));
4986 if let Some(buffer) = buffer {
4987 buffer.update(cx, |buffer, cx| {
4988 buffer.did_save(version, mtime, None, cx);
4989 });
4990 }
4991 Ok(())
4992 })
4993 }
4994
4995 async fn handle_buffer_reloaded(
4996 this: ModelHandle<Self>,
4997 envelope: TypedEnvelope<proto::BufferReloaded>,
4998 _: Arc<Client>,
4999 mut cx: AsyncAppContext,
5000 ) -> Result<()> {
5001 let payload = envelope.payload.clone();
5002 let version = deserialize_version(payload.version);
5003 let mtime = payload
5004 .mtime
5005 .ok_or_else(|| anyhow!("missing mtime"))?
5006 .into();
5007 this.update(&mut cx, |this, cx| {
5008 let buffer = this
5009 .opened_buffers
5010 .get(&payload.buffer_id)
5011 .and_then(|buffer| buffer.upgrade(cx));
5012 if let Some(buffer) = buffer {
5013 buffer.update(cx, |buffer, cx| {
5014 buffer.did_reload(version, mtime, cx);
5015 });
5016 }
5017 Ok(())
5018 })
5019 }
5020
5021 pub fn match_paths<'a>(
5022 &self,
5023 query: &'a str,
5024 include_ignored: bool,
5025 smart_case: bool,
5026 max_results: usize,
5027 cancel_flag: &'a AtomicBool,
5028 cx: &AppContext,
5029 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
5030 let worktrees = self
5031 .worktrees(cx)
5032 .filter(|worktree| worktree.read(cx).is_visible())
5033 .collect::<Vec<_>>();
5034 let include_root_name = worktrees.len() > 1;
5035 let candidate_sets = worktrees
5036 .into_iter()
5037 .map(|worktree| CandidateSet {
5038 snapshot: worktree.read(cx).snapshot(),
5039 include_ignored,
5040 include_root_name,
5041 })
5042 .collect::<Vec<_>>();
5043
5044 let background = cx.background().clone();
5045 async move {
5046 fuzzy::match_paths(
5047 candidate_sets.as_slice(),
5048 query,
5049 smart_case,
5050 max_results,
5051 cancel_flag,
5052 background,
5053 )
5054 .await
5055 }
5056 }
5057
5058 fn edits_from_lsp(
5059 &mut self,
5060 buffer: &ModelHandle<Buffer>,
5061 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
5062 version: Option<i32>,
5063 cx: &mut ModelContext<Self>,
5064 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
5065 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
5066 cx.background().spawn(async move {
5067 let snapshot = snapshot?;
5068 let mut lsp_edits = lsp_edits
5069 .into_iter()
5070 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
5071 .peekable();
5072
5073 let mut edits = Vec::new();
5074 while let Some((mut range, mut new_text)) = lsp_edits.next() {
5075 // Combine any LSP edits that are adjacent.
5076 //
5077 // Also, combine LSP edits that are separated from each other by only
5078 // a newline. This is important because for some code actions,
5079 // Rust-analyzer rewrites the entire buffer via a series of edits that
5080 // are separated by unchanged newline characters.
5081 //
5082 // In order for the diffing logic below to work properly, any edits that
5083 // cancel each other out must be combined into one.
5084 while let Some((next_range, next_text)) = lsp_edits.peek() {
5085 if next_range.start > range.end {
5086 if next_range.start.row > range.end.row + 1
5087 || next_range.start.column > 0
5088 || snapshot.clip_point_utf16(
5089 PointUtf16::new(range.end.row, u32::MAX),
5090 Bias::Left,
5091 ) > range.end
5092 {
5093 break;
5094 }
5095 new_text.push('\n');
5096 }
5097 range.end = next_range.end;
5098 new_text.push_str(&next_text);
5099 lsp_edits.next();
5100 }
5101
5102 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
5103 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
5104 {
5105 return Err(anyhow!("invalid edits received from language server"));
5106 }
5107
5108 // For multiline edits, perform a diff of the old and new text so that
5109 // we can identify the changes more precisely, preserving the locations
5110 // of any anchors positioned in the unchanged regions.
5111 if range.end.row > range.start.row {
5112 let mut offset = range.start.to_offset(&snapshot);
5113 let old_text = snapshot.text_for_range(range).collect::<String>();
5114
5115 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
5116 let mut moved_since_edit = true;
5117 for change in diff.iter_all_changes() {
5118 let tag = change.tag();
5119 let value = change.value();
5120 match tag {
5121 ChangeTag::Equal => {
5122 offset += value.len();
5123 moved_since_edit = true;
5124 }
5125 ChangeTag::Delete => {
5126 let start = snapshot.anchor_after(offset);
5127 let end = snapshot.anchor_before(offset + value.len());
5128 if moved_since_edit {
5129 edits.push((start..end, String::new()));
5130 } else {
5131 edits.last_mut().unwrap().0.end = end;
5132 }
5133 offset += value.len();
5134 moved_since_edit = false;
5135 }
5136 ChangeTag::Insert => {
5137 if moved_since_edit {
5138 let anchor = snapshot.anchor_after(offset);
5139 edits.push((anchor.clone()..anchor, value.to_string()));
5140 } else {
5141 edits.last_mut().unwrap().1.push_str(value);
5142 }
5143 moved_since_edit = false;
5144 }
5145 }
5146 }
5147 } else if range.end == range.start {
5148 let anchor = snapshot.anchor_after(range.start);
5149 edits.push((anchor.clone()..anchor, new_text));
5150 } else {
5151 let edit_start = snapshot.anchor_after(range.start);
5152 let edit_end = snapshot.anchor_before(range.end);
5153 edits.push((edit_start..edit_end, new_text));
5154 }
5155 }
5156
5157 Ok(edits)
5158 })
5159 }
5160
5161 fn buffer_snapshot_for_lsp_version(
5162 &mut self,
5163 buffer: &ModelHandle<Buffer>,
5164 version: Option<i32>,
5165 cx: &AppContext,
5166 ) -> Result<TextBufferSnapshot> {
5167 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
5168
5169 if let Some(version) = version {
5170 let buffer_id = buffer.read(cx).remote_id();
5171 let snapshots = self
5172 .buffer_snapshots
5173 .get_mut(&buffer_id)
5174 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
5175 let mut found_snapshot = None;
5176 snapshots.retain(|(snapshot_version, snapshot)| {
5177 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
5178 false
5179 } else {
5180 if *snapshot_version == version {
5181 found_snapshot = Some(snapshot.clone());
5182 }
5183 true
5184 }
5185 });
5186
5187 found_snapshot.ok_or_else(|| {
5188 anyhow!(
5189 "snapshot not found for buffer {} at version {}",
5190 buffer_id,
5191 version
5192 )
5193 })
5194 } else {
5195 Ok((buffer.read(cx)).text_snapshot())
5196 }
5197 }
5198
5199 fn language_server_for_buffer(
5200 &self,
5201 buffer: &Buffer,
5202 cx: &AppContext,
5203 ) -> Option<&(Arc<dyn LspAdapter>, Arc<LanguageServer>)> {
5204 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
5205 let worktree_id = file.worktree_id(cx);
5206 self.language_servers
5207 .get(&(worktree_id, language.lsp_adapter()?.name()))
5208 } else {
5209 None
5210 }
5211 }
5212}
5213
5214impl ProjectStore {
5215 pub fn projects<'a>(
5216 &'a self,
5217 cx: &'a AppContext,
5218 ) -> impl 'a + Iterator<Item = ModelHandle<Project>> {
5219 self.projects
5220 .iter()
5221 .filter_map(|project| project.upgrade(cx))
5222 }
5223
5224 fn add_project(&mut self, project: WeakModelHandle<Project>, cx: &mut ModelContext<Self>) {
5225 if let Err(ix) = self
5226 .projects
5227 .binary_search_by_key(&project.id(), WeakModelHandle::id)
5228 {
5229 self.projects.insert(ix, project);
5230 }
5231 cx.notify();
5232 }
5233
5234 fn prune_projects(&mut self, cx: &mut ModelContext<Self>) {
5235 let mut did_change = false;
5236 self.projects.retain(|project| {
5237 if project.is_upgradable(cx) {
5238 true
5239 } else {
5240 did_change = true;
5241 false
5242 }
5243 });
5244 if did_change {
5245 cx.notify();
5246 }
5247 }
5248}
5249
5250impl WorktreeHandle {
5251 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
5252 match self {
5253 WorktreeHandle::Strong(handle) => Some(handle.clone()),
5254 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
5255 }
5256 }
5257}
5258
5259impl OpenBuffer {
5260 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
5261 match self {
5262 OpenBuffer::Strong(handle) => Some(handle.clone()),
5263 OpenBuffer::Weak(handle) => handle.upgrade(cx),
5264 OpenBuffer::Loading(_) => None,
5265 }
5266 }
5267}
5268
5269struct CandidateSet {
5270 snapshot: Snapshot,
5271 include_ignored: bool,
5272 include_root_name: bool,
5273}
5274
5275impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
5276 type Candidates = CandidateSetIter<'a>;
5277
5278 fn id(&self) -> usize {
5279 self.snapshot.id().to_usize()
5280 }
5281
5282 fn len(&self) -> usize {
5283 if self.include_ignored {
5284 self.snapshot.file_count()
5285 } else {
5286 self.snapshot.visible_file_count()
5287 }
5288 }
5289
5290 fn prefix(&self) -> Arc<str> {
5291 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
5292 self.snapshot.root_name().into()
5293 } else if self.include_root_name {
5294 format!("{}/", self.snapshot.root_name()).into()
5295 } else {
5296 "".into()
5297 }
5298 }
5299
5300 fn candidates(&'a self, start: usize) -> Self::Candidates {
5301 CandidateSetIter {
5302 traversal: self.snapshot.files(self.include_ignored, start),
5303 }
5304 }
5305}
5306
5307struct CandidateSetIter<'a> {
5308 traversal: Traversal<'a>,
5309}
5310
5311impl<'a> Iterator for CandidateSetIter<'a> {
5312 type Item = PathMatchCandidate<'a>;
5313
5314 fn next(&mut self) -> Option<Self::Item> {
5315 self.traversal.next().map(|entry| {
5316 if let EntryKind::File(char_bag) = entry.kind {
5317 PathMatchCandidate {
5318 path: &entry.path,
5319 char_bag,
5320 }
5321 } else {
5322 unreachable!()
5323 }
5324 })
5325 }
5326}
5327
5328impl Entity for ProjectStore {
5329 type Event = ();
5330}
5331
5332impl Entity for Project {
5333 type Event = Event;
5334
5335 fn release(&mut self, cx: &mut gpui::MutableAppContext) {
5336 self.project_store.update(cx, ProjectStore::prune_projects);
5337
5338 match &self.client_state {
5339 ProjectClientState::Local { remote_id_rx, .. } => {
5340 if let Some(project_id) = *remote_id_rx.borrow() {
5341 self.client
5342 .send(proto::UnregisterProject { project_id })
5343 .log_err();
5344 }
5345 }
5346 ProjectClientState::Remote { remote_id, .. } => {
5347 self.client
5348 .send(proto::LeaveProject {
5349 project_id: *remote_id,
5350 })
5351 .log_err();
5352 }
5353 }
5354 }
5355
5356 fn app_will_quit(
5357 &mut self,
5358 _: &mut MutableAppContext,
5359 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
5360 let shutdown_futures = self
5361 .language_servers
5362 .drain()
5363 .filter_map(|(_, (_, server))| server.shutdown())
5364 .collect::<Vec<_>>();
5365 Some(
5366 async move {
5367 futures::future::join_all(shutdown_futures).await;
5368 }
5369 .boxed(),
5370 )
5371 }
5372}
5373
5374impl Collaborator {
5375 fn from_proto(
5376 message: proto::Collaborator,
5377 user_store: &ModelHandle<UserStore>,
5378 cx: &mut AsyncAppContext,
5379 ) -> impl Future<Output = Result<Self>> {
5380 let user = user_store.update(cx, |user_store, cx| {
5381 user_store.fetch_user(message.user_id, cx)
5382 });
5383
5384 async move {
5385 Ok(Self {
5386 peer_id: PeerId(message.peer_id),
5387 user: user.await?,
5388 replica_id: message.replica_id as ReplicaId,
5389 })
5390 }
5391 }
5392}
5393
5394impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
5395 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
5396 Self {
5397 worktree_id,
5398 path: path.as_ref().into(),
5399 }
5400 }
5401}
5402
5403impl From<lsp::CreateFileOptions> for fs::CreateOptions {
5404 fn from(options: lsp::CreateFileOptions) -> Self {
5405 Self {
5406 overwrite: options.overwrite.unwrap_or(false),
5407 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5408 }
5409 }
5410}
5411
5412impl From<lsp::RenameFileOptions> for fs::RenameOptions {
5413 fn from(options: lsp::RenameFileOptions) -> Self {
5414 Self {
5415 overwrite: options.overwrite.unwrap_or(false),
5416 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
5417 }
5418 }
5419}
5420
5421impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
5422 fn from(options: lsp::DeleteFileOptions) -> Self {
5423 Self {
5424 recursive: options.recursive.unwrap_or(false),
5425 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
5426 }
5427 }
5428}
5429
5430fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
5431 proto::Symbol {
5432 source_worktree_id: symbol.source_worktree_id.to_proto(),
5433 worktree_id: symbol.worktree_id.to_proto(),
5434 language_server_name: symbol.language_server_name.0.to_string(),
5435 name: symbol.name.clone(),
5436 kind: unsafe { mem::transmute(symbol.kind) },
5437 path: symbol.path.to_string_lossy().to_string(),
5438 start: Some(proto::Point {
5439 row: symbol.range.start.row,
5440 column: symbol.range.start.column,
5441 }),
5442 end: Some(proto::Point {
5443 row: symbol.range.end.row,
5444 column: symbol.range.end.column,
5445 }),
5446 signature: symbol.signature.to_vec(),
5447 }
5448}
5449
5450fn relativize_path(base: &Path, path: &Path) -> PathBuf {
5451 let mut path_components = path.components();
5452 let mut base_components = base.components();
5453 let mut components: Vec<Component> = Vec::new();
5454 loop {
5455 match (path_components.next(), base_components.next()) {
5456 (None, None) => break,
5457 (Some(a), None) => {
5458 components.push(a);
5459 components.extend(path_components.by_ref());
5460 break;
5461 }
5462 (None, _) => components.push(Component::ParentDir),
5463 (Some(a), Some(b)) if components.is_empty() && a == b => (),
5464 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
5465 (Some(a), Some(_)) => {
5466 components.push(Component::ParentDir);
5467 for _ in base_components {
5468 components.push(Component::ParentDir);
5469 }
5470 components.push(a);
5471 components.extend(path_components.by_ref());
5472 break;
5473 }
5474 }
5475 }
5476 components.iter().map(|c| c.as_os_str()).collect()
5477}
5478
5479impl Item for Buffer {
5480 fn entry_id(&self, cx: &AppContext) -> Option<ProjectEntryId> {
5481 File::from_dyn(self.file()).and_then(|file| file.project_entry_id(cx))
5482 }
5483}
5484
5485#[cfg(test)]
5486mod tests {
5487 use crate::worktree::WorktreeHandle;
5488
5489 use super::{Event, *};
5490 use fs::RealFs;
5491 use futures::{future, StreamExt};
5492 use gpui::test::subscribe;
5493 use language::{
5494 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
5495 OffsetRangeExt, Point, ToPoint,
5496 };
5497 use lsp::Url;
5498 use serde_json::json;
5499 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc, task::Poll};
5500 use unindent::Unindent as _;
5501 use util::{assert_set_eq, test::temp_tree};
5502
5503 #[gpui::test]
5504 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
5505 let dir = temp_tree(json!({
5506 "root": {
5507 "apple": "",
5508 "banana": {
5509 "carrot": {
5510 "date": "",
5511 "endive": "",
5512 }
5513 },
5514 "fennel": {
5515 "grape": "",
5516 }
5517 }
5518 }));
5519
5520 let root_link_path = dir.path().join("root_link");
5521 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
5522 unix::fs::symlink(
5523 &dir.path().join("root/fennel"),
5524 &dir.path().join("root/finnochio"),
5525 )
5526 .unwrap();
5527
5528 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
5529
5530 project.read_with(cx, |project, cx| {
5531 let tree = project.worktrees(cx).next().unwrap().read(cx);
5532 assert_eq!(tree.file_count(), 5);
5533 assert_eq!(
5534 tree.inode_for_path("fennel/grape"),
5535 tree.inode_for_path("finnochio/grape")
5536 );
5537 });
5538
5539 let cancel_flag = Default::default();
5540 let results = project
5541 .read_with(cx, |project, cx| {
5542 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
5543 })
5544 .await;
5545 assert_eq!(
5546 results
5547 .into_iter()
5548 .map(|result| result.path)
5549 .collect::<Vec<Arc<Path>>>(),
5550 vec![
5551 PathBuf::from("banana/carrot/date").into(),
5552 PathBuf::from("banana/carrot/endive").into(),
5553 ]
5554 );
5555 }
5556
5557 #[gpui::test]
5558 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
5559 cx.foreground().forbid_parking();
5560
5561 let mut rust_language = Language::new(
5562 LanguageConfig {
5563 name: "Rust".into(),
5564 path_suffixes: vec!["rs".to_string()],
5565 ..Default::default()
5566 },
5567 Some(tree_sitter_rust::language()),
5568 );
5569 let mut json_language = Language::new(
5570 LanguageConfig {
5571 name: "JSON".into(),
5572 path_suffixes: vec!["json".to_string()],
5573 ..Default::default()
5574 },
5575 None,
5576 );
5577 let mut fake_rust_servers = rust_language.set_fake_lsp_adapter(FakeLspAdapter {
5578 name: "the-rust-language-server",
5579 capabilities: lsp::ServerCapabilities {
5580 completion_provider: Some(lsp::CompletionOptions {
5581 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
5582 ..Default::default()
5583 }),
5584 ..Default::default()
5585 },
5586 ..Default::default()
5587 });
5588 let mut fake_json_servers = json_language.set_fake_lsp_adapter(FakeLspAdapter {
5589 name: "the-json-language-server",
5590 capabilities: lsp::ServerCapabilities {
5591 completion_provider: Some(lsp::CompletionOptions {
5592 trigger_characters: Some(vec![":".to_string()]),
5593 ..Default::default()
5594 }),
5595 ..Default::default()
5596 },
5597 ..Default::default()
5598 });
5599
5600 let fs = FakeFs::new(cx.background());
5601 fs.insert_tree(
5602 "/the-root",
5603 json!({
5604 "test.rs": "const A: i32 = 1;",
5605 "test2.rs": "",
5606 "Cargo.toml": "a = 1",
5607 "package.json": "{\"a\": 1}",
5608 }),
5609 )
5610 .await;
5611
5612 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
5613 project.update(cx, |project, _| {
5614 project.languages.add(Arc::new(rust_language));
5615 project.languages.add(Arc::new(json_language));
5616 });
5617
5618 // Open a buffer without an associated language server.
5619 let toml_buffer = project
5620 .update(cx, |project, cx| {
5621 project.open_local_buffer("/the-root/Cargo.toml", cx)
5622 })
5623 .await
5624 .unwrap();
5625
5626 // Open a buffer with an associated language server.
5627 let rust_buffer = project
5628 .update(cx, |project, cx| {
5629 project.open_local_buffer("/the-root/test.rs", cx)
5630 })
5631 .await
5632 .unwrap();
5633
5634 // A server is started up, and it is notified about Rust files.
5635 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5636 assert_eq!(
5637 fake_rust_server
5638 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5639 .await
5640 .text_document,
5641 lsp::TextDocumentItem {
5642 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5643 version: 0,
5644 text: "const A: i32 = 1;".to_string(),
5645 language_id: Default::default()
5646 }
5647 );
5648
5649 // The buffer is configured based on the language server's capabilities.
5650 rust_buffer.read_with(cx, |buffer, _| {
5651 assert_eq!(
5652 buffer.completion_triggers(),
5653 &[".".to_string(), "::".to_string()]
5654 );
5655 });
5656 toml_buffer.read_with(cx, |buffer, _| {
5657 assert!(buffer.completion_triggers().is_empty());
5658 });
5659
5660 // Edit a buffer. The changes are reported to the language server.
5661 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], cx));
5662 assert_eq!(
5663 fake_rust_server
5664 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5665 .await
5666 .text_document,
5667 lsp::VersionedTextDocumentIdentifier::new(
5668 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5669 1
5670 )
5671 );
5672
5673 // Open a third buffer with a different associated language server.
5674 let json_buffer = project
5675 .update(cx, |project, cx| {
5676 project.open_local_buffer("/the-root/package.json", cx)
5677 })
5678 .await
5679 .unwrap();
5680
5681 // A json language server is started up and is only notified about the json buffer.
5682 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5683 assert_eq!(
5684 fake_json_server
5685 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5686 .await
5687 .text_document,
5688 lsp::TextDocumentItem {
5689 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5690 version: 0,
5691 text: "{\"a\": 1}".to_string(),
5692 language_id: Default::default()
5693 }
5694 );
5695
5696 // This buffer is configured based on the second language server's
5697 // capabilities.
5698 json_buffer.read_with(cx, |buffer, _| {
5699 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
5700 });
5701
5702 // When opening another buffer whose language server is already running,
5703 // it is also configured based on the existing language server's capabilities.
5704 let rust_buffer2 = project
5705 .update(cx, |project, cx| {
5706 project.open_local_buffer("/the-root/test2.rs", cx)
5707 })
5708 .await
5709 .unwrap();
5710 rust_buffer2.read_with(cx, |buffer, _| {
5711 assert_eq!(
5712 buffer.completion_triggers(),
5713 &[".".to_string(), "::".to_string()]
5714 );
5715 });
5716
5717 // Changes are reported only to servers matching the buffer's language.
5718 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], cx));
5719 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "let x = 1;")], cx));
5720 assert_eq!(
5721 fake_rust_server
5722 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5723 .await
5724 .text_document,
5725 lsp::VersionedTextDocumentIdentifier::new(
5726 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
5727 1
5728 )
5729 );
5730
5731 // Save notifications are reported to all servers.
5732 toml_buffer
5733 .update(cx, |buffer, cx| buffer.save(cx))
5734 .await
5735 .unwrap();
5736 assert_eq!(
5737 fake_rust_server
5738 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5739 .await
5740 .text_document,
5741 lsp::TextDocumentIdentifier::new(
5742 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5743 )
5744 );
5745 assert_eq!(
5746 fake_json_server
5747 .receive_notification::<lsp::notification::DidSaveTextDocument>()
5748 .await
5749 .text_document,
5750 lsp::TextDocumentIdentifier::new(
5751 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
5752 )
5753 );
5754
5755 // Renames are reported only to servers matching the buffer's language.
5756 fs.rename(
5757 Path::new("/the-root/test2.rs"),
5758 Path::new("/the-root/test3.rs"),
5759 Default::default(),
5760 )
5761 .await
5762 .unwrap();
5763 assert_eq!(
5764 fake_rust_server
5765 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5766 .await
5767 .text_document,
5768 lsp::TextDocumentIdentifier::new(
5769 lsp::Url::from_file_path("/the-root/test2.rs").unwrap()
5770 ),
5771 );
5772 assert_eq!(
5773 fake_rust_server
5774 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5775 .await
5776 .text_document,
5777 lsp::TextDocumentItem {
5778 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5779 version: 0,
5780 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5781 language_id: Default::default()
5782 },
5783 );
5784
5785 rust_buffer2.update(cx, |buffer, cx| {
5786 buffer.update_diagnostics(
5787 DiagnosticSet::from_sorted_entries(
5788 vec![DiagnosticEntry {
5789 diagnostic: Default::default(),
5790 range: Anchor::MIN..Anchor::MAX,
5791 }],
5792 &buffer.snapshot(),
5793 ),
5794 cx,
5795 );
5796 assert_eq!(
5797 buffer
5798 .snapshot()
5799 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5800 .count(),
5801 1
5802 );
5803 });
5804
5805 // When the rename changes the extension of the file, the buffer gets closed on the old
5806 // language server and gets opened on the new one.
5807 fs.rename(
5808 Path::new("/the-root/test3.rs"),
5809 Path::new("/the-root/test3.json"),
5810 Default::default(),
5811 )
5812 .await
5813 .unwrap();
5814 assert_eq!(
5815 fake_rust_server
5816 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5817 .await
5818 .text_document,
5819 lsp::TextDocumentIdentifier::new(
5820 lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
5821 ),
5822 );
5823 assert_eq!(
5824 fake_json_server
5825 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5826 .await
5827 .text_document,
5828 lsp::TextDocumentItem {
5829 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5830 version: 0,
5831 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5832 language_id: Default::default()
5833 },
5834 );
5835
5836 // We clear the diagnostics, since the language has changed.
5837 rust_buffer2.read_with(cx, |buffer, _| {
5838 assert_eq!(
5839 buffer
5840 .snapshot()
5841 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
5842 .count(),
5843 0
5844 );
5845 });
5846
5847 // The renamed file's version resets after changing language server.
5848 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], cx));
5849 assert_eq!(
5850 fake_json_server
5851 .receive_notification::<lsp::notification::DidChangeTextDocument>()
5852 .await
5853 .text_document,
5854 lsp::VersionedTextDocumentIdentifier::new(
5855 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5856 1
5857 )
5858 );
5859
5860 // Restart language servers
5861 project.update(cx, |project, cx| {
5862 project.restart_language_servers_for_buffers(
5863 vec![rust_buffer.clone(), json_buffer.clone()],
5864 cx,
5865 );
5866 });
5867
5868 let mut rust_shutdown_requests = fake_rust_server
5869 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5870 let mut json_shutdown_requests = fake_json_server
5871 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
5872 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
5873
5874 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
5875 let mut fake_json_server = fake_json_servers.next().await.unwrap();
5876
5877 // Ensure rust document is reopened in new rust language server
5878 assert_eq!(
5879 fake_rust_server
5880 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5881 .await
5882 .text_document,
5883 lsp::TextDocumentItem {
5884 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
5885 version: 1,
5886 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
5887 language_id: Default::default()
5888 }
5889 );
5890
5891 // Ensure json documents are reopened in new json language server
5892 assert_set_eq!(
5893 [
5894 fake_json_server
5895 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5896 .await
5897 .text_document,
5898 fake_json_server
5899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5900 .await
5901 .text_document,
5902 ],
5903 [
5904 lsp::TextDocumentItem {
5905 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5906 version: 0,
5907 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
5908 language_id: Default::default()
5909 },
5910 lsp::TextDocumentItem {
5911 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
5912 version: 1,
5913 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
5914 language_id: Default::default()
5915 }
5916 ]
5917 );
5918
5919 // Close notifications are reported only to servers matching the buffer's language.
5920 cx.update(|_| drop(json_buffer));
5921 let close_message = lsp::DidCloseTextDocumentParams {
5922 text_document: lsp::TextDocumentIdentifier::new(
5923 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
5924 ),
5925 };
5926 assert_eq!(
5927 fake_json_server
5928 .receive_notification::<lsp::notification::DidCloseTextDocument>()
5929 .await,
5930 close_message,
5931 );
5932 }
5933
5934 #[gpui::test]
5935 async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
5936 cx.foreground().forbid_parking();
5937
5938 let fs = FakeFs::new(cx.background());
5939 fs.insert_tree(
5940 "/dir",
5941 json!({
5942 "a.rs": "let a = 1;",
5943 "b.rs": "let b = 2;"
5944 }),
5945 )
5946 .await;
5947
5948 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
5949
5950 let buffer_a = project
5951 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
5952 .await
5953 .unwrap();
5954 let buffer_b = project
5955 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
5956 .await
5957 .unwrap();
5958
5959 project.update(cx, |project, cx| {
5960 project
5961 .update_diagnostics(
5962 lsp::PublishDiagnosticsParams {
5963 uri: Url::from_file_path("/dir/a.rs").unwrap(),
5964 version: None,
5965 diagnostics: vec![lsp::Diagnostic {
5966 range: lsp::Range::new(
5967 lsp::Position::new(0, 4),
5968 lsp::Position::new(0, 5),
5969 ),
5970 severity: Some(lsp::DiagnosticSeverity::ERROR),
5971 message: "error 1".to_string(),
5972 ..Default::default()
5973 }],
5974 },
5975 &[],
5976 cx,
5977 )
5978 .unwrap();
5979 project
5980 .update_diagnostics(
5981 lsp::PublishDiagnosticsParams {
5982 uri: Url::from_file_path("/dir/b.rs").unwrap(),
5983 version: None,
5984 diagnostics: vec![lsp::Diagnostic {
5985 range: lsp::Range::new(
5986 lsp::Position::new(0, 4),
5987 lsp::Position::new(0, 5),
5988 ),
5989 severity: Some(lsp::DiagnosticSeverity::WARNING),
5990 message: "error 2".to_string(),
5991 ..Default::default()
5992 }],
5993 },
5994 &[],
5995 cx,
5996 )
5997 .unwrap();
5998 });
5999
6000 buffer_a.read_with(cx, |buffer, _| {
6001 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6002 assert_eq!(
6003 chunks
6004 .iter()
6005 .map(|(s, d)| (s.as_str(), *d))
6006 .collect::<Vec<_>>(),
6007 &[
6008 ("let ", None),
6009 ("a", Some(DiagnosticSeverity::ERROR)),
6010 (" = 1;", None),
6011 ]
6012 );
6013 });
6014 buffer_b.read_with(cx, |buffer, _| {
6015 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6016 assert_eq!(
6017 chunks
6018 .iter()
6019 .map(|(s, d)| (s.as_str(), *d))
6020 .collect::<Vec<_>>(),
6021 &[
6022 ("let ", None),
6023 ("b", Some(DiagnosticSeverity::WARNING)),
6024 (" = 2;", None),
6025 ]
6026 );
6027 });
6028 }
6029
6030 #[gpui::test]
6031 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
6032 cx.foreground().forbid_parking();
6033
6034 let progress_token = "the-progress-token";
6035 let mut language = Language::new(
6036 LanguageConfig {
6037 name: "Rust".into(),
6038 path_suffixes: vec!["rs".to_string()],
6039 ..Default::default()
6040 },
6041 Some(tree_sitter_rust::language()),
6042 );
6043 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6044 disk_based_diagnostics_progress_token: Some(progress_token),
6045 disk_based_diagnostics_sources: &["disk"],
6046 ..Default::default()
6047 });
6048
6049 let fs = FakeFs::new(cx.background());
6050 fs.insert_tree(
6051 "/dir",
6052 json!({
6053 "a.rs": "fn a() { A }",
6054 "b.rs": "const y: i32 = 1",
6055 }),
6056 )
6057 .await;
6058
6059 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6060 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6061 let worktree_id =
6062 project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
6063
6064 // Cause worktree to start the fake language server
6065 let _buffer = project
6066 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6067 .await
6068 .unwrap();
6069
6070 let mut events = subscribe(&project, cx);
6071
6072 let mut fake_server = fake_servers.next().await.unwrap();
6073 fake_server.start_progress(progress_token).await;
6074 assert_eq!(
6075 events.next().await.unwrap(),
6076 Event::DiskBasedDiagnosticsStarted
6077 );
6078
6079 fake_server.start_progress(progress_token).await;
6080 fake_server.end_progress(progress_token).await;
6081 fake_server.start_progress(progress_token).await;
6082
6083 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6084 lsp::PublishDiagnosticsParams {
6085 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6086 version: None,
6087 diagnostics: vec![lsp::Diagnostic {
6088 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6089 severity: Some(lsp::DiagnosticSeverity::ERROR),
6090 message: "undefined variable 'A'".to_string(),
6091 ..Default::default()
6092 }],
6093 },
6094 );
6095 assert_eq!(
6096 events.next().await.unwrap(),
6097 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6098 );
6099
6100 fake_server.end_progress(progress_token).await;
6101 fake_server.end_progress(progress_token).await;
6102 assert_eq!(
6103 events.next().await.unwrap(),
6104 Event::DiskBasedDiagnosticsUpdated
6105 );
6106 assert_eq!(
6107 events.next().await.unwrap(),
6108 Event::DiskBasedDiagnosticsFinished
6109 );
6110
6111 let buffer = project
6112 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
6113 .await
6114 .unwrap();
6115
6116 buffer.read_with(cx, |buffer, _| {
6117 let snapshot = buffer.snapshot();
6118 let diagnostics = snapshot
6119 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6120 .collect::<Vec<_>>();
6121 assert_eq!(
6122 diagnostics,
6123 &[DiagnosticEntry {
6124 range: Point::new(0, 9)..Point::new(0, 10),
6125 diagnostic: Diagnostic {
6126 severity: lsp::DiagnosticSeverity::ERROR,
6127 message: "undefined variable 'A'".to_string(),
6128 group_id: 0,
6129 is_primary: true,
6130 ..Default::default()
6131 }
6132 }]
6133 )
6134 });
6135
6136 // Ensure publishing empty diagnostics twice only results in one update event.
6137 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6138 lsp::PublishDiagnosticsParams {
6139 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6140 version: None,
6141 diagnostics: Default::default(),
6142 },
6143 );
6144 assert_eq!(
6145 events.next().await.unwrap(),
6146 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
6147 );
6148
6149 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6150 lsp::PublishDiagnosticsParams {
6151 uri: Url::from_file_path("/dir/a.rs").unwrap(),
6152 version: None,
6153 diagnostics: Default::default(),
6154 },
6155 );
6156 cx.foreground().run_until_parked();
6157 assert_eq!(futures::poll!(events.next()), Poll::Pending);
6158 }
6159
6160 #[gpui::test]
6161 async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
6162 cx.foreground().forbid_parking();
6163
6164 let progress_token = "the-progress-token";
6165 let mut language = Language::new(
6166 LanguageConfig {
6167 path_suffixes: vec!["rs".to_string()],
6168 ..Default::default()
6169 },
6170 None,
6171 );
6172 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6173 disk_based_diagnostics_sources: &["disk"],
6174 disk_based_diagnostics_progress_token: Some(progress_token),
6175 ..Default::default()
6176 });
6177
6178 let fs = FakeFs::new(cx.background());
6179 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
6180
6181 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6182 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6183
6184 let buffer = project
6185 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6186 .await
6187 .unwrap();
6188
6189 // Simulate diagnostics starting to update.
6190 let mut fake_server = fake_servers.next().await.unwrap();
6191 fake_server.start_progress(progress_token).await;
6192
6193 // Restart the server before the diagnostics finish updating.
6194 project.update(cx, |project, cx| {
6195 project.restart_language_servers_for_buffers([buffer], cx);
6196 });
6197 let mut events = subscribe(&project, cx);
6198
6199 // Simulate the newly started server sending more diagnostics.
6200 let mut fake_server = fake_servers.next().await.unwrap();
6201 fake_server.start_progress(progress_token).await;
6202 assert_eq!(
6203 events.next().await.unwrap(),
6204 Event::DiskBasedDiagnosticsStarted
6205 );
6206
6207 // All diagnostics are considered done, despite the old server's diagnostic
6208 // task never completing.
6209 fake_server.end_progress(progress_token).await;
6210 assert_eq!(
6211 events.next().await.unwrap(),
6212 Event::DiskBasedDiagnosticsUpdated
6213 );
6214 assert_eq!(
6215 events.next().await.unwrap(),
6216 Event::DiskBasedDiagnosticsFinished
6217 );
6218 project.read_with(cx, |project, _| {
6219 assert!(!project.is_running_disk_based_diagnostics());
6220 });
6221 }
6222
6223 #[gpui::test]
6224 async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
6225 cx.foreground().forbid_parking();
6226
6227 let mut language = Language::new(
6228 LanguageConfig {
6229 name: "Rust".into(),
6230 path_suffixes: vec!["rs".to_string()],
6231 ..Default::default()
6232 },
6233 Some(tree_sitter_rust::language()),
6234 );
6235 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
6236 disk_based_diagnostics_sources: &["disk"],
6237 ..Default::default()
6238 });
6239
6240 let text = "
6241 fn a() { A }
6242 fn b() { BB }
6243 fn c() { CCC }
6244 "
6245 .unindent();
6246
6247 let fs = FakeFs::new(cx.background());
6248 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6249
6250 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6251 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6252
6253 let buffer = project
6254 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6255 .await
6256 .unwrap();
6257
6258 let mut fake_server = fake_servers.next().await.unwrap();
6259 let open_notification = fake_server
6260 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6261 .await;
6262
6263 // Edit the buffer, moving the content down
6264 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], cx));
6265 let change_notification_1 = fake_server
6266 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6267 .await;
6268 assert!(
6269 change_notification_1.text_document.version > open_notification.text_document.version
6270 );
6271
6272 // Report some diagnostics for the initial version of the buffer
6273 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6274 lsp::PublishDiagnosticsParams {
6275 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6276 version: Some(open_notification.text_document.version),
6277 diagnostics: vec![
6278 lsp::Diagnostic {
6279 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6280 severity: Some(DiagnosticSeverity::ERROR),
6281 message: "undefined variable 'A'".to_string(),
6282 source: Some("disk".to_string()),
6283 ..Default::default()
6284 },
6285 lsp::Diagnostic {
6286 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6287 severity: Some(DiagnosticSeverity::ERROR),
6288 message: "undefined variable 'BB'".to_string(),
6289 source: Some("disk".to_string()),
6290 ..Default::default()
6291 },
6292 lsp::Diagnostic {
6293 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
6294 severity: Some(DiagnosticSeverity::ERROR),
6295 source: Some("disk".to_string()),
6296 message: "undefined variable 'CCC'".to_string(),
6297 ..Default::default()
6298 },
6299 ],
6300 },
6301 );
6302
6303 // The diagnostics have moved down since they were created.
6304 buffer.next_notification(cx).await;
6305 buffer.read_with(cx, |buffer, _| {
6306 assert_eq!(
6307 buffer
6308 .snapshot()
6309 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
6310 .collect::<Vec<_>>(),
6311 &[
6312 DiagnosticEntry {
6313 range: Point::new(3, 9)..Point::new(3, 11),
6314 diagnostic: Diagnostic {
6315 severity: DiagnosticSeverity::ERROR,
6316 message: "undefined variable 'BB'".to_string(),
6317 is_disk_based: true,
6318 group_id: 1,
6319 is_primary: true,
6320 ..Default::default()
6321 },
6322 },
6323 DiagnosticEntry {
6324 range: Point::new(4, 9)..Point::new(4, 12),
6325 diagnostic: Diagnostic {
6326 severity: DiagnosticSeverity::ERROR,
6327 message: "undefined variable 'CCC'".to_string(),
6328 is_disk_based: true,
6329 group_id: 2,
6330 is_primary: true,
6331 ..Default::default()
6332 }
6333 }
6334 ]
6335 );
6336 assert_eq!(
6337 chunks_with_diagnostics(buffer, 0..buffer.len()),
6338 [
6339 ("\n\nfn a() { ".to_string(), None),
6340 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6341 (" }\nfn b() { ".to_string(), None),
6342 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
6343 (" }\nfn c() { ".to_string(), None),
6344 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
6345 (" }\n".to_string(), None),
6346 ]
6347 );
6348 assert_eq!(
6349 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
6350 [
6351 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
6352 (" }\nfn c() { ".to_string(), None),
6353 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
6354 ]
6355 );
6356 });
6357
6358 // Ensure overlapping diagnostics are highlighted correctly.
6359 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6360 lsp::PublishDiagnosticsParams {
6361 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6362 version: Some(open_notification.text_document.version),
6363 diagnostics: vec![
6364 lsp::Diagnostic {
6365 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6366 severity: Some(DiagnosticSeverity::ERROR),
6367 message: "undefined variable 'A'".to_string(),
6368 source: Some("disk".to_string()),
6369 ..Default::default()
6370 },
6371 lsp::Diagnostic {
6372 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
6373 severity: Some(DiagnosticSeverity::WARNING),
6374 message: "unreachable statement".to_string(),
6375 source: Some("disk".to_string()),
6376 ..Default::default()
6377 },
6378 ],
6379 },
6380 );
6381
6382 buffer.next_notification(cx).await;
6383 buffer.read_with(cx, |buffer, _| {
6384 assert_eq!(
6385 buffer
6386 .snapshot()
6387 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
6388 .collect::<Vec<_>>(),
6389 &[
6390 DiagnosticEntry {
6391 range: Point::new(2, 9)..Point::new(2, 12),
6392 diagnostic: Diagnostic {
6393 severity: DiagnosticSeverity::WARNING,
6394 message: "unreachable statement".to_string(),
6395 is_disk_based: true,
6396 group_id: 4,
6397 is_primary: true,
6398 ..Default::default()
6399 }
6400 },
6401 DiagnosticEntry {
6402 range: Point::new(2, 9)..Point::new(2, 10),
6403 diagnostic: Diagnostic {
6404 severity: DiagnosticSeverity::ERROR,
6405 message: "undefined variable 'A'".to_string(),
6406 is_disk_based: true,
6407 group_id: 3,
6408 is_primary: true,
6409 ..Default::default()
6410 },
6411 }
6412 ]
6413 );
6414 assert_eq!(
6415 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
6416 [
6417 ("fn a() { ".to_string(), None),
6418 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
6419 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6420 ("\n".to_string(), None),
6421 ]
6422 );
6423 assert_eq!(
6424 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
6425 [
6426 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
6427 ("\n".to_string(), None),
6428 ]
6429 );
6430 });
6431
6432 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
6433 // changes since the last save.
6434 buffer.update(cx, |buffer, cx| {
6435 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], cx);
6436 buffer.edit([(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")], cx);
6437 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], cx);
6438 });
6439 let change_notification_2 = fake_server
6440 .receive_notification::<lsp::notification::DidChangeTextDocument>()
6441 .await;
6442 assert!(
6443 change_notification_2.text_document.version
6444 > change_notification_1.text_document.version
6445 );
6446
6447 // Handle out-of-order diagnostics
6448 fake_server.notify::<lsp::notification::PublishDiagnostics>(
6449 lsp::PublishDiagnosticsParams {
6450 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6451 version: Some(change_notification_2.text_document.version),
6452 diagnostics: vec![
6453 lsp::Diagnostic {
6454 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
6455 severity: Some(DiagnosticSeverity::ERROR),
6456 message: "undefined variable 'BB'".to_string(),
6457 source: Some("disk".to_string()),
6458 ..Default::default()
6459 },
6460 lsp::Diagnostic {
6461 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6462 severity: Some(DiagnosticSeverity::WARNING),
6463 message: "undefined variable 'A'".to_string(),
6464 source: Some("disk".to_string()),
6465 ..Default::default()
6466 },
6467 ],
6468 },
6469 );
6470
6471 buffer.next_notification(cx).await;
6472 buffer.read_with(cx, |buffer, _| {
6473 assert_eq!(
6474 buffer
6475 .snapshot()
6476 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
6477 .collect::<Vec<_>>(),
6478 &[
6479 DiagnosticEntry {
6480 range: Point::new(2, 21)..Point::new(2, 22),
6481 diagnostic: Diagnostic {
6482 severity: DiagnosticSeverity::WARNING,
6483 message: "undefined variable 'A'".to_string(),
6484 is_disk_based: true,
6485 group_id: 6,
6486 is_primary: true,
6487 ..Default::default()
6488 }
6489 },
6490 DiagnosticEntry {
6491 range: Point::new(3, 9)..Point::new(3, 14),
6492 diagnostic: Diagnostic {
6493 severity: DiagnosticSeverity::ERROR,
6494 message: "undefined variable 'BB'".to_string(),
6495 is_disk_based: true,
6496 group_id: 5,
6497 is_primary: true,
6498 ..Default::default()
6499 },
6500 }
6501 ]
6502 );
6503 });
6504 }
6505
6506 #[gpui::test]
6507 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
6508 cx.foreground().forbid_parking();
6509
6510 let text = concat!(
6511 "let one = ;\n", //
6512 "let two = \n",
6513 "let three = 3;\n",
6514 );
6515
6516 let fs = FakeFs::new(cx.background());
6517 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
6518
6519 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6520 let buffer = project
6521 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6522 .await
6523 .unwrap();
6524
6525 project.update(cx, |project, cx| {
6526 project
6527 .update_buffer_diagnostics(
6528 &buffer,
6529 vec![
6530 DiagnosticEntry {
6531 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
6532 diagnostic: Diagnostic {
6533 severity: DiagnosticSeverity::ERROR,
6534 message: "syntax error 1".to_string(),
6535 ..Default::default()
6536 },
6537 },
6538 DiagnosticEntry {
6539 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
6540 diagnostic: Diagnostic {
6541 severity: DiagnosticSeverity::ERROR,
6542 message: "syntax error 2".to_string(),
6543 ..Default::default()
6544 },
6545 },
6546 ],
6547 None,
6548 cx,
6549 )
6550 .unwrap();
6551 });
6552
6553 // An empty range is extended forward to include the following character.
6554 // At the end of a line, an empty range is extended backward to include
6555 // the preceding character.
6556 buffer.read_with(cx, |buffer, _| {
6557 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
6558 assert_eq!(
6559 chunks
6560 .iter()
6561 .map(|(s, d)| (s.as_str(), *d))
6562 .collect::<Vec<_>>(),
6563 &[
6564 ("let one = ", None),
6565 (";", Some(DiagnosticSeverity::ERROR)),
6566 ("\nlet two =", None),
6567 (" ", Some(DiagnosticSeverity::ERROR)),
6568 ("\nlet three = 3;\n", None)
6569 ]
6570 );
6571 });
6572 }
6573
6574 #[gpui::test]
6575 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
6576 cx.foreground().forbid_parking();
6577
6578 let mut language = Language::new(
6579 LanguageConfig {
6580 name: "Rust".into(),
6581 path_suffixes: vec!["rs".to_string()],
6582 ..Default::default()
6583 },
6584 Some(tree_sitter_rust::language()),
6585 );
6586 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6587
6588 let text = "
6589 fn a() {
6590 f1();
6591 }
6592 fn b() {
6593 f2();
6594 }
6595 fn c() {
6596 f3();
6597 }
6598 "
6599 .unindent();
6600
6601 let fs = FakeFs::new(cx.background());
6602 fs.insert_tree(
6603 "/dir",
6604 json!({
6605 "a.rs": text.clone(),
6606 }),
6607 )
6608 .await;
6609
6610 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6611 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6612 let buffer = project
6613 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6614 .await
6615 .unwrap();
6616
6617 let mut fake_server = fake_servers.next().await.unwrap();
6618 let lsp_document_version = fake_server
6619 .receive_notification::<lsp::notification::DidOpenTextDocument>()
6620 .await
6621 .text_document
6622 .version;
6623
6624 // Simulate editing the buffer after the language server computes some edits.
6625 buffer.update(cx, |buffer, cx| {
6626 buffer.edit(
6627 [(
6628 Point::new(0, 0)..Point::new(0, 0),
6629 "// above first function\n",
6630 )],
6631 cx,
6632 );
6633 buffer.edit(
6634 [(
6635 Point::new(2, 0)..Point::new(2, 0),
6636 " // inside first function\n",
6637 )],
6638 cx,
6639 );
6640 buffer.edit(
6641 [(
6642 Point::new(6, 4)..Point::new(6, 4),
6643 "// inside second function ",
6644 )],
6645 cx,
6646 );
6647
6648 assert_eq!(
6649 buffer.text(),
6650 "
6651 // above first function
6652 fn a() {
6653 // inside first function
6654 f1();
6655 }
6656 fn b() {
6657 // inside second function f2();
6658 }
6659 fn c() {
6660 f3();
6661 }
6662 "
6663 .unindent()
6664 );
6665 });
6666
6667 let edits = project
6668 .update(cx, |project, cx| {
6669 project.edits_from_lsp(
6670 &buffer,
6671 vec![
6672 // replace body of first function
6673 lsp::TextEdit {
6674 range: lsp::Range::new(
6675 lsp::Position::new(0, 0),
6676 lsp::Position::new(3, 0),
6677 ),
6678 new_text: "
6679 fn a() {
6680 f10();
6681 }
6682 "
6683 .unindent(),
6684 },
6685 // edit inside second function
6686 lsp::TextEdit {
6687 range: lsp::Range::new(
6688 lsp::Position::new(4, 6),
6689 lsp::Position::new(4, 6),
6690 ),
6691 new_text: "00".into(),
6692 },
6693 // edit inside third function via two distinct edits
6694 lsp::TextEdit {
6695 range: lsp::Range::new(
6696 lsp::Position::new(7, 5),
6697 lsp::Position::new(7, 5),
6698 ),
6699 new_text: "4000".into(),
6700 },
6701 lsp::TextEdit {
6702 range: lsp::Range::new(
6703 lsp::Position::new(7, 5),
6704 lsp::Position::new(7, 6),
6705 ),
6706 new_text: "".into(),
6707 },
6708 ],
6709 Some(lsp_document_version),
6710 cx,
6711 )
6712 })
6713 .await
6714 .unwrap();
6715
6716 buffer.update(cx, |buffer, cx| {
6717 for (range, new_text) in edits {
6718 buffer.edit([(range, new_text)], cx);
6719 }
6720 assert_eq!(
6721 buffer.text(),
6722 "
6723 // above first function
6724 fn a() {
6725 // inside first function
6726 f10();
6727 }
6728 fn b() {
6729 // inside second function f200();
6730 }
6731 fn c() {
6732 f4000();
6733 }
6734 "
6735 .unindent()
6736 );
6737 });
6738 }
6739
6740 #[gpui::test]
6741 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
6742 cx.foreground().forbid_parking();
6743
6744 let text = "
6745 use a::b;
6746 use a::c;
6747
6748 fn f() {
6749 b();
6750 c();
6751 }
6752 "
6753 .unindent();
6754
6755 let fs = FakeFs::new(cx.background());
6756 fs.insert_tree(
6757 "/dir",
6758 json!({
6759 "a.rs": text.clone(),
6760 }),
6761 )
6762 .await;
6763
6764 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
6765 let buffer = project
6766 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
6767 .await
6768 .unwrap();
6769
6770 // Simulate the language server sending us a small edit in the form of a very large diff.
6771 // Rust-analyzer does this when performing a merge-imports code action.
6772 let edits = project
6773 .update(cx, |project, cx| {
6774 project.edits_from_lsp(
6775 &buffer,
6776 [
6777 // Replace the first use statement without editing the semicolon.
6778 lsp::TextEdit {
6779 range: lsp::Range::new(
6780 lsp::Position::new(0, 4),
6781 lsp::Position::new(0, 8),
6782 ),
6783 new_text: "a::{b, c}".into(),
6784 },
6785 // Reinsert the remainder of the file between the semicolon and the final
6786 // newline of the file.
6787 lsp::TextEdit {
6788 range: lsp::Range::new(
6789 lsp::Position::new(0, 9),
6790 lsp::Position::new(0, 9),
6791 ),
6792 new_text: "\n\n".into(),
6793 },
6794 lsp::TextEdit {
6795 range: lsp::Range::new(
6796 lsp::Position::new(0, 9),
6797 lsp::Position::new(0, 9),
6798 ),
6799 new_text: "
6800 fn f() {
6801 b();
6802 c();
6803 }"
6804 .unindent(),
6805 },
6806 // Delete everything after the first newline of the file.
6807 lsp::TextEdit {
6808 range: lsp::Range::new(
6809 lsp::Position::new(1, 0),
6810 lsp::Position::new(7, 0),
6811 ),
6812 new_text: "".into(),
6813 },
6814 ],
6815 None,
6816 cx,
6817 )
6818 })
6819 .await
6820 .unwrap();
6821
6822 buffer.update(cx, |buffer, cx| {
6823 let edits = edits
6824 .into_iter()
6825 .map(|(range, text)| {
6826 (
6827 range.start.to_point(&buffer)..range.end.to_point(&buffer),
6828 text,
6829 )
6830 })
6831 .collect::<Vec<_>>();
6832
6833 assert_eq!(
6834 edits,
6835 [
6836 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
6837 (Point::new(1, 0)..Point::new(2, 0), "".into())
6838 ]
6839 );
6840
6841 for (range, new_text) in edits {
6842 buffer.edit([(range, new_text)], cx);
6843 }
6844 assert_eq!(
6845 buffer.text(),
6846 "
6847 use a::{b, c};
6848
6849 fn f() {
6850 b();
6851 c();
6852 }
6853 "
6854 .unindent()
6855 );
6856 });
6857 }
6858
6859 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
6860 buffer: &Buffer,
6861 range: Range<T>,
6862 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
6863 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
6864 for chunk in buffer.snapshot().chunks(range, true) {
6865 if chunks.last().map_or(false, |prev_chunk| {
6866 prev_chunk.1 == chunk.diagnostic_severity
6867 }) {
6868 chunks.last_mut().unwrap().0.push_str(chunk.text);
6869 } else {
6870 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
6871 }
6872 }
6873 chunks
6874 }
6875
6876 #[gpui::test]
6877 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
6878 let dir = temp_tree(json!({
6879 "root": {
6880 "dir1": {},
6881 "dir2": {
6882 "dir3": {}
6883 }
6884 }
6885 }));
6886
6887 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
6888 let cancel_flag = Default::default();
6889 let results = project
6890 .read_with(cx, |project, cx| {
6891 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
6892 })
6893 .await;
6894
6895 assert!(results.is_empty());
6896 }
6897
6898 #[gpui::test(iterations = 10)]
6899 async fn test_definition(cx: &mut gpui::TestAppContext) {
6900 let mut language = Language::new(
6901 LanguageConfig {
6902 name: "Rust".into(),
6903 path_suffixes: vec!["rs".to_string()],
6904 ..Default::default()
6905 },
6906 Some(tree_sitter_rust::language()),
6907 );
6908 let mut fake_servers = language.set_fake_lsp_adapter(Default::default());
6909
6910 let fs = FakeFs::new(cx.background());
6911 fs.insert_tree(
6912 "/dir",
6913 json!({
6914 "a.rs": "const fn a() { A }",
6915 "b.rs": "const y: i32 = crate::a()",
6916 }),
6917 )
6918 .await;
6919
6920 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
6921 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
6922
6923 let buffer = project
6924 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
6925 .await
6926 .unwrap();
6927
6928 let fake_server = fake_servers.next().await.unwrap();
6929 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
6930 let params = params.text_document_position_params;
6931 assert_eq!(
6932 params.text_document.uri.to_file_path().unwrap(),
6933 Path::new("/dir/b.rs"),
6934 );
6935 assert_eq!(params.position, lsp::Position::new(0, 22));
6936
6937 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
6938 lsp::Location::new(
6939 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
6940 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
6941 ),
6942 )))
6943 });
6944
6945 let mut definitions = project
6946 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
6947 .await
6948 .unwrap();
6949
6950 assert_eq!(definitions.len(), 1);
6951 let definition = definitions.pop().unwrap();
6952 cx.update(|cx| {
6953 let target_buffer = definition.buffer.read(cx);
6954 assert_eq!(
6955 target_buffer
6956 .file()
6957 .unwrap()
6958 .as_local()
6959 .unwrap()
6960 .abs_path(cx),
6961 Path::new("/dir/a.rs"),
6962 );
6963 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
6964 assert_eq!(
6965 list_worktrees(&project, cx),
6966 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
6967 );
6968
6969 drop(definition);
6970 });
6971 cx.read(|cx| {
6972 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
6973 });
6974
6975 fn list_worktrees<'a>(
6976 project: &'a ModelHandle<Project>,
6977 cx: &'a AppContext,
6978 ) -> Vec<(&'a Path, bool)> {
6979 project
6980 .read(cx)
6981 .worktrees(cx)
6982 .map(|worktree| {
6983 let worktree = worktree.read(cx);
6984 (
6985 worktree.as_local().unwrap().abs_path().as_ref(),
6986 worktree.is_visible(),
6987 )
6988 })
6989 .collect::<Vec<_>>()
6990 }
6991 }
6992
6993 #[gpui::test]
6994 async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
6995 let mut language = Language::new(
6996 LanguageConfig {
6997 name: "TypeScript".into(),
6998 path_suffixes: vec!["ts".to_string()],
6999 ..Default::default()
7000 },
7001 Some(tree_sitter_typescript::language_typescript()),
7002 );
7003 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7004
7005 let fs = FakeFs::new(cx.background());
7006 fs.insert_tree(
7007 "/dir",
7008 json!({
7009 "a.ts": "",
7010 }),
7011 )
7012 .await;
7013
7014 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7015 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7016 let buffer = project
7017 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7018 .await
7019 .unwrap();
7020
7021 let fake_server = fake_language_servers.next().await.unwrap();
7022
7023 let text = "let a = b.fqn";
7024 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
7025 let completions = project.update(cx, |project, cx| {
7026 project.completions(&buffer, text.len(), cx)
7027 });
7028
7029 fake_server
7030 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
7031 Ok(Some(lsp::CompletionResponse::Array(vec![
7032 lsp::CompletionItem {
7033 label: "fullyQualifiedName?".into(),
7034 insert_text: Some("fullyQualifiedName".into()),
7035 ..Default::default()
7036 },
7037 ])))
7038 })
7039 .next()
7040 .await;
7041 let completions = completions.await.unwrap();
7042 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7043 assert_eq!(completions.len(), 1);
7044 assert_eq!(completions[0].new_text, "fullyQualifiedName");
7045 assert_eq!(
7046 completions[0].old_range.to_offset(&snapshot),
7047 text.len() - 3..text.len()
7048 );
7049 }
7050
7051 #[gpui::test(iterations = 10)]
7052 async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
7053 let mut language = Language::new(
7054 LanguageConfig {
7055 name: "TypeScript".into(),
7056 path_suffixes: vec!["ts".to_string()],
7057 ..Default::default()
7058 },
7059 None,
7060 );
7061 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default());
7062
7063 let fs = FakeFs::new(cx.background());
7064 fs.insert_tree(
7065 "/dir",
7066 json!({
7067 "a.ts": "a",
7068 }),
7069 )
7070 .await;
7071
7072 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
7073 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7074 let buffer = project
7075 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
7076 .await
7077 .unwrap();
7078
7079 let fake_server = fake_language_servers.next().await.unwrap();
7080
7081 // Language server returns code actions that contain commands, and not edits.
7082 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
7083 fake_server
7084 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
7085 Ok(Some(vec![
7086 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7087 title: "The code action".into(),
7088 command: Some(lsp::Command {
7089 title: "The command".into(),
7090 command: "_the/command".into(),
7091 arguments: Some(vec![json!("the-argument")]),
7092 }),
7093 ..Default::default()
7094 }),
7095 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
7096 title: "two".into(),
7097 ..Default::default()
7098 }),
7099 ]))
7100 })
7101 .next()
7102 .await;
7103
7104 let action = actions.await.unwrap()[0].clone();
7105 let apply = project.update(cx, |project, cx| {
7106 project.apply_code_action(buffer.clone(), action, true, cx)
7107 });
7108
7109 // Resolving the code action does not populate its edits. In absence of
7110 // edits, we must execute the given command.
7111 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
7112 |action, _| async move { Ok(action) },
7113 );
7114
7115 // While executing the command, the language server sends the editor
7116 // a `workspaceEdit` request.
7117 fake_server
7118 .handle_request::<lsp::request::ExecuteCommand, _, _>({
7119 let fake = fake_server.clone();
7120 move |params, _| {
7121 assert_eq!(params.command, "_the/command");
7122 let fake = fake.clone();
7123 async move {
7124 fake.server
7125 .request::<lsp::request::ApplyWorkspaceEdit>(
7126 lsp::ApplyWorkspaceEditParams {
7127 label: None,
7128 edit: lsp::WorkspaceEdit {
7129 changes: Some(
7130 [(
7131 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
7132 vec![lsp::TextEdit {
7133 range: lsp::Range::new(
7134 lsp::Position::new(0, 0),
7135 lsp::Position::new(0, 0),
7136 ),
7137 new_text: "X".into(),
7138 }],
7139 )]
7140 .into_iter()
7141 .collect(),
7142 ),
7143 ..Default::default()
7144 },
7145 },
7146 )
7147 .await
7148 .unwrap();
7149 Ok(Some(json!(null)))
7150 }
7151 }
7152 })
7153 .next()
7154 .await;
7155
7156 // Applying the code action returns a project transaction containing the edits
7157 // sent by the language server in its `workspaceEdit` request.
7158 let transaction = apply.await.unwrap();
7159 assert!(transaction.0.contains_key(&buffer));
7160 buffer.update(cx, |buffer, cx| {
7161 assert_eq!(buffer.text(), "Xa");
7162 buffer.undo(cx);
7163 assert_eq!(buffer.text(), "a");
7164 });
7165 }
7166
7167 #[gpui::test]
7168 async fn test_save_file(cx: &mut gpui::TestAppContext) {
7169 let fs = FakeFs::new(cx.background());
7170 fs.insert_tree(
7171 "/dir",
7172 json!({
7173 "file1": "the old contents",
7174 }),
7175 )
7176 .await;
7177
7178 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7179 let buffer = project
7180 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7181 .await
7182 .unwrap();
7183 buffer
7184 .update(cx, |buffer, cx| {
7185 assert_eq!(buffer.text(), "the old contents");
7186 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7187 buffer.save(cx)
7188 })
7189 .await
7190 .unwrap();
7191
7192 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7193 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7194 }
7195
7196 #[gpui::test]
7197 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
7198 let fs = FakeFs::new(cx.background());
7199 fs.insert_tree(
7200 "/dir",
7201 json!({
7202 "file1": "the old contents",
7203 }),
7204 )
7205 .await;
7206
7207 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
7208 let buffer = project
7209 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7210 .await
7211 .unwrap();
7212 buffer
7213 .update(cx, |buffer, cx| {
7214 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], cx);
7215 buffer.save(cx)
7216 })
7217 .await
7218 .unwrap();
7219
7220 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
7221 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
7222 }
7223
7224 #[gpui::test]
7225 async fn test_save_as(cx: &mut gpui::TestAppContext) {
7226 let fs = FakeFs::new(cx.background());
7227 fs.insert_tree("/dir", json!({})).await;
7228
7229 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7230 let buffer = project.update(cx, |project, cx| {
7231 project.create_buffer("", None, cx).unwrap()
7232 });
7233 buffer.update(cx, |buffer, cx| {
7234 buffer.edit([(0..0, "abc")], cx);
7235 assert!(buffer.is_dirty());
7236 assert!(!buffer.has_conflict());
7237 });
7238 project
7239 .update(cx, |project, cx| {
7240 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
7241 })
7242 .await
7243 .unwrap();
7244 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
7245 buffer.read_with(cx, |buffer, cx| {
7246 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
7247 assert!(!buffer.is_dirty());
7248 assert!(!buffer.has_conflict());
7249 });
7250
7251 let opened_buffer = project
7252 .update(cx, |project, cx| {
7253 project.open_local_buffer("/dir/file1", cx)
7254 })
7255 .await
7256 .unwrap();
7257 assert_eq!(opened_buffer, buffer);
7258 }
7259
7260 #[gpui::test(retries = 5)]
7261 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
7262 let dir = temp_tree(json!({
7263 "a": {
7264 "file1": "",
7265 "file2": "",
7266 "file3": "",
7267 },
7268 "b": {
7269 "c": {
7270 "file4": "",
7271 "file5": "",
7272 }
7273 }
7274 }));
7275
7276 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
7277 let rpc = project.read_with(cx, |p, _| p.client.clone());
7278
7279 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
7280 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
7281 async move { buffer.await.unwrap() }
7282 };
7283 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
7284 project.read_with(cx, |project, cx| {
7285 let tree = project.worktrees(cx).next().unwrap();
7286 tree.read(cx)
7287 .entry_for_path(path)
7288 .expect(&format!("no entry for path {}", path))
7289 .id
7290 })
7291 };
7292
7293 let buffer2 = buffer_for_path("a/file2", cx).await;
7294 let buffer3 = buffer_for_path("a/file3", cx).await;
7295 let buffer4 = buffer_for_path("b/c/file4", cx).await;
7296 let buffer5 = buffer_for_path("b/c/file5", cx).await;
7297
7298 let file2_id = id_for_path("a/file2", &cx);
7299 let file3_id = id_for_path("a/file3", &cx);
7300 let file4_id = id_for_path("b/c/file4", &cx);
7301
7302 // Create a remote copy of this worktree.
7303 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7304 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
7305 let (remote, load_task) = cx.update(|cx| {
7306 Worktree::remote(
7307 1,
7308 1,
7309 initial_snapshot.to_proto(&Default::default(), true),
7310 rpc.clone(),
7311 cx,
7312 )
7313 });
7314 // tree
7315 load_task.await;
7316
7317 cx.read(|cx| {
7318 assert!(!buffer2.read(cx).is_dirty());
7319 assert!(!buffer3.read(cx).is_dirty());
7320 assert!(!buffer4.read(cx).is_dirty());
7321 assert!(!buffer5.read(cx).is_dirty());
7322 });
7323
7324 // Rename and delete files and directories.
7325 tree.flush_fs_events(&cx).await;
7326 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
7327 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
7328 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
7329 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
7330 tree.flush_fs_events(&cx).await;
7331
7332 let expected_paths = vec![
7333 "a",
7334 "a/file1",
7335 "a/file2.new",
7336 "b",
7337 "d",
7338 "d/file3",
7339 "d/file4",
7340 ];
7341
7342 cx.read(|app| {
7343 assert_eq!(
7344 tree.read(app)
7345 .paths()
7346 .map(|p| p.to_str().unwrap())
7347 .collect::<Vec<_>>(),
7348 expected_paths
7349 );
7350
7351 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
7352 assert_eq!(id_for_path("d/file3", &cx), file3_id);
7353 assert_eq!(id_for_path("d/file4", &cx), file4_id);
7354
7355 assert_eq!(
7356 buffer2.read(app).file().unwrap().path().as_ref(),
7357 Path::new("a/file2.new")
7358 );
7359 assert_eq!(
7360 buffer3.read(app).file().unwrap().path().as_ref(),
7361 Path::new("d/file3")
7362 );
7363 assert_eq!(
7364 buffer4.read(app).file().unwrap().path().as_ref(),
7365 Path::new("d/file4")
7366 );
7367 assert_eq!(
7368 buffer5.read(app).file().unwrap().path().as_ref(),
7369 Path::new("b/c/file5")
7370 );
7371
7372 assert!(!buffer2.read(app).file().unwrap().is_deleted());
7373 assert!(!buffer3.read(app).file().unwrap().is_deleted());
7374 assert!(!buffer4.read(app).file().unwrap().is_deleted());
7375 assert!(buffer5.read(app).file().unwrap().is_deleted());
7376 });
7377
7378 // Update the remote worktree. Check that it becomes consistent with the
7379 // local worktree.
7380 remote.update(cx, |remote, cx| {
7381 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
7382 &initial_snapshot,
7383 1,
7384 1,
7385 true,
7386 );
7387 remote
7388 .as_remote_mut()
7389 .unwrap()
7390 .snapshot
7391 .apply_remote_update(update_message)
7392 .unwrap();
7393
7394 assert_eq!(
7395 remote
7396 .paths()
7397 .map(|p| p.to_str().unwrap())
7398 .collect::<Vec<_>>(),
7399 expected_paths
7400 );
7401 });
7402 }
7403
7404 #[gpui::test]
7405 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
7406 let fs = FakeFs::new(cx.background());
7407 fs.insert_tree(
7408 "/dir",
7409 json!({
7410 "a.txt": "a-contents",
7411 "b.txt": "b-contents",
7412 }),
7413 )
7414 .await;
7415
7416 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7417
7418 // Spawn multiple tasks to open paths, repeating some paths.
7419 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
7420 (
7421 p.open_local_buffer("/dir/a.txt", cx),
7422 p.open_local_buffer("/dir/b.txt", cx),
7423 p.open_local_buffer("/dir/a.txt", cx),
7424 )
7425 });
7426
7427 let buffer_a_1 = buffer_a_1.await.unwrap();
7428 let buffer_a_2 = buffer_a_2.await.unwrap();
7429 let buffer_b = buffer_b.await.unwrap();
7430 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
7431 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
7432
7433 // There is only one buffer per path.
7434 let buffer_a_id = buffer_a_1.id();
7435 assert_eq!(buffer_a_2.id(), buffer_a_id);
7436
7437 // Open the same path again while it is still open.
7438 drop(buffer_a_1);
7439 let buffer_a_3 = project
7440 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
7441 .await
7442 .unwrap();
7443
7444 // There's still only one buffer per path.
7445 assert_eq!(buffer_a_3.id(), buffer_a_id);
7446 }
7447
7448 #[gpui::test]
7449 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
7450 let fs = FakeFs::new(cx.background());
7451 fs.insert_tree(
7452 "/dir",
7453 json!({
7454 "file1": "abc",
7455 "file2": "def",
7456 "file3": "ghi",
7457 }),
7458 )
7459 .await;
7460
7461 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7462
7463 let buffer1 = project
7464 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
7465 .await
7466 .unwrap();
7467 let events = Rc::new(RefCell::new(Vec::new()));
7468
7469 // initially, the buffer isn't dirty.
7470 buffer1.update(cx, |buffer, cx| {
7471 cx.subscribe(&buffer1, {
7472 let events = events.clone();
7473 move |_, _, event, _| match event {
7474 BufferEvent::Operation(_) => {}
7475 _ => events.borrow_mut().push(event.clone()),
7476 }
7477 })
7478 .detach();
7479
7480 assert!(!buffer.is_dirty());
7481 assert!(events.borrow().is_empty());
7482
7483 buffer.edit([(1..2, "")], cx);
7484 });
7485
7486 // after the first edit, the buffer is dirty, and emits a dirtied event.
7487 buffer1.update(cx, |buffer, cx| {
7488 assert!(buffer.text() == "ac");
7489 assert!(buffer.is_dirty());
7490 assert_eq!(
7491 *events.borrow(),
7492 &[language::Event::Edited, language::Event::Dirtied]
7493 );
7494 events.borrow_mut().clear();
7495 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
7496 });
7497
7498 // after saving, the buffer is not dirty, and emits a saved event.
7499 buffer1.update(cx, |buffer, cx| {
7500 assert!(!buffer.is_dirty());
7501 assert_eq!(*events.borrow(), &[language::Event::Saved]);
7502 events.borrow_mut().clear();
7503
7504 buffer.edit([(1..1, "B")], cx);
7505 buffer.edit([(2..2, "D")], cx);
7506 });
7507
7508 // after editing again, the buffer is dirty, and emits another dirty event.
7509 buffer1.update(cx, |buffer, cx| {
7510 assert!(buffer.text() == "aBDc");
7511 assert!(buffer.is_dirty());
7512 assert_eq!(
7513 *events.borrow(),
7514 &[
7515 language::Event::Edited,
7516 language::Event::Dirtied,
7517 language::Event::Edited,
7518 ],
7519 );
7520 events.borrow_mut().clear();
7521
7522 // TODO - currently, after restoring the buffer to its
7523 // previously-saved state, the is still considered dirty.
7524 buffer.edit([(1..3, "")], cx);
7525 assert!(buffer.text() == "ac");
7526 assert!(buffer.is_dirty());
7527 });
7528
7529 assert_eq!(*events.borrow(), &[language::Event::Edited]);
7530
7531 // When a file is deleted, the buffer is considered dirty.
7532 let events = Rc::new(RefCell::new(Vec::new()));
7533 let buffer2 = project
7534 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
7535 .await
7536 .unwrap();
7537 buffer2.update(cx, |_, cx| {
7538 cx.subscribe(&buffer2, {
7539 let events = events.clone();
7540 move |_, _, event, _| events.borrow_mut().push(event.clone())
7541 })
7542 .detach();
7543 });
7544
7545 fs.remove_file("/dir/file2".as_ref(), Default::default())
7546 .await
7547 .unwrap();
7548 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
7549 assert_eq!(
7550 *events.borrow(),
7551 &[language::Event::Dirtied, language::Event::FileHandleChanged]
7552 );
7553
7554 // When a file is already dirty when deleted, we don't emit a Dirtied event.
7555 let events = Rc::new(RefCell::new(Vec::new()));
7556 let buffer3 = project
7557 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
7558 .await
7559 .unwrap();
7560 buffer3.update(cx, |_, cx| {
7561 cx.subscribe(&buffer3, {
7562 let events = events.clone();
7563 move |_, _, event, _| events.borrow_mut().push(event.clone())
7564 })
7565 .detach();
7566 });
7567
7568 buffer3.update(cx, |buffer, cx| {
7569 buffer.edit([(0..0, "x")], cx);
7570 });
7571 events.borrow_mut().clear();
7572 fs.remove_file("/dir/file3".as_ref(), Default::default())
7573 .await
7574 .unwrap();
7575 buffer3
7576 .condition(&cx, |_, _| !events.borrow().is_empty())
7577 .await;
7578 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
7579 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
7580 }
7581
7582 #[gpui::test]
7583 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
7584 let initial_contents = "aaa\nbbbbb\nc\n";
7585 let fs = FakeFs::new(cx.background());
7586 fs.insert_tree(
7587 "/dir",
7588 json!({
7589 "the-file": initial_contents,
7590 }),
7591 )
7592 .await;
7593 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7594 let buffer = project
7595 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
7596 .await
7597 .unwrap();
7598
7599 let anchors = (0..3)
7600 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
7601 .collect::<Vec<_>>();
7602
7603 // Change the file on disk, adding two new lines of text, and removing
7604 // one line.
7605 buffer.read_with(cx, |buffer, _| {
7606 assert!(!buffer.is_dirty());
7607 assert!(!buffer.has_conflict());
7608 });
7609 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
7610 fs.save("/dir/the-file".as_ref(), &new_contents.into())
7611 .await
7612 .unwrap();
7613
7614 // Because the buffer was not modified, it is reloaded from disk. Its
7615 // contents are edited according to the diff between the old and new
7616 // file contents.
7617 buffer
7618 .condition(&cx, |buffer, _| buffer.text() == new_contents)
7619 .await;
7620
7621 buffer.update(cx, |buffer, _| {
7622 assert_eq!(buffer.text(), new_contents);
7623 assert!(!buffer.is_dirty());
7624 assert!(!buffer.has_conflict());
7625
7626 let anchor_positions = anchors
7627 .iter()
7628 .map(|anchor| anchor.to_point(&*buffer))
7629 .collect::<Vec<_>>();
7630 assert_eq!(
7631 anchor_positions,
7632 [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
7633 );
7634 });
7635
7636 // Modify the buffer
7637 buffer.update(cx, |buffer, cx| {
7638 buffer.edit([(0..0, " ")], cx);
7639 assert!(buffer.is_dirty());
7640 assert!(!buffer.has_conflict());
7641 });
7642
7643 // Change the file on disk again, adding blank lines to the beginning.
7644 fs.save(
7645 "/dir/the-file".as_ref(),
7646 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
7647 )
7648 .await
7649 .unwrap();
7650
7651 // Because the buffer is modified, it doesn't reload from disk, but is
7652 // marked as having a conflict.
7653 buffer
7654 .condition(&cx, |buffer, _| buffer.has_conflict())
7655 .await;
7656 }
7657
7658 #[gpui::test]
7659 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
7660 cx.foreground().forbid_parking();
7661
7662 let fs = FakeFs::new(cx.background());
7663 fs.insert_tree(
7664 "/the-dir",
7665 json!({
7666 "a.rs": "
7667 fn foo(mut v: Vec<usize>) {
7668 for x in &v {
7669 v.push(1);
7670 }
7671 }
7672 "
7673 .unindent(),
7674 }),
7675 )
7676 .await;
7677
7678 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
7679 let buffer = project
7680 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
7681 .await
7682 .unwrap();
7683
7684 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
7685 let message = lsp::PublishDiagnosticsParams {
7686 uri: buffer_uri.clone(),
7687 diagnostics: vec![
7688 lsp::Diagnostic {
7689 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7690 severity: Some(DiagnosticSeverity::WARNING),
7691 message: "error 1".to_string(),
7692 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7693 location: lsp::Location {
7694 uri: buffer_uri.clone(),
7695 range: lsp::Range::new(
7696 lsp::Position::new(1, 8),
7697 lsp::Position::new(1, 9),
7698 ),
7699 },
7700 message: "error 1 hint 1".to_string(),
7701 }]),
7702 ..Default::default()
7703 },
7704 lsp::Diagnostic {
7705 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
7706 severity: Some(DiagnosticSeverity::HINT),
7707 message: "error 1 hint 1".to_string(),
7708 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7709 location: lsp::Location {
7710 uri: buffer_uri.clone(),
7711 range: lsp::Range::new(
7712 lsp::Position::new(1, 8),
7713 lsp::Position::new(1, 9),
7714 ),
7715 },
7716 message: "original diagnostic".to_string(),
7717 }]),
7718 ..Default::default()
7719 },
7720 lsp::Diagnostic {
7721 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
7722 severity: Some(DiagnosticSeverity::ERROR),
7723 message: "error 2".to_string(),
7724 related_information: Some(vec![
7725 lsp::DiagnosticRelatedInformation {
7726 location: lsp::Location {
7727 uri: buffer_uri.clone(),
7728 range: lsp::Range::new(
7729 lsp::Position::new(1, 13),
7730 lsp::Position::new(1, 15),
7731 ),
7732 },
7733 message: "error 2 hint 1".to_string(),
7734 },
7735 lsp::DiagnosticRelatedInformation {
7736 location: lsp::Location {
7737 uri: buffer_uri.clone(),
7738 range: lsp::Range::new(
7739 lsp::Position::new(1, 13),
7740 lsp::Position::new(1, 15),
7741 ),
7742 },
7743 message: "error 2 hint 2".to_string(),
7744 },
7745 ]),
7746 ..Default::default()
7747 },
7748 lsp::Diagnostic {
7749 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7750 severity: Some(DiagnosticSeverity::HINT),
7751 message: "error 2 hint 1".to_string(),
7752 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7753 location: lsp::Location {
7754 uri: buffer_uri.clone(),
7755 range: lsp::Range::new(
7756 lsp::Position::new(2, 8),
7757 lsp::Position::new(2, 17),
7758 ),
7759 },
7760 message: "original diagnostic".to_string(),
7761 }]),
7762 ..Default::default()
7763 },
7764 lsp::Diagnostic {
7765 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
7766 severity: Some(DiagnosticSeverity::HINT),
7767 message: "error 2 hint 2".to_string(),
7768 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
7769 location: lsp::Location {
7770 uri: buffer_uri.clone(),
7771 range: lsp::Range::new(
7772 lsp::Position::new(2, 8),
7773 lsp::Position::new(2, 17),
7774 ),
7775 },
7776 message: "original diagnostic".to_string(),
7777 }]),
7778 ..Default::default()
7779 },
7780 ],
7781 version: None,
7782 };
7783
7784 project
7785 .update(cx, |p, cx| p.update_diagnostics(message, &[], cx))
7786 .unwrap();
7787 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7788
7789 assert_eq!(
7790 buffer
7791 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
7792 .collect::<Vec<_>>(),
7793 &[
7794 DiagnosticEntry {
7795 range: Point::new(1, 8)..Point::new(1, 9),
7796 diagnostic: Diagnostic {
7797 severity: DiagnosticSeverity::WARNING,
7798 message: "error 1".to_string(),
7799 group_id: 0,
7800 is_primary: true,
7801 ..Default::default()
7802 }
7803 },
7804 DiagnosticEntry {
7805 range: Point::new(1, 8)..Point::new(1, 9),
7806 diagnostic: Diagnostic {
7807 severity: DiagnosticSeverity::HINT,
7808 message: "error 1 hint 1".to_string(),
7809 group_id: 0,
7810 is_primary: false,
7811 ..Default::default()
7812 }
7813 },
7814 DiagnosticEntry {
7815 range: Point::new(1, 13)..Point::new(1, 15),
7816 diagnostic: Diagnostic {
7817 severity: DiagnosticSeverity::HINT,
7818 message: "error 2 hint 1".to_string(),
7819 group_id: 1,
7820 is_primary: false,
7821 ..Default::default()
7822 }
7823 },
7824 DiagnosticEntry {
7825 range: Point::new(1, 13)..Point::new(1, 15),
7826 diagnostic: Diagnostic {
7827 severity: DiagnosticSeverity::HINT,
7828 message: "error 2 hint 2".to_string(),
7829 group_id: 1,
7830 is_primary: false,
7831 ..Default::default()
7832 }
7833 },
7834 DiagnosticEntry {
7835 range: Point::new(2, 8)..Point::new(2, 17),
7836 diagnostic: Diagnostic {
7837 severity: DiagnosticSeverity::ERROR,
7838 message: "error 2".to_string(),
7839 group_id: 1,
7840 is_primary: true,
7841 ..Default::default()
7842 }
7843 }
7844 ]
7845 );
7846
7847 assert_eq!(
7848 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
7849 &[
7850 DiagnosticEntry {
7851 range: Point::new(1, 8)..Point::new(1, 9),
7852 diagnostic: Diagnostic {
7853 severity: DiagnosticSeverity::WARNING,
7854 message: "error 1".to_string(),
7855 group_id: 0,
7856 is_primary: true,
7857 ..Default::default()
7858 }
7859 },
7860 DiagnosticEntry {
7861 range: Point::new(1, 8)..Point::new(1, 9),
7862 diagnostic: Diagnostic {
7863 severity: DiagnosticSeverity::HINT,
7864 message: "error 1 hint 1".to_string(),
7865 group_id: 0,
7866 is_primary: false,
7867 ..Default::default()
7868 }
7869 },
7870 ]
7871 );
7872 assert_eq!(
7873 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
7874 &[
7875 DiagnosticEntry {
7876 range: Point::new(1, 13)..Point::new(1, 15),
7877 diagnostic: Diagnostic {
7878 severity: DiagnosticSeverity::HINT,
7879 message: "error 2 hint 1".to_string(),
7880 group_id: 1,
7881 is_primary: false,
7882 ..Default::default()
7883 }
7884 },
7885 DiagnosticEntry {
7886 range: Point::new(1, 13)..Point::new(1, 15),
7887 diagnostic: Diagnostic {
7888 severity: DiagnosticSeverity::HINT,
7889 message: "error 2 hint 2".to_string(),
7890 group_id: 1,
7891 is_primary: false,
7892 ..Default::default()
7893 }
7894 },
7895 DiagnosticEntry {
7896 range: Point::new(2, 8)..Point::new(2, 17),
7897 diagnostic: Diagnostic {
7898 severity: DiagnosticSeverity::ERROR,
7899 message: "error 2".to_string(),
7900 group_id: 1,
7901 is_primary: true,
7902 ..Default::default()
7903 }
7904 }
7905 ]
7906 );
7907 }
7908
7909 #[gpui::test]
7910 async fn test_rename(cx: &mut gpui::TestAppContext) {
7911 cx.foreground().forbid_parking();
7912
7913 let mut language = Language::new(
7914 LanguageConfig {
7915 name: "Rust".into(),
7916 path_suffixes: vec!["rs".to_string()],
7917 ..Default::default()
7918 },
7919 Some(tree_sitter_rust::language()),
7920 );
7921 let mut fake_servers = language.set_fake_lsp_adapter(FakeLspAdapter {
7922 capabilities: lsp::ServerCapabilities {
7923 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
7924 prepare_provider: Some(true),
7925 work_done_progress_options: Default::default(),
7926 })),
7927 ..Default::default()
7928 },
7929 ..Default::default()
7930 });
7931
7932 let fs = FakeFs::new(cx.background());
7933 fs.insert_tree(
7934 "/dir",
7935 json!({
7936 "one.rs": "const ONE: usize = 1;",
7937 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
7938 }),
7939 )
7940 .await;
7941
7942 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7943 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
7944 let buffer = project
7945 .update(cx, |project, cx| {
7946 project.open_local_buffer("/dir/one.rs", cx)
7947 })
7948 .await
7949 .unwrap();
7950
7951 let fake_server = fake_servers.next().await.unwrap();
7952
7953 let response = project.update(cx, |project, cx| {
7954 project.prepare_rename(buffer.clone(), 7, cx)
7955 });
7956 fake_server
7957 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
7958 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
7959 assert_eq!(params.position, lsp::Position::new(0, 7));
7960 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
7961 lsp::Position::new(0, 6),
7962 lsp::Position::new(0, 9),
7963 ))))
7964 })
7965 .next()
7966 .await
7967 .unwrap();
7968 let range = response.await.unwrap().unwrap();
7969 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
7970 assert_eq!(range, 6..9);
7971
7972 let response = project.update(cx, |project, cx| {
7973 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
7974 });
7975 fake_server
7976 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
7977 assert_eq!(
7978 params.text_document_position.text_document.uri.as_str(),
7979 "file:///dir/one.rs"
7980 );
7981 assert_eq!(
7982 params.text_document_position.position,
7983 lsp::Position::new(0, 7)
7984 );
7985 assert_eq!(params.new_name, "THREE");
7986 Ok(Some(lsp::WorkspaceEdit {
7987 changes: Some(
7988 [
7989 (
7990 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
7991 vec![lsp::TextEdit::new(
7992 lsp::Range::new(
7993 lsp::Position::new(0, 6),
7994 lsp::Position::new(0, 9),
7995 ),
7996 "THREE".to_string(),
7997 )],
7998 ),
7999 (
8000 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
8001 vec![
8002 lsp::TextEdit::new(
8003 lsp::Range::new(
8004 lsp::Position::new(0, 24),
8005 lsp::Position::new(0, 27),
8006 ),
8007 "THREE".to_string(),
8008 ),
8009 lsp::TextEdit::new(
8010 lsp::Range::new(
8011 lsp::Position::new(0, 35),
8012 lsp::Position::new(0, 38),
8013 ),
8014 "THREE".to_string(),
8015 ),
8016 ],
8017 ),
8018 ]
8019 .into_iter()
8020 .collect(),
8021 ),
8022 ..Default::default()
8023 }))
8024 })
8025 .next()
8026 .await
8027 .unwrap();
8028 let mut transaction = response.await.unwrap().0;
8029 assert_eq!(transaction.len(), 2);
8030 assert_eq!(
8031 transaction
8032 .remove_entry(&buffer)
8033 .unwrap()
8034 .0
8035 .read_with(cx, |buffer, _| buffer.text()),
8036 "const THREE: usize = 1;"
8037 );
8038 assert_eq!(
8039 transaction
8040 .into_keys()
8041 .next()
8042 .unwrap()
8043 .read_with(cx, |buffer, _| buffer.text()),
8044 "const TWO: usize = one::THREE + one::THREE;"
8045 );
8046 }
8047
8048 #[gpui::test]
8049 async fn test_search(cx: &mut gpui::TestAppContext) {
8050 let fs = FakeFs::new(cx.background());
8051 fs.insert_tree(
8052 "/dir",
8053 json!({
8054 "one.rs": "const ONE: usize = 1;",
8055 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
8056 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
8057 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
8058 }),
8059 )
8060 .await;
8061 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
8062 assert_eq!(
8063 search(&project, SearchQuery::text("TWO", false, true), cx)
8064 .await
8065 .unwrap(),
8066 HashMap::from_iter([
8067 ("two.rs".to_string(), vec![6..9]),
8068 ("three.rs".to_string(), vec![37..40])
8069 ])
8070 );
8071
8072 let buffer_4 = project
8073 .update(cx, |project, cx| {
8074 project.open_local_buffer("/dir/four.rs", cx)
8075 })
8076 .await
8077 .unwrap();
8078 buffer_4.update(cx, |buffer, cx| {
8079 let text = "two::TWO";
8080 buffer.edit([(20..28, text), (31..43, text)], cx);
8081 });
8082
8083 assert_eq!(
8084 search(&project, SearchQuery::text("TWO", false, true), cx)
8085 .await
8086 .unwrap(),
8087 HashMap::from_iter([
8088 ("two.rs".to_string(), vec![6..9]),
8089 ("three.rs".to_string(), vec![37..40]),
8090 ("four.rs".to_string(), vec![25..28, 36..39])
8091 ])
8092 );
8093
8094 async fn search(
8095 project: &ModelHandle<Project>,
8096 query: SearchQuery,
8097 cx: &mut gpui::TestAppContext,
8098 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
8099 let results = project
8100 .update(cx, |project, cx| project.search(query, cx))
8101 .await?;
8102
8103 Ok(results
8104 .into_iter()
8105 .map(|(buffer, ranges)| {
8106 buffer.read_with(cx, |buffer, _| {
8107 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
8108 let ranges = ranges
8109 .into_iter()
8110 .map(|range| range.to_offset(buffer))
8111 .collect::<Vec<_>>();
8112 (path, ranges)
8113 })
8114 })
8115 .collect())
8116 }
8117 }
8118}