1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use postage::watch;
27use rand::prelude::*;
28use search::SearchQuery;
29use sha2::{Digest, Sha256};
30use similar::{ChangeTag, TextDiff};
31use smol::block_on;
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{atomic::AtomicBool, Arc},
42 time::Instant,
43};
44use util::{post_inc, ResultExt, TryFutureExt as _};
45
46pub use fs::*;
47pub use worktree::*;
48
49pub struct Project {
50 worktrees: Vec<WorktreeHandle>,
51 active_entry: Option<ProjectEntry>,
52 languages: Arc<LanguageRegistry>,
53 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
54 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
55 client: Arc<client::Client>,
56 user_store: ModelHandle<UserStore>,
57 fs: Arc<dyn Fs>,
58 client_state: ProjectClientState,
59 collaborators: HashMap<PeerId, Collaborator>,
60 subscriptions: Vec<client::Subscription>,
61 language_servers_with_diagnostics_running: isize,
62 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
63 shared_buffers: HashMap<PeerId, HashSet<u64>>,
64 loading_buffers: HashMap<
65 ProjectPath,
66 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
67 >,
68 loading_local_worktrees:
69 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
70 opened_buffers: HashMap<u64, OpenBuffer>,
71 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
72 nonce: u128,
73}
74
75enum OpenBuffer {
76 Strong(ModelHandle<Buffer>),
77 Weak(WeakModelHandle<Buffer>),
78 Loading(Vec<Operation>),
79}
80
81enum WorktreeHandle {
82 Strong(ModelHandle<Worktree>),
83 Weak(WeakModelHandle<Worktree>),
84}
85
86enum ProjectClientState {
87 Local {
88 is_shared: bool,
89 remote_id_tx: watch::Sender<Option<u64>>,
90 remote_id_rx: watch::Receiver<Option<u64>>,
91 _maintain_remote_id_task: Task<Option<()>>,
92 },
93 Remote {
94 sharing_has_stopped: bool,
95 remote_id: u64,
96 replica_id: ReplicaId,
97 _detect_unshare_task: Task<Option<()>>,
98 },
99}
100
101#[derive(Clone, Debug)]
102pub struct Collaborator {
103 pub user: Arc<User>,
104 pub peer_id: PeerId,
105 pub replica_id: ReplicaId,
106}
107
108#[derive(Clone, Debug, PartialEq)]
109pub enum Event {
110 ActiveEntryChanged(Option<ProjectEntry>),
111 WorktreeRemoved(WorktreeId),
112 DiskBasedDiagnosticsStarted,
113 DiskBasedDiagnosticsUpdated,
114 DiskBasedDiagnosticsFinished,
115 DiagnosticsUpdated(ProjectPath),
116}
117
118#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
119pub struct ProjectPath {
120 pub worktree_id: WorktreeId,
121 pub path: Arc<Path>,
122}
123
124#[derive(Clone, Debug, Default, PartialEq)]
125pub struct DiagnosticSummary {
126 pub error_count: usize,
127 pub warning_count: usize,
128 pub info_count: usize,
129 pub hint_count: usize,
130}
131
132#[derive(Debug)]
133pub struct Location {
134 pub buffer: ModelHandle<Buffer>,
135 pub range: Range<language::Anchor>,
136}
137
138#[derive(Debug)]
139pub struct DocumentHighlight {
140 pub range: Range<language::Anchor>,
141 pub kind: DocumentHighlightKind,
142}
143
144#[derive(Clone, Debug)]
145pub struct Symbol {
146 pub source_worktree_id: WorktreeId,
147 pub worktree_id: WorktreeId,
148 pub language_name: String,
149 pub path: PathBuf,
150 pub label: CodeLabel,
151 pub name: String,
152 pub kind: lsp::SymbolKind,
153 pub range: Range<PointUtf16>,
154 pub signature: [u8; 32],
155}
156
157#[derive(Default)]
158pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
159
160impl DiagnosticSummary {
161 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
162 let mut this = Self {
163 error_count: 0,
164 warning_count: 0,
165 info_count: 0,
166 hint_count: 0,
167 };
168
169 for entry in diagnostics {
170 if entry.diagnostic.is_primary {
171 match entry.diagnostic.severity {
172 DiagnosticSeverity::ERROR => this.error_count += 1,
173 DiagnosticSeverity::WARNING => this.warning_count += 1,
174 DiagnosticSeverity::INFORMATION => this.info_count += 1,
175 DiagnosticSeverity::HINT => this.hint_count += 1,
176 _ => {}
177 }
178 }
179 }
180
181 this
182 }
183
184 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
185 proto::DiagnosticSummary {
186 path: path.to_string_lossy().to_string(),
187 error_count: self.error_count as u32,
188 warning_count: self.warning_count as u32,
189 info_count: self.info_count as u32,
190 hint_count: self.hint_count as u32,
191 }
192 }
193}
194
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
196pub struct ProjectEntry {
197 pub worktree_id: WorktreeId,
198 pub entry_id: usize,
199}
200
201impl Project {
202 pub fn init(client: &Arc<Client>) {
203 client.add_entity_message_handler(Self::handle_add_collaborator);
204 client.add_entity_message_handler(Self::handle_buffer_reloaded);
205 client.add_entity_message_handler(Self::handle_buffer_saved);
206 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
207 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
208 client.add_entity_message_handler(Self::handle_remove_collaborator);
209 client.add_entity_message_handler(Self::handle_register_worktree);
210 client.add_entity_message_handler(Self::handle_unregister_worktree);
211 client.add_entity_message_handler(Self::handle_unshare_project);
212 client.add_entity_message_handler(Self::handle_update_buffer_file);
213 client.add_entity_message_handler(Self::handle_update_buffer);
214 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
215 client.add_entity_message_handler(Self::handle_update_worktree);
216 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
217 client.add_entity_request_handler(Self::handle_apply_code_action);
218 client.add_entity_request_handler(Self::handle_format_buffers);
219 client.add_entity_request_handler(Self::handle_get_code_actions);
220 client.add_entity_request_handler(Self::handle_get_completions);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
224 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
225 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
226 client.add_entity_request_handler(Self::handle_search_project);
227 client.add_entity_request_handler(Self::handle_get_project_symbols);
228 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
229 client.add_entity_request_handler(Self::handle_open_buffer);
230 client.add_entity_request_handler(Self::handle_save_buffer);
231 }
232
233 pub fn local(
234 client: Arc<Client>,
235 user_store: ModelHandle<UserStore>,
236 languages: Arc<LanguageRegistry>,
237 fs: Arc<dyn Fs>,
238 cx: &mut MutableAppContext,
239 ) -> ModelHandle<Self> {
240 cx.add_model(|cx: &mut ModelContext<Self>| {
241 let (remote_id_tx, remote_id_rx) = watch::channel();
242 let _maintain_remote_id_task = cx.spawn_weak({
243 let rpc = client.clone();
244 move |this, mut cx| {
245 async move {
246 let mut status = rpc.status();
247 while let Some(status) = status.next().await {
248 if let Some(this) = this.upgrade(&cx) {
249 let remote_id = if status.is_connected() {
250 let response = rpc.request(proto::RegisterProject {}).await?;
251 Some(response.project_id)
252 } else {
253 None
254 };
255
256 if let Some(project_id) = remote_id {
257 let mut registrations = Vec::new();
258 this.update(&mut cx, |this, cx| {
259 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
260 registrations.push(worktree.update(
261 cx,
262 |worktree, cx| {
263 let worktree = worktree.as_local_mut().unwrap();
264 worktree.register(project_id, cx)
265 },
266 ));
267 }
268 });
269 for registration in registrations {
270 registration.await?;
271 }
272 }
273 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
274 }
275 }
276 Ok(())
277 }
278 .log_err()
279 }
280 });
281
282 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
283 Self {
284 worktrees: Default::default(),
285 collaborators: Default::default(),
286 opened_buffers: Default::default(),
287 shared_buffers: Default::default(),
288 loading_buffers: Default::default(),
289 loading_local_worktrees: Default::default(),
290 buffer_snapshots: Default::default(),
291 client_state: ProjectClientState::Local {
292 is_shared: false,
293 remote_id_tx,
294 remote_id_rx,
295 _maintain_remote_id_task,
296 },
297 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
298 subscriptions: Vec::new(),
299 active_entry: None,
300 languages,
301 client,
302 user_store,
303 fs,
304 language_servers_with_diagnostics_running: 0,
305 language_servers: Default::default(),
306 started_language_servers: Default::default(),
307 nonce: StdRng::from_entropy().gen(),
308 }
309 })
310 }
311
312 pub async fn remote(
313 remote_id: u64,
314 client: Arc<Client>,
315 user_store: ModelHandle<UserStore>,
316 languages: Arc<LanguageRegistry>,
317 fs: Arc<dyn Fs>,
318 cx: &mut AsyncAppContext,
319 ) -> Result<ModelHandle<Self>> {
320 client.authenticate_and_connect(&cx).await?;
321
322 let response = client
323 .request(proto::JoinProject {
324 project_id: remote_id,
325 })
326 .await?;
327
328 let replica_id = response.replica_id as ReplicaId;
329
330 let mut worktrees = Vec::new();
331 for worktree in response.worktrees {
332 let (worktree, load_task) = cx
333 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
334 worktrees.push(worktree);
335 load_task.detach();
336 }
337
338 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
339 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
340 let mut this = Self {
341 worktrees: Vec::new(),
342 loading_buffers: Default::default(),
343 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
344 shared_buffers: Default::default(),
345 loading_local_worktrees: Default::default(),
346 active_entry: None,
347 collaborators: Default::default(),
348 languages,
349 user_store: user_store.clone(),
350 fs,
351 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
352 client: client.clone(),
353 client_state: ProjectClientState::Remote {
354 sharing_has_stopped: false,
355 remote_id,
356 replica_id,
357 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
358 async move {
359 let mut status = client.status();
360 let is_connected =
361 status.next().await.map_or(false, |s| s.is_connected());
362 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
363 if !is_connected || status.next().await.is_some() {
364 if let Some(this) = this.upgrade(&cx) {
365 this.update(&mut cx, |this, cx| this.project_unshared(cx))
366 }
367 }
368 Ok(())
369 }
370 .log_err()
371 }),
372 },
373 language_servers_with_diagnostics_running: 0,
374 language_servers: Default::default(),
375 started_language_servers: Default::default(),
376 opened_buffers: Default::default(),
377 buffer_snapshots: Default::default(),
378 nonce: StdRng::from_entropy().gen(),
379 };
380 for worktree in worktrees {
381 this.add_worktree(&worktree, cx);
382 }
383 this
384 });
385
386 let user_ids = response
387 .collaborators
388 .iter()
389 .map(|peer| peer.user_id)
390 .collect();
391 user_store
392 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
393 .await?;
394 let mut collaborators = HashMap::default();
395 for message in response.collaborators {
396 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
397 collaborators.insert(collaborator.peer_id, collaborator);
398 }
399
400 this.update(cx, |this, _| {
401 this.collaborators = collaborators;
402 });
403
404 Ok(this)
405 }
406
407 #[cfg(any(test, feature = "test-support"))]
408 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
409 let languages = Arc::new(LanguageRegistry::test());
410 let http_client = client::test::FakeHttpClient::with_404_response();
411 let client = client::Client::new(http_client.clone());
412 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
413 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
414 }
415
416 #[cfg(any(test, feature = "test-support"))]
417 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
418 self.opened_buffers
419 .get(&remote_id)
420 .and_then(|buffer| buffer.upgrade(cx))
421 }
422
423 #[cfg(any(test, feature = "test-support"))]
424 pub fn languages(&self) -> &Arc<LanguageRegistry> {
425 &self.languages
426 }
427
428 #[cfg(any(test, feature = "test-support"))]
429 pub fn check_invariants(&self, cx: &AppContext) {
430 if self.is_local() {
431 let mut worktree_root_paths = HashMap::default();
432 for worktree in self.worktrees(cx) {
433 let worktree = worktree.read(cx);
434 let abs_path = worktree.as_local().unwrap().abs_path().clone();
435 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
436 assert_eq!(
437 prev_worktree_id,
438 None,
439 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
440 abs_path,
441 worktree.id(),
442 prev_worktree_id
443 )
444 }
445 } else {
446 let replica_id = self.replica_id();
447 for buffer in self.opened_buffers.values() {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 let buffer = buffer.read(cx);
450 assert_eq!(
451 buffer.deferred_ops_len(),
452 0,
453 "replica {}, buffer {} has deferred operations",
454 replica_id,
455 buffer.remote_id()
456 );
457 }
458 }
459 }
460 }
461
462 #[cfg(any(test, feature = "test-support"))]
463 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
464 let path = path.into();
465 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
466 self.opened_buffers.iter().any(|(_, buffer)| {
467 if let Some(buffer) = buffer.upgrade(cx) {
468 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
469 if file.worktree == worktree && file.path() == &path.path {
470 return true;
471 }
472 }
473 }
474 false
475 })
476 } else {
477 false
478 }
479 }
480
481 pub fn fs(&self) -> &Arc<dyn Fs> {
482 &self.fs
483 }
484
485 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
486 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
487 *remote_id_tx.borrow_mut() = remote_id;
488 }
489
490 self.subscriptions.clear();
491 if let Some(remote_id) = remote_id {
492 self.subscriptions
493 .push(self.client.add_model_for_remote_entity(remote_id, cx));
494 }
495 }
496
497 pub fn remote_id(&self) -> Option<u64> {
498 match &self.client_state {
499 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
500 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
501 }
502 }
503
504 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
505 let mut id = None;
506 let mut watch = None;
507 match &self.client_state {
508 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
509 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
510 }
511
512 async move {
513 if let Some(id) = id {
514 return id;
515 }
516 let mut watch = watch.unwrap();
517 loop {
518 let id = *watch.borrow();
519 if let Some(id) = id {
520 return id;
521 }
522 watch.next().await;
523 }
524 }
525 }
526
527 pub fn replica_id(&self) -> ReplicaId {
528 match &self.client_state {
529 ProjectClientState::Local { .. } => 0,
530 ProjectClientState::Remote { replica_id, .. } => *replica_id,
531 }
532 }
533
534 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
535 &self.collaborators
536 }
537
538 pub fn worktrees<'a>(
539 &'a self,
540 cx: &'a AppContext,
541 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
542 self.worktrees
543 .iter()
544 .filter_map(move |worktree| worktree.upgrade(cx))
545 }
546
547 pub fn visible_worktrees<'a>(
548 &'a self,
549 cx: &'a AppContext,
550 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
551 self.worktrees.iter().filter_map(|worktree| {
552 worktree.upgrade(cx).and_then(|worktree| {
553 if worktree.read(cx).is_visible() {
554 Some(worktree)
555 } else {
556 None
557 }
558 })
559 })
560 }
561
562 pub fn worktree_for_id(
563 &self,
564 id: WorktreeId,
565 cx: &AppContext,
566 ) -> Option<ModelHandle<Worktree>> {
567 self.worktrees(cx)
568 .find(|worktree| worktree.read(cx).id() == id)
569 }
570
571 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
572 let rpc = self.client.clone();
573 cx.spawn(|this, mut cx| async move {
574 let project_id = this.update(&mut cx, |this, cx| {
575 if let ProjectClientState::Local {
576 is_shared,
577 remote_id_rx,
578 ..
579 } = &mut this.client_state
580 {
581 *is_shared = true;
582
583 for open_buffer in this.opened_buffers.values_mut() {
584 match open_buffer {
585 OpenBuffer::Strong(_) => {}
586 OpenBuffer::Weak(buffer) => {
587 if let Some(buffer) = buffer.upgrade(cx) {
588 *open_buffer = OpenBuffer::Strong(buffer);
589 }
590 }
591 OpenBuffer::Loading(_) => unreachable!(),
592 }
593 }
594
595 for worktree_handle in this.worktrees.iter_mut() {
596 match worktree_handle {
597 WorktreeHandle::Strong(_) => {}
598 WorktreeHandle::Weak(worktree) => {
599 if let Some(worktree) = worktree.upgrade(cx) {
600 *worktree_handle = WorktreeHandle::Strong(worktree);
601 }
602 }
603 }
604 }
605
606 remote_id_rx
607 .borrow()
608 .ok_or_else(|| anyhow!("no project id"))
609 } else {
610 Err(anyhow!("can't share a remote project"))
611 }
612 })?;
613
614 rpc.request(proto::ShareProject { project_id }).await?;
615
616 let mut tasks = Vec::new();
617 this.update(&mut cx, |this, cx| {
618 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
619 worktree.update(cx, |worktree, cx| {
620 let worktree = worktree.as_local_mut().unwrap();
621 tasks.push(worktree.share(project_id, cx));
622 });
623 }
624 });
625 for task in tasks {
626 task.await?;
627 }
628 this.update(&mut cx, |_, cx| cx.notify());
629 Ok(())
630 })
631 }
632
633 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
634 let rpc = self.client.clone();
635 cx.spawn(|this, mut cx| async move {
636 let project_id = this.update(&mut cx, |this, cx| {
637 if let ProjectClientState::Local {
638 is_shared,
639 remote_id_rx,
640 ..
641 } = &mut this.client_state
642 {
643 *is_shared = false;
644
645 for open_buffer in this.opened_buffers.values_mut() {
646 match open_buffer {
647 OpenBuffer::Strong(buffer) => {
648 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
649 }
650 _ => {}
651 }
652 }
653
654 for worktree_handle in this.worktrees.iter_mut() {
655 match worktree_handle {
656 WorktreeHandle::Strong(worktree) => {
657 if !worktree.read(cx).is_visible() {
658 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
659 }
660 }
661 _ => {}
662 }
663 }
664
665 remote_id_rx
666 .borrow()
667 .ok_or_else(|| anyhow!("no project id"))
668 } else {
669 Err(anyhow!("can't share a remote project"))
670 }
671 })?;
672
673 rpc.send(proto::UnshareProject { project_id })?;
674 this.update(&mut cx, |this, cx| {
675 this.collaborators.clear();
676 this.shared_buffers.clear();
677 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
678 worktree.update(cx, |worktree, _| {
679 worktree.as_local_mut().unwrap().unshare();
680 });
681 }
682 cx.notify()
683 });
684 Ok(())
685 })
686 }
687
688 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
689 if let ProjectClientState::Remote {
690 sharing_has_stopped,
691 ..
692 } = &mut self.client_state
693 {
694 *sharing_has_stopped = true;
695 self.collaborators.clear();
696 cx.notify();
697 }
698 }
699
700 pub fn is_read_only(&self) -> bool {
701 match &self.client_state {
702 ProjectClientState::Local { .. } => false,
703 ProjectClientState::Remote {
704 sharing_has_stopped,
705 ..
706 } => *sharing_has_stopped,
707 }
708 }
709
710 pub fn is_local(&self) -> bool {
711 match &self.client_state {
712 ProjectClientState::Local { .. } => true,
713 ProjectClientState::Remote { .. } => false,
714 }
715 }
716
717 pub fn is_remote(&self) -> bool {
718 !self.is_local()
719 }
720
721 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
722 if self.is_remote() {
723 return Err(anyhow!("creating buffers as a guest is not supported yet"));
724 }
725
726 let buffer = cx.add_model(|cx| {
727 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
728 });
729 self.register_buffer(&buffer, cx)?;
730 Ok(buffer)
731 }
732
733 pub fn open_buffer(
734 &mut self,
735 path: impl Into<ProjectPath>,
736 cx: &mut ModelContext<Self>,
737 ) -> Task<Result<ModelHandle<Buffer>>> {
738 let project_path = path.into();
739 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
740 worktree
741 } else {
742 return Task::ready(Err(anyhow!("no such worktree")));
743 };
744
745 // If there is already a buffer for the given path, then return it.
746 let existing_buffer = self.get_open_buffer(&project_path, cx);
747 if let Some(existing_buffer) = existing_buffer {
748 return Task::ready(Ok(existing_buffer));
749 }
750
751 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
752 // If the given path is already being loaded, then wait for that existing
753 // task to complete and return the same buffer.
754 hash_map::Entry::Occupied(e) => e.get().clone(),
755
756 // Otherwise, record the fact that this path is now being loaded.
757 hash_map::Entry::Vacant(entry) => {
758 let (mut tx, rx) = postage::watch::channel();
759 entry.insert(rx.clone());
760
761 let load_buffer = if worktree.read(cx).is_local() {
762 self.open_local_buffer(&project_path.path, &worktree, cx)
763 } else {
764 self.open_remote_buffer(&project_path.path, &worktree, cx)
765 };
766
767 cx.spawn(move |this, mut cx| async move {
768 let load_result = load_buffer.await;
769 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
770 // Record the fact that the buffer is no longer loading.
771 this.loading_buffers.remove(&project_path);
772 let buffer = load_result.map_err(Arc::new)?;
773 Ok(buffer)
774 }));
775 })
776 .detach();
777 rx
778 }
779 };
780
781 cx.foreground().spawn(async move {
782 loop {
783 if let Some(result) = loading_watch.borrow().as_ref() {
784 match result {
785 Ok(buffer) => return Ok(buffer.clone()),
786 Err(error) => return Err(anyhow!("{}", error)),
787 }
788 }
789 loading_watch.next().await;
790 }
791 })
792 }
793
794 fn open_local_buffer(
795 &mut self,
796 path: &Arc<Path>,
797 worktree: &ModelHandle<Worktree>,
798 cx: &mut ModelContext<Self>,
799 ) -> Task<Result<ModelHandle<Buffer>>> {
800 let load_buffer = worktree.update(cx, |worktree, cx| {
801 let worktree = worktree.as_local_mut().unwrap();
802 worktree.load_buffer(path, cx)
803 });
804 cx.spawn(|this, mut cx| async move {
805 let buffer = load_buffer.await?;
806 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
807 Ok(buffer)
808 })
809 }
810
811 fn open_remote_buffer(
812 &mut self,
813 path: &Arc<Path>,
814 worktree: &ModelHandle<Worktree>,
815 cx: &mut ModelContext<Self>,
816 ) -> Task<Result<ModelHandle<Buffer>>> {
817 let rpc = self.client.clone();
818 let project_id = self.remote_id().unwrap();
819 let remote_worktree_id = worktree.read(cx).id();
820 let path = path.clone();
821 let path_string = path.to_string_lossy().to_string();
822 cx.spawn(|this, mut cx| async move {
823 let response = rpc
824 .request(proto::OpenBuffer {
825 project_id,
826 worktree_id: remote_worktree_id.to_proto(),
827 path: path_string,
828 })
829 .await?;
830 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
831 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
832 .await
833 })
834 }
835
836 fn open_local_buffer_via_lsp(
837 &mut self,
838 abs_path: lsp::Url,
839 lang_name: Arc<str>,
840 lang_server: Arc<LanguageServer>,
841 cx: &mut ModelContext<Self>,
842 ) -> Task<Result<ModelHandle<Buffer>>> {
843 cx.spawn(|this, mut cx| async move {
844 let abs_path = abs_path
845 .to_file_path()
846 .map_err(|_| anyhow!("can't convert URI to path"))?;
847 let (worktree, relative_path) = if let Some(result) =
848 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
849 {
850 result
851 } else {
852 let worktree = this
853 .update(&mut cx, |this, cx| {
854 this.create_local_worktree(&abs_path, false, cx)
855 })
856 .await?;
857 this.update(&mut cx, |this, cx| {
858 this.language_servers
859 .insert((worktree.read(cx).id(), lang_name), lang_server);
860 });
861 (worktree, PathBuf::new())
862 };
863
864 let project_path = ProjectPath {
865 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
866 path: relative_path.into(),
867 };
868 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
869 .await
870 })
871 }
872
873 pub fn save_buffer_as(
874 &mut self,
875 buffer: ModelHandle<Buffer>,
876 abs_path: PathBuf,
877 cx: &mut ModelContext<Project>,
878 ) -> Task<Result<()>> {
879 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
880 cx.spawn(|this, mut cx| async move {
881 let (worktree, path) = worktree_task.await?;
882 worktree
883 .update(&mut cx, |worktree, cx| {
884 worktree
885 .as_local_mut()
886 .unwrap()
887 .save_buffer_as(buffer.clone(), path, cx)
888 })
889 .await?;
890 this.update(&mut cx, |this, cx| {
891 this.assign_language_to_buffer(&buffer, cx);
892 this.register_buffer_with_language_servers(&buffer, cx);
893 });
894 Ok(())
895 })
896 }
897
898 pub fn get_open_buffer(
899 &mut self,
900 path: &ProjectPath,
901 cx: &mut ModelContext<Self>,
902 ) -> Option<ModelHandle<Buffer>> {
903 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
904 self.opened_buffers.values().find_map(|buffer| {
905 let buffer = buffer.upgrade(cx)?;
906 let file = File::from_dyn(buffer.read(cx).file())?;
907 if file.worktree == worktree && file.path() == &path.path {
908 Some(buffer)
909 } else {
910 None
911 }
912 })
913 }
914
915 fn register_buffer(
916 &mut self,
917 buffer: &ModelHandle<Buffer>,
918 cx: &mut ModelContext<Self>,
919 ) -> Result<()> {
920 let remote_id = buffer.read(cx).remote_id();
921 let open_buffer = if self.is_remote() || self.is_shared() {
922 OpenBuffer::Strong(buffer.clone())
923 } else {
924 OpenBuffer::Weak(buffer.downgrade())
925 };
926
927 match self.opened_buffers.insert(remote_id, open_buffer) {
928 None => {}
929 Some(OpenBuffer::Loading(operations)) => {
930 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
931 }
932 Some(OpenBuffer::Weak(existing_handle)) => {
933 if existing_handle.upgrade(cx).is_some() {
934 Err(anyhow!(
935 "already registered buffer with remote id {}",
936 remote_id
937 ))?
938 }
939 }
940 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
941 "already registered buffer with remote id {}",
942 remote_id
943 ))?,
944 }
945 cx.become_delegate(buffer, |this, buffer, event, cx| {
946 this.on_buffer_event(buffer, event, cx);
947 })
948 .detach();
949
950 self.assign_language_to_buffer(buffer, cx);
951 self.register_buffer_with_language_servers(buffer, cx);
952
953 Ok(())
954 }
955
956 fn register_buffer_with_language_servers(
957 &mut self,
958 buffer_handle: &ModelHandle<Buffer>,
959 cx: &mut ModelContext<Self>,
960 ) {
961 let buffer = buffer_handle.read(cx);
962 let buffer_language_name = buffer.language().map(|l| l.name().clone());
963 if let Some(file) = File::from_dyn(buffer.file()) {
964 let worktree_id = file.worktree_id(cx);
965 if file.is_local() {
966 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
967 let initial_snapshot = buffer.as_text_snapshot();
968 self.buffer_snapshots
969 .insert(buffer.remote_id(), vec![(0, initial_snapshot.clone())]);
970
971 let mut notifications = Vec::new();
972 let did_open_text_document = lsp::DidOpenTextDocumentParams {
973 text_document: lsp::TextDocumentItem::new(
974 uri,
975 Default::default(),
976 0,
977 initial_snapshot.text(),
978 ),
979 };
980
981 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
982 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
983 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
984 .log_err();
985 }
986 }
987
988 for (language_name, server) in self.language_servers_for_worktree(worktree_id) {
989 notifications.push(server.notify::<lsp::notification::DidOpenTextDocument>(
990 did_open_text_document.clone(),
991 ));
992
993 if Some(language_name) == buffer_language_name.as_deref() {
994 buffer_handle.update(cx, |buffer, cx| {
995 buffer.set_completion_triggers(
996 server
997 .capabilities()
998 .completion_provider
999 .as_ref()
1000 .and_then(|provider| provider.trigger_characters.clone())
1001 .unwrap_or(Vec::new()),
1002 cx,
1003 )
1004 });
1005 }
1006 }
1007
1008 cx.observe_release(buffer_handle, |this, buffer, cx| {
1009 if let Some(file) = File::from_dyn(buffer.file()) {
1010 let worktree_id = file.worktree_id(cx);
1011 if file.is_local() {
1012 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1013 for (_, server) in this.language_servers_for_worktree(worktree_id) {
1014 server
1015 .notify::<lsp::notification::DidCloseTextDocument>(
1016 lsp::DidCloseTextDocumentParams {
1017 text_document: lsp::TextDocumentIdentifier::new(
1018 uri.clone(),
1019 ),
1020 },
1021 )
1022 .log_err();
1023 }
1024 }
1025 }
1026 })
1027 .detach();
1028 }
1029 }
1030 }
1031
1032 fn on_buffer_event(
1033 &mut self,
1034 buffer: ModelHandle<Buffer>,
1035 event: BufferEvent,
1036 cx: &mut ModelContext<Self>,
1037 ) -> Option<()> {
1038 match event {
1039 BufferEvent::Operation(operation) => {
1040 let project_id = self.remote_id()?;
1041 let request = self.client.request(proto::UpdateBuffer {
1042 project_id,
1043 buffer_id: buffer.read(cx).remote_id(),
1044 operations: vec![language::proto::serialize_operation(&operation)],
1045 });
1046 cx.background().spawn(request).detach_and_log_err(cx);
1047 }
1048 BufferEvent::Edited => {
1049 let buffer = buffer.read(cx);
1050 let file = File::from_dyn(buffer.file())?;
1051 let worktree_id = file.worktree_id(cx);
1052 let abs_path = file.as_local()?.abs_path(cx);
1053 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1054 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1055 let (version, prev_snapshot) = buffer_snapshots.last()?;
1056 let next_snapshot = buffer.text_snapshot();
1057 let next_version = version + 1;
1058
1059 let content_changes = buffer
1060 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1061 .map(|edit| {
1062 let edit_start = edit.new.start.0;
1063 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1064 let new_text = next_snapshot
1065 .text_for_range(edit.new.start.1..edit.new.end.1)
1066 .collect();
1067 lsp::TextDocumentContentChangeEvent {
1068 range: Some(lsp::Range::new(
1069 edit_start.to_lsp_position(),
1070 edit_end.to_lsp_position(),
1071 )),
1072 range_length: None,
1073 text: new_text,
1074 }
1075 })
1076 .collect();
1077
1078 let changes = lsp::DidChangeTextDocumentParams {
1079 text_document: lsp::VersionedTextDocumentIdentifier::new(uri, next_version),
1080 content_changes,
1081 };
1082
1083 buffer_snapshots.push((next_version, next_snapshot));
1084
1085 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1086 server
1087 .notify::<lsp::notification::DidChangeTextDocument>(changes.clone())
1088 .log_err();
1089 }
1090 }
1091 BufferEvent::Saved => {
1092 let file = File::from_dyn(buffer.read(cx).file())?;
1093 let worktree_id = file.worktree_id(cx);
1094 let abs_path = file.as_local()?.abs_path(cx);
1095 let text_document = lsp::TextDocumentIdentifier {
1096 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1097 };
1098
1099 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1100 server
1101 .notify::<lsp::notification::DidSaveTextDocument>(
1102 lsp::DidSaveTextDocumentParams {
1103 text_document: text_document.clone(),
1104 text: None,
1105 },
1106 )
1107 .log_err();
1108 }
1109 }
1110 _ => {}
1111 }
1112
1113 None
1114 }
1115
1116 fn language_servers_for_worktree(
1117 &self,
1118 worktree_id: WorktreeId,
1119 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1120 self.language_servers.iter().filter_map(
1121 move |((language_server_worktree_id, language_name), server)| {
1122 if *language_server_worktree_id == worktree_id {
1123 Some((language_name.as_ref(), server))
1124 } else {
1125 None
1126 }
1127 },
1128 )
1129 }
1130
1131 fn assign_language_to_buffer(
1132 &mut self,
1133 buffer: &ModelHandle<Buffer>,
1134 cx: &mut ModelContext<Self>,
1135 ) -> Option<()> {
1136 // If the buffer has a language, set it and start the language server if we haven't already.
1137 let full_path = buffer.read(cx).file()?.full_path(cx);
1138 let language = self.languages.select_language(&full_path)?;
1139 buffer.update(cx, |buffer, cx| {
1140 buffer.set_language(Some(language.clone()), cx);
1141 });
1142
1143 let file = File::from_dyn(buffer.read(cx).file())?;
1144 let worktree = file.worktree.read(cx).as_local()?;
1145 let worktree_id = worktree.id();
1146 let worktree_abs_path = worktree.abs_path().clone();
1147 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1148
1149 None
1150 }
1151
1152 fn start_language_server(
1153 &mut self,
1154 worktree_id: WorktreeId,
1155 worktree_path: Arc<Path>,
1156 language: Arc<Language>,
1157 cx: &mut ModelContext<Self>,
1158 ) {
1159 enum LspEvent {
1160 DiagnosticsStart,
1161 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1162 DiagnosticsFinish,
1163 }
1164
1165 let key = (worktree_id, language.name());
1166 self.started_language_servers
1167 .entry(key.clone())
1168 .or_insert_with(|| {
1169 let language_server = self.languages.start_language_server(
1170 language.clone(),
1171 worktree_path,
1172 self.client.http_client(),
1173 cx,
1174 );
1175 let rpc = self.client.clone();
1176 cx.spawn_weak(|this, mut cx| async move {
1177 let language_server = language_server?.await.log_err()?;
1178 let this = this.upgrade(&cx)?;
1179 this.update(&mut cx, |this, cx| {
1180 this.language_servers.insert(key, language_server.clone());
1181
1182 for buffer in this.opened_buffers.values() {
1183 if let Some(buffer_handle) = buffer.upgrade(cx) {
1184 let buffer = buffer_handle.read(cx);
1185 let file = File::from_dyn(buffer.file())?;
1186 if file.worktree.read(cx).id() != worktree_id {
1187 continue;
1188 }
1189
1190 // Tell the language server about every open buffer in the worktree.
1191 let file = file.as_local()?;
1192 let versions = this
1193 .buffer_snapshots
1194 .entry(buffer.remote_id())
1195 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1196 let (version, initial_snapshot) = versions.last().unwrap();
1197 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1198 language_server
1199 .notify::<lsp::notification::DidOpenTextDocument>(
1200 lsp::DidOpenTextDocumentParams {
1201 text_document: lsp::TextDocumentItem::new(
1202 uri,
1203 Default::default(),
1204 *version,
1205 initial_snapshot.text(),
1206 ),
1207 },
1208 )
1209 .log_err()?;
1210
1211 // Update the language buffers
1212 if buffer
1213 .language()
1214 .map_or(false, |l| l.name() == language.name())
1215 {
1216 buffer_handle.update(cx, |buffer, cx| {
1217 buffer.set_completion_triggers(
1218 language_server
1219 .capabilities()
1220 .completion_provider
1221 .as_ref()
1222 .and_then(|provider| {
1223 provider.trigger_characters.clone()
1224 })
1225 .unwrap_or(Vec::new()),
1226 cx,
1227 )
1228 });
1229 }
1230 }
1231 }
1232
1233 Some(())
1234 });
1235
1236 let disk_based_sources = language
1237 .disk_based_diagnostic_sources()
1238 .cloned()
1239 .unwrap_or_default();
1240 let disk_based_diagnostics_progress_token =
1241 language.disk_based_diagnostics_progress_token().cloned();
1242 let has_disk_based_diagnostic_progress_token =
1243 disk_based_diagnostics_progress_token.is_some();
1244 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1245
1246 // Listen for `PublishDiagnostics` notifications.
1247 language_server
1248 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1249 let diagnostics_tx = diagnostics_tx.clone();
1250 move |params| {
1251 if !has_disk_based_diagnostic_progress_token {
1252 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1253 }
1254 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1255 .ok();
1256 if !has_disk_based_diagnostic_progress_token {
1257 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1258 }
1259 }
1260 })
1261 .detach();
1262
1263 // Listen for `Progress` notifications. Send an event when the language server
1264 // transitions between running jobs and not running any jobs.
1265 let mut running_jobs_for_this_server: i32 = 0;
1266 language_server
1267 .on_notification::<lsp::notification::Progress, _>(move |params| {
1268 let token = match params.token {
1269 lsp::NumberOrString::Number(_) => None,
1270 lsp::NumberOrString::String(token) => Some(token),
1271 };
1272
1273 if token == disk_based_diagnostics_progress_token {
1274 match params.value {
1275 lsp::ProgressParamsValue::WorkDone(progress) => {
1276 match progress {
1277 lsp::WorkDoneProgress::Begin(_) => {
1278 running_jobs_for_this_server += 1;
1279 if running_jobs_for_this_server == 1 {
1280 block_on(
1281 diagnostics_tx
1282 .send(LspEvent::DiagnosticsStart),
1283 )
1284 .ok();
1285 }
1286 }
1287 lsp::WorkDoneProgress::End(_) => {
1288 running_jobs_for_this_server -= 1;
1289 if running_jobs_for_this_server == 0 {
1290 block_on(
1291 diagnostics_tx
1292 .send(LspEvent::DiagnosticsFinish),
1293 )
1294 .ok();
1295 }
1296 }
1297 _ => {}
1298 }
1299 }
1300 }
1301 }
1302 })
1303 .detach();
1304
1305 // Process all the LSP events.
1306 let this = this.downgrade();
1307 cx.spawn(|mut cx| async move {
1308 while let Ok(message) = diagnostics_rx.recv().await {
1309 let this = this.upgrade(&cx)?;
1310 match message {
1311 LspEvent::DiagnosticsStart => {
1312 this.update(&mut cx, |this, cx| {
1313 this.disk_based_diagnostics_started(cx);
1314 if let Some(project_id) = this.remote_id() {
1315 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1316 project_id,
1317 })
1318 .log_err();
1319 }
1320 });
1321 }
1322 LspEvent::DiagnosticsUpdate(mut params) => {
1323 language.process_diagnostics(&mut params);
1324 this.update(&mut cx, |this, cx| {
1325 this.update_diagnostics(params, &disk_based_sources, cx)
1326 .log_err();
1327 });
1328 }
1329 LspEvent::DiagnosticsFinish => {
1330 this.update(&mut cx, |this, cx| {
1331 this.disk_based_diagnostics_finished(cx);
1332 if let Some(project_id) = this.remote_id() {
1333 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1334 project_id,
1335 })
1336 .log_err();
1337 }
1338 });
1339 }
1340 }
1341 }
1342 Some(())
1343 })
1344 .detach();
1345
1346 Some(language_server)
1347 })
1348 });
1349 }
1350
1351 pub fn update_diagnostics(
1352 &mut self,
1353 params: lsp::PublishDiagnosticsParams,
1354 disk_based_sources: &HashSet<String>,
1355 cx: &mut ModelContext<Self>,
1356 ) -> Result<()> {
1357 let abs_path = params
1358 .uri
1359 .to_file_path()
1360 .map_err(|_| anyhow!("URI is not a file"))?;
1361 let mut next_group_id = 0;
1362 let mut diagnostics = Vec::default();
1363 let mut primary_diagnostic_group_ids = HashMap::default();
1364 let mut sources_by_group_id = HashMap::default();
1365 let mut supporting_diagnostic_severities = HashMap::default();
1366 for diagnostic in ¶ms.diagnostics {
1367 let source = diagnostic.source.as_ref();
1368 let code = diagnostic.code.as_ref().map(|code| match code {
1369 lsp::NumberOrString::Number(code) => code.to_string(),
1370 lsp::NumberOrString::String(code) => code.clone(),
1371 });
1372 let range = range_from_lsp(diagnostic.range);
1373 let is_supporting = diagnostic
1374 .related_information
1375 .as_ref()
1376 .map_or(false, |infos| {
1377 infos.iter().any(|info| {
1378 primary_diagnostic_group_ids.contains_key(&(
1379 source,
1380 code.clone(),
1381 range_from_lsp(info.location.range),
1382 ))
1383 })
1384 });
1385
1386 if is_supporting {
1387 if let Some(severity) = diagnostic.severity {
1388 supporting_diagnostic_severities
1389 .insert((source, code.clone(), range), severity);
1390 }
1391 } else {
1392 let group_id = post_inc(&mut next_group_id);
1393 let is_disk_based =
1394 source.map_or(false, |source| disk_based_sources.contains(source));
1395
1396 sources_by_group_id.insert(group_id, source);
1397 primary_diagnostic_group_ids
1398 .insert((source, code.clone(), range.clone()), group_id);
1399
1400 diagnostics.push(DiagnosticEntry {
1401 range,
1402 diagnostic: Diagnostic {
1403 code: code.clone(),
1404 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1405 message: diagnostic.message.clone(),
1406 group_id,
1407 is_primary: true,
1408 is_valid: true,
1409 is_disk_based,
1410 },
1411 });
1412 if let Some(infos) = &diagnostic.related_information {
1413 for info in infos {
1414 if info.location.uri == params.uri && !info.message.is_empty() {
1415 let range = range_from_lsp(info.location.range);
1416 diagnostics.push(DiagnosticEntry {
1417 range,
1418 diagnostic: Diagnostic {
1419 code: code.clone(),
1420 severity: DiagnosticSeverity::INFORMATION,
1421 message: info.message.clone(),
1422 group_id,
1423 is_primary: false,
1424 is_valid: true,
1425 is_disk_based,
1426 },
1427 });
1428 }
1429 }
1430 }
1431 }
1432 }
1433
1434 for entry in &mut diagnostics {
1435 let diagnostic = &mut entry.diagnostic;
1436 if !diagnostic.is_primary {
1437 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1438 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1439 source,
1440 diagnostic.code.clone(),
1441 entry.range.clone(),
1442 )) {
1443 diagnostic.severity = severity;
1444 }
1445 }
1446 }
1447
1448 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1449 Ok(())
1450 }
1451
1452 pub fn update_diagnostic_entries(
1453 &mut self,
1454 abs_path: PathBuf,
1455 version: Option<i32>,
1456 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1457 cx: &mut ModelContext<Project>,
1458 ) -> Result<(), anyhow::Error> {
1459 let (worktree, relative_path) = self
1460 .find_local_worktree(&abs_path, cx)
1461 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1462 if !worktree.read(cx).is_visible() {
1463 return Ok(());
1464 }
1465
1466 let project_path = ProjectPath {
1467 worktree_id: worktree.read(cx).id(),
1468 path: relative_path.into(),
1469 };
1470
1471 for buffer in self.opened_buffers.values() {
1472 if let Some(buffer) = buffer.upgrade(cx) {
1473 if buffer
1474 .read(cx)
1475 .file()
1476 .map_or(false, |file| *file.path() == project_path.path)
1477 {
1478 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1479 break;
1480 }
1481 }
1482 }
1483 worktree.update(cx, |worktree, cx| {
1484 worktree
1485 .as_local_mut()
1486 .ok_or_else(|| anyhow!("not a local worktree"))?
1487 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1488 })?;
1489 cx.emit(Event::DiagnosticsUpdated(project_path));
1490 Ok(())
1491 }
1492
1493 fn update_buffer_diagnostics(
1494 &mut self,
1495 buffer: &ModelHandle<Buffer>,
1496 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1497 version: Option<i32>,
1498 cx: &mut ModelContext<Self>,
1499 ) -> Result<()> {
1500 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1501 Ordering::Equal
1502 .then_with(|| b.is_primary.cmp(&a.is_primary))
1503 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1504 .then_with(|| a.severity.cmp(&b.severity))
1505 .then_with(|| a.message.cmp(&b.message))
1506 }
1507
1508 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1509
1510 diagnostics.sort_unstable_by(|a, b| {
1511 Ordering::Equal
1512 .then_with(|| a.range.start.cmp(&b.range.start))
1513 .then_with(|| b.range.end.cmp(&a.range.end))
1514 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1515 });
1516
1517 let mut sanitized_diagnostics = Vec::new();
1518 let mut edits_since_save = snapshot
1519 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1520 .peekable();
1521 let mut last_edit_old_end = PointUtf16::zero();
1522 let mut last_edit_new_end = PointUtf16::zero();
1523 'outer: for entry in diagnostics {
1524 let mut start = entry.range.start;
1525 let mut end = entry.range.end;
1526
1527 // Some diagnostics are based on files on disk instead of buffers'
1528 // current contents. Adjust these diagnostics' ranges to reflect
1529 // any unsaved edits.
1530 if entry.diagnostic.is_disk_based {
1531 while let Some(edit) = edits_since_save.peek() {
1532 if edit.old.end <= start {
1533 last_edit_old_end = edit.old.end;
1534 last_edit_new_end = edit.new.end;
1535 edits_since_save.next();
1536 } else if edit.old.start <= end && edit.old.end >= start {
1537 continue 'outer;
1538 } else {
1539 break;
1540 }
1541 }
1542
1543 let start_overshoot = start - last_edit_old_end;
1544 start = last_edit_new_end;
1545 start += start_overshoot;
1546
1547 let end_overshoot = end - last_edit_old_end;
1548 end = last_edit_new_end;
1549 end += end_overshoot;
1550 }
1551
1552 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1553 ..snapshot.clip_point_utf16(end, Bias::Right);
1554
1555 // Expand empty ranges by one character
1556 if range.start == range.end {
1557 range.end.column += 1;
1558 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1559 if range.start == range.end && range.end.column > 0 {
1560 range.start.column -= 1;
1561 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1562 }
1563 }
1564
1565 sanitized_diagnostics.push(DiagnosticEntry {
1566 range,
1567 diagnostic: entry.diagnostic,
1568 });
1569 }
1570 drop(edits_since_save);
1571
1572 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1573 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1574 Ok(())
1575 }
1576
1577 pub fn format(
1578 &self,
1579 buffers: HashSet<ModelHandle<Buffer>>,
1580 push_to_history: bool,
1581 cx: &mut ModelContext<Project>,
1582 ) -> Task<Result<ProjectTransaction>> {
1583 let mut local_buffers = Vec::new();
1584 let mut remote_buffers = None;
1585 for buffer_handle in buffers {
1586 let buffer = buffer_handle.read(cx);
1587 let worktree;
1588 if let Some(file) = File::from_dyn(buffer.file()) {
1589 worktree = file.worktree.clone();
1590 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1591 let lang_server;
1592 if let Some(lang) = buffer.language() {
1593 if let Some(server) = self
1594 .language_servers
1595 .get(&(worktree.read(cx).id(), lang.name()))
1596 {
1597 lang_server = server.clone();
1598 } else {
1599 return Task::ready(Ok(Default::default()));
1600 };
1601 } else {
1602 return Task::ready(Ok(Default::default()));
1603 }
1604
1605 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1606 } else {
1607 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1608 }
1609 } else {
1610 return Task::ready(Ok(Default::default()));
1611 }
1612 }
1613
1614 let remote_buffers = self.remote_id().zip(remote_buffers);
1615 let client = self.client.clone();
1616
1617 cx.spawn(|this, mut cx| async move {
1618 let mut project_transaction = ProjectTransaction::default();
1619
1620 if let Some((project_id, remote_buffers)) = remote_buffers {
1621 let response = client
1622 .request(proto::FormatBuffers {
1623 project_id,
1624 buffer_ids: remote_buffers
1625 .iter()
1626 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1627 .collect(),
1628 })
1629 .await?
1630 .transaction
1631 .ok_or_else(|| anyhow!("missing transaction"))?;
1632 project_transaction = this
1633 .update(&mut cx, |this, cx| {
1634 this.deserialize_project_transaction(response, push_to_history, cx)
1635 })
1636 .await?;
1637 }
1638
1639 for (buffer, buffer_abs_path, language_server) in local_buffers {
1640 let text_document = lsp::TextDocumentIdentifier::new(
1641 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1642 );
1643 let capabilities = &language_server.capabilities();
1644 let lsp_edits = if capabilities
1645 .document_formatting_provider
1646 .as_ref()
1647 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1648 {
1649 language_server
1650 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1651 text_document,
1652 options: Default::default(),
1653 work_done_progress_params: Default::default(),
1654 })
1655 .await?
1656 } else if capabilities
1657 .document_range_formatting_provider
1658 .as_ref()
1659 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1660 {
1661 let buffer_start = lsp::Position::new(0, 0);
1662 let buffer_end = buffer
1663 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1664 .to_lsp_position();
1665 language_server
1666 .request::<lsp::request::RangeFormatting>(
1667 lsp::DocumentRangeFormattingParams {
1668 text_document,
1669 range: lsp::Range::new(buffer_start, buffer_end),
1670 options: Default::default(),
1671 work_done_progress_params: Default::default(),
1672 },
1673 )
1674 .await?
1675 } else {
1676 continue;
1677 };
1678
1679 if let Some(lsp_edits) = lsp_edits {
1680 let edits = this
1681 .update(&mut cx, |this, cx| {
1682 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1683 })
1684 .await?;
1685 buffer.update(&mut cx, |buffer, cx| {
1686 buffer.finalize_last_transaction();
1687 buffer.start_transaction();
1688 for (range, text) in edits {
1689 buffer.edit([range], text, cx);
1690 }
1691 if buffer.end_transaction(cx).is_some() {
1692 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1693 if !push_to_history {
1694 buffer.forget_transaction(transaction.id);
1695 }
1696 project_transaction.0.insert(cx.handle(), transaction);
1697 }
1698 });
1699 }
1700 }
1701
1702 Ok(project_transaction)
1703 })
1704 }
1705
1706 pub fn definition<T: ToPointUtf16>(
1707 &self,
1708 buffer: &ModelHandle<Buffer>,
1709 position: T,
1710 cx: &mut ModelContext<Self>,
1711 ) -> Task<Result<Vec<Location>>> {
1712 let position = position.to_point_utf16(buffer.read(cx));
1713 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1714 }
1715
1716 pub fn references<T: ToPointUtf16>(
1717 &self,
1718 buffer: &ModelHandle<Buffer>,
1719 position: T,
1720 cx: &mut ModelContext<Self>,
1721 ) -> Task<Result<Vec<Location>>> {
1722 let position = position.to_point_utf16(buffer.read(cx));
1723 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1724 }
1725
1726 pub fn document_highlights<T: ToPointUtf16>(
1727 &self,
1728 buffer: &ModelHandle<Buffer>,
1729 position: T,
1730 cx: &mut ModelContext<Self>,
1731 ) -> Task<Result<Vec<DocumentHighlight>>> {
1732 let position = position.to_point_utf16(buffer.read(cx));
1733
1734 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1735 }
1736
1737 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1738 if self.is_local() {
1739 let mut language_servers = HashMap::default();
1740 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1741 if let Some((worktree, language)) = self
1742 .worktree_for_id(*worktree_id, cx)
1743 .and_then(|worktree| worktree.read(cx).as_local())
1744 .zip(self.languages.get_language(language_name))
1745 {
1746 language_servers
1747 .entry(Arc::as_ptr(language_server))
1748 .or_insert((
1749 language_server.clone(),
1750 *worktree_id,
1751 worktree.abs_path().clone(),
1752 language.clone(),
1753 ));
1754 }
1755 }
1756
1757 let mut requests = Vec::new();
1758 for (language_server, _, _, _) in language_servers.values() {
1759 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1760 lsp::WorkspaceSymbolParams {
1761 query: query.to_string(),
1762 ..Default::default()
1763 },
1764 ));
1765 }
1766
1767 cx.spawn_weak(|this, cx| async move {
1768 let responses = futures::future::try_join_all(requests).await?;
1769
1770 let mut symbols = Vec::new();
1771 if let Some(this) = this.upgrade(&cx) {
1772 this.read_with(&cx, |this, cx| {
1773 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1774 language_servers.into_values().zip(responses)
1775 {
1776 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1777 |lsp_symbol| {
1778 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1779 let mut worktree_id = source_worktree_id;
1780 let path;
1781 if let Some((worktree, rel_path)) =
1782 this.find_local_worktree(&abs_path, cx)
1783 {
1784 worktree_id = worktree.read(cx).id();
1785 path = rel_path;
1786 } else {
1787 path = relativize_path(&worktree_abs_path, &abs_path);
1788 }
1789
1790 let label = language
1791 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1792 .unwrap_or_else(|| {
1793 CodeLabel::plain(lsp_symbol.name.clone(), None)
1794 });
1795 let signature = this.symbol_signature(worktree_id, &path);
1796
1797 Some(Symbol {
1798 source_worktree_id,
1799 worktree_id,
1800 language_name: language.name().to_string(),
1801 name: lsp_symbol.name,
1802 kind: lsp_symbol.kind,
1803 label,
1804 path,
1805 range: range_from_lsp(lsp_symbol.location.range),
1806 signature,
1807 })
1808 },
1809 ));
1810 }
1811 })
1812 }
1813
1814 Ok(symbols)
1815 })
1816 } else if let Some(project_id) = self.remote_id() {
1817 let request = self.client.request(proto::GetProjectSymbols {
1818 project_id,
1819 query: query.to_string(),
1820 });
1821 cx.spawn_weak(|this, cx| async move {
1822 let response = request.await?;
1823 let mut symbols = Vec::new();
1824 if let Some(this) = this.upgrade(&cx) {
1825 this.read_with(&cx, |this, _| {
1826 symbols.extend(
1827 response
1828 .symbols
1829 .into_iter()
1830 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1831 );
1832 })
1833 }
1834 Ok(symbols)
1835 })
1836 } else {
1837 Task::ready(Ok(Default::default()))
1838 }
1839 }
1840
1841 pub fn open_buffer_for_symbol(
1842 &mut self,
1843 symbol: &Symbol,
1844 cx: &mut ModelContext<Self>,
1845 ) -> Task<Result<ModelHandle<Buffer>>> {
1846 if self.is_local() {
1847 let language_server = if let Some(server) = self.language_servers.get(&(
1848 symbol.source_worktree_id,
1849 Arc::from(symbol.language_name.as_str()),
1850 )) {
1851 server.clone()
1852 } else {
1853 return Task::ready(Err(anyhow!(
1854 "language server for worktree and language not found"
1855 )));
1856 };
1857
1858 let worktree_abs_path = if let Some(worktree_abs_path) = self
1859 .worktree_for_id(symbol.worktree_id, cx)
1860 .and_then(|worktree| worktree.read(cx).as_local())
1861 .map(|local_worktree| local_worktree.abs_path())
1862 {
1863 worktree_abs_path
1864 } else {
1865 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1866 };
1867 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1868 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1869 uri
1870 } else {
1871 return Task::ready(Err(anyhow!("invalid symbol path")));
1872 };
1873
1874 self.open_local_buffer_via_lsp(
1875 symbol_uri,
1876 Arc::from(symbol.language_name.as_str()),
1877 language_server,
1878 cx,
1879 )
1880 } else if let Some(project_id) = self.remote_id() {
1881 let request = self.client.request(proto::OpenBufferForSymbol {
1882 project_id,
1883 symbol: Some(serialize_symbol(symbol)),
1884 });
1885 cx.spawn(|this, mut cx| async move {
1886 let response = request.await?;
1887 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1888 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1889 .await
1890 })
1891 } else {
1892 Task::ready(Err(anyhow!("project does not have a remote id")))
1893 }
1894 }
1895
1896 pub fn completions<T: ToPointUtf16>(
1897 &self,
1898 source_buffer_handle: &ModelHandle<Buffer>,
1899 position: T,
1900 cx: &mut ModelContext<Self>,
1901 ) -> Task<Result<Vec<Completion>>> {
1902 let source_buffer_handle = source_buffer_handle.clone();
1903 let source_buffer = source_buffer_handle.read(cx);
1904 let buffer_id = source_buffer.remote_id();
1905 let language = source_buffer.language().cloned();
1906 let worktree;
1907 let buffer_abs_path;
1908 if let Some(file) = File::from_dyn(source_buffer.file()) {
1909 worktree = file.worktree.clone();
1910 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1911 } else {
1912 return Task::ready(Ok(Default::default()));
1913 };
1914
1915 let position = position.to_point_utf16(source_buffer);
1916 let anchor = source_buffer.anchor_after(position);
1917
1918 if worktree.read(cx).as_local().is_some() {
1919 let buffer_abs_path = buffer_abs_path.unwrap();
1920 let lang_server =
1921 if let Some(server) = self.language_server_for_buffer(&source_buffer_handle, cx) {
1922 server.clone()
1923 } else {
1924 return Task::ready(Ok(Default::default()));
1925 };
1926
1927 cx.spawn(|_, cx| async move {
1928 let completions = lang_server
1929 .request::<lsp::request::Completion>(lsp::CompletionParams {
1930 text_document_position: lsp::TextDocumentPositionParams::new(
1931 lsp::TextDocumentIdentifier::new(
1932 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1933 ),
1934 position.to_lsp_position(),
1935 ),
1936 context: Default::default(),
1937 work_done_progress_params: Default::default(),
1938 partial_result_params: Default::default(),
1939 })
1940 .await
1941 .context("lsp completion request failed")?;
1942
1943 let completions = if let Some(completions) = completions {
1944 match completions {
1945 lsp::CompletionResponse::Array(completions) => completions,
1946 lsp::CompletionResponse::List(list) => list.items,
1947 }
1948 } else {
1949 Default::default()
1950 };
1951
1952 source_buffer_handle.read_with(&cx, |this, _| {
1953 Ok(completions
1954 .into_iter()
1955 .filter_map(|lsp_completion| {
1956 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1957 lsp::CompletionTextEdit::Edit(edit) => {
1958 (range_from_lsp(edit.range), edit.new_text.clone())
1959 }
1960 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1961 log::info!("unsupported insert/replace completion");
1962 return None;
1963 }
1964 };
1965
1966 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1967 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1968 if clipped_start == old_range.start && clipped_end == old_range.end {
1969 Some(Completion {
1970 old_range: this.anchor_before(old_range.start)
1971 ..this.anchor_after(old_range.end),
1972 new_text,
1973 label: language
1974 .as_ref()
1975 .and_then(|l| l.label_for_completion(&lsp_completion))
1976 .unwrap_or_else(|| {
1977 CodeLabel::plain(
1978 lsp_completion.label.clone(),
1979 lsp_completion.filter_text.as_deref(),
1980 )
1981 }),
1982 lsp_completion,
1983 })
1984 } else {
1985 None
1986 }
1987 })
1988 .collect())
1989 })
1990 })
1991 } else if let Some(project_id) = self.remote_id() {
1992 let rpc = self.client.clone();
1993 let message = proto::GetCompletions {
1994 project_id,
1995 buffer_id,
1996 position: Some(language::proto::serialize_anchor(&anchor)),
1997 version: serialize_version(&source_buffer.version()),
1998 };
1999 cx.spawn_weak(|_, mut cx| async move {
2000 let response = rpc.request(message).await?;
2001
2002 source_buffer_handle
2003 .update(&mut cx, |buffer, _| {
2004 buffer.wait_for_version(deserialize_version(response.version))
2005 })
2006 .await;
2007
2008 response
2009 .completions
2010 .into_iter()
2011 .map(|completion| {
2012 language::proto::deserialize_completion(completion, language.as_ref())
2013 })
2014 .collect()
2015 })
2016 } else {
2017 Task::ready(Ok(Default::default()))
2018 }
2019 }
2020
2021 pub fn apply_additional_edits_for_completion(
2022 &self,
2023 buffer_handle: ModelHandle<Buffer>,
2024 completion: Completion,
2025 push_to_history: bool,
2026 cx: &mut ModelContext<Self>,
2027 ) -> Task<Result<Option<Transaction>>> {
2028 let buffer = buffer_handle.read(cx);
2029 let buffer_id = buffer.remote_id();
2030
2031 if self.is_local() {
2032 let lang_server =
2033 if let Some(server) = self.language_server_for_buffer(&buffer_handle, cx) {
2034 server.clone()
2035 } else {
2036 return Task::ready(Ok(Default::default()));
2037 };
2038
2039 cx.spawn(|this, mut cx| async move {
2040 let resolved_completion = lang_server
2041 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2042 .await?;
2043 if let Some(edits) = resolved_completion.additional_text_edits {
2044 let edits = this
2045 .update(&mut cx, |this, cx| {
2046 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2047 })
2048 .await?;
2049 buffer_handle.update(&mut cx, |buffer, cx| {
2050 buffer.finalize_last_transaction();
2051 buffer.start_transaction();
2052 for (range, text) in edits {
2053 buffer.edit([range], text, cx);
2054 }
2055 let transaction = if buffer.end_transaction(cx).is_some() {
2056 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2057 if !push_to_history {
2058 buffer.forget_transaction(transaction.id);
2059 }
2060 Some(transaction)
2061 } else {
2062 None
2063 };
2064 Ok(transaction)
2065 })
2066 } else {
2067 Ok(None)
2068 }
2069 })
2070 } else if let Some(project_id) = self.remote_id() {
2071 let client = self.client.clone();
2072 cx.spawn(|_, mut cx| async move {
2073 let response = client
2074 .request(proto::ApplyCompletionAdditionalEdits {
2075 project_id,
2076 buffer_id,
2077 completion: Some(language::proto::serialize_completion(&completion)),
2078 })
2079 .await?;
2080
2081 if let Some(transaction) = response.transaction {
2082 let transaction = language::proto::deserialize_transaction(transaction)?;
2083 buffer_handle
2084 .update(&mut cx, |buffer, _| {
2085 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2086 })
2087 .await;
2088 if push_to_history {
2089 buffer_handle.update(&mut cx, |buffer, _| {
2090 buffer.push_transaction(transaction.clone(), Instant::now());
2091 });
2092 }
2093 Ok(Some(transaction))
2094 } else {
2095 Ok(None)
2096 }
2097 })
2098 } else {
2099 Task::ready(Err(anyhow!("project does not have a remote id")))
2100 }
2101 }
2102
2103 pub fn code_actions<T: ToOffset>(
2104 &self,
2105 buffer_handle: &ModelHandle<Buffer>,
2106 range: Range<T>,
2107 cx: &mut ModelContext<Self>,
2108 ) -> Task<Result<Vec<CodeAction>>> {
2109 let buffer_handle = buffer_handle.clone();
2110 let buffer = buffer_handle.read(cx);
2111 let buffer_id = buffer.remote_id();
2112 let worktree;
2113 let buffer_abs_path;
2114 if let Some(file) = File::from_dyn(buffer.file()) {
2115 worktree = file.worktree.clone();
2116 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2117 } else {
2118 return Task::ready(Ok(Default::default()));
2119 };
2120 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2121
2122 if worktree.read(cx).as_local().is_some() {
2123 let buffer_abs_path = buffer_abs_path.unwrap();
2124 let lang_name;
2125 let lang_server;
2126 if let Some(lang) = buffer.language() {
2127 lang_name = lang.name();
2128 if let Some(server) = self
2129 .language_servers
2130 .get(&(worktree.read(cx).id(), lang_name.clone()))
2131 {
2132 lang_server = server.clone();
2133 } else {
2134 return Task::ready(Ok(Default::default()));
2135 };
2136 } else {
2137 return Task::ready(Ok(Default::default()));
2138 }
2139
2140 let lsp_range = lsp::Range::new(
2141 range.start.to_point_utf16(buffer).to_lsp_position(),
2142 range.end.to_point_utf16(buffer).to_lsp_position(),
2143 );
2144 cx.foreground().spawn(async move {
2145 if !lang_server.capabilities().code_action_provider.is_some() {
2146 return Ok(Default::default());
2147 }
2148
2149 Ok(lang_server
2150 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2151 text_document: lsp::TextDocumentIdentifier::new(
2152 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2153 ),
2154 range: lsp_range,
2155 work_done_progress_params: Default::default(),
2156 partial_result_params: Default::default(),
2157 context: lsp::CodeActionContext {
2158 diagnostics: Default::default(),
2159 only: Some(vec![
2160 lsp::CodeActionKind::QUICKFIX,
2161 lsp::CodeActionKind::REFACTOR,
2162 lsp::CodeActionKind::REFACTOR_EXTRACT,
2163 ]),
2164 },
2165 })
2166 .await?
2167 .unwrap_or_default()
2168 .into_iter()
2169 .filter_map(|entry| {
2170 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2171 Some(CodeAction {
2172 range: range.clone(),
2173 lsp_action,
2174 })
2175 } else {
2176 None
2177 }
2178 })
2179 .collect())
2180 })
2181 } else if let Some(project_id) = self.remote_id() {
2182 let rpc = self.client.clone();
2183 let version = buffer.version();
2184 cx.spawn_weak(|_, mut cx| async move {
2185 let response = rpc
2186 .request(proto::GetCodeActions {
2187 project_id,
2188 buffer_id,
2189 start: Some(language::proto::serialize_anchor(&range.start)),
2190 end: Some(language::proto::serialize_anchor(&range.end)),
2191 version: serialize_version(&version),
2192 })
2193 .await?;
2194
2195 buffer_handle
2196 .update(&mut cx, |buffer, _| {
2197 buffer.wait_for_version(deserialize_version(response.version))
2198 })
2199 .await;
2200
2201 response
2202 .actions
2203 .into_iter()
2204 .map(language::proto::deserialize_code_action)
2205 .collect()
2206 })
2207 } else {
2208 Task::ready(Ok(Default::default()))
2209 }
2210 }
2211
2212 pub fn apply_code_action(
2213 &self,
2214 buffer_handle: ModelHandle<Buffer>,
2215 mut action: CodeAction,
2216 push_to_history: bool,
2217 cx: &mut ModelContext<Self>,
2218 ) -> Task<Result<ProjectTransaction>> {
2219 if self.is_local() {
2220 let buffer = buffer_handle.read(cx);
2221 let lang_name = if let Some(lang) = buffer.language() {
2222 lang.name()
2223 } else {
2224 return Task::ready(Ok(Default::default()));
2225 };
2226 let lang_server =
2227 if let Some(server) = self.language_server_for_buffer(&buffer_handle, cx) {
2228 server.clone()
2229 } else {
2230 return Task::ready(Ok(Default::default()));
2231 };
2232 let range = action.range.to_point_utf16(buffer);
2233
2234 cx.spawn(|this, mut cx| async move {
2235 if let Some(lsp_range) = action
2236 .lsp_action
2237 .data
2238 .as_mut()
2239 .and_then(|d| d.get_mut("codeActionParams"))
2240 .and_then(|d| d.get_mut("range"))
2241 {
2242 *lsp_range = serde_json::to_value(&lsp::Range::new(
2243 range.start.to_lsp_position(),
2244 range.end.to_lsp_position(),
2245 ))
2246 .unwrap();
2247 action.lsp_action = lang_server
2248 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2249 .await?;
2250 } else {
2251 let actions = this
2252 .update(&mut cx, |this, cx| {
2253 this.code_actions(&buffer_handle, action.range, cx)
2254 })
2255 .await?;
2256 action.lsp_action = actions
2257 .into_iter()
2258 .find(|a| a.lsp_action.title == action.lsp_action.title)
2259 .ok_or_else(|| anyhow!("code action is outdated"))?
2260 .lsp_action;
2261 }
2262
2263 if let Some(edit) = action.lsp_action.edit {
2264 Self::deserialize_workspace_edit(
2265 this,
2266 edit,
2267 push_to_history,
2268 lang_name,
2269 lang_server,
2270 &mut cx,
2271 )
2272 .await
2273 } else {
2274 Ok(ProjectTransaction::default())
2275 }
2276 })
2277 } else if let Some(project_id) = self.remote_id() {
2278 let client = self.client.clone();
2279 let request = proto::ApplyCodeAction {
2280 project_id,
2281 buffer_id: buffer_handle.read(cx).remote_id(),
2282 action: Some(language::proto::serialize_code_action(&action)),
2283 };
2284 cx.spawn(|this, mut cx| async move {
2285 let response = client
2286 .request(request)
2287 .await?
2288 .transaction
2289 .ok_or_else(|| anyhow!("missing transaction"))?;
2290 this.update(&mut cx, |this, cx| {
2291 this.deserialize_project_transaction(response, push_to_history, cx)
2292 })
2293 .await
2294 })
2295 } else {
2296 Task::ready(Err(anyhow!("project does not have a remote id")))
2297 }
2298 }
2299
2300 async fn deserialize_workspace_edit(
2301 this: ModelHandle<Self>,
2302 edit: lsp::WorkspaceEdit,
2303 push_to_history: bool,
2304 language_name: Arc<str>,
2305 language_server: Arc<LanguageServer>,
2306 cx: &mut AsyncAppContext,
2307 ) -> Result<ProjectTransaction> {
2308 let fs = this.read_with(cx, |this, _| this.fs.clone());
2309 let mut operations = Vec::new();
2310 if let Some(document_changes) = edit.document_changes {
2311 match document_changes {
2312 lsp::DocumentChanges::Edits(edits) => {
2313 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2314 }
2315 lsp::DocumentChanges::Operations(ops) => operations = ops,
2316 }
2317 } else if let Some(changes) = edit.changes {
2318 operations.extend(changes.into_iter().map(|(uri, edits)| {
2319 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2320 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2321 uri,
2322 version: None,
2323 },
2324 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2325 })
2326 }));
2327 }
2328
2329 let mut project_transaction = ProjectTransaction::default();
2330 for operation in operations {
2331 match operation {
2332 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2333 let abs_path = op
2334 .uri
2335 .to_file_path()
2336 .map_err(|_| anyhow!("can't convert URI to path"))?;
2337
2338 if let Some(parent_path) = abs_path.parent() {
2339 fs.create_dir(parent_path).await?;
2340 }
2341 if abs_path.ends_with("/") {
2342 fs.create_dir(&abs_path).await?;
2343 } else {
2344 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2345 .await?;
2346 }
2347 }
2348 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2349 let source_abs_path = op
2350 .old_uri
2351 .to_file_path()
2352 .map_err(|_| anyhow!("can't convert URI to path"))?;
2353 let target_abs_path = op
2354 .new_uri
2355 .to_file_path()
2356 .map_err(|_| anyhow!("can't convert URI to path"))?;
2357 fs.rename(
2358 &source_abs_path,
2359 &target_abs_path,
2360 op.options.map(Into::into).unwrap_or_default(),
2361 )
2362 .await?;
2363 }
2364 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2365 let abs_path = op
2366 .uri
2367 .to_file_path()
2368 .map_err(|_| anyhow!("can't convert URI to path"))?;
2369 let options = op.options.map(Into::into).unwrap_or_default();
2370 if abs_path.ends_with("/") {
2371 fs.remove_dir(&abs_path, options).await?;
2372 } else {
2373 fs.remove_file(&abs_path, options).await?;
2374 }
2375 }
2376 lsp::DocumentChangeOperation::Edit(op) => {
2377 let buffer_to_edit = this
2378 .update(cx, |this, cx| {
2379 this.open_local_buffer_via_lsp(
2380 op.text_document.uri,
2381 language_name.clone(),
2382 language_server.clone(),
2383 cx,
2384 )
2385 })
2386 .await?;
2387
2388 let edits = this
2389 .update(cx, |this, cx| {
2390 let edits = op.edits.into_iter().map(|edit| match edit {
2391 lsp::OneOf::Left(edit) => edit,
2392 lsp::OneOf::Right(edit) => edit.text_edit,
2393 });
2394 this.edits_from_lsp(
2395 &buffer_to_edit,
2396 edits,
2397 op.text_document.version,
2398 cx,
2399 )
2400 })
2401 .await?;
2402
2403 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2404 buffer.finalize_last_transaction();
2405 buffer.start_transaction();
2406 for (range, text) in edits {
2407 buffer.edit([range], text, cx);
2408 }
2409 let transaction = if buffer.end_transaction(cx).is_some() {
2410 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2411 if !push_to_history {
2412 buffer.forget_transaction(transaction.id);
2413 }
2414 Some(transaction)
2415 } else {
2416 None
2417 };
2418
2419 transaction
2420 });
2421 if let Some(transaction) = transaction {
2422 project_transaction.0.insert(buffer_to_edit, transaction);
2423 }
2424 }
2425 }
2426 }
2427
2428 Ok(project_transaction)
2429 }
2430
2431 pub fn prepare_rename<T: ToPointUtf16>(
2432 &self,
2433 buffer: ModelHandle<Buffer>,
2434 position: T,
2435 cx: &mut ModelContext<Self>,
2436 ) -> Task<Result<Option<Range<Anchor>>>> {
2437 let position = position.to_point_utf16(buffer.read(cx));
2438 self.request_lsp(buffer, PrepareRename { position }, cx)
2439 }
2440
2441 pub fn perform_rename<T: ToPointUtf16>(
2442 &self,
2443 buffer: ModelHandle<Buffer>,
2444 position: T,
2445 new_name: String,
2446 push_to_history: bool,
2447 cx: &mut ModelContext<Self>,
2448 ) -> Task<Result<ProjectTransaction>> {
2449 let position = position.to_point_utf16(buffer.read(cx));
2450 self.request_lsp(
2451 buffer,
2452 PerformRename {
2453 position,
2454 new_name,
2455 push_to_history,
2456 },
2457 cx,
2458 )
2459 }
2460
2461 pub fn search(
2462 &self,
2463 query: SearchQuery,
2464 cx: &mut ModelContext<Self>,
2465 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2466 if self.is_local() {
2467 let snapshots = self
2468 .visible_worktrees(cx)
2469 .filter_map(|tree| {
2470 let tree = tree.read(cx).as_local()?;
2471 Some(tree.snapshot())
2472 })
2473 .collect::<Vec<_>>();
2474
2475 let background = cx.background().clone();
2476 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2477 if path_count == 0 {
2478 return Task::ready(Ok(Default::default()));
2479 }
2480 let workers = background.num_cpus().min(path_count);
2481 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2482 cx.background()
2483 .spawn({
2484 let fs = self.fs.clone();
2485 let background = cx.background().clone();
2486 let query = query.clone();
2487 async move {
2488 let fs = &fs;
2489 let query = &query;
2490 let matching_paths_tx = &matching_paths_tx;
2491 let paths_per_worker = (path_count + workers - 1) / workers;
2492 let snapshots = &snapshots;
2493 background
2494 .scoped(|scope| {
2495 for worker_ix in 0..workers {
2496 let worker_start_ix = worker_ix * paths_per_worker;
2497 let worker_end_ix = worker_start_ix + paths_per_worker;
2498 scope.spawn(async move {
2499 let mut snapshot_start_ix = 0;
2500 let mut abs_path = PathBuf::new();
2501 for snapshot in snapshots {
2502 let snapshot_end_ix =
2503 snapshot_start_ix + snapshot.visible_file_count();
2504 if worker_end_ix <= snapshot_start_ix {
2505 break;
2506 } else if worker_start_ix > snapshot_end_ix {
2507 snapshot_start_ix = snapshot_end_ix;
2508 continue;
2509 } else {
2510 let start_in_snapshot = worker_start_ix
2511 .saturating_sub(snapshot_start_ix);
2512 let end_in_snapshot =
2513 cmp::min(worker_end_ix, snapshot_end_ix)
2514 - snapshot_start_ix;
2515
2516 for entry in snapshot
2517 .files(false, start_in_snapshot)
2518 .take(end_in_snapshot - start_in_snapshot)
2519 {
2520 if matching_paths_tx.is_closed() {
2521 break;
2522 }
2523
2524 abs_path.clear();
2525 abs_path.push(&snapshot.abs_path());
2526 abs_path.push(&entry.path);
2527 let matches = if let Some(file) =
2528 fs.open_sync(&abs_path).await.log_err()
2529 {
2530 query.detect(file).unwrap_or(false)
2531 } else {
2532 false
2533 };
2534
2535 if matches {
2536 let project_path =
2537 (snapshot.id(), entry.path.clone());
2538 if matching_paths_tx
2539 .send(project_path)
2540 .await
2541 .is_err()
2542 {
2543 break;
2544 }
2545 }
2546 }
2547
2548 snapshot_start_ix = snapshot_end_ix;
2549 }
2550 }
2551 });
2552 }
2553 })
2554 .await;
2555 }
2556 })
2557 .detach();
2558
2559 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2560 let open_buffers = self
2561 .opened_buffers
2562 .values()
2563 .filter_map(|b| b.upgrade(cx))
2564 .collect::<HashSet<_>>();
2565 cx.spawn(|this, cx| async move {
2566 for buffer in &open_buffers {
2567 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2568 buffers_tx.send((buffer.clone(), snapshot)).await?;
2569 }
2570
2571 let open_buffers = Rc::new(RefCell::new(open_buffers));
2572 while let Some(project_path) = matching_paths_rx.next().await {
2573 if buffers_tx.is_closed() {
2574 break;
2575 }
2576
2577 let this = this.clone();
2578 let open_buffers = open_buffers.clone();
2579 let buffers_tx = buffers_tx.clone();
2580 cx.spawn(|mut cx| async move {
2581 if let Some(buffer) = this
2582 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2583 .await
2584 .log_err()
2585 {
2586 if open_buffers.borrow_mut().insert(buffer.clone()) {
2587 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2588 buffers_tx.send((buffer, snapshot)).await?;
2589 }
2590 }
2591
2592 Ok::<_, anyhow::Error>(())
2593 })
2594 .detach();
2595 }
2596
2597 Ok::<_, anyhow::Error>(())
2598 })
2599 .detach_and_log_err(cx);
2600
2601 let background = cx.background().clone();
2602 cx.background().spawn(async move {
2603 let query = &query;
2604 let mut matched_buffers = Vec::new();
2605 for _ in 0..workers {
2606 matched_buffers.push(HashMap::default());
2607 }
2608 background
2609 .scoped(|scope| {
2610 for worker_matched_buffers in matched_buffers.iter_mut() {
2611 let mut buffers_rx = buffers_rx.clone();
2612 scope.spawn(async move {
2613 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2614 let buffer_matches = query
2615 .search(snapshot.as_rope())
2616 .await
2617 .iter()
2618 .map(|range| {
2619 snapshot.anchor_before(range.start)
2620 ..snapshot.anchor_after(range.end)
2621 })
2622 .collect::<Vec<_>>();
2623 if !buffer_matches.is_empty() {
2624 worker_matched_buffers
2625 .insert(buffer.clone(), buffer_matches);
2626 }
2627 }
2628 });
2629 }
2630 })
2631 .await;
2632 Ok(matched_buffers.into_iter().flatten().collect())
2633 })
2634 } else if let Some(project_id) = self.remote_id() {
2635 let request = self.client.request(query.to_proto(project_id));
2636 cx.spawn(|this, mut cx| async move {
2637 let response = request.await?;
2638 let mut result = HashMap::default();
2639 for location in response.locations {
2640 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2641 let target_buffer = this
2642 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2643 .await?;
2644 let start = location
2645 .start
2646 .and_then(deserialize_anchor)
2647 .ok_or_else(|| anyhow!("missing target start"))?;
2648 let end = location
2649 .end
2650 .and_then(deserialize_anchor)
2651 .ok_or_else(|| anyhow!("missing target end"))?;
2652 result
2653 .entry(target_buffer)
2654 .or_insert(Vec::new())
2655 .push(start..end)
2656 }
2657 Ok(result)
2658 })
2659 } else {
2660 Task::ready(Ok(Default::default()))
2661 }
2662 }
2663
2664 fn request_lsp<R: LspCommand>(
2665 &self,
2666 buffer_handle: ModelHandle<Buffer>,
2667 request: R,
2668 cx: &mut ModelContext<Self>,
2669 ) -> Task<Result<R::Response>>
2670 where
2671 <R::LspRequest as lsp::request::Request>::Result: Send,
2672 {
2673 let buffer = buffer_handle.read(cx);
2674 if self.is_local() {
2675 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2676 if let Some((file, language_server)) =
2677 file.zip(self.language_server_for_buffer(&buffer_handle, cx).cloned())
2678 {
2679 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2680 return cx.spawn(|this, cx| async move {
2681 if !request.check_capabilities(language_server.capabilities()) {
2682 return Ok(Default::default());
2683 }
2684
2685 let response = language_server
2686 .request::<R::LspRequest>(lsp_params)
2687 .await
2688 .context("lsp request failed")?;
2689 request
2690 .response_from_lsp(response, this, buffer_handle, cx)
2691 .await
2692 });
2693 }
2694 } else if let Some(project_id) = self.remote_id() {
2695 let rpc = self.client.clone();
2696 let message = request.to_proto(project_id, buffer);
2697 return cx.spawn(|this, cx| async move {
2698 let response = rpc.request(message).await?;
2699 request
2700 .response_from_proto(response, this, buffer_handle, cx)
2701 .await
2702 });
2703 }
2704 Task::ready(Ok(Default::default()))
2705 }
2706
2707 pub fn find_or_create_local_worktree(
2708 &mut self,
2709 abs_path: impl AsRef<Path>,
2710 visible: bool,
2711 cx: &mut ModelContext<Self>,
2712 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2713 let abs_path = abs_path.as_ref();
2714 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2715 Task::ready(Ok((tree.clone(), relative_path.into())))
2716 } else {
2717 let worktree = self.create_local_worktree(abs_path, visible, cx);
2718 cx.foreground()
2719 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2720 }
2721 }
2722
2723 pub fn find_local_worktree(
2724 &self,
2725 abs_path: &Path,
2726 cx: &AppContext,
2727 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2728 for tree in self.worktrees(cx) {
2729 if let Some(relative_path) = tree
2730 .read(cx)
2731 .as_local()
2732 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2733 {
2734 return Some((tree.clone(), relative_path.into()));
2735 }
2736 }
2737 None
2738 }
2739
2740 pub fn is_shared(&self) -> bool {
2741 match &self.client_state {
2742 ProjectClientState::Local { is_shared, .. } => *is_shared,
2743 ProjectClientState::Remote { .. } => false,
2744 }
2745 }
2746
2747 fn create_local_worktree(
2748 &mut self,
2749 abs_path: impl AsRef<Path>,
2750 visible: bool,
2751 cx: &mut ModelContext<Self>,
2752 ) -> Task<Result<ModelHandle<Worktree>>> {
2753 let fs = self.fs.clone();
2754 let client = self.client.clone();
2755 let path: Arc<Path> = abs_path.as_ref().into();
2756 let task = self
2757 .loading_local_worktrees
2758 .entry(path.clone())
2759 .or_insert_with(|| {
2760 cx.spawn(|project, mut cx| {
2761 async move {
2762 let worktree =
2763 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2764 .await;
2765 project.update(&mut cx, |project, _| {
2766 project.loading_local_worktrees.remove(&path);
2767 });
2768 let worktree = worktree?;
2769
2770 let (remote_project_id, is_shared) =
2771 project.update(&mut cx, |project, cx| {
2772 project.add_worktree(&worktree, cx);
2773 (project.remote_id(), project.is_shared())
2774 });
2775
2776 if let Some(project_id) = remote_project_id {
2777 if is_shared {
2778 worktree
2779 .update(&mut cx, |worktree, cx| {
2780 worktree.as_local_mut().unwrap().share(project_id, cx)
2781 })
2782 .await?;
2783 } else {
2784 worktree
2785 .update(&mut cx, |worktree, cx| {
2786 worktree.as_local_mut().unwrap().register(project_id, cx)
2787 })
2788 .await?;
2789 }
2790 }
2791
2792 Ok(worktree)
2793 }
2794 .map_err(|err| Arc::new(err))
2795 })
2796 .shared()
2797 })
2798 .clone();
2799 cx.foreground().spawn(async move {
2800 match task.await {
2801 Ok(worktree) => Ok(worktree),
2802 Err(err) => Err(anyhow!("{}", err)),
2803 }
2804 })
2805 }
2806
2807 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2808 self.worktrees.retain(|worktree| {
2809 worktree
2810 .upgrade(cx)
2811 .map_or(false, |w| w.read(cx).id() != id)
2812 });
2813 cx.notify();
2814 }
2815
2816 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2817 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2818 if worktree.read(cx).is_local() {
2819 cx.subscribe(&worktree, |this, worktree, _, cx| {
2820 this.update_local_worktree_buffers(worktree, cx);
2821 })
2822 .detach();
2823 }
2824
2825 let push_strong_handle = {
2826 let worktree = worktree.read(cx);
2827 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2828 };
2829 if push_strong_handle {
2830 self.worktrees
2831 .push(WorktreeHandle::Strong(worktree.clone()));
2832 } else {
2833 cx.observe_release(&worktree, |this, _, cx| {
2834 this.worktrees
2835 .retain(|worktree| worktree.upgrade(cx).is_some());
2836 cx.notify();
2837 })
2838 .detach();
2839 self.worktrees
2840 .push(WorktreeHandle::Weak(worktree.downgrade()));
2841 }
2842 cx.notify();
2843 }
2844
2845 fn update_local_worktree_buffers(
2846 &mut self,
2847 worktree_handle: ModelHandle<Worktree>,
2848 cx: &mut ModelContext<Self>,
2849 ) {
2850 let snapshot = worktree_handle.read(cx).snapshot();
2851 let mut buffers_to_delete = Vec::new();
2852 for (buffer_id, buffer) in &self.opened_buffers {
2853 if let Some(buffer) = buffer.upgrade(cx) {
2854 buffer.update(cx, |buffer, cx| {
2855 if let Some(old_file) = File::from_dyn(buffer.file()) {
2856 if old_file.worktree != worktree_handle {
2857 return;
2858 }
2859
2860 let new_file = if let Some(entry) = old_file
2861 .entry_id
2862 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2863 {
2864 File {
2865 is_local: true,
2866 entry_id: Some(entry.id),
2867 mtime: entry.mtime,
2868 path: entry.path.clone(),
2869 worktree: worktree_handle.clone(),
2870 }
2871 } else if let Some(entry) =
2872 snapshot.entry_for_path(old_file.path().as_ref())
2873 {
2874 File {
2875 is_local: true,
2876 entry_id: Some(entry.id),
2877 mtime: entry.mtime,
2878 path: entry.path.clone(),
2879 worktree: worktree_handle.clone(),
2880 }
2881 } else {
2882 File {
2883 is_local: true,
2884 entry_id: None,
2885 path: old_file.path().clone(),
2886 mtime: old_file.mtime(),
2887 worktree: worktree_handle.clone(),
2888 }
2889 };
2890
2891 if let Some(project_id) = self.remote_id() {
2892 self.client
2893 .send(proto::UpdateBufferFile {
2894 project_id,
2895 buffer_id: *buffer_id as u64,
2896 file: Some(new_file.to_proto()),
2897 })
2898 .log_err();
2899 }
2900 buffer.file_updated(Box::new(new_file), cx).detach();
2901 }
2902 });
2903 } else {
2904 buffers_to_delete.push(*buffer_id);
2905 }
2906 }
2907
2908 for buffer_id in buffers_to_delete {
2909 self.opened_buffers.remove(&buffer_id);
2910 }
2911 }
2912
2913 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2914 let new_active_entry = entry.and_then(|project_path| {
2915 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2916 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2917 Some(ProjectEntry {
2918 worktree_id: project_path.worktree_id,
2919 entry_id: entry.id,
2920 })
2921 });
2922 if new_active_entry != self.active_entry {
2923 self.active_entry = new_active_entry;
2924 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2925 }
2926 }
2927
2928 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2929 self.language_servers_with_diagnostics_running > 0
2930 }
2931
2932 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2933 let mut summary = DiagnosticSummary::default();
2934 for (_, path_summary) in self.diagnostic_summaries(cx) {
2935 summary.error_count += path_summary.error_count;
2936 summary.warning_count += path_summary.warning_count;
2937 summary.info_count += path_summary.info_count;
2938 summary.hint_count += path_summary.hint_count;
2939 }
2940 summary
2941 }
2942
2943 pub fn diagnostic_summaries<'a>(
2944 &'a self,
2945 cx: &'a AppContext,
2946 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2947 self.worktrees(cx).flat_map(move |worktree| {
2948 let worktree = worktree.read(cx);
2949 let worktree_id = worktree.id();
2950 worktree
2951 .diagnostic_summaries()
2952 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2953 })
2954 }
2955
2956 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2957 self.language_servers_with_diagnostics_running += 1;
2958 if self.language_servers_with_diagnostics_running == 1 {
2959 cx.emit(Event::DiskBasedDiagnosticsStarted);
2960 }
2961 }
2962
2963 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2964 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2965 self.language_servers_with_diagnostics_running -= 1;
2966 if self.language_servers_with_diagnostics_running == 0 {
2967 cx.emit(Event::DiskBasedDiagnosticsFinished);
2968 }
2969 }
2970
2971 pub fn active_entry(&self) -> Option<ProjectEntry> {
2972 self.active_entry
2973 }
2974
2975 // RPC message handlers
2976
2977 async fn handle_unshare_project(
2978 this: ModelHandle<Self>,
2979 _: TypedEnvelope<proto::UnshareProject>,
2980 _: Arc<Client>,
2981 mut cx: AsyncAppContext,
2982 ) -> Result<()> {
2983 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2984 Ok(())
2985 }
2986
2987 async fn handle_add_collaborator(
2988 this: ModelHandle<Self>,
2989 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2990 _: Arc<Client>,
2991 mut cx: AsyncAppContext,
2992 ) -> Result<()> {
2993 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2994 let collaborator = envelope
2995 .payload
2996 .collaborator
2997 .take()
2998 .ok_or_else(|| anyhow!("empty collaborator"))?;
2999
3000 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
3001 this.update(&mut cx, |this, cx| {
3002 this.collaborators
3003 .insert(collaborator.peer_id, collaborator);
3004 cx.notify();
3005 });
3006
3007 Ok(())
3008 }
3009
3010 async fn handle_remove_collaborator(
3011 this: ModelHandle<Self>,
3012 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
3013 _: Arc<Client>,
3014 mut cx: AsyncAppContext,
3015 ) -> Result<()> {
3016 this.update(&mut cx, |this, cx| {
3017 let peer_id = PeerId(envelope.payload.peer_id);
3018 let replica_id = this
3019 .collaborators
3020 .remove(&peer_id)
3021 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3022 .replica_id;
3023 for (_, buffer) in &this.opened_buffers {
3024 if let Some(buffer) = buffer.upgrade(cx) {
3025 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3026 }
3027 }
3028 cx.notify();
3029 Ok(())
3030 })
3031 }
3032
3033 async fn handle_register_worktree(
3034 this: ModelHandle<Self>,
3035 envelope: TypedEnvelope<proto::RegisterWorktree>,
3036 client: Arc<Client>,
3037 mut cx: AsyncAppContext,
3038 ) -> Result<()> {
3039 this.update(&mut cx, |this, cx| {
3040 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3041 let replica_id = this.replica_id();
3042 let worktree = proto::Worktree {
3043 id: envelope.payload.worktree_id,
3044 root_name: envelope.payload.root_name,
3045 entries: Default::default(),
3046 diagnostic_summaries: Default::default(),
3047 visible: envelope.payload.visible,
3048 };
3049 let (worktree, load_task) =
3050 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3051 this.add_worktree(&worktree, cx);
3052 load_task.detach();
3053 Ok(())
3054 })
3055 }
3056
3057 async fn handle_unregister_worktree(
3058 this: ModelHandle<Self>,
3059 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3060 _: Arc<Client>,
3061 mut cx: AsyncAppContext,
3062 ) -> Result<()> {
3063 this.update(&mut cx, |this, cx| {
3064 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3065 this.remove_worktree(worktree_id, cx);
3066 Ok(())
3067 })
3068 }
3069
3070 async fn handle_update_worktree(
3071 this: ModelHandle<Self>,
3072 envelope: TypedEnvelope<proto::UpdateWorktree>,
3073 _: Arc<Client>,
3074 mut cx: AsyncAppContext,
3075 ) -> Result<()> {
3076 this.update(&mut cx, |this, cx| {
3077 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3078 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3079 worktree.update(cx, |worktree, _| {
3080 let worktree = worktree.as_remote_mut().unwrap();
3081 worktree.update_from_remote(envelope)
3082 })?;
3083 }
3084 Ok(())
3085 })
3086 }
3087
3088 async fn handle_update_diagnostic_summary(
3089 this: ModelHandle<Self>,
3090 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3091 _: Arc<Client>,
3092 mut cx: AsyncAppContext,
3093 ) -> Result<()> {
3094 this.update(&mut cx, |this, cx| {
3095 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3096 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3097 if let Some(summary) = envelope.payload.summary {
3098 let project_path = ProjectPath {
3099 worktree_id,
3100 path: Path::new(&summary.path).into(),
3101 };
3102 worktree.update(cx, |worktree, _| {
3103 worktree
3104 .as_remote_mut()
3105 .unwrap()
3106 .update_diagnostic_summary(project_path.path.clone(), &summary);
3107 });
3108 cx.emit(Event::DiagnosticsUpdated(project_path));
3109 }
3110 }
3111 Ok(())
3112 })
3113 }
3114
3115 async fn handle_disk_based_diagnostics_updating(
3116 this: ModelHandle<Self>,
3117 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
3118 _: Arc<Client>,
3119 mut cx: AsyncAppContext,
3120 ) -> Result<()> {
3121 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
3122 Ok(())
3123 }
3124
3125 async fn handle_disk_based_diagnostics_updated(
3126 this: ModelHandle<Self>,
3127 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
3128 _: Arc<Client>,
3129 mut cx: AsyncAppContext,
3130 ) -> Result<()> {
3131 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3132 Ok(())
3133 }
3134
3135 async fn handle_update_buffer(
3136 this: ModelHandle<Self>,
3137 envelope: TypedEnvelope<proto::UpdateBuffer>,
3138 _: Arc<Client>,
3139 mut cx: AsyncAppContext,
3140 ) -> Result<()> {
3141 this.update(&mut cx, |this, cx| {
3142 let payload = envelope.payload.clone();
3143 let buffer_id = payload.buffer_id;
3144 let ops = payload
3145 .operations
3146 .into_iter()
3147 .map(|op| language::proto::deserialize_operation(op))
3148 .collect::<Result<Vec<_>, _>>()?;
3149 match this.opened_buffers.entry(buffer_id) {
3150 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3151 OpenBuffer::Strong(buffer) => {
3152 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3153 }
3154 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3155 OpenBuffer::Weak(_) => {}
3156 },
3157 hash_map::Entry::Vacant(e) => {
3158 e.insert(OpenBuffer::Loading(ops));
3159 }
3160 }
3161 Ok(())
3162 })
3163 }
3164
3165 async fn handle_update_buffer_file(
3166 this: ModelHandle<Self>,
3167 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3168 _: Arc<Client>,
3169 mut cx: AsyncAppContext,
3170 ) -> Result<()> {
3171 this.update(&mut cx, |this, cx| {
3172 let payload = envelope.payload.clone();
3173 let buffer_id = payload.buffer_id;
3174 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3175 let worktree = this
3176 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3177 .ok_or_else(|| anyhow!("no such worktree"))?;
3178 let file = File::from_proto(file, worktree.clone(), cx)?;
3179 let buffer = this
3180 .opened_buffers
3181 .get_mut(&buffer_id)
3182 .and_then(|b| b.upgrade(cx))
3183 .ok_or_else(|| anyhow!("no such buffer"))?;
3184 buffer.update(cx, |buffer, cx| {
3185 buffer.file_updated(Box::new(file), cx).detach();
3186 });
3187 Ok(())
3188 })
3189 }
3190
3191 async fn handle_save_buffer(
3192 this: ModelHandle<Self>,
3193 envelope: TypedEnvelope<proto::SaveBuffer>,
3194 _: Arc<Client>,
3195 mut cx: AsyncAppContext,
3196 ) -> Result<proto::BufferSaved> {
3197 let buffer_id = envelope.payload.buffer_id;
3198 let requested_version = deserialize_version(envelope.payload.version);
3199
3200 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3201 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3202 let buffer = this
3203 .opened_buffers
3204 .get(&buffer_id)
3205 .map(|buffer| buffer.upgrade(cx).unwrap())
3206 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3207 Ok::<_, anyhow::Error>((project_id, buffer))
3208 })?;
3209 buffer
3210 .update(&mut cx, |buffer, _| {
3211 buffer.wait_for_version(requested_version)
3212 })
3213 .await;
3214
3215 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3216 Ok(proto::BufferSaved {
3217 project_id,
3218 buffer_id,
3219 version: serialize_version(&saved_version),
3220 mtime: Some(mtime.into()),
3221 })
3222 }
3223
3224 async fn handle_format_buffers(
3225 this: ModelHandle<Self>,
3226 envelope: TypedEnvelope<proto::FormatBuffers>,
3227 _: Arc<Client>,
3228 mut cx: AsyncAppContext,
3229 ) -> Result<proto::FormatBuffersResponse> {
3230 let sender_id = envelope.original_sender_id()?;
3231 let format = this.update(&mut cx, |this, cx| {
3232 let mut buffers = HashSet::default();
3233 for buffer_id in &envelope.payload.buffer_ids {
3234 buffers.insert(
3235 this.opened_buffers
3236 .get(buffer_id)
3237 .map(|buffer| buffer.upgrade(cx).unwrap())
3238 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3239 );
3240 }
3241 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3242 })?;
3243
3244 let project_transaction = format.await?;
3245 let project_transaction = this.update(&mut cx, |this, cx| {
3246 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3247 });
3248 Ok(proto::FormatBuffersResponse {
3249 transaction: Some(project_transaction),
3250 })
3251 }
3252
3253 async fn handle_get_completions(
3254 this: ModelHandle<Self>,
3255 envelope: TypedEnvelope<proto::GetCompletions>,
3256 _: Arc<Client>,
3257 mut cx: AsyncAppContext,
3258 ) -> Result<proto::GetCompletionsResponse> {
3259 let position = envelope
3260 .payload
3261 .position
3262 .and_then(language::proto::deserialize_anchor)
3263 .ok_or_else(|| anyhow!("invalid position"))?;
3264 let version = deserialize_version(envelope.payload.version);
3265 let buffer = this.read_with(&cx, |this, cx| {
3266 this.opened_buffers
3267 .get(&envelope.payload.buffer_id)
3268 .map(|buffer| buffer.upgrade(cx).unwrap())
3269 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3270 })?;
3271 buffer
3272 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3273 .await;
3274 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3275 let completions = this
3276 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3277 .await?;
3278
3279 Ok(proto::GetCompletionsResponse {
3280 completions: completions
3281 .iter()
3282 .map(language::proto::serialize_completion)
3283 .collect(),
3284 version: serialize_version(&version),
3285 })
3286 }
3287
3288 async fn handle_apply_additional_edits_for_completion(
3289 this: ModelHandle<Self>,
3290 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3291 _: Arc<Client>,
3292 mut cx: AsyncAppContext,
3293 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3294 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3295 let buffer = this
3296 .opened_buffers
3297 .get(&envelope.payload.buffer_id)
3298 .map(|buffer| buffer.upgrade(cx).unwrap())
3299 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3300 let language = buffer.read(cx).language();
3301 let completion = language::proto::deserialize_completion(
3302 envelope
3303 .payload
3304 .completion
3305 .ok_or_else(|| anyhow!("invalid completion"))?,
3306 language,
3307 )?;
3308 Ok::<_, anyhow::Error>(
3309 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3310 )
3311 })?;
3312
3313 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3314 transaction: apply_additional_edits
3315 .await?
3316 .as_ref()
3317 .map(language::proto::serialize_transaction),
3318 })
3319 }
3320
3321 async fn handle_get_code_actions(
3322 this: ModelHandle<Self>,
3323 envelope: TypedEnvelope<proto::GetCodeActions>,
3324 _: Arc<Client>,
3325 mut cx: AsyncAppContext,
3326 ) -> Result<proto::GetCodeActionsResponse> {
3327 let start = envelope
3328 .payload
3329 .start
3330 .and_then(language::proto::deserialize_anchor)
3331 .ok_or_else(|| anyhow!("invalid start"))?;
3332 let end = envelope
3333 .payload
3334 .end
3335 .and_then(language::proto::deserialize_anchor)
3336 .ok_or_else(|| anyhow!("invalid end"))?;
3337 let buffer = this.update(&mut cx, |this, cx| {
3338 this.opened_buffers
3339 .get(&envelope.payload.buffer_id)
3340 .map(|buffer| buffer.upgrade(cx).unwrap())
3341 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3342 })?;
3343 buffer
3344 .update(&mut cx, |buffer, _| {
3345 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3346 })
3347 .await;
3348
3349 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3350 let code_actions = this.update(&mut cx, |this, cx| {
3351 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3352 })?;
3353
3354 Ok(proto::GetCodeActionsResponse {
3355 actions: code_actions
3356 .await?
3357 .iter()
3358 .map(language::proto::serialize_code_action)
3359 .collect(),
3360 version: serialize_version(&version),
3361 })
3362 }
3363
3364 async fn handle_apply_code_action(
3365 this: ModelHandle<Self>,
3366 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3367 _: Arc<Client>,
3368 mut cx: AsyncAppContext,
3369 ) -> Result<proto::ApplyCodeActionResponse> {
3370 let sender_id = envelope.original_sender_id()?;
3371 let action = language::proto::deserialize_code_action(
3372 envelope
3373 .payload
3374 .action
3375 .ok_or_else(|| anyhow!("invalid action"))?,
3376 )?;
3377 let apply_code_action = this.update(&mut cx, |this, cx| {
3378 let buffer = this
3379 .opened_buffers
3380 .get(&envelope.payload.buffer_id)
3381 .map(|buffer| buffer.upgrade(cx).unwrap())
3382 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3383 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3384 })?;
3385
3386 let project_transaction = apply_code_action.await?;
3387 let project_transaction = this.update(&mut cx, |this, cx| {
3388 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3389 });
3390 Ok(proto::ApplyCodeActionResponse {
3391 transaction: Some(project_transaction),
3392 })
3393 }
3394
3395 async fn handle_lsp_command<T: LspCommand>(
3396 this: ModelHandle<Self>,
3397 envelope: TypedEnvelope<T::ProtoRequest>,
3398 _: Arc<Client>,
3399 mut cx: AsyncAppContext,
3400 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3401 where
3402 <T::LspRequest as lsp::request::Request>::Result: Send,
3403 {
3404 let sender_id = envelope.original_sender_id()?;
3405 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3406 let buffer_handle = this.read_with(&cx, |this, _| {
3407 this.opened_buffers
3408 .get(&buffer_id)
3409 .map(|buffer| buffer.upgrade(&cx).unwrap())
3410 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3411 })?;
3412 let request = T::from_proto(
3413 envelope.payload,
3414 this.clone(),
3415 buffer_handle.clone(),
3416 cx.clone(),
3417 )
3418 .await?;
3419 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3420 let response = this
3421 .update(&mut cx, |this, cx| {
3422 this.request_lsp(buffer_handle, request, cx)
3423 })
3424 .await?;
3425 this.update(&mut cx, |this, cx| {
3426 Ok(T::response_to_proto(
3427 response,
3428 this,
3429 sender_id,
3430 &buffer_version,
3431 cx,
3432 ))
3433 })
3434 }
3435
3436 async fn handle_get_project_symbols(
3437 this: ModelHandle<Self>,
3438 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3439 _: Arc<Client>,
3440 mut cx: AsyncAppContext,
3441 ) -> Result<proto::GetProjectSymbolsResponse> {
3442 let symbols = this
3443 .update(&mut cx, |this, cx| {
3444 this.symbols(&envelope.payload.query, cx)
3445 })
3446 .await?;
3447
3448 Ok(proto::GetProjectSymbolsResponse {
3449 symbols: symbols.iter().map(serialize_symbol).collect(),
3450 })
3451 }
3452
3453 async fn handle_search_project(
3454 this: ModelHandle<Self>,
3455 envelope: TypedEnvelope<proto::SearchProject>,
3456 _: Arc<Client>,
3457 mut cx: AsyncAppContext,
3458 ) -> Result<proto::SearchProjectResponse> {
3459 let peer_id = envelope.original_sender_id()?;
3460 let query = SearchQuery::from_proto(envelope.payload)?;
3461 let result = this
3462 .update(&mut cx, |this, cx| this.search(query, cx))
3463 .await?;
3464
3465 this.update(&mut cx, |this, cx| {
3466 let mut locations = Vec::new();
3467 for (buffer, ranges) in result {
3468 for range in ranges {
3469 let start = serialize_anchor(&range.start);
3470 let end = serialize_anchor(&range.end);
3471 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3472 locations.push(proto::Location {
3473 buffer: Some(buffer),
3474 start: Some(start),
3475 end: Some(end),
3476 });
3477 }
3478 }
3479 Ok(proto::SearchProjectResponse { locations })
3480 })
3481 }
3482
3483 async fn handle_open_buffer_for_symbol(
3484 this: ModelHandle<Self>,
3485 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3486 _: Arc<Client>,
3487 mut cx: AsyncAppContext,
3488 ) -> Result<proto::OpenBufferForSymbolResponse> {
3489 let peer_id = envelope.original_sender_id()?;
3490 let symbol = envelope
3491 .payload
3492 .symbol
3493 .ok_or_else(|| anyhow!("invalid symbol"))?;
3494 let symbol = this.read_with(&cx, |this, _| {
3495 let symbol = this.deserialize_symbol(symbol)?;
3496 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3497 if signature == symbol.signature {
3498 Ok(symbol)
3499 } else {
3500 Err(anyhow!("invalid symbol signature"))
3501 }
3502 })?;
3503 let buffer = this
3504 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3505 .await?;
3506
3507 Ok(proto::OpenBufferForSymbolResponse {
3508 buffer: Some(this.update(&mut cx, |this, cx| {
3509 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3510 })),
3511 })
3512 }
3513
3514 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3515 let mut hasher = Sha256::new();
3516 hasher.update(worktree_id.to_proto().to_be_bytes());
3517 hasher.update(path.to_string_lossy().as_bytes());
3518 hasher.update(self.nonce.to_be_bytes());
3519 hasher.finalize().as_slice().try_into().unwrap()
3520 }
3521
3522 async fn handle_open_buffer(
3523 this: ModelHandle<Self>,
3524 envelope: TypedEnvelope<proto::OpenBuffer>,
3525 _: Arc<Client>,
3526 mut cx: AsyncAppContext,
3527 ) -> Result<proto::OpenBufferResponse> {
3528 let peer_id = envelope.original_sender_id()?;
3529 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3530 let open_buffer = this.update(&mut cx, |this, cx| {
3531 this.open_buffer(
3532 ProjectPath {
3533 worktree_id,
3534 path: PathBuf::from(envelope.payload.path).into(),
3535 },
3536 cx,
3537 )
3538 });
3539
3540 let buffer = open_buffer.await?;
3541 this.update(&mut cx, |this, cx| {
3542 Ok(proto::OpenBufferResponse {
3543 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3544 })
3545 })
3546 }
3547
3548 fn serialize_project_transaction_for_peer(
3549 &mut self,
3550 project_transaction: ProjectTransaction,
3551 peer_id: PeerId,
3552 cx: &AppContext,
3553 ) -> proto::ProjectTransaction {
3554 let mut serialized_transaction = proto::ProjectTransaction {
3555 buffers: Default::default(),
3556 transactions: Default::default(),
3557 };
3558 for (buffer, transaction) in project_transaction.0 {
3559 serialized_transaction
3560 .buffers
3561 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3562 serialized_transaction
3563 .transactions
3564 .push(language::proto::serialize_transaction(&transaction));
3565 }
3566 serialized_transaction
3567 }
3568
3569 fn deserialize_project_transaction(
3570 &mut self,
3571 message: proto::ProjectTransaction,
3572 push_to_history: bool,
3573 cx: &mut ModelContext<Self>,
3574 ) -> Task<Result<ProjectTransaction>> {
3575 cx.spawn(|this, mut cx| async move {
3576 let mut project_transaction = ProjectTransaction::default();
3577 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3578 let buffer = this
3579 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3580 .await?;
3581 let transaction = language::proto::deserialize_transaction(transaction)?;
3582 project_transaction.0.insert(buffer, transaction);
3583 }
3584
3585 for (buffer, transaction) in &project_transaction.0 {
3586 buffer
3587 .update(&mut cx, |buffer, _| {
3588 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3589 })
3590 .await;
3591
3592 if push_to_history {
3593 buffer.update(&mut cx, |buffer, _| {
3594 buffer.push_transaction(transaction.clone(), Instant::now());
3595 });
3596 }
3597 }
3598
3599 Ok(project_transaction)
3600 })
3601 }
3602
3603 fn serialize_buffer_for_peer(
3604 &mut self,
3605 buffer: &ModelHandle<Buffer>,
3606 peer_id: PeerId,
3607 cx: &AppContext,
3608 ) -> proto::Buffer {
3609 let buffer_id = buffer.read(cx).remote_id();
3610 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3611 if shared_buffers.insert(buffer_id) {
3612 proto::Buffer {
3613 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3614 }
3615 } else {
3616 proto::Buffer {
3617 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3618 }
3619 }
3620 }
3621
3622 fn deserialize_buffer(
3623 &mut self,
3624 buffer: proto::Buffer,
3625 cx: &mut ModelContext<Self>,
3626 ) -> Task<Result<ModelHandle<Buffer>>> {
3627 let replica_id = self.replica_id();
3628
3629 let opened_buffer_tx = self.opened_buffer.0.clone();
3630 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3631 cx.spawn(|this, mut cx| async move {
3632 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3633 proto::buffer::Variant::Id(id) => {
3634 let buffer = loop {
3635 let buffer = this.read_with(&cx, |this, cx| {
3636 this.opened_buffers
3637 .get(&id)
3638 .and_then(|buffer| buffer.upgrade(cx))
3639 });
3640 if let Some(buffer) = buffer {
3641 break buffer;
3642 }
3643 opened_buffer_rx
3644 .next()
3645 .await
3646 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3647 };
3648 Ok(buffer)
3649 }
3650 proto::buffer::Variant::State(mut buffer) => {
3651 let mut buffer_worktree = None;
3652 let mut buffer_file = None;
3653 if let Some(file) = buffer.file.take() {
3654 this.read_with(&cx, |this, cx| {
3655 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3656 let worktree =
3657 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3658 anyhow!("no worktree found for id {}", file.worktree_id)
3659 })?;
3660 buffer_file =
3661 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3662 as Box<dyn language::File>);
3663 buffer_worktree = Some(worktree);
3664 Ok::<_, anyhow::Error>(())
3665 })?;
3666 }
3667
3668 let buffer = cx.add_model(|cx| {
3669 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3670 });
3671
3672 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3673
3674 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3675 Ok(buffer)
3676 }
3677 }
3678 })
3679 }
3680
3681 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3682 let language = self
3683 .languages
3684 .get_language(&serialized_symbol.language_name);
3685 let start = serialized_symbol
3686 .start
3687 .ok_or_else(|| anyhow!("invalid start"))?;
3688 let end = serialized_symbol
3689 .end
3690 .ok_or_else(|| anyhow!("invalid end"))?;
3691 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3692 Ok(Symbol {
3693 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3694 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3695 language_name: serialized_symbol.language_name.clone(),
3696 label: language
3697 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3698 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3699 name: serialized_symbol.name,
3700 path: PathBuf::from(serialized_symbol.path),
3701 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3702 kind,
3703 signature: serialized_symbol
3704 .signature
3705 .try_into()
3706 .map_err(|_| anyhow!("invalid signature"))?,
3707 })
3708 }
3709
3710 async fn handle_buffer_saved(
3711 this: ModelHandle<Self>,
3712 envelope: TypedEnvelope<proto::BufferSaved>,
3713 _: Arc<Client>,
3714 mut cx: AsyncAppContext,
3715 ) -> Result<()> {
3716 let version = deserialize_version(envelope.payload.version);
3717 let mtime = envelope
3718 .payload
3719 .mtime
3720 .ok_or_else(|| anyhow!("missing mtime"))?
3721 .into();
3722
3723 this.update(&mut cx, |this, cx| {
3724 let buffer = this
3725 .opened_buffers
3726 .get(&envelope.payload.buffer_id)
3727 .and_then(|buffer| buffer.upgrade(cx));
3728 if let Some(buffer) = buffer {
3729 buffer.update(cx, |buffer, cx| {
3730 buffer.did_save(version, mtime, None, cx);
3731 });
3732 }
3733 Ok(())
3734 })
3735 }
3736
3737 async fn handle_buffer_reloaded(
3738 this: ModelHandle<Self>,
3739 envelope: TypedEnvelope<proto::BufferReloaded>,
3740 _: Arc<Client>,
3741 mut cx: AsyncAppContext,
3742 ) -> Result<()> {
3743 let payload = envelope.payload.clone();
3744 let version = deserialize_version(payload.version);
3745 let mtime = payload
3746 .mtime
3747 .ok_or_else(|| anyhow!("missing mtime"))?
3748 .into();
3749 this.update(&mut cx, |this, cx| {
3750 let buffer = this
3751 .opened_buffers
3752 .get(&payload.buffer_id)
3753 .and_then(|buffer| buffer.upgrade(cx));
3754 if let Some(buffer) = buffer {
3755 buffer.update(cx, |buffer, cx| {
3756 buffer.did_reload(version, mtime, cx);
3757 });
3758 }
3759 Ok(())
3760 })
3761 }
3762
3763 pub fn match_paths<'a>(
3764 &self,
3765 query: &'a str,
3766 include_ignored: bool,
3767 smart_case: bool,
3768 max_results: usize,
3769 cancel_flag: &'a AtomicBool,
3770 cx: &AppContext,
3771 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3772 let worktrees = self
3773 .worktrees(cx)
3774 .filter(|worktree| worktree.read(cx).is_visible())
3775 .collect::<Vec<_>>();
3776 let include_root_name = worktrees.len() > 1;
3777 let candidate_sets = worktrees
3778 .into_iter()
3779 .map(|worktree| CandidateSet {
3780 snapshot: worktree.read(cx).snapshot(),
3781 include_ignored,
3782 include_root_name,
3783 })
3784 .collect::<Vec<_>>();
3785
3786 let background = cx.background().clone();
3787 async move {
3788 fuzzy::match_paths(
3789 candidate_sets.as_slice(),
3790 query,
3791 smart_case,
3792 max_results,
3793 cancel_flag,
3794 background,
3795 )
3796 .await
3797 }
3798 }
3799
3800 fn edits_from_lsp(
3801 &mut self,
3802 buffer: &ModelHandle<Buffer>,
3803 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
3804 version: Option<i32>,
3805 cx: &mut ModelContext<Self>,
3806 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
3807 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
3808 cx.background().spawn(async move {
3809 let snapshot = snapshot?;
3810 let mut lsp_edits = lsp_edits
3811 .into_iter()
3812 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
3813 .peekable();
3814
3815 let mut edits = Vec::new();
3816 while let Some((mut range, mut new_text)) = lsp_edits.next() {
3817 // Combine any LSP edits that are adjacent.
3818 //
3819 // Also, combine LSP edits that are separated from each other by only
3820 // a newline. This is important because for some code actions,
3821 // Rust-analyzer rewrites the entire buffer via a series of edits that
3822 // are separated by unchanged newline characters.
3823 //
3824 // In order for the diffing logic below to work properly, any edits that
3825 // cancel each other out must be combined into one.
3826 while let Some((next_range, next_text)) = lsp_edits.peek() {
3827 if next_range.start > range.end {
3828 if next_range.start.row > range.end.row + 1
3829 || next_range.start.column > 0
3830 || snapshot.clip_point_utf16(
3831 PointUtf16::new(range.end.row, u32::MAX),
3832 Bias::Left,
3833 ) > range.end
3834 {
3835 break;
3836 }
3837 new_text.push('\n');
3838 }
3839 range.end = next_range.end;
3840 new_text.push_str(&next_text);
3841 lsp_edits.next();
3842 }
3843
3844 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
3845 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
3846 {
3847 return Err(anyhow!("invalid edits received from language server"));
3848 }
3849
3850 // For multiline edits, perform a diff of the old and new text so that
3851 // we can identify the changes more precisely, preserving the locations
3852 // of any anchors positioned in the unchanged regions.
3853 if range.end.row > range.start.row {
3854 let mut offset = range.start.to_offset(&snapshot);
3855 let old_text = snapshot.text_for_range(range).collect::<String>();
3856
3857 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
3858 let mut moved_since_edit = true;
3859 for change in diff.iter_all_changes() {
3860 let tag = change.tag();
3861 let value = change.value();
3862 match tag {
3863 ChangeTag::Equal => {
3864 offset += value.len();
3865 moved_since_edit = true;
3866 }
3867 ChangeTag::Delete => {
3868 let start = snapshot.anchor_after(offset);
3869 let end = snapshot.anchor_before(offset + value.len());
3870 if moved_since_edit {
3871 edits.push((start..end, String::new()));
3872 } else {
3873 edits.last_mut().unwrap().0.end = end;
3874 }
3875 offset += value.len();
3876 moved_since_edit = false;
3877 }
3878 ChangeTag::Insert => {
3879 if moved_since_edit {
3880 let anchor = snapshot.anchor_after(offset);
3881 edits.push((anchor.clone()..anchor, value.to_string()));
3882 } else {
3883 edits.last_mut().unwrap().1.push_str(value);
3884 }
3885 moved_since_edit = false;
3886 }
3887 }
3888 }
3889 } else if range.end == range.start {
3890 let anchor = snapshot.anchor_after(range.start);
3891 edits.push((anchor.clone()..anchor, new_text));
3892 } else {
3893 let edit_start = snapshot.anchor_after(range.start);
3894 let edit_end = snapshot.anchor_before(range.end);
3895 edits.push((edit_start..edit_end, new_text));
3896 }
3897 }
3898
3899 Ok(edits)
3900 })
3901 }
3902
3903 fn buffer_snapshot_for_lsp_version(
3904 &mut self,
3905 buffer: &ModelHandle<Buffer>,
3906 version: Option<i32>,
3907 cx: &AppContext,
3908 ) -> Result<TextBufferSnapshot> {
3909 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
3910
3911 if let Some(version) = version {
3912 let buffer_id = buffer.read(cx).remote_id();
3913 let snapshots = self
3914 .buffer_snapshots
3915 .get_mut(&buffer_id)
3916 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
3917 let mut found_snapshot = None;
3918 snapshots.retain(|(snapshot_version, snapshot)| {
3919 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
3920 false
3921 } else {
3922 if *snapshot_version == version {
3923 found_snapshot = Some(snapshot.clone());
3924 }
3925 true
3926 }
3927 });
3928
3929 found_snapshot.ok_or_else(|| {
3930 anyhow!(
3931 "snapshot not found for buffer {} at version {}",
3932 buffer_id,
3933 version
3934 )
3935 })
3936 } else {
3937 Ok((**buffer.read(cx)).clone())
3938 }
3939 }
3940
3941 fn language_server_for_buffer(
3942 &self,
3943 buffer: &ModelHandle<Buffer>,
3944 cx: &AppContext,
3945 ) -> Option<&Arc<LanguageServer>> {
3946 let buffer = buffer.read(cx);
3947 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
3948 let worktree_id = file.worktree_id(cx);
3949 self.language_servers.get(&(worktree_id, language.name()))
3950 } else {
3951 None
3952 }
3953 }
3954}
3955
3956impl WorktreeHandle {
3957 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3958 match self {
3959 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3960 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3961 }
3962 }
3963}
3964
3965impl OpenBuffer {
3966 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3967 match self {
3968 OpenBuffer::Strong(handle) => Some(handle.clone()),
3969 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3970 OpenBuffer::Loading(_) => None,
3971 }
3972 }
3973}
3974
3975struct CandidateSet {
3976 snapshot: Snapshot,
3977 include_ignored: bool,
3978 include_root_name: bool,
3979}
3980
3981impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3982 type Candidates = CandidateSetIter<'a>;
3983
3984 fn id(&self) -> usize {
3985 self.snapshot.id().to_usize()
3986 }
3987
3988 fn len(&self) -> usize {
3989 if self.include_ignored {
3990 self.snapshot.file_count()
3991 } else {
3992 self.snapshot.visible_file_count()
3993 }
3994 }
3995
3996 fn prefix(&self) -> Arc<str> {
3997 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3998 self.snapshot.root_name().into()
3999 } else if self.include_root_name {
4000 format!("{}/", self.snapshot.root_name()).into()
4001 } else {
4002 "".into()
4003 }
4004 }
4005
4006 fn candidates(&'a self, start: usize) -> Self::Candidates {
4007 CandidateSetIter {
4008 traversal: self.snapshot.files(self.include_ignored, start),
4009 }
4010 }
4011}
4012
4013struct CandidateSetIter<'a> {
4014 traversal: Traversal<'a>,
4015}
4016
4017impl<'a> Iterator for CandidateSetIter<'a> {
4018 type Item = PathMatchCandidate<'a>;
4019
4020 fn next(&mut self) -> Option<Self::Item> {
4021 self.traversal.next().map(|entry| {
4022 if let EntryKind::File(char_bag) = entry.kind {
4023 PathMatchCandidate {
4024 path: &entry.path,
4025 char_bag,
4026 }
4027 } else {
4028 unreachable!()
4029 }
4030 })
4031 }
4032}
4033
4034impl Entity for Project {
4035 type Event = Event;
4036
4037 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4038 match &self.client_state {
4039 ProjectClientState::Local { remote_id_rx, .. } => {
4040 if let Some(project_id) = *remote_id_rx.borrow() {
4041 self.client
4042 .send(proto::UnregisterProject { project_id })
4043 .log_err();
4044 }
4045 }
4046 ProjectClientState::Remote { remote_id, .. } => {
4047 self.client
4048 .send(proto::LeaveProject {
4049 project_id: *remote_id,
4050 })
4051 .log_err();
4052 }
4053 }
4054 }
4055
4056 fn app_will_quit(
4057 &mut self,
4058 _: &mut MutableAppContext,
4059 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4060 let shutdown_futures = self
4061 .language_servers
4062 .drain()
4063 .filter_map(|(_, server)| server.shutdown())
4064 .collect::<Vec<_>>();
4065 Some(
4066 async move {
4067 futures::future::join_all(shutdown_futures).await;
4068 }
4069 .boxed(),
4070 )
4071 }
4072}
4073
4074impl Collaborator {
4075 fn from_proto(
4076 message: proto::Collaborator,
4077 user_store: &ModelHandle<UserStore>,
4078 cx: &mut AsyncAppContext,
4079 ) -> impl Future<Output = Result<Self>> {
4080 let user = user_store.update(cx, |user_store, cx| {
4081 user_store.fetch_user(message.user_id, cx)
4082 });
4083
4084 async move {
4085 Ok(Self {
4086 peer_id: PeerId(message.peer_id),
4087 user: user.await?,
4088 replica_id: message.replica_id as ReplicaId,
4089 })
4090 }
4091 }
4092}
4093
4094impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4095 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4096 Self {
4097 worktree_id,
4098 path: path.as_ref().into(),
4099 }
4100 }
4101}
4102
4103impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4104 fn from(options: lsp::CreateFileOptions) -> Self {
4105 Self {
4106 overwrite: options.overwrite.unwrap_or(false),
4107 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4108 }
4109 }
4110}
4111
4112impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4113 fn from(options: lsp::RenameFileOptions) -> Self {
4114 Self {
4115 overwrite: options.overwrite.unwrap_or(false),
4116 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4117 }
4118 }
4119}
4120
4121impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4122 fn from(options: lsp::DeleteFileOptions) -> Self {
4123 Self {
4124 recursive: options.recursive.unwrap_or(false),
4125 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4126 }
4127 }
4128}
4129
4130fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4131 proto::Symbol {
4132 source_worktree_id: symbol.source_worktree_id.to_proto(),
4133 worktree_id: symbol.worktree_id.to_proto(),
4134 language_name: symbol.language_name.clone(),
4135 name: symbol.name.clone(),
4136 kind: unsafe { mem::transmute(symbol.kind) },
4137 path: symbol.path.to_string_lossy().to_string(),
4138 start: Some(proto::Point {
4139 row: symbol.range.start.row,
4140 column: symbol.range.start.column,
4141 }),
4142 end: Some(proto::Point {
4143 row: symbol.range.end.row,
4144 column: symbol.range.end.column,
4145 }),
4146 signature: symbol.signature.to_vec(),
4147 }
4148}
4149
4150fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4151 let mut path_components = path.components();
4152 let mut base_components = base.components();
4153 let mut components: Vec<Component> = Vec::new();
4154 loop {
4155 match (path_components.next(), base_components.next()) {
4156 (None, None) => break,
4157 (Some(a), None) => {
4158 components.push(a);
4159 components.extend(path_components.by_ref());
4160 break;
4161 }
4162 (None, _) => components.push(Component::ParentDir),
4163 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4164 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4165 (Some(a), Some(_)) => {
4166 components.push(Component::ParentDir);
4167 for _ in base_components {
4168 components.push(Component::ParentDir);
4169 }
4170 components.push(a);
4171 components.extend(path_components.by_ref());
4172 break;
4173 }
4174 }
4175 }
4176 components.iter().map(|c| c.as_os_str()).collect()
4177}
4178
4179#[cfg(test)]
4180mod tests {
4181 use super::{Event, *};
4182 use fs::RealFs;
4183 use futures::StreamExt;
4184 use gpui::test::subscribe;
4185 use language::{
4186 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4187 ToPoint,
4188 };
4189 use lsp::Url;
4190 use serde_json::json;
4191 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4192 use unindent::Unindent as _;
4193 use util::test::temp_tree;
4194 use worktree::WorktreeHandle as _;
4195
4196 #[gpui::test]
4197 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4198 let dir = temp_tree(json!({
4199 "root": {
4200 "apple": "",
4201 "banana": {
4202 "carrot": {
4203 "date": "",
4204 "endive": "",
4205 }
4206 },
4207 "fennel": {
4208 "grape": "",
4209 }
4210 }
4211 }));
4212
4213 let root_link_path = dir.path().join("root_link");
4214 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4215 unix::fs::symlink(
4216 &dir.path().join("root/fennel"),
4217 &dir.path().join("root/finnochio"),
4218 )
4219 .unwrap();
4220
4221 let project = Project::test(Arc::new(RealFs), cx);
4222
4223 let (tree, _) = project
4224 .update(cx, |project, cx| {
4225 project.find_or_create_local_worktree(&root_link_path, true, cx)
4226 })
4227 .await
4228 .unwrap();
4229
4230 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4231 .await;
4232 cx.read(|cx| {
4233 let tree = tree.read(cx);
4234 assert_eq!(tree.file_count(), 5);
4235 assert_eq!(
4236 tree.inode_for_path("fennel/grape"),
4237 tree.inode_for_path("finnochio/grape")
4238 );
4239 });
4240
4241 let cancel_flag = Default::default();
4242 let results = project
4243 .read_with(cx, |project, cx| {
4244 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4245 })
4246 .await;
4247 assert_eq!(
4248 results
4249 .into_iter()
4250 .map(|result| result.path)
4251 .collect::<Vec<Arc<Path>>>(),
4252 vec![
4253 PathBuf::from("banana/carrot/date").into(),
4254 PathBuf::from("banana/carrot/endive").into(),
4255 ]
4256 );
4257 }
4258
4259 #[gpui::test]
4260 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4261 cx.foreground().forbid_parking();
4262
4263 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4264 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4265 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4266 completion_provider: Some(lsp::CompletionOptions {
4267 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4268 ..Default::default()
4269 }),
4270 ..Default::default()
4271 });
4272 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4273 completion_provider: Some(lsp::CompletionOptions {
4274 trigger_characters: Some(vec![":".to_string()]),
4275 ..Default::default()
4276 }),
4277 ..Default::default()
4278 });
4279
4280 let rust_language = Arc::new(Language::new(
4281 LanguageConfig {
4282 name: "Rust".into(),
4283 path_suffixes: vec!["rs".to_string()],
4284 language_server: Some(rust_lsp_config),
4285 ..Default::default()
4286 },
4287 Some(tree_sitter_rust::language()),
4288 ));
4289 let json_language = Arc::new(Language::new(
4290 LanguageConfig {
4291 name: "JSON".into(),
4292 path_suffixes: vec!["json".to_string()],
4293 language_server: Some(json_lsp_config),
4294 ..Default::default()
4295 },
4296 None,
4297 ));
4298
4299 let fs = FakeFs::new(cx.background());
4300 fs.insert_tree(
4301 "/the-root",
4302 json!({
4303 "test.rs": "const A: i32 = 1;",
4304 "test2.rs": "",
4305 "Cargo.toml": "a = 1",
4306 "package.json": "{\"a\": 1}",
4307 }),
4308 )
4309 .await;
4310
4311 let project = Project::test(fs, cx);
4312 project.update(cx, |project, _| {
4313 project.languages.add(rust_language);
4314 project.languages.add(json_language);
4315 });
4316
4317 let worktree_id = project
4318 .update(cx, |project, cx| {
4319 project.find_or_create_local_worktree("/the-root", true, cx)
4320 })
4321 .await
4322 .unwrap()
4323 .0
4324 .read_with(cx, |tree, _| tree.id());
4325
4326 // Open a buffer without an associated language server.
4327 let toml_buffer = project
4328 .update(cx, |project, cx| {
4329 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4330 })
4331 .await
4332 .unwrap();
4333
4334 // Open a buffer with an associated language server.
4335 let rust_buffer = project
4336 .update(cx, |project, cx| {
4337 project.open_buffer((worktree_id, "test.rs"), cx)
4338 })
4339 .await
4340 .unwrap();
4341
4342 // A server is started up, and it is notified about both open buffers.
4343 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4344 assert_eq!(
4345 fake_rust_server
4346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4347 .await
4348 .text_document,
4349 lsp::TextDocumentItem {
4350 uri: lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
4351 version: 0,
4352 text: "a = 1".to_string(),
4353 language_id: Default::default()
4354 }
4355 );
4356 assert_eq!(
4357 fake_rust_server
4358 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4359 .await
4360 .text_document,
4361 lsp::TextDocumentItem {
4362 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4363 version: 0,
4364 text: "const A: i32 = 1;".to_string(),
4365 language_id: Default::default()
4366 }
4367 );
4368
4369 // The buffer is configured based on the language server's capabilities.
4370 rust_buffer.read_with(cx, |buffer, _| {
4371 assert_eq!(
4372 buffer.completion_triggers(),
4373 &[".".to_string(), "::".to_string()]
4374 );
4375 });
4376 toml_buffer.read_with(cx, |buffer, _| {
4377 assert!(buffer.completion_triggers().is_empty());
4378 });
4379
4380 // Edit a buffer. The changes are reported to the language server.
4381 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4382 assert_eq!(
4383 fake_rust_server
4384 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4385 .await
4386 .text_document,
4387 lsp::VersionedTextDocumentIdentifier::new(
4388 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4389 1
4390 )
4391 );
4392
4393 // Open a third buffer with a different associated language server.
4394 let json_buffer = project
4395 .update(cx, |project, cx| {
4396 project.open_buffer((worktree_id, "package.json"), cx)
4397 })
4398 .await
4399 .unwrap();
4400
4401 // Another language server is started up, and it is notified about
4402 // all three open buffers.
4403 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4404 assert_eq!(
4405 fake_json_server
4406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4407 .await
4408 .text_document,
4409 lsp::TextDocumentItem {
4410 uri: lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
4411 version: 0,
4412 text: "a = 1".to_string(),
4413 language_id: Default::default()
4414 }
4415 );
4416 assert_eq!(
4417 fake_json_server
4418 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4419 .await
4420 .text_document,
4421 lsp::TextDocumentItem {
4422 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4423 version: 0,
4424 text: "{\"a\": 1}".to_string(),
4425 language_id: Default::default()
4426 }
4427 );
4428 assert_eq!(
4429 fake_json_server
4430 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4431 .await
4432 .text_document,
4433 lsp::TextDocumentItem {
4434 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4435 version: 1,
4436 text: "const A: i32 = 12;".to_string(),
4437 language_id: Default::default()
4438 }
4439 );
4440
4441 // This buffer is configured based on the second language server's
4442 // capabilities.
4443 json_buffer.read_with(cx, |buffer, _| {
4444 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4445 });
4446
4447 // The first language server is also notified about the new open buffer.
4448 assert_eq!(
4449 fake_rust_server
4450 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4451 .await
4452 .text_document,
4453 lsp::TextDocumentItem {
4454 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4455 version: 0,
4456 text: "{\"a\": 1}".to_string(),
4457 language_id: Default::default()
4458 }
4459 );
4460
4461 // When opening another buffer whose language server is already running,
4462 // it is also configured based on the existing language server's capabilities.
4463 let rust_buffer2 = project
4464 .update(cx, |project, cx| {
4465 project.open_buffer((worktree_id, "test2.rs"), cx)
4466 })
4467 .await
4468 .unwrap();
4469 rust_buffer2.read_with(cx, |buffer, _| {
4470 assert_eq!(
4471 buffer.completion_triggers(),
4472 &[".".to_string(), "::".to_string()]
4473 );
4474 });
4475
4476 // Edit a buffer. The changes are reported to both the language servers.
4477 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4478 assert_eq!(
4479 fake_rust_server
4480 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4481 .await
4482 .text_document,
4483 lsp::VersionedTextDocumentIdentifier::new(
4484 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
4485 1
4486 )
4487 );
4488 assert_eq!(
4489 fake_json_server
4490 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4491 .await,
4492 lsp::DidChangeTextDocumentParams {
4493 text_document: lsp::VersionedTextDocumentIdentifier::new(
4494 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap(),
4495 1
4496 ),
4497 content_changes: vec![lsp::TextDocumentContentChangeEvent {
4498 range: Some(lsp::Range::new(
4499 lsp::Position::new(0, 5),
4500 lsp::Position::new(0, 5)
4501 )),
4502 range_length: None,
4503 text: "23".to_string(),
4504 }],
4505 },
4506 );
4507
4508 // Close a buffer. Both language servers are notified.
4509 cx.update(|_| drop(json_buffer));
4510 let close_message = lsp::DidCloseTextDocumentParams {
4511 text_document: lsp::TextDocumentIdentifier::new(
4512 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4513 ),
4514 };
4515 assert_eq!(
4516 fake_json_server
4517 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4518 .await,
4519 close_message,
4520 );
4521 assert_eq!(
4522 fake_rust_server
4523 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4524 .await,
4525 close_message,
4526 );
4527 }
4528
4529 #[gpui::test]
4530 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4531 cx.foreground().forbid_parking();
4532
4533 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4534 let progress_token = language_server_config
4535 .disk_based_diagnostics_progress_token
4536 .clone()
4537 .unwrap();
4538
4539 let language = Arc::new(Language::new(
4540 LanguageConfig {
4541 name: "Rust".into(),
4542 path_suffixes: vec!["rs".to_string()],
4543 language_server: Some(language_server_config),
4544 ..Default::default()
4545 },
4546 Some(tree_sitter_rust::language()),
4547 ));
4548
4549 let fs = FakeFs::new(cx.background());
4550 fs.insert_tree(
4551 "/dir",
4552 json!({
4553 "a.rs": "fn a() { A }",
4554 "b.rs": "const y: i32 = 1",
4555 }),
4556 )
4557 .await;
4558
4559 let project = Project::test(fs, cx);
4560 project.update(cx, |project, _| project.languages.add(language));
4561
4562 let (tree, _) = project
4563 .update(cx, |project, cx| {
4564 project.find_or_create_local_worktree("/dir", true, cx)
4565 })
4566 .await
4567 .unwrap();
4568 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4569
4570 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4571 .await;
4572
4573 // Cause worktree to start the fake language server
4574 let _buffer = project
4575 .update(cx, |project, cx| {
4576 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4577 })
4578 .await
4579 .unwrap();
4580
4581 let mut events = subscribe(&project, cx);
4582
4583 let mut fake_server = fake_servers.next().await.unwrap();
4584 fake_server.start_progress(&progress_token).await;
4585 assert_eq!(
4586 events.next().await.unwrap(),
4587 Event::DiskBasedDiagnosticsStarted
4588 );
4589
4590 fake_server.start_progress(&progress_token).await;
4591 fake_server.end_progress(&progress_token).await;
4592 fake_server.start_progress(&progress_token).await;
4593
4594 fake_server
4595 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4596 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4597 version: None,
4598 diagnostics: vec![lsp::Diagnostic {
4599 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4600 severity: Some(lsp::DiagnosticSeverity::ERROR),
4601 message: "undefined variable 'A'".to_string(),
4602 ..Default::default()
4603 }],
4604 })
4605 .await;
4606 assert_eq!(
4607 events.next().await.unwrap(),
4608 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4609 );
4610
4611 fake_server.end_progress(&progress_token).await;
4612 fake_server.end_progress(&progress_token).await;
4613 assert_eq!(
4614 events.next().await.unwrap(),
4615 Event::DiskBasedDiagnosticsUpdated
4616 );
4617 assert_eq!(
4618 events.next().await.unwrap(),
4619 Event::DiskBasedDiagnosticsFinished
4620 );
4621
4622 let buffer = project
4623 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4624 .await
4625 .unwrap();
4626
4627 buffer.read_with(cx, |buffer, _| {
4628 let snapshot = buffer.snapshot();
4629 let diagnostics = snapshot
4630 .diagnostics_in_range::<_, Point>(0..buffer.len())
4631 .collect::<Vec<_>>();
4632 assert_eq!(
4633 diagnostics,
4634 &[DiagnosticEntry {
4635 range: Point::new(0, 9)..Point::new(0, 10),
4636 diagnostic: Diagnostic {
4637 severity: lsp::DiagnosticSeverity::ERROR,
4638 message: "undefined variable 'A'".to_string(),
4639 group_id: 0,
4640 is_primary: true,
4641 ..Default::default()
4642 }
4643 }]
4644 )
4645 });
4646 }
4647
4648 #[gpui::test]
4649 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4650 cx.foreground().forbid_parking();
4651
4652 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4653 lsp_config
4654 .disk_based_diagnostic_sources
4655 .insert("disk".to_string());
4656 let language = Arc::new(Language::new(
4657 LanguageConfig {
4658 name: "Rust".into(),
4659 path_suffixes: vec!["rs".to_string()],
4660 language_server: Some(lsp_config),
4661 ..Default::default()
4662 },
4663 Some(tree_sitter_rust::language()),
4664 ));
4665
4666 let text = "
4667 fn a() { A }
4668 fn b() { BB }
4669 fn c() { CCC }
4670 "
4671 .unindent();
4672
4673 let fs = FakeFs::new(cx.background());
4674 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4675
4676 let project = Project::test(fs, cx);
4677 project.update(cx, |project, _| project.languages.add(language));
4678
4679 let worktree_id = project
4680 .update(cx, |project, cx| {
4681 project.find_or_create_local_worktree("/dir", true, cx)
4682 })
4683 .await
4684 .unwrap()
4685 .0
4686 .read_with(cx, |tree, _| tree.id());
4687
4688 let buffer = project
4689 .update(cx, |project, cx| {
4690 project.open_buffer((worktree_id, "a.rs"), cx)
4691 })
4692 .await
4693 .unwrap();
4694
4695 let mut fake_server = fake_servers.next().await.unwrap();
4696 let open_notification = fake_server
4697 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4698 .await;
4699
4700 // Edit the buffer, moving the content down
4701 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4702 let change_notification_1 = fake_server
4703 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4704 .await;
4705 assert!(
4706 change_notification_1.text_document.version > open_notification.text_document.version
4707 );
4708
4709 // Report some diagnostics for the initial version of the buffer
4710 fake_server
4711 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4712 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4713 version: Some(open_notification.text_document.version),
4714 diagnostics: vec![
4715 lsp::Diagnostic {
4716 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4717 severity: Some(DiagnosticSeverity::ERROR),
4718 message: "undefined variable 'A'".to_string(),
4719 source: Some("disk".to_string()),
4720 ..Default::default()
4721 },
4722 lsp::Diagnostic {
4723 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4724 severity: Some(DiagnosticSeverity::ERROR),
4725 message: "undefined variable 'BB'".to_string(),
4726 source: Some("disk".to_string()),
4727 ..Default::default()
4728 },
4729 lsp::Diagnostic {
4730 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4731 severity: Some(DiagnosticSeverity::ERROR),
4732 source: Some("disk".to_string()),
4733 message: "undefined variable 'CCC'".to_string(),
4734 ..Default::default()
4735 },
4736 ],
4737 })
4738 .await;
4739
4740 // The diagnostics have moved down since they were created.
4741 buffer.next_notification(cx).await;
4742 buffer.read_with(cx, |buffer, _| {
4743 assert_eq!(
4744 buffer
4745 .snapshot()
4746 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4747 .collect::<Vec<_>>(),
4748 &[
4749 DiagnosticEntry {
4750 range: Point::new(3, 9)..Point::new(3, 11),
4751 diagnostic: Diagnostic {
4752 severity: DiagnosticSeverity::ERROR,
4753 message: "undefined variable 'BB'".to_string(),
4754 is_disk_based: true,
4755 group_id: 1,
4756 is_primary: true,
4757 ..Default::default()
4758 },
4759 },
4760 DiagnosticEntry {
4761 range: Point::new(4, 9)..Point::new(4, 12),
4762 diagnostic: Diagnostic {
4763 severity: DiagnosticSeverity::ERROR,
4764 message: "undefined variable 'CCC'".to_string(),
4765 is_disk_based: true,
4766 group_id: 2,
4767 is_primary: true,
4768 ..Default::default()
4769 }
4770 }
4771 ]
4772 );
4773 assert_eq!(
4774 chunks_with_diagnostics(buffer, 0..buffer.len()),
4775 [
4776 ("\n\nfn a() { ".to_string(), None),
4777 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4778 (" }\nfn b() { ".to_string(), None),
4779 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
4780 (" }\nfn c() { ".to_string(), None),
4781 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
4782 (" }\n".to_string(), None),
4783 ]
4784 );
4785 assert_eq!(
4786 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
4787 [
4788 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
4789 (" }\nfn c() { ".to_string(), None),
4790 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
4791 ]
4792 );
4793 });
4794
4795 // Ensure overlapping diagnostics are highlighted correctly.
4796 fake_server
4797 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4798 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4799 version: Some(open_notification.text_document.version),
4800 diagnostics: vec![
4801 lsp::Diagnostic {
4802 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4803 severity: Some(DiagnosticSeverity::ERROR),
4804 message: "undefined variable 'A'".to_string(),
4805 source: Some("disk".to_string()),
4806 ..Default::default()
4807 },
4808 lsp::Diagnostic {
4809 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
4810 severity: Some(DiagnosticSeverity::WARNING),
4811 message: "unreachable statement".to_string(),
4812 source: Some("disk".to_string()),
4813 ..Default::default()
4814 },
4815 ],
4816 })
4817 .await;
4818
4819 buffer.next_notification(cx).await;
4820 buffer.read_with(cx, |buffer, _| {
4821 assert_eq!(
4822 buffer
4823 .snapshot()
4824 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
4825 .collect::<Vec<_>>(),
4826 &[
4827 DiagnosticEntry {
4828 range: Point::new(2, 9)..Point::new(2, 12),
4829 diagnostic: Diagnostic {
4830 severity: DiagnosticSeverity::WARNING,
4831 message: "unreachable statement".to_string(),
4832 is_disk_based: true,
4833 group_id: 1,
4834 is_primary: true,
4835 ..Default::default()
4836 }
4837 },
4838 DiagnosticEntry {
4839 range: Point::new(2, 9)..Point::new(2, 10),
4840 diagnostic: Diagnostic {
4841 severity: DiagnosticSeverity::ERROR,
4842 message: "undefined variable 'A'".to_string(),
4843 is_disk_based: true,
4844 group_id: 0,
4845 is_primary: true,
4846 ..Default::default()
4847 },
4848 }
4849 ]
4850 );
4851 assert_eq!(
4852 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
4853 [
4854 ("fn a() { ".to_string(), None),
4855 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4856 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
4857 ("\n".to_string(), None),
4858 ]
4859 );
4860 assert_eq!(
4861 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
4862 [
4863 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
4864 ("\n".to_string(), None),
4865 ]
4866 );
4867 });
4868
4869 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
4870 // changes since the last save.
4871 buffer.update(cx, |buffer, cx| {
4872 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
4873 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
4874 });
4875 let change_notification_2 =
4876 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
4877 assert!(
4878 change_notification_2.await.text_document.version
4879 > change_notification_1.text_document.version
4880 );
4881
4882 // Handle out-of-order diagnostics
4883 fake_server
4884 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
4885 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4886 version: Some(open_notification.text_document.version),
4887 diagnostics: vec![
4888 lsp::Diagnostic {
4889 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4890 severity: Some(DiagnosticSeverity::ERROR),
4891 message: "undefined variable 'BB'".to_string(),
4892 source: Some("disk".to_string()),
4893 ..Default::default()
4894 },
4895 lsp::Diagnostic {
4896 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4897 severity: Some(DiagnosticSeverity::WARNING),
4898 message: "undefined variable 'A'".to_string(),
4899 source: Some("disk".to_string()),
4900 ..Default::default()
4901 },
4902 ],
4903 })
4904 .await;
4905
4906 buffer.next_notification(cx).await;
4907 buffer.read_with(cx, |buffer, _| {
4908 assert_eq!(
4909 buffer
4910 .snapshot()
4911 .diagnostics_in_range::<_, Point>(0..buffer.len())
4912 .collect::<Vec<_>>(),
4913 &[
4914 DiagnosticEntry {
4915 range: Point::new(2, 21)..Point::new(2, 22),
4916 diagnostic: Diagnostic {
4917 severity: DiagnosticSeverity::WARNING,
4918 message: "undefined variable 'A'".to_string(),
4919 is_disk_based: true,
4920 group_id: 1,
4921 is_primary: true,
4922 ..Default::default()
4923 }
4924 },
4925 DiagnosticEntry {
4926 range: Point::new(3, 9)..Point::new(3, 11),
4927 diagnostic: Diagnostic {
4928 severity: DiagnosticSeverity::ERROR,
4929 message: "undefined variable 'BB'".to_string(),
4930 is_disk_based: true,
4931 group_id: 0,
4932 is_primary: true,
4933 ..Default::default()
4934 },
4935 }
4936 ]
4937 );
4938 });
4939 }
4940
4941 #[gpui::test]
4942 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
4943 cx.foreground().forbid_parking();
4944
4945 let text = concat!(
4946 "let one = ;\n", //
4947 "let two = \n",
4948 "let three = 3;\n",
4949 );
4950
4951 let fs = FakeFs::new(cx.background());
4952 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4953
4954 let project = Project::test(fs, cx);
4955 let worktree_id = project
4956 .update(cx, |project, cx| {
4957 project.find_or_create_local_worktree("/dir", true, cx)
4958 })
4959 .await
4960 .unwrap()
4961 .0
4962 .read_with(cx, |tree, _| tree.id());
4963
4964 let buffer = project
4965 .update(cx, |project, cx| {
4966 project.open_buffer((worktree_id, "a.rs"), cx)
4967 })
4968 .await
4969 .unwrap();
4970
4971 project.update(cx, |project, cx| {
4972 project
4973 .update_buffer_diagnostics(
4974 &buffer,
4975 vec![
4976 DiagnosticEntry {
4977 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
4978 diagnostic: Diagnostic {
4979 severity: DiagnosticSeverity::ERROR,
4980 message: "syntax error 1".to_string(),
4981 ..Default::default()
4982 },
4983 },
4984 DiagnosticEntry {
4985 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
4986 diagnostic: Diagnostic {
4987 severity: DiagnosticSeverity::ERROR,
4988 message: "syntax error 2".to_string(),
4989 ..Default::default()
4990 },
4991 },
4992 ],
4993 None,
4994 cx,
4995 )
4996 .unwrap();
4997 });
4998
4999 // An empty range is extended forward to include the following character.
5000 // At the end of a line, an empty range is extended backward to include
5001 // the preceding character.
5002 buffer.read_with(cx, |buffer, _| {
5003 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
5004 assert_eq!(
5005 chunks
5006 .iter()
5007 .map(|(s, d)| (s.as_str(), *d))
5008 .collect::<Vec<_>>(),
5009 &[
5010 ("let one = ", None),
5011 (";", Some(DiagnosticSeverity::ERROR)),
5012 ("\nlet two =", None),
5013 (" ", Some(DiagnosticSeverity::ERROR)),
5014 ("\nlet three = 3;\n", None)
5015 ]
5016 );
5017 });
5018 }
5019
5020 #[gpui::test]
5021 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
5022 cx.foreground().forbid_parking();
5023
5024 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
5025 let language = Arc::new(Language::new(
5026 LanguageConfig {
5027 name: "Rust".into(),
5028 path_suffixes: vec!["rs".to_string()],
5029 language_server: Some(lsp_config),
5030 ..Default::default()
5031 },
5032 Some(tree_sitter_rust::language()),
5033 ));
5034
5035 let text = "
5036 fn a() {
5037 f1();
5038 }
5039 fn b() {
5040 f2();
5041 }
5042 fn c() {
5043 f3();
5044 }
5045 "
5046 .unindent();
5047
5048 let fs = FakeFs::new(cx.background());
5049 fs.insert_tree(
5050 "/dir",
5051 json!({
5052 "a.rs": text.clone(),
5053 }),
5054 )
5055 .await;
5056
5057 let project = Project::test(fs, cx);
5058 project.update(cx, |project, _| project.languages.add(language));
5059
5060 let worktree_id = project
5061 .update(cx, |project, cx| {
5062 project.find_or_create_local_worktree("/dir", true, cx)
5063 })
5064 .await
5065 .unwrap()
5066 .0
5067 .read_with(cx, |tree, _| tree.id());
5068
5069 let buffer = project
5070 .update(cx, |project, cx| {
5071 project.open_buffer((worktree_id, "a.rs"), cx)
5072 })
5073 .await
5074 .unwrap();
5075
5076 let mut fake_server = fake_servers.next().await.unwrap();
5077 let lsp_document_version = fake_server
5078 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5079 .await
5080 .text_document
5081 .version;
5082
5083 // Simulate editing the buffer after the language server computes some edits.
5084 buffer.update(cx, |buffer, cx| {
5085 buffer.edit(
5086 [Point::new(0, 0)..Point::new(0, 0)],
5087 "// above first function\n",
5088 cx,
5089 );
5090 buffer.edit(
5091 [Point::new(2, 0)..Point::new(2, 0)],
5092 " // inside first function\n",
5093 cx,
5094 );
5095 buffer.edit(
5096 [Point::new(6, 4)..Point::new(6, 4)],
5097 "// inside second function ",
5098 cx,
5099 );
5100
5101 assert_eq!(
5102 buffer.text(),
5103 "
5104 // above first function
5105 fn a() {
5106 // inside first function
5107 f1();
5108 }
5109 fn b() {
5110 // inside second function f2();
5111 }
5112 fn c() {
5113 f3();
5114 }
5115 "
5116 .unindent()
5117 );
5118 });
5119
5120 let edits = project
5121 .update(cx, |project, cx| {
5122 project.edits_from_lsp(
5123 &buffer,
5124 vec![
5125 // replace body of first function
5126 lsp::TextEdit {
5127 range: lsp::Range::new(
5128 lsp::Position::new(0, 0),
5129 lsp::Position::new(3, 0),
5130 ),
5131 new_text: "
5132 fn a() {
5133 f10();
5134 }
5135 "
5136 .unindent(),
5137 },
5138 // edit inside second function
5139 lsp::TextEdit {
5140 range: lsp::Range::new(
5141 lsp::Position::new(4, 6),
5142 lsp::Position::new(4, 6),
5143 ),
5144 new_text: "00".into(),
5145 },
5146 // edit inside third function via two distinct edits
5147 lsp::TextEdit {
5148 range: lsp::Range::new(
5149 lsp::Position::new(7, 5),
5150 lsp::Position::new(7, 5),
5151 ),
5152 new_text: "4000".into(),
5153 },
5154 lsp::TextEdit {
5155 range: lsp::Range::new(
5156 lsp::Position::new(7, 5),
5157 lsp::Position::new(7, 6),
5158 ),
5159 new_text: "".into(),
5160 },
5161 ],
5162 Some(lsp_document_version),
5163 cx,
5164 )
5165 })
5166 .await
5167 .unwrap();
5168
5169 buffer.update(cx, |buffer, cx| {
5170 for (range, new_text) in edits {
5171 buffer.edit([range], new_text, cx);
5172 }
5173 assert_eq!(
5174 buffer.text(),
5175 "
5176 // above first function
5177 fn a() {
5178 // inside first function
5179 f10();
5180 }
5181 fn b() {
5182 // inside second function f200();
5183 }
5184 fn c() {
5185 f4000();
5186 }
5187 "
5188 .unindent()
5189 );
5190 });
5191 }
5192
5193 #[gpui::test]
5194 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5195 cx.foreground().forbid_parking();
5196
5197 let text = "
5198 use a::b;
5199 use a::c;
5200
5201 fn f() {
5202 b();
5203 c();
5204 }
5205 "
5206 .unindent();
5207
5208 let fs = FakeFs::new(cx.background());
5209 fs.insert_tree(
5210 "/dir",
5211 json!({
5212 "a.rs": text.clone(),
5213 }),
5214 )
5215 .await;
5216
5217 let project = Project::test(fs, cx);
5218 let worktree_id = project
5219 .update(cx, |project, cx| {
5220 project.find_or_create_local_worktree("/dir", true, cx)
5221 })
5222 .await
5223 .unwrap()
5224 .0
5225 .read_with(cx, |tree, _| tree.id());
5226
5227 let buffer = project
5228 .update(cx, |project, cx| {
5229 project.open_buffer((worktree_id, "a.rs"), cx)
5230 })
5231 .await
5232 .unwrap();
5233
5234 // Simulate the language server sending us a small edit in the form of a very large diff.
5235 // Rust-analyzer does this when performing a merge-imports code action.
5236 let edits = project
5237 .update(cx, |project, cx| {
5238 project.edits_from_lsp(
5239 &buffer,
5240 [
5241 // Replace the first use statement without editing the semicolon.
5242 lsp::TextEdit {
5243 range: lsp::Range::new(
5244 lsp::Position::new(0, 4),
5245 lsp::Position::new(0, 8),
5246 ),
5247 new_text: "a::{b, c}".into(),
5248 },
5249 // Reinsert the remainder of the file between the semicolon and the final
5250 // newline of the file.
5251 lsp::TextEdit {
5252 range: lsp::Range::new(
5253 lsp::Position::new(0, 9),
5254 lsp::Position::new(0, 9),
5255 ),
5256 new_text: "\n\n".into(),
5257 },
5258 lsp::TextEdit {
5259 range: lsp::Range::new(
5260 lsp::Position::new(0, 9),
5261 lsp::Position::new(0, 9),
5262 ),
5263 new_text: "
5264 fn f() {
5265 b();
5266 c();
5267 }"
5268 .unindent(),
5269 },
5270 // Delete everything after the first newline of the file.
5271 lsp::TextEdit {
5272 range: lsp::Range::new(
5273 lsp::Position::new(1, 0),
5274 lsp::Position::new(7, 0),
5275 ),
5276 new_text: "".into(),
5277 },
5278 ],
5279 None,
5280 cx,
5281 )
5282 })
5283 .await
5284 .unwrap();
5285
5286 buffer.update(cx, |buffer, cx| {
5287 let edits = edits
5288 .into_iter()
5289 .map(|(range, text)| {
5290 (
5291 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5292 text,
5293 )
5294 })
5295 .collect::<Vec<_>>();
5296
5297 assert_eq!(
5298 edits,
5299 [
5300 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5301 (Point::new(1, 0)..Point::new(2, 0), "".into())
5302 ]
5303 );
5304
5305 for (range, new_text) in edits {
5306 buffer.edit([range], new_text, cx);
5307 }
5308 assert_eq!(
5309 buffer.text(),
5310 "
5311 use a::{b, c};
5312
5313 fn f() {
5314 b();
5315 c();
5316 }
5317 "
5318 .unindent()
5319 );
5320 });
5321 }
5322
5323 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5324 buffer: &Buffer,
5325 range: Range<T>,
5326 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5327 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5328 for chunk in buffer.snapshot().chunks(range, true) {
5329 if chunks
5330 .last()
5331 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5332 {
5333 chunks.last_mut().unwrap().0.push_str(chunk.text);
5334 } else {
5335 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5336 }
5337 }
5338 chunks
5339 }
5340
5341 #[gpui::test]
5342 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5343 let dir = temp_tree(json!({
5344 "root": {
5345 "dir1": {},
5346 "dir2": {
5347 "dir3": {}
5348 }
5349 }
5350 }));
5351
5352 let project = Project::test(Arc::new(RealFs), cx);
5353 let (tree, _) = project
5354 .update(cx, |project, cx| {
5355 project.find_or_create_local_worktree(&dir.path(), true, cx)
5356 })
5357 .await
5358 .unwrap();
5359
5360 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5361 .await;
5362
5363 let cancel_flag = Default::default();
5364 let results = project
5365 .read_with(cx, |project, cx| {
5366 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5367 })
5368 .await;
5369
5370 assert!(results.is_empty());
5371 }
5372
5373 #[gpui::test]
5374 async fn test_definition(cx: &mut gpui::TestAppContext) {
5375 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5376 let language = Arc::new(Language::new(
5377 LanguageConfig {
5378 name: "Rust".into(),
5379 path_suffixes: vec!["rs".to_string()],
5380 language_server: Some(language_server_config),
5381 ..Default::default()
5382 },
5383 Some(tree_sitter_rust::language()),
5384 ));
5385
5386 let fs = FakeFs::new(cx.background());
5387 fs.insert_tree(
5388 "/dir",
5389 json!({
5390 "a.rs": "const fn a() { A }",
5391 "b.rs": "const y: i32 = crate::a()",
5392 }),
5393 )
5394 .await;
5395
5396 let project = Project::test(fs, cx);
5397 project.update(cx, |project, _| {
5398 Arc::get_mut(&mut project.languages).unwrap().add(language);
5399 });
5400
5401 let (tree, _) = project
5402 .update(cx, |project, cx| {
5403 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5404 })
5405 .await
5406 .unwrap();
5407 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5408 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5409 .await;
5410
5411 let buffer = project
5412 .update(cx, |project, cx| {
5413 project.open_buffer(
5414 ProjectPath {
5415 worktree_id,
5416 path: Path::new("").into(),
5417 },
5418 cx,
5419 )
5420 })
5421 .await
5422 .unwrap();
5423
5424 let mut fake_server = fake_servers.next().await.unwrap();
5425 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5426 let params = params.text_document_position_params;
5427 assert_eq!(
5428 params.text_document.uri.to_file_path().unwrap(),
5429 Path::new("/dir/b.rs"),
5430 );
5431 assert_eq!(params.position, lsp::Position::new(0, 22));
5432
5433 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5434 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5435 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5436 )))
5437 });
5438
5439 let mut definitions = project
5440 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5441 .await
5442 .unwrap();
5443
5444 assert_eq!(definitions.len(), 1);
5445 let definition = definitions.pop().unwrap();
5446 cx.update(|cx| {
5447 let target_buffer = definition.buffer.read(cx);
5448 assert_eq!(
5449 target_buffer
5450 .file()
5451 .unwrap()
5452 .as_local()
5453 .unwrap()
5454 .abs_path(cx),
5455 Path::new("/dir/a.rs"),
5456 );
5457 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5458 assert_eq!(
5459 list_worktrees(&project, cx),
5460 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5461 );
5462
5463 drop(definition);
5464 });
5465 cx.read(|cx| {
5466 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5467 });
5468
5469 fn list_worktrees<'a>(
5470 project: &'a ModelHandle<Project>,
5471 cx: &'a AppContext,
5472 ) -> Vec<(&'a Path, bool)> {
5473 project
5474 .read(cx)
5475 .worktrees(cx)
5476 .map(|worktree| {
5477 let worktree = worktree.read(cx);
5478 (
5479 worktree.as_local().unwrap().abs_path().as_ref(),
5480 worktree.is_visible(),
5481 )
5482 })
5483 .collect::<Vec<_>>()
5484 }
5485 }
5486
5487 #[gpui::test]
5488 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5489 let fs = FakeFs::new(cx.background());
5490 fs.insert_tree(
5491 "/dir",
5492 json!({
5493 "file1": "the old contents",
5494 }),
5495 )
5496 .await;
5497
5498 let project = Project::test(fs.clone(), cx);
5499 let worktree_id = project
5500 .update(cx, |p, cx| {
5501 p.find_or_create_local_worktree("/dir", true, cx)
5502 })
5503 .await
5504 .unwrap()
5505 .0
5506 .read_with(cx, |tree, _| tree.id());
5507
5508 let buffer = project
5509 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5510 .await
5511 .unwrap();
5512 buffer
5513 .update(cx, |buffer, cx| {
5514 assert_eq!(buffer.text(), "the old contents");
5515 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5516 buffer.save(cx)
5517 })
5518 .await
5519 .unwrap();
5520
5521 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5522 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5523 }
5524
5525 #[gpui::test]
5526 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5527 let fs = FakeFs::new(cx.background());
5528 fs.insert_tree(
5529 "/dir",
5530 json!({
5531 "file1": "the old contents",
5532 }),
5533 )
5534 .await;
5535
5536 let project = Project::test(fs.clone(), cx);
5537 let worktree_id = project
5538 .update(cx, |p, cx| {
5539 p.find_or_create_local_worktree("/dir/file1", true, cx)
5540 })
5541 .await
5542 .unwrap()
5543 .0
5544 .read_with(cx, |tree, _| tree.id());
5545
5546 let buffer = project
5547 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5548 .await
5549 .unwrap();
5550 buffer
5551 .update(cx, |buffer, cx| {
5552 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5553 buffer.save(cx)
5554 })
5555 .await
5556 .unwrap();
5557
5558 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5559 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5560 }
5561
5562 #[gpui::test]
5563 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5564 let fs = FakeFs::new(cx.background());
5565 fs.insert_tree("/dir", json!({})).await;
5566
5567 let project = Project::test(fs.clone(), cx);
5568 let (worktree, _) = project
5569 .update(cx, |project, cx| {
5570 project.find_or_create_local_worktree("/dir", true, cx)
5571 })
5572 .await
5573 .unwrap();
5574 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5575
5576 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5577 buffer.update(cx, |buffer, cx| {
5578 buffer.edit([0..0], "abc", cx);
5579 assert!(buffer.is_dirty());
5580 assert!(!buffer.has_conflict());
5581 });
5582 project
5583 .update(cx, |project, cx| {
5584 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5585 })
5586 .await
5587 .unwrap();
5588 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5589 buffer.read_with(cx, |buffer, cx| {
5590 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5591 assert!(!buffer.is_dirty());
5592 assert!(!buffer.has_conflict());
5593 });
5594
5595 let opened_buffer = project
5596 .update(cx, |project, cx| {
5597 project.open_buffer((worktree_id, "file1"), cx)
5598 })
5599 .await
5600 .unwrap();
5601 assert_eq!(opened_buffer, buffer);
5602 }
5603
5604 #[gpui::test(retries = 5)]
5605 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5606 let dir = temp_tree(json!({
5607 "a": {
5608 "file1": "",
5609 "file2": "",
5610 "file3": "",
5611 },
5612 "b": {
5613 "c": {
5614 "file4": "",
5615 "file5": "",
5616 }
5617 }
5618 }));
5619
5620 let project = Project::test(Arc::new(RealFs), cx);
5621 let rpc = project.read_with(cx, |p, _| p.client.clone());
5622
5623 let (tree, _) = project
5624 .update(cx, |p, cx| {
5625 p.find_or_create_local_worktree(dir.path(), true, cx)
5626 })
5627 .await
5628 .unwrap();
5629 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5630
5631 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5632 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5633 async move { buffer.await.unwrap() }
5634 };
5635 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5636 tree.read_with(cx, |tree, _| {
5637 tree.entry_for_path(path)
5638 .expect(&format!("no entry for path {}", path))
5639 .id
5640 })
5641 };
5642
5643 let buffer2 = buffer_for_path("a/file2", cx).await;
5644 let buffer3 = buffer_for_path("a/file3", cx).await;
5645 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5646 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5647
5648 let file2_id = id_for_path("a/file2", &cx);
5649 let file3_id = id_for_path("a/file3", &cx);
5650 let file4_id = id_for_path("b/c/file4", &cx);
5651
5652 // Wait for the initial scan.
5653 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5654 .await;
5655
5656 // Create a remote copy of this worktree.
5657 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5658 let (remote, load_task) = cx.update(|cx| {
5659 Worktree::remote(
5660 1,
5661 1,
5662 initial_snapshot.to_proto(&Default::default(), true),
5663 rpc.clone(),
5664 cx,
5665 )
5666 });
5667 load_task.await;
5668
5669 cx.read(|cx| {
5670 assert!(!buffer2.read(cx).is_dirty());
5671 assert!(!buffer3.read(cx).is_dirty());
5672 assert!(!buffer4.read(cx).is_dirty());
5673 assert!(!buffer5.read(cx).is_dirty());
5674 });
5675
5676 // Rename and delete files and directories.
5677 tree.flush_fs_events(&cx).await;
5678 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5679 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5680 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5681 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5682 tree.flush_fs_events(&cx).await;
5683
5684 let expected_paths = vec![
5685 "a",
5686 "a/file1",
5687 "a/file2.new",
5688 "b",
5689 "d",
5690 "d/file3",
5691 "d/file4",
5692 ];
5693
5694 cx.read(|app| {
5695 assert_eq!(
5696 tree.read(app)
5697 .paths()
5698 .map(|p| p.to_str().unwrap())
5699 .collect::<Vec<_>>(),
5700 expected_paths
5701 );
5702
5703 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5704 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5705 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5706
5707 assert_eq!(
5708 buffer2.read(app).file().unwrap().path().as_ref(),
5709 Path::new("a/file2.new")
5710 );
5711 assert_eq!(
5712 buffer3.read(app).file().unwrap().path().as_ref(),
5713 Path::new("d/file3")
5714 );
5715 assert_eq!(
5716 buffer4.read(app).file().unwrap().path().as_ref(),
5717 Path::new("d/file4")
5718 );
5719 assert_eq!(
5720 buffer5.read(app).file().unwrap().path().as_ref(),
5721 Path::new("b/c/file5")
5722 );
5723
5724 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5725 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5726 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5727 assert!(buffer5.read(app).file().unwrap().is_deleted());
5728 });
5729
5730 // Update the remote worktree. Check that it becomes consistent with the
5731 // local worktree.
5732 remote.update(cx, |remote, cx| {
5733 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5734 &initial_snapshot,
5735 1,
5736 1,
5737 true,
5738 );
5739 remote
5740 .as_remote_mut()
5741 .unwrap()
5742 .snapshot
5743 .apply_remote_update(update_message)
5744 .unwrap();
5745
5746 assert_eq!(
5747 remote
5748 .paths()
5749 .map(|p| p.to_str().unwrap())
5750 .collect::<Vec<_>>(),
5751 expected_paths
5752 );
5753 });
5754 }
5755
5756 #[gpui::test]
5757 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5758 let fs = FakeFs::new(cx.background());
5759 fs.insert_tree(
5760 "/the-dir",
5761 json!({
5762 "a.txt": "a-contents",
5763 "b.txt": "b-contents",
5764 }),
5765 )
5766 .await;
5767
5768 let project = Project::test(fs.clone(), cx);
5769 let worktree_id = project
5770 .update(cx, |p, cx| {
5771 p.find_or_create_local_worktree("/the-dir", true, cx)
5772 })
5773 .await
5774 .unwrap()
5775 .0
5776 .read_with(cx, |tree, _| tree.id());
5777
5778 // Spawn multiple tasks to open paths, repeating some paths.
5779 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5780 (
5781 p.open_buffer((worktree_id, "a.txt"), cx),
5782 p.open_buffer((worktree_id, "b.txt"), cx),
5783 p.open_buffer((worktree_id, "a.txt"), cx),
5784 )
5785 });
5786
5787 let buffer_a_1 = buffer_a_1.await.unwrap();
5788 let buffer_a_2 = buffer_a_2.await.unwrap();
5789 let buffer_b = buffer_b.await.unwrap();
5790 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
5791 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
5792
5793 // There is only one buffer per path.
5794 let buffer_a_id = buffer_a_1.id();
5795 assert_eq!(buffer_a_2.id(), buffer_a_id);
5796
5797 // Open the same path again while it is still open.
5798 drop(buffer_a_1);
5799 let buffer_a_3 = project
5800 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
5801 .await
5802 .unwrap();
5803
5804 // There's still only one buffer per path.
5805 assert_eq!(buffer_a_3.id(), buffer_a_id);
5806 }
5807
5808 #[gpui::test]
5809 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5810 use std::fs;
5811
5812 let dir = temp_tree(json!({
5813 "file1": "abc",
5814 "file2": "def",
5815 "file3": "ghi",
5816 }));
5817
5818 let project = Project::test(Arc::new(RealFs), cx);
5819 let (worktree, _) = project
5820 .update(cx, |p, cx| {
5821 p.find_or_create_local_worktree(dir.path(), true, cx)
5822 })
5823 .await
5824 .unwrap();
5825 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5826
5827 worktree.flush_fs_events(&cx).await;
5828 worktree
5829 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
5830 .await;
5831
5832 let buffer1 = project
5833 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5834 .await
5835 .unwrap();
5836 let events = Rc::new(RefCell::new(Vec::new()));
5837
5838 // initially, the buffer isn't dirty.
5839 buffer1.update(cx, |buffer, cx| {
5840 cx.subscribe(&buffer1, {
5841 let events = events.clone();
5842 move |_, _, event, _| match event {
5843 BufferEvent::Operation(_) => {}
5844 _ => events.borrow_mut().push(event.clone()),
5845 }
5846 })
5847 .detach();
5848
5849 assert!(!buffer.is_dirty());
5850 assert!(events.borrow().is_empty());
5851
5852 buffer.edit(vec![1..2], "", cx);
5853 });
5854
5855 // after the first edit, the buffer is dirty, and emits a dirtied event.
5856 buffer1.update(cx, |buffer, cx| {
5857 assert!(buffer.text() == "ac");
5858 assert!(buffer.is_dirty());
5859 assert_eq!(
5860 *events.borrow(),
5861 &[language::Event::Edited, language::Event::Dirtied]
5862 );
5863 events.borrow_mut().clear();
5864 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
5865 });
5866
5867 // after saving, the buffer is not dirty, and emits a saved event.
5868 buffer1.update(cx, |buffer, cx| {
5869 assert!(!buffer.is_dirty());
5870 assert_eq!(*events.borrow(), &[language::Event::Saved]);
5871 events.borrow_mut().clear();
5872
5873 buffer.edit(vec![1..1], "B", cx);
5874 buffer.edit(vec![2..2], "D", cx);
5875 });
5876
5877 // after editing again, the buffer is dirty, and emits another dirty event.
5878 buffer1.update(cx, |buffer, cx| {
5879 assert!(buffer.text() == "aBDc");
5880 assert!(buffer.is_dirty());
5881 assert_eq!(
5882 *events.borrow(),
5883 &[
5884 language::Event::Edited,
5885 language::Event::Dirtied,
5886 language::Event::Edited,
5887 ],
5888 );
5889 events.borrow_mut().clear();
5890
5891 // TODO - currently, after restoring the buffer to its
5892 // previously-saved state, the is still considered dirty.
5893 buffer.edit([1..3], "", cx);
5894 assert!(buffer.text() == "ac");
5895 assert!(buffer.is_dirty());
5896 });
5897
5898 assert_eq!(*events.borrow(), &[language::Event::Edited]);
5899
5900 // When a file is deleted, the buffer is considered dirty.
5901 let events = Rc::new(RefCell::new(Vec::new()));
5902 let buffer2 = project
5903 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
5904 .await
5905 .unwrap();
5906 buffer2.update(cx, |_, cx| {
5907 cx.subscribe(&buffer2, {
5908 let events = events.clone();
5909 move |_, _, event, _| events.borrow_mut().push(event.clone())
5910 })
5911 .detach();
5912 });
5913
5914 fs::remove_file(dir.path().join("file2")).unwrap();
5915 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
5916 assert_eq!(
5917 *events.borrow(),
5918 &[language::Event::Dirtied, language::Event::FileHandleChanged]
5919 );
5920
5921 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5922 let events = Rc::new(RefCell::new(Vec::new()));
5923 let buffer3 = project
5924 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
5925 .await
5926 .unwrap();
5927 buffer3.update(cx, |_, cx| {
5928 cx.subscribe(&buffer3, {
5929 let events = events.clone();
5930 move |_, _, event, _| events.borrow_mut().push(event.clone())
5931 })
5932 .detach();
5933 });
5934
5935 worktree.flush_fs_events(&cx).await;
5936 buffer3.update(cx, |buffer, cx| {
5937 buffer.edit(Some(0..0), "x", cx);
5938 });
5939 events.borrow_mut().clear();
5940 fs::remove_file(dir.path().join("file3")).unwrap();
5941 buffer3
5942 .condition(&cx, |_, _| !events.borrow().is_empty())
5943 .await;
5944 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
5945 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
5946 }
5947
5948 #[gpui::test]
5949 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5950 use std::fs;
5951
5952 let initial_contents = "aaa\nbbbbb\nc\n";
5953 let dir = temp_tree(json!({ "the-file": initial_contents }));
5954
5955 let project = Project::test(Arc::new(RealFs), cx);
5956 let (worktree, _) = project
5957 .update(cx, |p, cx| {
5958 p.find_or_create_local_worktree(dir.path(), true, cx)
5959 })
5960 .await
5961 .unwrap();
5962 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
5963
5964 worktree
5965 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
5966 .await;
5967
5968 let abs_path = dir.path().join("the-file");
5969 let buffer = project
5970 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
5971 .await
5972 .unwrap();
5973
5974 // TODO
5975 // Add a cursor on each row.
5976 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
5977 // assert!(!buffer.is_dirty());
5978 // buffer.add_selection_set(
5979 // &(0..3)
5980 // .map(|row| Selection {
5981 // id: row as usize,
5982 // start: Point::new(row, 1),
5983 // end: Point::new(row, 1),
5984 // reversed: false,
5985 // goal: SelectionGoal::None,
5986 // })
5987 // .collect::<Vec<_>>(),
5988 // cx,
5989 // )
5990 // });
5991
5992 // Change the file on disk, adding two new lines of text, and removing
5993 // one line.
5994 buffer.read_with(cx, |buffer, _| {
5995 assert!(!buffer.is_dirty());
5996 assert!(!buffer.has_conflict());
5997 });
5998 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
5999 fs::write(&abs_path, new_contents).unwrap();
6000
6001 // Because the buffer was not modified, it is reloaded from disk. Its
6002 // contents are edited according to the diff between the old and new
6003 // file contents.
6004 buffer
6005 .condition(&cx, |buffer, _| buffer.text() == new_contents)
6006 .await;
6007
6008 buffer.update(cx, |buffer, _| {
6009 assert_eq!(buffer.text(), new_contents);
6010 assert!(!buffer.is_dirty());
6011 assert!(!buffer.has_conflict());
6012
6013 // TODO
6014 // let cursor_positions = buffer
6015 // .selection_set(selection_set_id)
6016 // .unwrap()
6017 // .selections::<Point>(&*buffer)
6018 // .map(|selection| {
6019 // assert_eq!(selection.start, selection.end);
6020 // selection.start
6021 // })
6022 // .collect::<Vec<_>>();
6023 // assert_eq!(
6024 // cursor_positions,
6025 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
6026 // );
6027 });
6028
6029 // Modify the buffer
6030 buffer.update(cx, |buffer, cx| {
6031 buffer.edit(vec![0..0], " ", cx);
6032 assert!(buffer.is_dirty());
6033 assert!(!buffer.has_conflict());
6034 });
6035
6036 // Change the file on disk again, adding blank lines to the beginning.
6037 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
6038
6039 // Because the buffer is modified, it doesn't reload from disk, but is
6040 // marked as having a conflict.
6041 buffer
6042 .condition(&cx, |buffer, _| buffer.has_conflict())
6043 .await;
6044 }
6045
6046 #[gpui::test]
6047 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
6048 cx.foreground().forbid_parking();
6049
6050 let fs = FakeFs::new(cx.background());
6051 fs.insert_tree(
6052 "/the-dir",
6053 json!({
6054 "a.rs": "
6055 fn foo(mut v: Vec<usize>) {
6056 for x in &v {
6057 v.push(1);
6058 }
6059 }
6060 "
6061 .unindent(),
6062 }),
6063 )
6064 .await;
6065
6066 let project = Project::test(fs.clone(), cx);
6067 let (worktree, _) = project
6068 .update(cx, |p, cx| {
6069 p.find_or_create_local_worktree("/the-dir", true, cx)
6070 })
6071 .await
6072 .unwrap();
6073 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6074
6075 let buffer = project
6076 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6077 .await
6078 .unwrap();
6079
6080 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6081 let message = lsp::PublishDiagnosticsParams {
6082 uri: buffer_uri.clone(),
6083 diagnostics: vec![
6084 lsp::Diagnostic {
6085 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6086 severity: Some(DiagnosticSeverity::WARNING),
6087 message: "error 1".to_string(),
6088 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6089 location: lsp::Location {
6090 uri: buffer_uri.clone(),
6091 range: lsp::Range::new(
6092 lsp::Position::new(1, 8),
6093 lsp::Position::new(1, 9),
6094 ),
6095 },
6096 message: "error 1 hint 1".to_string(),
6097 }]),
6098 ..Default::default()
6099 },
6100 lsp::Diagnostic {
6101 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6102 severity: Some(DiagnosticSeverity::HINT),
6103 message: "error 1 hint 1".to_string(),
6104 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6105 location: lsp::Location {
6106 uri: buffer_uri.clone(),
6107 range: lsp::Range::new(
6108 lsp::Position::new(1, 8),
6109 lsp::Position::new(1, 9),
6110 ),
6111 },
6112 message: "original diagnostic".to_string(),
6113 }]),
6114 ..Default::default()
6115 },
6116 lsp::Diagnostic {
6117 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6118 severity: Some(DiagnosticSeverity::ERROR),
6119 message: "error 2".to_string(),
6120 related_information: Some(vec![
6121 lsp::DiagnosticRelatedInformation {
6122 location: lsp::Location {
6123 uri: buffer_uri.clone(),
6124 range: lsp::Range::new(
6125 lsp::Position::new(1, 13),
6126 lsp::Position::new(1, 15),
6127 ),
6128 },
6129 message: "error 2 hint 1".to_string(),
6130 },
6131 lsp::DiagnosticRelatedInformation {
6132 location: lsp::Location {
6133 uri: buffer_uri.clone(),
6134 range: lsp::Range::new(
6135 lsp::Position::new(1, 13),
6136 lsp::Position::new(1, 15),
6137 ),
6138 },
6139 message: "error 2 hint 2".to_string(),
6140 },
6141 ]),
6142 ..Default::default()
6143 },
6144 lsp::Diagnostic {
6145 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6146 severity: Some(DiagnosticSeverity::HINT),
6147 message: "error 2 hint 1".to_string(),
6148 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6149 location: lsp::Location {
6150 uri: buffer_uri.clone(),
6151 range: lsp::Range::new(
6152 lsp::Position::new(2, 8),
6153 lsp::Position::new(2, 17),
6154 ),
6155 },
6156 message: "original diagnostic".to_string(),
6157 }]),
6158 ..Default::default()
6159 },
6160 lsp::Diagnostic {
6161 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6162 severity: Some(DiagnosticSeverity::HINT),
6163 message: "error 2 hint 2".to_string(),
6164 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6165 location: lsp::Location {
6166 uri: buffer_uri.clone(),
6167 range: lsp::Range::new(
6168 lsp::Position::new(2, 8),
6169 lsp::Position::new(2, 17),
6170 ),
6171 },
6172 message: "original diagnostic".to_string(),
6173 }]),
6174 ..Default::default()
6175 },
6176 ],
6177 version: None,
6178 };
6179
6180 project
6181 .update(cx, |p, cx| {
6182 p.update_diagnostics(message, &Default::default(), cx)
6183 })
6184 .unwrap();
6185 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6186
6187 assert_eq!(
6188 buffer
6189 .diagnostics_in_range::<_, Point>(0..buffer.len())
6190 .collect::<Vec<_>>(),
6191 &[
6192 DiagnosticEntry {
6193 range: Point::new(1, 8)..Point::new(1, 9),
6194 diagnostic: Diagnostic {
6195 severity: DiagnosticSeverity::WARNING,
6196 message: "error 1".to_string(),
6197 group_id: 0,
6198 is_primary: true,
6199 ..Default::default()
6200 }
6201 },
6202 DiagnosticEntry {
6203 range: Point::new(1, 8)..Point::new(1, 9),
6204 diagnostic: Diagnostic {
6205 severity: DiagnosticSeverity::HINT,
6206 message: "error 1 hint 1".to_string(),
6207 group_id: 0,
6208 is_primary: false,
6209 ..Default::default()
6210 }
6211 },
6212 DiagnosticEntry {
6213 range: Point::new(1, 13)..Point::new(1, 15),
6214 diagnostic: Diagnostic {
6215 severity: DiagnosticSeverity::HINT,
6216 message: "error 2 hint 1".to_string(),
6217 group_id: 1,
6218 is_primary: false,
6219 ..Default::default()
6220 }
6221 },
6222 DiagnosticEntry {
6223 range: Point::new(1, 13)..Point::new(1, 15),
6224 diagnostic: Diagnostic {
6225 severity: DiagnosticSeverity::HINT,
6226 message: "error 2 hint 2".to_string(),
6227 group_id: 1,
6228 is_primary: false,
6229 ..Default::default()
6230 }
6231 },
6232 DiagnosticEntry {
6233 range: Point::new(2, 8)..Point::new(2, 17),
6234 diagnostic: Diagnostic {
6235 severity: DiagnosticSeverity::ERROR,
6236 message: "error 2".to_string(),
6237 group_id: 1,
6238 is_primary: true,
6239 ..Default::default()
6240 }
6241 }
6242 ]
6243 );
6244
6245 assert_eq!(
6246 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6247 &[
6248 DiagnosticEntry {
6249 range: Point::new(1, 8)..Point::new(1, 9),
6250 diagnostic: Diagnostic {
6251 severity: DiagnosticSeverity::WARNING,
6252 message: "error 1".to_string(),
6253 group_id: 0,
6254 is_primary: true,
6255 ..Default::default()
6256 }
6257 },
6258 DiagnosticEntry {
6259 range: Point::new(1, 8)..Point::new(1, 9),
6260 diagnostic: Diagnostic {
6261 severity: DiagnosticSeverity::HINT,
6262 message: "error 1 hint 1".to_string(),
6263 group_id: 0,
6264 is_primary: false,
6265 ..Default::default()
6266 }
6267 },
6268 ]
6269 );
6270 assert_eq!(
6271 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6272 &[
6273 DiagnosticEntry {
6274 range: Point::new(1, 13)..Point::new(1, 15),
6275 diagnostic: Diagnostic {
6276 severity: DiagnosticSeverity::HINT,
6277 message: "error 2 hint 1".to_string(),
6278 group_id: 1,
6279 is_primary: false,
6280 ..Default::default()
6281 }
6282 },
6283 DiagnosticEntry {
6284 range: Point::new(1, 13)..Point::new(1, 15),
6285 diagnostic: Diagnostic {
6286 severity: DiagnosticSeverity::HINT,
6287 message: "error 2 hint 2".to_string(),
6288 group_id: 1,
6289 is_primary: false,
6290 ..Default::default()
6291 }
6292 },
6293 DiagnosticEntry {
6294 range: Point::new(2, 8)..Point::new(2, 17),
6295 diagnostic: Diagnostic {
6296 severity: DiagnosticSeverity::ERROR,
6297 message: "error 2".to_string(),
6298 group_id: 1,
6299 is_primary: true,
6300 ..Default::default()
6301 }
6302 }
6303 ]
6304 );
6305 }
6306
6307 #[gpui::test]
6308 async fn test_rename(cx: &mut gpui::TestAppContext) {
6309 cx.foreground().forbid_parking();
6310
6311 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6312 let language = Arc::new(Language::new(
6313 LanguageConfig {
6314 name: "Rust".into(),
6315 path_suffixes: vec!["rs".to_string()],
6316 language_server: Some(language_server_config),
6317 ..Default::default()
6318 },
6319 Some(tree_sitter_rust::language()),
6320 ));
6321
6322 let fs = FakeFs::new(cx.background());
6323 fs.insert_tree(
6324 "/dir",
6325 json!({
6326 "one.rs": "const ONE: usize = 1;",
6327 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6328 }),
6329 )
6330 .await;
6331
6332 let project = Project::test(fs.clone(), cx);
6333 project.update(cx, |project, _| {
6334 Arc::get_mut(&mut project.languages).unwrap().add(language);
6335 });
6336
6337 let (tree, _) = project
6338 .update(cx, |project, cx| {
6339 project.find_or_create_local_worktree("/dir", true, cx)
6340 })
6341 .await
6342 .unwrap();
6343 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6344 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6345 .await;
6346
6347 let buffer = project
6348 .update(cx, |project, cx| {
6349 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6350 })
6351 .await
6352 .unwrap();
6353
6354 let mut fake_server = fake_servers.next().await.unwrap();
6355
6356 let response = project.update(cx, |project, cx| {
6357 project.prepare_rename(buffer.clone(), 7, cx)
6358 });
6359 fake_server
6360 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6361 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6362 assert_eq!(params.position, lsp::Position::new(0, 7));
6363 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6364 lsp::Position::new(0, 6),
6365 lsp::Position::new(0, 9),
6366 )))
6367 })
6368 .next()
6369 .await
6370 .unwrap();
6371 let range = response.await.unwrap().unwrap();
6372 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6373 assert_eq!(range, 6..9);
6374
6375 let response = project.update(cx, |project, cx| {
6376 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6377 });
6378 fake_server
6379 .handle_request::<lsp::request::Rename, _>(|params, _| {
6380 assert_eq!(
6381 params.text_document_position.text_document.uri.as_str(),
6382 "file:///dir/one.rs"
6383 );
6384 assert_eq!(
6385 params.text_document_position.position,
6386 lsp::Position::new(0, 7)
6387 );
6388 assert_eq!(params.new_name, "THREE");
6389 Some(lsp::WorkspaceEdit {
6390 changes: Some(
6391 [
6392 (
6393 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6394 vec![lsp::TextEdit::new(
6395 lsp::Range::new(
6396 lsp::Position::new(0, 6),
6397 lsp::Position::new(0, 9),
6398 ),
6399 "THREE".to_string(),
6400 )],
6401 ),
6402 (
6403 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6404 vec![
6405 lsp::TextEdit::new(
6406 lsp::Range::new(
6407 lsp::Position::new(0, 24),
6408 lsp::Position::new(0, 27),
6409 ),
6410 "THREE".to_string(),
6411 ),
6412 lsp::TextEdit::new(
6413 lsp::Range::new(
6414 lsp::Position::new(0, 35),
6415 lsp::Position::new(0, 38),
6416 ),
6417 "THREE".to_string(),
6418 ),
6419 ],
6420 ),
6421 ]
6422 .into_iter()
6423 .collect(),
6424 ),
6425 ..Default::default()
6426 })
6427 })
6428 .next()
6429 .await
6430 .unwrap();
6431 let mut transaction = response.await.unwrap().0;
6432 assert_eq!(transaction.len(), 2);
6433 assert_eq!(
6434 transaction
6435 .remove_entry(&buffer)
6436 .unwrap()
6437 .0
6438 .read_with(cx, |buffer, _| buffer.text()),
6439 "const THREE: usize = 1;"
6440 );
6441 assert_eq!(
6442 transaction
6443 .into_keys()
6444 .next()
6445 .unwrap()
6446 .read_with(cx, |buffer, _| buffer.text()),
6447 "const TWO: usize = one::THREE + one::THREE;"
6448 );
6449 }
6450
6451 #[gpui::test]
6452 async fn test_search(cx: &mut gpui::TestAppContext) {
6453 let fs = FakeFs::new(cx.background());
6454 fs.insert_tree(
6455 "/dir",
6456 json!({
6457 "one.rs": "const ONE: usize = 1;",
6458 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6459 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6460 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6461 }),
6462 )
6463 .await;
6464 let project = Project::test(fs.clone(), cx);
6465 let (tree, _) = project
6466 .update(cx, |project, cx| {
6467 project.find_or_create_local_worktree("/dir", true, cx)
6468 })
6469 .await
6470 .unwrap();
6471 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6472 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6473 .await;
6474
6475 assert_eq!(
6476 search(&project, SearchQuery::text("TWO", false, true), cx)
6477 .await
6478 .unwrap(),
6479 HashMap::from_iter([
6480 ("two.rs".to_string(), vec![6..9]),
6481 ("three.rs".to_string(), vec![37..40])
6482 ])
6483 );
6484
6485 let buffer_4 = project
6486 .update(cx, |project, cx| {
6487 project.open_buffer((worktree_id, "four.rs"), cx)
6488 })
6489 .await
6490 .unwrap();
6491 buffer_4.update(cx, |buffer, cx| {
6492 buffer.edit([20..28, 31..43], "two::TWO", cx);
6493 });
6494
6495 assert_eq!(
6496 search(&project, SearchQuery::text("TWO", false, true), cx)
6497 .await
6498 .unwrap(),
6499 HashMap::from_iter([
6500 ("two.rs".to_string(), vec![6..9]),
6501 ("three.rs".to_string(), vec![37..40]),
6502 ("four.rs".to_string(), vec![25..28, 36..39])
6503 ])
6504 );
6505
6506 async fn search(
6507 project: &ModelHandle<Project>,
6508 query: SearchQuery,
6509 cx: &mut gpui::TestAppContext,
6510 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6511 let results = project
6512 .update(cx, |project, cx| project.search(query, cx))
6513 .await?;
6514
6515 Ok(results
6516 .into_iter()
6517 .map(|(buffer, ranges)| {
6518 buffer.read_with(cx, |buffer, _| {
6519 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6520 let ranges = ranges
6521 .into_iter()
6522 .map(|range| range.to_offset(buffer))
6523 .collect::<Vec<_>>();
6524 (path, ranges)
6525 })
6526 })
6527 .collect())
6528 }
6529 }
6530}