1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, Bias, Buffer, CodeAction, CodeLabel, Completion, Diagnostic,
20 DiagnosticEntry, DiagnosticSet, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 LocalFile, OffsetRangeExt, Operation, PointUtf16, TextBufferSnapshot, ToLspPosition, ToOffset,
22 ToPointUtf16, Transaction,
23};
24use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
25use lsp_command::*;
26use postage::watch;
27use rand::prelude::*;
28use search::SearchQuery;
29use sha2::{Digest, Sha256};
30use similar::{ChangeTag, TextDiff};
31use smol::block_on;
32use std::{
33 cell::RefCell,
34 cmp::{self, Ordering},
35 convert::TryInto,
36 hash::Hash,
37 mem,
38 ops::Range,
39 path::{Component, Path, PathBuf},
40 rc::Rc,
41 sync::{atomic::AtomicBool, Arc},
42 time::Instant,
43};
44use util::{post_inc, ResultExt, TryFutureExt as _};
45
46pub use fs::*;
47pub use worktree::*;
48
49pub struct Project {
50 worktrees: Vec<WorktreeHandle>,
51 active_entry: Option<ProjectEntry>,
52 languages: Arc<LanguageRegistry>,
53 language_servers: HashMap<(WorktreeId, Arc<str>), Arc<LanguageServer>>,
54 started_language_servers: HashMap<(WorktreeId, Arc<str>), Task<Option<Arc<LanguageServer>>>>,
55 client: Arc<client::Client>,
56 user_store: ModelHandle<UserStore>,
57 fs: Arc<dyn Fs>,
58 client_state: ProjectClientState,
59 collaborators: HashMap<PeerId, Collaborator>,
60 subscriptions: Vec<client::Subscription>,
61 language_servers_with_diagnostics_running: isize,
62 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
63 shared_buffers: HashMap<PeerId, HashSet<u64>>,
64 loading_buffers: HashMap<
65 ProjectPath,
66 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
67 >,
68 loading_local_worktrees:
69 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
70 opened_buffers: HashMap<u64, OpenBuffer>,
71 buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
72 nonce: u128,
73}
74
75enum OpenBuffer {
76 Strong(ModelHandle<Buffer>),
77 Weak(WeakModelHandle<Buffer>),
78 Loading(Vec<Operation>),
79}
80
81enum WorktreeHandle {
82 Strong(ModelHandle<Worktree>),
83 Weak(WeakModelHandle<Worktree>),
84}
85
86enum ProjectClientState {
87 Local {
88 is_shared: bool,
89 remote_id_tx: watch::Sender<Option<u64>>,
90 remote_id_rx: watch::Receiver<Option<u64>>,
91 _maintain_remote_id_task: Task<Option<()>>,
92 },
93 Remote {
94 sharing_has_stopped: bool,
95 remote_id: u64,
96 replica_id: ReplicaId,
97 _detect_unshare_task: Task<Option<()>>,
98 },
99}
100
101#[derive(Clone, Debug)]
102pub struct Collaborator {
103 pub user: Arc<User>,
104 pub peer_id: PeerId,
105 pub replica_id: ReplicaId,
106}
107
108#[derive(Clone, Debug, PartialEq)]
109pub enum Event {
110 ActiveEntryChanged(Option<ProjectEntry>),
111 WorktreeRemoved(WorktreeId),
112 DiskBasedDiagnosticsStarted,
113 DiskBasedDiagnosticsUpdated,
114 DiskBasedDiagnosticsFinished,
115 DiagnosticsUpdated(ProjectPath),
116}
117
118#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
119pub struct ProjectPath {
120 pub worktree_id: WorktreeId,
121 pub path: Arc<Path>,
122}
123
124#[derive(Clone, Debug, Default, PartialEq)]
125pub struct DiagnosticSummary {
126 pub error_count: usize,
127 pub warning_count: usize,
128 pub info_count: usize,
129 pub hint_count: usize,
130}
131
132#[derive(Debug)]
133pub struct Location {
134 pub buffer: ModelHandle<Buffer>,
135 pub range: Range<language::Anchor>,
136}
137
138#[derive(Debug)]
139pub struct DocumentHighlight {
140 pub range: Range<language::Anchor>,
141 pub kind: DocumentHighlightKind,
142}
143
144#[derive(Clone, Debug)]
145pub struct Symbol {
146 pub source_worktree_id: WorktreeId,
147 pub worktree_id: WorktreeId,
148 pub language_name: String,
149 pub path: PathBuf,
150 pub label: CodeLabel,
151 pub name: String,
152 pub kind: lsp::SymbolKind,
153 pub range: Range<PointUtf16>,
154 pub signature: [u8; 32],
155}
156
157#[derive(Default)]
158pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
159
160impl DiagnosticSummary {
161 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
162 let mut this = Self {
163 error_count: 0,
164 warning_count: 0,
165 info_count: 0,
166 hint_count: 0,
167 };
168
169 for entry in diagnostics {
170 if entry.diagnostic.is_primary {
171 match entry.diagnostic.severity {
172 DiagnosticSeverity::ERROR => this.error_count += 1,
173 DiagnosticSeverity::WARNING => this.warning_count += 1,
174 DiagnosticSeverity::INFORMATION => this.info_count += 1,
175 DiagnosticSeverity::HINT => this.hint_count += 1,
176 _ => {}
177 }
178 }
179 }
180
181 this
182 }
183
184 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
185 proto::DiagnosticSummary {
186 path: path.to_string_lossy().to_string(),
187 error_count: self.error_count as u32,
188 warning_count: self.warning_count as u32,
189 info_count: self.info_count as u32,
190 hint_count: self.hint_count as u32,
191 }
192 }
193}
194
195#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
196pub struct ProjectEntry {
197 pub worktree_id: WorktreeId,
198 pub entry_id: usize,
199}
200
201impl Project {
202 pub fn init(client: &Arc<Client>) {
203 client.add_entity_message_handler(Self::handle_add_collaborator);
204 client.add_entity_message_handler(Self::handle_buffer_reloaded);
205 client.add_entity_message_handler(Self::handle_buffer_saved);
206 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
207 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
208 client.add_entity_message_handler(Self::handle_remove_collaborator);
209 client.add_entity_message_handler(Self::handle_register_worktree);
210 client.add_entity_message_handler(Self::handle_unregister_worktree);
211 client.add_entity_message_handler(Self::handle_unshare_project);
212 client.add_entity_message_handler(Self::handle_update_buffer_file);
213 client.add_entity_message_handler(Self::handle_update_buffer);
214 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
215 client.add_entity_message_handler(Self::handle_update_worktree);
216 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
217 client.add_entity_request_handler(Self::handle_apply_code_action);
218 client.add_entity_request_handler(Self::handle_format_buffers);
219 client.add_entity_request_handler(Self::handle_get_code_actions);
220 client.add_entity_request_handler(Self::handle_get_completions);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
224 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
225 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
226 client.add_entity_request_handler(Self::handle_search_project);
227 client.add_entity_request_handler(Self::handle_get_project_symbols);
228 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
229 client.add_entity_request_handler(Self::handle_open_buffer);
230 client.add_entity_request_handler(Self::handle_save_buffer);
231 }
232
233 pub fn local(
234 client: Arc<Client>,
235 user_store: ModelHandle<UserStore>,
236 languages: Arc<LanguageRegistry>,
237 fs: Arc<dyn Fs>,
238 cx: &mut MutableAppContext,
239 ) -> ModelHandle<Self> {
240 cx.add_model(|cx: &mut ModelContext<Self>| {
241 let (remote_id_tx, remote_id_rx) = watch::channel();
242 let _maintain_remote_id_task = cx.spawn_weak({
243 let rpc = client.clone();
244 move |this, mut cx| {
245 async move {
246 let mut status = rpc.status();
247 while let Some(status) = status.next().await {
248 if let Some(this) = this.upgrade(&cx) {
249 let remote_id = if status.is_connected() {
250 let response = rpc.request(proto::RegisterProject {}).await?;
251 Some(response.project_id)
252 } else {
253 None
254 };
255
256 if let Some(project_id) = remote_id {
257 let mut registrations = Vec::new();
258 this.update(&mut cx, |this, cx| {
259 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
260 registrations.push(worktree.update(
261 cx,
262 |worktree, cx| {
263 let worktree = worktree.as_local_mut().unwrap();
264 worktree.register(project_id, cx)
265 },
266 ));
267 }
268 });
269 for registration in registrations {
270 registration.await?;
271 }
272 }
273 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
274 }
275 }
276 Ok(())
277 }
278 .log_err()
279 }
280 });
281
282 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
283 Self {
284 worktrees: Default::default(),
285 collaborators: Default::default(),
286 opened_buffers: Default::default(),
287 shared_buffers: Default::default(),
288 loading_buffers: Default::default(),
289 loading_local_worktrees: Default::default(),
290 buffer_snapshots: Default::default(),
291 client_state: ProjectClientState::Local {
292 is_shared: false,
293 remote_id_tx,
294 remote_id_rx,
295 _maintain_remote_id_task,
296 },
297 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
298 subscriptions: Vec::new(),
299 active_entry: None,
300 languages,
301 client,
302 user_store,
303 fs,
304 language_servers_with_diagnostics_running: 0,
305 language_servers: Default::default(),
306 started_language_servers: Default::default(),
307 nonce: StdRng::from_entropy().gen(),
308 }
309 })
310 }
311
312 pub async fn remote(
313 remote_id: u64,
314 client: Arc<Client>,
315 user_store: ModelHandle<UserStore>,
316 languages: Arc<LanguageRegistry>,
317 fs: Arc<dyn Fs>,
318 cx: &mut AsyncAppContext,
319 ) -> Result<ModelHandle<Self>> {
320 client.authenticate_and_connect(&cx).await?;
321
322 let response = client
323 .request(proto::JoinProject {
324 project_id: remote_id,
325 })
326 .await?;
327
328 let replica_id = response.replica_id as ReplicaId;
329
330 let mut worktrees = Vec::new();
331 for worktree in response.worktrees {
332 let (worktree, load_task) = cx
333 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
334 worktrees.push(worktree);
335 load_task.detach();
336 }
337
338 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
339 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
340 let mut this = Self {
341 worktrees: Vec::new(),
342 loading_buffers: Default::default(),
343 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
344 shared_buffers: Default::default(),
345 loading_local_worktrees: Default::default(),
346 active_entry: None,
347 collaborators: Default::default(),
348 languages,
349 user_store: user_store.clone(),
350 fs,
351 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
352 client: client.clone(),
353 client_state: ProjectClientState::Remote {
354 sharing_has_stopped: false,
355 remote_id,
356 replica_id,
357 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
358 async move {
359 let mut status = client.status();
360 let is_connected =
361 status.next().await.map_or(false, |s| s.is_connected());
362 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
363 if !is_connected || status.next().await.is_some() {
364 if let Some(this) = this.upgrade(&cx) {
365 this.update(&mut cx, |this, cx| this.project_unshared(cx))
366 }
367 }
368 Ok(())
369 }
370 .log_err()
371 }),
372 },
373 language_servers_with_diagnostics_running: 0,
374 language_servers: Default::default(),
375 started_language_servers: Default::default(),
376 opened_buffers: Default::default(),
377 buffer_snapshots: Default::default(),
378 nonce: StdRng::from_entropy().gen(),
379 };
380 for worktree in worktrees {
381 this.add_worktree(&worktree, cx);
382 }
383 this
384 });
385
386 let user_ids = response
387 .collaborators
388 .iter()
389 .map(|peer| peer.user_id)
390 .collect();
391 user_store
392 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
393 .await?;
394 let mut collaborators = HashMap::default();
395 for message in response.collaborators {
396 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
397 collaborators.insert(collaborator.peer_id, collaborator);
398 }
399
400 this.update(cx, |this, _| {
401 this.collaborators = collaborators;
402 });
403
404 Ok(this)
405 }
406
407 #[cfg(any(test, feature = "test-support"))]
408 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
409 let languages = Arc::new(LanguageRegistry::test());
410 let http_client = client::test::FakeHttpClient::with_404_response();
411 let client = client::Client::new(http_client.clone());
412 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
413 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
414 }
415
416 #[cfg(any(test, feature = "test-support"))]
417 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
418 self.opened_buffers
419 .get(&remote_id)
420 .and_then(|buffer| buffer.upgrade(cx))
421 }
422
423 #[cfg(any(test, feature = "test-support"))]
424 pub fn languages(&self) -> &Arc<LanguageRegistry> {
425 &self.languages
426 }
427
428 #[cfg(any(test, feature = "test-support"))]
429 pub fn check_invariants(&self, cx: &AppContext) {
430 if self.is_local() {
431 let mut worktree_root_paths = HashMap::default();
432 for worktree in self.worktrees(cx) {
433 let worktree = worktree.read(cx);
434 let abs_path = worktree.as_local().unwrap().abs_path().clone();
435 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
436 assert_eq!(
437 prev_worktree_id,
438 None,
439 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
440 abs_path,
441 worktree.id(),
442 prev_worktree_id
443 )
444 }
445 } else {
446 let replica_id = self.replica_id();
447 for buffer in self.opened_buffers.values() {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 let buffer = buffer.read(cx);
450 assert_eq!(
451 buffer.deferred_ops_len(),
452 0,
453 "replica {}, buffer {} has deferred operations",
454 replica_id,
455 buffer.remote_id()
456 );
457 }
458 }
459 }
460 }
461
462 #[cfg(any(test, feature = "test-support"))]
463 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
464 let path = path.into();
465 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
466 self.opened_buffers.iter().any(|(_, buffer)| {
467 if let Some(buffer) = buffer.upgrade(cx) {
468 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
469 if file.worktree == worktree && file.path() == &path.path {
470 return true;
471 }
472 }
473 }
474 false
475 })
476 } else {
477 false
478 }
479 }
480
481 pub fn fs(&self) -> &Arc<dyn Fs> {
482 &self.fs
483 }
484
485 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
486 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
487 *remote_id_tx.borrow_mut() = remote_id;
488 }
489
490 self.subscriptions.clear();
491 if let Some(remote_id) = remote_id {
492 self.subscriptions
493 .push(self.client.add_model_for_remote_entity(remote_id, cx));
494 }
495 }
496
497 pub fn remote_id(&self) -> Option<u64> {
498 match &self.client_state {
499 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
500 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
501 }
502 }
503
504 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
505 let mut id = None;
506 let mut watch = None;
507 match &self.client_state {
508 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
509 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
510 }
511
512 async move {
513 if let Some(id) = id {
514 return id;
515 }
516 let mut watch = watch.unwrap();
517 loop {
518 let id = *watch.borrow();
519 if let Some(id) = id {
520 return id;
521 }
522 watch.next().await;
523 }
524 }
525 }
526
527 pub fn replica_id(&self) -> ReplicaId {
528 match &self.client_state {
529 ProjectClientState::Local { .. } => 0,
530 ProjectClientState::Remote { replica_id, .. } => *replica_id,
531 }
532 }
533
534 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
535 &self.collaborators
536 }
537
538 pub fn worktrees<'a>(
539 &'a self,
540 cx: &'a AppContext,
541 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
542 self.worktrees
543 .iter()
544 .filter_map(move |worktree| worktree.upgrade(cx))
545 }
546
547 pub fn visible_worktrees<'a>(
548 &'a self,
549 cx: &'a AppContext,
550 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
551 self.worktrees.iter().filter_map(|worktree| {
552 worktree.upgrade(cx).and_then(|worktree| {
553 if worktree.read(cx).is_visible() {
554 Some(worktree)
555 } else {
556 None
557 }
558 })
559 })
560 }
561
562 pub fn worktree_for_id(
563 &self,
564 id: WorktreeId,
565 cx: &AppContext,
566 ) -> Option<ModelHandle<Worktree>> {
567 self.worktrees(cx)
568 .find(|worktree| worktree.read(cx).id() == id)
569 }
570
571 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
572 let rpc = self.client.clone();
573 cx.spawn(|this, mut cx| async move {
574 let project_id = this.update(&mut cx, |this, cx| {
575 if let ProjectClientState::Local {
576 is_shared,
577 remote_id_rx,
578 ..
579 } = &mut this.client_state
580 {
581 *is_shared = true;
582
583 for open_buffer in this.opened_buffers.values_mut() {
584 match open_buffer {
585 OpenBuffer::Strong(_) => {}
586 OpenBuffer::Weak(buffer) => {
587 if let Some(buffer) = buffer.upgrade(cx) {
588 *open_buffer = OpenBuffer::Strong(buffer);
589 }
590 }
591 OpenBuffer::Loading(_) => unreachable!(),
592 }
593 }
594
595 for worktree_handle in this.worktrees.iter_mut() {
596 match worktree_handle {
597 WorktreeHandle::Strong(_) => {}
598 WorktreeHandle::Weak(worktree) => {
599 if let Some(worktree) = worktree.upgrade(cx) {
600 *worktree_handle = WorktreeHandle::Strong(worktree);
601 }
602 }
603 }
604 }
605
606 remote_id_rx
607 .borrow()
608 .ok_or_else(|| anyhow!("no project id"))
609 } else {
610 Err(anyhow!("can't share a remote project"))
611 }
612 })?;
613
614 rpc.request(proto::ShareProject { project_id }).await?;
615
616 let mut tasks = Vec::new();
617 this.update(&mut cx, |this, cx| {
618 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
619 worktree.update(cx, |worktree, cx| {
620 let worktree = worktree.as_local_mut().unwrap();
621 tasks.push(worktree.share(project_id, cx));
622 });
623 }
624 });
625 for task in tasks {
626 task.await?;
627 }
628 this.update(&mut cx, |_, cx| cx.notify());
629 Ok(())
630 })
631 }
632
633 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
634 let rpc = self.client.clone();
635 cx.spawn(|this, mut cx| async move {
636 let project_id = this.update(&mut cx, |this, cx| {
637 if let ProjectClientState::Local {
638 is_shared,
639 remote_id_rx,
640 ..
641 } = &mut this.client_state
642 {
643 *is_shared = false;
644
645 for open_buffer in this.opened_buffers.values_mut() {
646 match open_buffer {
647 OpenBuffer::Strong(buffer) => {
648 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
649 }
650 _ => {}
651 }
652 }
653
654 for worktree_handle in this.worktrees.iter_mut() {
655 match worktree_handle {
656 WorktreeHandle::Strong(worktree) => {
657 if !worktree.read(cx).is_visible() {
658 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
659 }
660 }
661 _ => {}
662 }
663 }
664
665 remote_id_rx
666 .borrow()
667 .ok_or_else(|| anyhow!("no project id"))
668 } else {
669 Err(anyhow!("can't share a remote project"))
670 }
671 })?;
672
673 rpc.send(proto::UnshareProject { project_id })?;
674 this.update(&mut cx, |this, cx| {
675 this.collaborators.clear();
676 this.shared_buffers.clear();
677 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
678 worktree.update(cx, |worktree, _| {
679 worktree.as_local_mut().unwrap().unshare();
680 });
681 }
682 cx.notify()
683 });
684 Ok(())
685 })
686 }
687
688 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
689 if let ProjectClientState::Remote {
690 sharing_has_stopped,
691 ..
692 } = &mut self.client_state
693 {
694 *sharing_has_stopped = true;
695 self.collaborators.clear();
696 cx.notify();
697 }
698 }
699
700 pub fn is_read_only(&self) -> bool {
701 match &self.client_state {
702 ProjectClientState::Local { .. } => false,
703 ProjectClientState::Remote {
704 sharing_has_stopped,
705 ..
706 } => *sharing_has_stopped,
707 }
708 }
709
710 pub fn is_local(&self) -> bool {
711 match &self.client_state {
712 ProjectClientState::Local { .. } => true,
713 ProjectClientState::Remote { .. } => false,
714 }
715 }
716
717 pub fn is_remote(&self) -> bool {
718 !self.is_local()
719 }
720
721 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
722 if self.is_remote() {
723 return Err(anyhow!("creating buffers as a guest is not supported yet"));
724 }
725
726 let buffer = cx.add_model(|cx| {
727 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
728 });
729 self.register_buffer(&buffer, cx)?;
730 Ok(buffer)
731 }
732
733 pub fn open_buffer(
734 &mut self,
735 path: impl Into<ProjectPath>,
736 cx: &mut ModelContext<Self>,
737 ) -> Task<Result<ModelHandle<Buffer>>> {
738 let project_path = path.into();
739 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
740 worktree
741 } else {
742 return Task::ready(Err(anyhow!("no such worktree")));
743 };
744
745 // If there is already a buffer for the given path, then return it.
746 let existing_buffer = self.get_open_buffer(&project_path, cx);
747 if let Some(existing_buffer) = existing_buffer {
748 return Task::ready(Ok(existing_buffer));
749 }
750
751 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
752 // If the given path is already being loaded, then wait for that existing
753 // task to complete and return the same buffer.
754 hash_map::Entry::Occupied(e) => e.get().clone(),
755
756 // Otherwise, record the fact that this path is now being loaded.
757 hash_map::Entry::Vacant(entry) => {
758 let (mut tx, rx) = postage::watch::channel();
759 entry.insert(rx.clone());
760
761 let load_buffer = if worktree.read(cx).is_local() {
762 self.open_local_buffer(&project_path.path, &worktree, cx)
763 } else {
764 self.open_remote_buffer(&project_path.path, &worktree, cx)
765 };
766
767 cx.spawn(move |this, mut cx| async move {
768 let load_result = load_buffer.await;
769 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
770 // Record the fact that the buffer is no longer loading.
771 this.loading_buffers.remove(&project_path);
772 let buffer = load_result.map_err(Arc::new)?;
773 Ok(buffer)
774 }));
775 })
776 .detach();
777 rx
778 }
779 };
780
781 cx.foreground().spawn(async move {
782 loop {
783 if let Some(result) = loading_watch.borrow().as_ref() {
784 match result {
785 Ok(buffer) => return Ok(buffer.clone()),
786 Err(error) => return Err(anyhow!("{}", error)),
787 }
788 }
789 loading_watch.next().await;
790 }
791 })
792 }
793
794 fn open_local_buffer(
795 &mut self,
796 path: &Arc<Path>,
797 worktree: &ModelHandle<Worktree>,
798 cx: &mut ModelContext<Self>,
799 ) -> Task<Result<ModelHandle<Buffer>>> {
800 let load_buffer = worktree.update(cx, |worktree, cx| {
801 let worktree = worktree.as_local_mut().unwrap();
802 worktree.load_buffer(path, cx)
803 });
804 cx.spawn(|this, mut cx| async move {
805 let buffer = load_buffer.await?;
806 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
807 Ok(buffer)
808 })
809 }
810
811 fn open_remote_buffer(
812 &mut self,
813 path: &Arc<Path>,
814 worktree: &ModelHandle<Worktree>,
815 cx: &mut ModelContext<Self>,
816 ) -> Task<Result<ModelHandle<Buffer>>> {
817 let rpc = self.client.clone();
818 let project_id = self.remote_id().unwrap();
819 let remote_worktree_id = worktree.read(cx).id();
820 let path = path.clone();
821 let path_string = path.to_string_lossy().to_string();
822 cx.spawn(|this, mut cx| async move {
823 let response = rpc
824 .request(proto::OpenBuffer {
825 project_id,
826 worktree_id: remote_worktree_id.to_proto(),
827 path: path_string,
828 })
829 .await?;
830 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
831 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
832 .await
833 })
834 }
835
836 fn open_local_buffer_via_lsp(
837 &mut self,
838 abs_path: lsp::Url,
839 lang_name: Arc<str>,
840 lang_server: Arc<LanguageServer>,
841 cx: &mut ModelContext<Self>,
842 ) -> Task<Result<ModelHandle<Buffer>>> {
843 cx.spawn(|this, mut cx| async move {
844 let abs_path = abs_path
845 .to_file_path()
846 .map_err(|_| anyhow!("can't convert URI to path"))?;
847 let (worktree, relative_path) = if let Some(result) =
848 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
849 {
850 result
851 } else {
852 let worktree = this
853 .update(&mut cx, |this, cx| {
854 this.create_local_worktree(&abs_path, false, cx)
855 })
856 .await?;
857 this.update(&mut cx, |this, cx| {
858 this.language_servers
859 .insert((worktree.read(cx).id(), lang_name), lang_server);
860 });
861 (worktree, PathBuf::new())
862 };
863
864 let project_path = ProjectPath {
865 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
866 path: relative_path.into(),
867 };
868 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
869 .await
870 })
871 }
872
873 pub fn save_buffer_as(
874 &mut self,
875 buffer: ModelHandle<Buffer>,
876 abs_path: PathBuf,
877 cx: &mut ModelContext<Project>,
878 ) -> Task<Result<()>> {
879 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
880 cx.spawn(|this, mut cx| async move {
881 let (worktree, path) = worktree_task.await?;
882 worktree
883 .update(&mut cx, |worktree, cx| {
884 worktree
885 .as_local_mut()
886 .unwrap()
887 .save_buffer_as(buffer.clone(), path, cx)
888 })
889 .await?;
890 this.update(&mut cx, |this, cx| {
891 this.assign_language_to_buffer(&buffer, cx);
892 this.register_buffer_with_language_server(&buffer, cx);
893 });
894 Ok(())
895 })
896 }
897
898 pub fn get_open_buffer(
899 &mut self,
900 path: &ProjectPath,
901 cx: &mut ModelContext<Self>,
902 ) -> Option<ModelHandle<Buffer>> {
903 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
904 self.opened_buffers.values().find_map(|buffer| {
905 let buffer = buffer.upgrade(cx)?;
906 let file = File::from_dyn(buffer.read(cx).file())?;
907 if file.worktree == worktree && file.path() == &path.path {
908 Some(buffer)
909 } else {
910 None
911 }
912 })
913 }
914
915 fn register_buffer(
916 &mut self,
917 buffer: &ModelHandle<Buffer>,
918 cx: &mut ModelContext<Self>,
919 ) -> Result<()> {
920 let remote_id = buffer.read(cx).remote_id();
921 let open_buffer = if self.is_remote() || self.is_shared() {
922 OpenBuffer::Strong(buffer.clone())
923 } else {
924 OpenBuffer::Weak(buffer.downgrade())
925 };
926
927 match self.opened_buffers.insert(remote_id, open_buffer) {
928 None => {}
929 Some(OpenBuffer::Loading(operations)) => {
930 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
931 }
932 Some(OpenBuffer::Weak(existing_handle)) => {
933 if existing_handle.upgrade(cx).is_some() {
934 Err(anyhow!(
935 "already registered buffer with remote id {}",
936 remote_id
937 ))?
938 }
939 }
940 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
941 "already registered buffer with remote id {}",
942 remote_id
943 ))?,
944 }
945 cx.subscribe(buffer, |this, buffer, event, cx| {
946 this.on_buffer_event(buffer, event, cx);
947 })
948 .detach();
949
950 self.assign_language_to_buffer(buffer, cx);
951 self.register_buffer_with_language_server(buffer, cx);
952
953 Ok(())
954 }
955
956 fn register_buffer_with_language_server(
957 &mut self,
958 buffer_handle: &ModelHandle<Buffer>,
959 cx: &mut ModelContext<Self>,
960 ) {
961 let buffer = buffer_handle.read(cx);
962 let buffer_id = buffer.remote_id();
963 if let Some(file) = File::from_dyn(buffer.file()) {
964 if file.is_local() {
965 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
966 let initial_snapshot = buffer.text_snapshot();
967 let language_server = self.language_server_for_buffer(buffer, cx).cloned();
968
969 if let Some(local_worktree) = file.worktree.read(cx).as_local() {
970 if let Some(diagnostics) = local_worktree.diagnostics_for_path(file.path()) {
971 self.update_buffer_diagnostics(&buffer_handle, diagnostics, None, cx)
972 .log_err();
973 }
974 }
975
976 if let Some(server) = language_server {
977 server
978 .notify::<lsp::notification::DidOpenTextDocument>(
979 lsp::DidOpenTextDocumentParams {
980 text_document: lsp::TextDocumentItem::new(
981 uri,
982 Default::default(),
983 0,
984 initial_snapshot.text(),
985 ),
986 }
987 .clone(),
988 )
989 .log_err();
990 buffer_handle.update(cx, |buffer, cx| {
991 buffer.set_completion_triggers(
992 server
993 .capabilities()
994 .completion_provider
995 .as_ref()
996 .and_then(|provider| provider.trigger_characters.clone())
997 .unwrap_or(Vec::new()),
998 cx,
999 )
1000 });
1001 self.buffer_snapshots
1002 .insert(buffer_id, vec![(0, initial_snapshot)]);
1003 }
1004
1005 cx.observe_release(buffer_handle, |this, buffer, cx| {
1006 if let Some(file) = File::from_dyn(buffer.file()) {
1007 if file.is_local() {
1008 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1009 if let Some(server) = this.language_server_for_buffer(buffer, cx) {
1010 server
1011 .notify::<lsp::notification::DidCloseTextDocument>(
1012 lsp::DidCloseTextDocumentParams {
1013 text_document: lsp::TextDocumentIdentifier::new(
1014 uri.clone(),
1015 ),
1016 },
1017 )
1018 .log_err();
1019 }
1020 }
1021 }
1022 })
1023 .detach();
1024 }
1025 }
1026 }
1027
1028 fn on_buffer_event(
1029 &mut self,
1030 buffer: ModelHandle<Buffer>,
1031 event: &BufferEvent,
1032 cx: &mut ModelContext<Self>,
1033 ) -> Option<()> {
1034 match event {
1035 BufferEvent::Operation(operation) => {
1036 let project_id = self.remote_id()?;
1037 let request = self.client.request(proto::UpdateBuffer {
1038 project_id,
1039 buffer_id: buffer.read(cx).remote_id(),
1040 operations: vec![language::proto::serialize_operation(&operation)],
1041 });
1042 cx.background().spawn(request).detach_and_log_err(cx);
1043 }
1044 BufferEvent::Edited => {
1045 let language_server = self
1046 .language_server_for_buffer(buffer.read(cx), cx)?
1047 .clone();
1048 let buffer = buffer.read(cx);
1049 let file = File::from_dyn(buffer.file())?;
1050 let abs_path = file.as_local()?.abs_path(cx);
1051 let uri = lsp::Url::from_file_path(abs_path).unwrap();
1052 let buffer_snapshots = self.buffer_snapshots.entry(buffer.remote_id()).or_default();
1053 let (version, prev_snapshot) = buffer_snapshots.last()?;
1054 let next_snapshot = buffer.text_snapshot();
1055 let next_version = version + 1;
1056
1057 let content_changes = buffer
1058 .edits_since::<(PointUtf16, usize)>(prev_snapshot.version())
1059 .map(|edit| {
1060 let edit_start = edit.new.start.0;
1061 let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
1062 let new_text = next_snapshot
1063 .text_for_range(edit.new.start.1..edit.new.end.1)
1064 .collect();
1065 lsp::TextDocumentContentChangeEvent {
1066 range: Some(lsp::Range::new(
1067 edit_start.to_lsp_position(),
1068 edit_end.to_lsp_position(),
1069 )),
1070 range_length: None,
1071 text: new_text,
1072 }
1073 })
1074 .collect();
1075
1076 buffer_snapshots.push((next_version, next_snapshot));
1077
1078 language_server
1079 .notify::<lsp::notification::DidChangeTextDocument>(
1080 lsp::DidChangeTextDocumentParams {
1081 text_document: lsp::VersionedTextDocumentIdentifier::new(
1082 uri,
1083 next_version,
1084 ),
1085 content_changes,
1086 },
1087 )
1088 .log_err();
1089 }
1090 BufferEvent::Saved => {
1091 let file = File::from_dyn(buffer.read(cx).file())?;
1092 let worktree_id = file.worktree_id(cx);
1093 let abs_path = file.as_local()?.abs_path(cx);
1094 let text_document = lsp::TextDocumentIdentifier {
1095 uri: lsp::Url::from_file_path(abs_path).unwrap(),
1096 };
1097
1098 for (_, server) in self.language_servers_for_worktree(worktree_id) {
1099 server
1100 .notify::<lsp::notification::DidSaveTextDocument>(
1101 lsp::DidSaveTextDocumentParams {
1102 text_document: text_document.clone(),
1103 text: None,
1104 },
1105 )
1106 .log_err();
1107 }
1108 }
1109 _ => {}
1110 }
1111
1112 None
1113 }
1114
1115 fn language_servers_for_worktree(
1116 &self,
1117 worktree_id: WorktreeId,
1118 ) -> impl Iterator<Item = (&str, &Arc<LanguageServer>)> {
1119 self.language_servers.iter().filter_map(
1120 move |((language_server_worktree_id, language_name), server)| {
1121 if *language_server_worktree_id == worktree_id {
1122 Some((language_name.as_ref(), server))
1123 } else {
1124 None
1125 }
1126 },
1127 )
1128 }
1129
1130 fn assign_language_to_buffer(
1131 &mut self,
1132 buffer: &ModelHandle<Buffer>,
1133 cx: &mut ModelContext<Self>,
1134 ) -> Option<()> {
1135 // If the buffer has a language, set it and start the language server if we haven't already.
1136 let full_path = buffer.read(cx).file()?.full_path(cx);
1137 let language = self.languages.select_language(&full_path)?;
1138 buffer.update(cx, |buffer, cx| {
1139 buffer.set_language(Some(language.clone()), cx);
1140 });
1141
1142 let file = File::from_dyn(buffer.read(cx).file())?;
1143 let worktree = file.worktree.read(cx).as_local()?;
1144 let worktree_id = worktree.id();
1145 let worktree_abs_path = worktree.abs_path().clone();
1146 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1147
1148 None
1149 }
1150
1151 fn start_language_server(
1152 &mut self,
1153 worktree_id: WorktreeId,
1154 worktree_path: Arc<Path>,
1155 language: Arc<Language>,
1156 cx: &mut ModelContext<Self>,
1157 ) {
1158 enum LspEvent {
1159 DiagnosticsStart,
1160 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1161 DiagnosticsFinish,
1162 }
1163
1164 let key = (worktree_id, language.name());
1165 self.started_language_servers
1166 .entry(key.clone())
1167 .or_insert_with(|| {
1168 let language_server = self.languages.start_language_server(
1169 language.clone(),
1170 worktree_path,
1171 self.client.http_client(),
1172 cx,
1173 );
1174 let rpc = self.client.clone();
1175 cx.spawn_weak(|this, mut cx| async move {
1176 let mut language_server = language_server?.await.log_err()?;
1177 let this = this.upgrade(&cx)?;
1178
1179 let disk_based_sources = language
1180 .disk_based_diagnostic_sources()
1181 .cloned()
1182 .unwrap_or_default();
1183 let disk_based_diagnostics_progress_token =
1184 language.disk_based_diagnostics_progress_token().cloned();
1185 let has_disk_based_diagnostic_progress_token =
1186 disk_based_diagnostics_progress_token.is_some();
1187 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1188
1189 // Listen for `PublishDiagnostics` notifications.
1190 language_server
1191 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1192 let diagnostics_tx = diagnostics_tx.clone();
1193 move |params| {
1194 if !has_disk_based_diagnostic_progress_token {
1195 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1196 }
1197 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1198 .ok();
1199 if !has_disk_based_diagnostic_progress_token {
1200 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1201 }
1202 }
1203 })
1204 .detach();
1205
1206 // Listen for `Progress` notifications. Send an event when the language server
1207 // transitions between running jobs and not running any jobs.
1208 let mut running_jobs_for_this_server: i32 = 0;
1209 language_server
1210 .on_notification::<lsp::notification::Progress, _>(move |params| {
1211 let token = match params.token {
1212 lsp::NumberOrString::Number(_) => None,
1213 lsp::NumberOrString::String(token) => Some(token),
1214 };
1215
1216 if token == disk_based_diagnostics_progress_token {
1217 match params.value {
1218 lsp::ProgressParamsValue::WorkDone(progress) => {
1219 match progress {
1220 lsp::WorkDoneProgress::Begin(_) => {
1221 running_jobs_for_this_server += 1;
1222 if running_jobs_for_this_server == 1 {
1223 block_on(
1224 diagnostics_tx
1225 .send(LspEvent::DiagnosticsStart),
1226 )
1227 .ok();
1228 }
1229 }
1230 lsp::WorkDoneProgress::End(_) => {
1231 running_jobs_for_this_server -= 1;
1232 if running_jobs_for_this_server == 0 {
1233 block_on(
1234 diagnostics_tx
1235 .send(LspEvent::DiagnosticsFinish),
1236 )
1237 .ok();
1238 }
1239 }
1240 _ => {}
1241 }
1242 }
1243 }
1244 }
1245 })
1246 .detach();
1247
1248 // Process all the LSP events.
1249 cx.spawn(|mut cx| {
1250 let this = this.downgrade();
1251 async move {
1252 while let Ok(message) = diagnostics_rx.recv().await {
1253 let this = this.upgrade(&cx)?;
1254 match message {
1255 LspEvent::DiagnosticsStart => {
1256 this.update(&mut cx, |this, cx| {
1257 this.disk_based_diagnostics_started(cx);
1258 if let Some(project_id) = this.remote_id() {
1259 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1260 project_id,
1261 })
1262 .log_err();
1263 }
1264 });
1265 }
1266 LspEvent::DiagnosticsUpdate(mut params) => {
1267 language.process_diagnostics(&mut params);
1268 this.update(&mut cx, |this, cx| {
1269 this.update_diagnostics(
1270 params,
1271 &disk_based_sources,
1272 cx,
1273 )
1274 .log_err();
1275 });
1276 }
1277 LspEvent::DiagnosticsFinish => {
1278 this.update(&mut cx, |this, cx| {
1279 this.disk_based_diagnostics_finished(cx);
1280 if let Some(project_id) = this.remote_id() {
1281 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1282 project_id,
1283 })
1284 .log_err();
1285 }
1286 });
1287 }
1288 }
1289 }
1290 Some(())
1291 }
1292 })
1293 .detach();
1294
1295 let language_server = language_server.initialize().await.log_err()?;
1296 this.update(&mut cx, |this, cx| {
1297 this.language_servers
1298 .insert(key.clone(), language_server.clone());
1299
1300 // Tell the language server about every open buffer in the worktree that matches the language.
1301 for buffer in this.opened_buffers.values() {
1302 if let Some(buffer_handle) = buffer.upgrade(cx) {
1303 let buffer = buffer_handle.read(cx);
1304 let file = if let Some(file) = File::from_dyn(buffer.file()) {
1305 file
1306 } else {
1307 continue;
1308 };
1309 let language = if let Some(language) = buffer.language() {
1310 language
1311 } else {
1312 continue;
1313 };
1314 if (file.worktree.read(cx).id(), language.name()) != key {
1315 continue;
1316 }
1317
1318 let file = file.as_local()?;
1319 let versions = this
1320 .buffer_snapshots
1321 .entry(buffer.remote_id())
1322 .or_insert_with(|| vec![(0, buffer.text_snapshot())]);
1323 let (version, initial_snapshot) = versions.last().unwrap();
1324 let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
1325 language_server
1326 .notify::<lsp::notification::DidOpenTextDocument>(
1327 lsp::DidOpenTextDocumentParams {
1328 text_document: lsp::TextDocumentItem::new(
1329 uri,
1330 Default::default(),
1331 *version,
1332 initial_snapshot.text(),
1333 ),
1334 },
1335 )
1336 .log_err()?;
1337 buffer_handle.update(cx, |buffer, cx| {
1338 buffer.set_completion_triggers(
1339 language_server
1340 .capabilities()
1341 .completion_provider
1342 .as_ref()
1343 .and_then(|provider| {
1344 provider.trigger_characters.clone()
1345 })
1346 .unwrap_or(Vec::new()),
1347 cx,
1348 )
1349 });
1350 }
1351 }
1352
1353 Some(())
1354 });
1355
1356 Some(language_server)
1357 })
1358 });
1359 }
1360
1361 pub fn update_diagnostics(
1362 &mut self,
1363 params: lsp::PublishDiagnosticsParams,
1364 disk_based_sources: &HashSet<String>,
1365 cx: &mut ModelContext<Self>,
1366 ) -> Result<()> {
1367 let abs_path = params
1368 .uri
1369 .to_file_path()
1370 .map_err(|_| anyhow!("URI is not a file"))?;
1371 let mut next_group_id = 0;
1372 let mut diagnostics = Vec::default();
1373 let mut primary_diagnostic_group_ids = HashMap::default();
1374 let mut sources_by_group_id = HashMap::default();
1375 let mut supporting_diagnostic_severities = HashMap::default();
1376 for diagnostic in ¶ms.diagnostics {
1377 let source = diagnostic.source.as_ref();
1378 let code = diagnostic.code.as_ref().map(|code| match code {
1379 lsp::NumberOrString::Number(code) => code.to_string(),
1380 lsp::NumberOrString::String(code) => code.clone(),
1381 });
1382 let range = range_from_lsp(diagnostic.range);
1383 let is_supporting = diagnostic
1384 .related_information
1385 .as_ref()
1386 .map_or(false, |infos| {
1387 infos.iter().any(|info| {
1388 primary_diagnostic_group_ids.contains_key(&(
1389 source,
1390 code.clone(),
1391 range_from_lsp(info.location.range),
1392 ))
1393 })
1394 });
1395
1396 if is_supporting {
1397 if let Some(severity) = diagnostic.severity {
1398 supporting_diagnostic_severities
1399 .insert((source, code.clone(), range), severity);
1400 }
1401 } else {
1402 let group_id = post_inc(&mut next_group_id);
1403 let is_disk_based =
1404 source.map_or(false, |source| disk_based_sources.contains(source));
1405
1406 sources_by_group_id.insert(group_id, source);
1407 primary_diagnostic_group_ids
1408 .insert((source, code.clone(), range.clone()), group_id);
1409
1410 diagnostics.push(DiagnosticEntry {
1411 range,
1412 diagnostic: Diagnostic {
1413 code: code.clone(),
1414 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1415 message: diagnostic.message.clone(),
1416 group_id,
1417 is_primary: true,
1418 is_valid: true,
1419 is_disk_based,
1420 },
1421 });
1422 if let Some(infos) = &diagnostic.related_information {
1423 for info in infos {
1424 if info.location.uri == params.uri && !info.message.is_empty() {
1425 let range = range_from_lsp(info.location.range);
1426 diagnostics.push(DiagnosticEntry {
1427 range,
1428 diagnostic: Diagnostic {
1429 code: code.clone(),
1430 severity: DiagnosticSeverity::INFORMATION,
1431 message: info.message.clone(),
1432 group_id,
1433 is_primary: false,
1434 is_valid: true,
1435 is_disk_based,
1436 },
1437 });
1438 }
1439 }
1440 }
1441 }
1442 }
1443
1444 for entry in &mut diagnostics {
1445 let diagnostic = &mut entry.diagnostic;
1446 if !diagnostic.is_primary {
1447 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1448 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1449 source,
1450 diagnostic.code.clone(),
1451 entry.range.clone(),
1452 )) {
1453 diagnostic.severity = severity;
1454 }
1455 }
1456 }
1457
1458 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1459 Ok(())
1460 }
1461
1462 pub fn update_diagnostic_entries(
1463 &mut self,
1464 abs_path: PathBuf,
1465 version: Option<i32>,
1466 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1467 cx: &mut ModelContext<Project>,
1468 ) -> Result<(), anyhow::Error> {
1469 let (worktree, relative_path) = self
1470 .find_local_worktree(&abs_path, cx)
1471 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1472 if !worktree.read(cx).is_visible() {
1473 return Ok(());
1474 }
1475
1476 let project_path = ProjectPath {
1477 worktree_id: worktree.read(cx).id(),
1478 path: relative_path.into(),
1479 };
1480
1481 for buffer in self.opened_buffers.values() {
1482 if let Some(buffer) = buffer.upgrade(cx) {
1483 if buffer
1484 .read(cx)
1485 .file()
1486 .map_or(false, |file| *file.path() == project_path.path)
1487 {
1488 self.update_buffer_diagnostics(&buffer, diagnostics.clone(), version, cx)?;
1489 break;
1490 }
1491 }
1492 }
1493 worktree.update(cx, |worktree, cx| {
1494 worktree
1495 .as_local_mut()
1496 .ok_or_else(|| anyhow!("not a local worktree"))?
1497 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1498 })?;
1499 cx.emit(Event::DiagnosticsUpdated(project_path));
1500 Ok(())
1501 }
1502
1503 fn update_buffer_diagnostics(
1504 &mut self,
1505 buffer: &ModelHandle<Buffer>,
1506 mut diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1507 version: Option<i32>,
1508 cx: &mut ModelContext<Self>,
1509 ) -> Result<()> {
1510 fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
1511 Ordering::Equal
1512 .then_with(|| b.is_primary.cmp(&a.is_primary))
1513 .then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
1514 .then_with(|| a.severity.cmp(&b.severity))
1515 .then_with(|| a.message.cmp(&b.message))
1516 }
1517
1518 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx)?;
1519
1520 diagnostics.sort_unstable_by(|a, b| {
1521 Ordering::Equal
1522 .then_with(|| a.range.start.cmp(&b.range.start))
1523 .then_with(|| b.range.end.cmp(&a.range.end))
1524 .then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
1525 });
1526
1527 let mut sanitized_diagnostics = Vec::new();
1528 let mut edits_since_save = snapshot
1529 .edits_since::<PointUtf16>(buffer.read(cx).saved_version())
1530 .peekable();
1531 let mut last_edit_old_end = PointUtf16::zero();
1532 let mut last_edit_new_end = PointUtf16::zero();
1533 'outer: for entry in diagnostics {
1534 let mut start = entry.range.start;
1535 let mut end = entry.range.end;
1536
1537 // Some diagnostics are based on files on disk instead of buffers'
1538 // current contents. Adjust these diagnostics' ranges to reflect
1539 // any unsaved edits.
1540 if entry.diagnostic.is_disk_based {
1541 while let Some(edit) = edits_since_save.peek() {
1542 if edit.old.end <= start {
1543 last_edit_old_end = edit.old.end;
1544 last_edit_new_end = edit.new.end;
1545 edits_since_save.next();
1546 } else if edit.old.start <= end && edit.old.end >= start {
1547 continue 'outer;
1548 } else {
1549 break;
1550 }
1551 }
1552
1553 let start_overshoot = start - last_edit_old_end;
1554 start = last_edit_new_end;
1555 start += start_overshoot;
1556
1557 let end_overshoot = end - last_edit_old_end;
1558 end = last_edit_new_end;
1559 end += end_overshoot;
1560 }
1561
1562 let mut range = snapshot.clip_point_utf16(start, Bias::Left)
1563 ..snapshot.clip_point_utf16(end, Bias::Right);
1564
1565 // Expand empty ranges by one character
1566 if range.start == range.end {
1567 range.end.column += 1;
1568 range.end = snapshot.clip_point_utf16(range.end, Bias::Right);
1569 if range.start == range.end && range.end.column > 0 {
1570 range.start.column -= 1;
1571 range.start = snapshot.clip_point_utf16(range.start, Bias::Left);
1572 }
1573 }
1574
1575 sanitized_diagnostics.push(DiagnosticEntry {
1576 range,
1577 diagnostic: entry.diagnostic,
1578 });
1579 }
1580 drop(edits_since_save);
1581
1582 let set = DiagnosticSet::new(sanitized_diagnostics, &snapshot);
1583 buffer.update(cx, |buffer, cx| buffer.update_diagnostics(set, cx));
1584 Ok(())
1585 }
1586
1587 pub fn format(
1588 &self,
1589 buffers: HashSet<ModelHandle<Buffer>>,
1590 push_to_history: bool,
1591 cx: &mut ModelContext<Project>,
1592 ) -> Task<Result<ProjectTransaction>> {
1593 let mut local_buffers = Vec::new();
1594 let mut remote_buffers = None;
1595 for buffer_handle in buffers {
1596 let buffer = buffer_handle.read(cx);
1597 if let Some(file) = File::from_dyn(buffer.file()) {
1598 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1599 if let Some(server) = self.language_server_for_buffer(buffer, cx) {
1600 local_buffers.push((buffer_handle, buffer_abs_path, server.clone()));
1601 }
1602 } else {
1603 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1604 }
1605 } else {
1606 return Task::ready(Ok(Default::default()));
1607 }
1608 }
1609
1610 let remote_buffers = self.remote_id().zip(remote_buffers);
1611 let client = self.client.clone();
1612
1613 cx.spawn(|this, mut cx| async move {
1614 let mut project_transaction = ProjectTransaction::default();
1615
1616 if let Some((project_id, remote_buffers)) = remote_buffers {
1617 let response = client
1618 .request(proto::FormatBuffers {
1619 project_id,
1620 buffer_ids: remote_buffers
1621 .iter()
1622 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1623 .collect(),
1624 })
1625 .await?
1626 .transaction
1627 .ok_or_else(|| anyhow!("missing transaction"))?;
1628 project_transaction = this
1629 .update(&mut cx, |this, cx| {
1630 this.deserialize_project_transaction(response, push_to_history, cx)
1631 })
1632 .await?;
1633 }
1634
1635 for (buffer, buffer_abs_path, language_server) in local_buffers {
1636 let text_document = lsp::TextDocumentIdentifier::new(
1637 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1638 );
1639 let capabilities = &language_server.capabilities();
1640 let lsp_edits = if capabilities
1641 .document_formatting_provider
1642 .as_ref()
1643 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1644 {
1645 language_server
1646 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1647 text_document,
1648 options: Default::default(),
1649 work_done_progress_params: Default::default(),
1650 })
1651 .await?
1652 } else if capabilities
1653 .document_range_formatting_provider
1654 .as_ref()
1655 .map_or(false, |provider| *provider != lsp::OneOf::Left(false))
1656 {
1657 let buffer_start = lsp::Position::new(0, 0);
1658 let buffer_end = buffer
1659 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1660 .to_lsp_position();
1661 language_server
1662 .request::<lsp::request::RangeFormatting>(
1663 lsp::DocumentRangeFormattingParams {
1664 text_document,
1665 range: lsp::Range::new(buffer_start, buffer_end),
1666 options: Default::default(),
1667 work_done_progress_params: Default::default(),
1668 },
1669 )
1670 .await?
1671 } else {
1672 continue;
1673 };
1674
1675 if let Some(lsp_edits) = lsp_edits {
1676 let edits = this
1677 .update(&mut cx, |this, cx| {
1678 this.edits_from_lsp(&buffer, lsp_edits, None, cx)
1679 })
1680 .await?;
1681 buffer.update(&mut cx, |buffer, cx| {
1682 buffer.finalize_last_transaction();
1683 buffer.start_transaction();
1684 for (range, text) in edits {
1685 buffer.edit([range], text, cx);
1686 }
1687 if buffer.end_transaction(cx).is_some() {
1688 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1689 if !push_to_history {
1690 buffer.forget_transaction(transaction.id);
1691 }
1692 project_transaction.0.insert(cx.handle(), transaction);
1693 }
1694 });
1695 }
1696 }
1697
1698 Ok(project_transaction)
1699 })
1700 }
1701
1702 pub fn definition<T: ToPointUtf16>(
1703 &self,
1704 buffer: &ModelHandle<Buffer>,
1705 position: T,
1706 cx: &mut ModelContext<Self>,
1707 ) -> Task<Result<Vec<Location>>> {
1708 let position = position.to_point_utf16(buffer.read(cx));
1709 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1710 }
1711
1712 pub fn references<T: ToPointUtf16>(
1713 &self,
1714 buffer: &ModelHandle<Buffer>,
1715 position: T,
1716 cx: &mut ModelContext<Self>,
1717 ) -> Task<Result<Vec<Location>>> {
1718 let position = position.to_point_utf16(buffer.read(cx));
1719 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1720 }
1721
1722 pub fn document_highlights<T: ToPointUtf16>(
1723 &self,
1724 buffer: &ModelHandle<Buffer>,
1725 position: T,
1726 cx: &mut ModelContext<Self>,
1727 ) -> Task<Result<Vec<DocumentHighlight>>> {
1728 let position = position.to_point_utf16(buffer.read(cx));
1729
1730 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1731 }
1732
1733 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1734 if self.is_local() {
1735 let mut language_servers = HashMap::default();
1736 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1737 if let Some((worktree, language)) = self
1738 .worktree_for_id(*worktree_id, cx)
1739 .and_then(|worktree| worktree.read(cx).as_local())
1740 .zip(self.languages.get_language(language_name))
1741 {
1742 language_servers
1743 .entry(Arc::as_ptr(language_server))
1744 .or_insert((
1745 language_server.clone(),
1746 *worktree_id,
1747 worktree.abs_path().clone(),
1748 language.clone(),
1749 ));
1750 }
1751 }
1752
1753 let mut requests = Vec::new();
1754 for (language_server, _, _, _) in language_servers.values() {
1755 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1756 lsp::WorkspaceSymbolParams {
1757 query: query.to_string(),
1758 ..Default::default()
1759 },
1760 ));
1761 }
1762
1763 cx.spawn_weak(|this, cx| async move {
1764 let responses = futures::future::try_join_all(requests).await?;
1765
1766 let mut symbols = Vec::new();
1767 if let Some(this) = this.upgrade(&cx) {
1768 this.read_with(&cx, |this, cx| {
1769 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1770 language_servers.into_values().zip(responses)
1771 {
1772 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1773 |lsp_symbol| {
1774 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1775 let mut worktree_id = source_worktree_id;
1776 let path;
1777 if let Some((worktree, rel_path)) =
1778 this.find_local_worktree(&abs_path, cx)
1779 {
1780 worktree_id = worktree.read(cx).id();
1781 path = rel_path;
1782 } else {
1783 path = relativize_path(&worktree_abs_path, &abs_path);
1784 }
1785
1786 let label = language
1787 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1788 .unwrap_or_else(|| {
1789 CodeLabel::plain(lsp_symbol.name.clone(), None)
1790 });
1791 let signature = this.symbol_signature(worktree_id, &path);
1792
1793 Some(Symbol {
1794 source_worktree_id,
1795 worktree_id,
1796 language_name: language.name().to_string(),
1797 name: lsp_symbol.name,
1798 kind: lsp_symbol.kind,
1799 label,
1800 path,
1801 range: range_from_lsp(lsp_symbol.location.range),
1802 signature,
1803 })
1804 },
1805 ));
1806 }
1807 })
1808 }
1809
1810 Ok(symbols)
1811 })
1812 } else if let Some(project_id) = self.remote_id() {
1813 let request = self.client.request(proto::GetProjectSymbols {
1814 project_id,
1815 query: query.to_string(),
1816 });
1817 cx.spawn_weak(|this, cx| async move {
1818 let response = request.await?;
1819 let mut symbols = Vec::new();
1820 if let Some(this) = this.upgrade(&cx) {
1821 this.read_with(&cx, |this, _| {
1822 symbols.extend(
1823 response
1824 .symbols
1825 .into_iter()
1826 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1827 );
1828 })
1829 }
1830 Ok(symbols)
1831 })
1832 } else {
1833 Task::ready(Ok(Default::default()))
1834 }
1835 }
1836
1837 pub fn open_buffer_for_symbol(
1838 &mut self,
1839 symbol: &Symbol,
1840 cx: &mut ModelContext<Self>,
1841 ) -> Task<Result<ModelHandle<Buffer>>> {
1842 if self.is_local() {
1843 let language_server = if let Some(server) = self.language_servers.get(&(
1844 symbol.source_worktree_id,
1845 Arc::from(symbol.language_name.as_str()),
1846 )) {
1847 server.clone()
1848 } else {
1849 return Task::ready(Err(anyhow!(
1850 "language server for worktree and language not found"
1851 )));
1852 };
1853
1854 let worktree_abs_path = if let Some(worktree_abs_path) = self
1855 .worktree_for_id(symbol.worktree_id, cx)
1856 .and_then(|worktree| worktree.read(cx).as_local())
1857 .map(|local_worktree| local_worktree.abs_path())
1858 {
1859 worktree_abs_path
1860 } else {
1861 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1862 };
1863 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1864 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1865 uri
1866 } else {
1867 return Task::ready(Err(anyhow!("invalid symbol path")));
1868 };
1869
1870 self.open_local_buffer_via_lsp(
1871 symbol_uri,
1872 Arc::from(symbol.language_name.as_str()),
1873 language_server,
1874 cx,
1875 )
1876 } else if let Some(project_id) = self.remote_id() {
1877 let request = self.client.request(proto::OpenBufferForSymbol {
1878 project_id,
1879 symbol: Some(serialize_symbol(symbol)),
1880 });
1881 cx.spawn(|this, mut cx| async move {
1882 let response = request.await?;
1883 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1884 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1885 .await
1886 })
1887 } else {
1888 Task::ready(Err(anyhow!("project does not have a remote id")))
1889 }
1890 }
1891
1892 pub fn completions<T: ToPointUtf16>(
1893 &self,
1894 source_buffer_handle: &ModelHandle<Buffer>,
1895 position: T,
1896 cx: &mut ModelContext<Self>,
1897 ) -> Task<Result<Vec<Completion>>> {
1898 let source_buffer_handle = source_buffer_handle.clone();
1899 let source_buffer = source_buffer_handle.read(cx);
1900 let buffer_id = source_buffer.remote_id();
1901 let language = source_buffer.language().cloned();
1902 let worktree;
1903 let buffer_abs_path;
1904 if let Some(file) = File::from_dyn(source_buffer.file()) {
1905 worktree = file.worktree.clone();
1906 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1907 } else {
1908 return Task::ready(Ok(Default::default()));
1909 };
1910
1911 let position = position.to_point_utf16(source_buffer);
1912 let anchor = source_buffer.anchor_after(position);
1913
1914 if worktree.read(cx).as_local().is_some() {
1915 let buffer_abs_path = buffer_abs_path.unwrap();
1916 let lang_server =
1917 if let Some(server) = self.language_server_for_buffer(source_buffer, cx) {
1918 server.clone()
1919 } else {
1920 return Task::ready(Ok(Default::default()));
1921 };
1922
1923 cx.spawn(|_, cx| async move {
1924 let completions = lang_server
1925 .request::<lsp::request::Completion>(lsp::CompletionParams {
1926 text_document_position: lsp::TextDocumentPositionParams::new(
1927 lsp::TextDocumentIdentifier::new(
1928 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1929 ),
1930 position.to_lsp_position(),
1931 ),
1932 context: Default::default(),
1933 work_done_progress_params: Default::default(),
1934 partial_result_params: Default::default(),
1935 })
1936 .await
1937 .context("lsp completion request failed")?;
1938
1939 let completions = if let Some(completions) = completions {
1940 match completions {
1941 lsp::CompletionResponse::Array(completions) => completions,
1942 lsp::CompletionResponse::List(list) => list.items,
1943 }
1944 } else {
1945 Default::default()
1946 };
1947
1948 source_buffer_handle.read_with(&cx, |this, _| {
1949 Ok(completions
1950 .into_iter()
1951 .filter_map(|lsp_completion| {
1952 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1953 lsp::CompletionTextEdit::Edit(edit) => {
1954 (range_from_lsp(edit.range), edit.new_text.clone())
1955 }
1956 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1957 log::info!("unsupported insert/replace completion");
1958 return None;
1959 }
1960 };
1961
1962 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1963 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1964 if clipped_start == old_range.start && clipped_end == old_range.end {
1965 Some(Completion {
1966 old_range: this.anchor_before(old_range.start)
1967 ..this.anchor_after(old_range.end),
1968 new_text,
1969 label: language
1970 .as_ref()
1971 .and_then(|l| l.label_for_completion(&lsp_completion))
1972 .unwrap_or_else(|| {
1973 CodeLabel::plain(
1974 lsp_completion.label.clone(),
1975 lsp_completion.filter_text.as_deref(),
1976 )
1977 }),
1978 lsp_completion,
1979 })
1980 } else {
1981 None
1982 }
1983 })
1984 .collect())
1985 })
1986 })
1987 } else if let Some(project_id) = self.remote_id() {
1988 let rpc = self.client.clone();
1989 let message = proto::GetCompletions {
1990 project_id,
1991 buffer_id,
1992 position: Some(language::proto::serialize_anchor(&anchor)),
1993 version: serialize_version(&source_buffer.version()),
1994 };
1995 cx.spawn_weak(|_, mut cx| async move {
1996 let response = rpc.request(message).await?;
1997
1998 source_buffer_handle
1999 .update(&mut cx, |buffer, _| {
2000 buffer.wait_for_version(deserialize_version(response.version))
2001 })
2002 .await;
2003
2004 response
2005 .completions
2006 .into_iter()
2007 .map(|completion| {
2008 language::proto::deserialize_completion(completion, language.as_ref())
2009 })
2010 .collect()
2011 })
2012 } else {
2013 Task::ready(Ok(Default::default()))
2014 }
2015 }
2016
2017 pub fn apply_additional_edits_for_completion(
2018 &self,
2019 buffer_handle: ModelHandle<Buffer>,
2020 completion: Completion,
2021 push_to_history: bool,
2022 cx: &mut ModelContext<Self>,
2023 ) -> Task<Result<Option<Transaction>>> {
2024 let buffer = buffer_handle.read(cx);
2025 let buffer_id = buffer.remote_id();
2026
2027 if self.is_local() {
2028 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2029 server.clone()
2030 } else {
2031 return Task::ready(Ok(Default::default()));
2032 };
2033
2034 cx.spawn(|this, mut cx| async move {
2035 let resolved_completion = lang_server
2036 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
2037 .await?;
2038 if let Some(edits) = resolved_completion.additional_text_edits {
2039 let edits = this
2040 .update(&mut cx, |this, cx| {
2041 this.edits_from_lsp(&buffer_handle, edits, None, cx)
2042 })
2043 .await?;
2044 buffer_handle.update(&mut cx, |buffer, cx| {
2045 buffer.finalize_last_transaction();
2046 buffer.start_transaction();
2047 for (range, text) in edits {
2048 buffer.edit([range], text, cx);
2049 }
2050 let transaction = if buffer.end_transaction(cx).is_some() {
2051 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2052 if !push_to_history {
2053 buffer.forget_transaction(transaction.id);
2054 }
2055 Some(transaction)
2056 } else {
2057 None
2058 };
2059 Ok(transaction)
2060 })
2061 } else {
2062 Ok(None)
2063 }
2064 })
2065 } else if let Some(project_id) = self.remote_id() {
2066 let client = self.client.clone();
2067 cx.spawn(|_, mut cx| async move {
2068 let response = client
2069 .request(proto::ApplyCompletionAdditionalEdits {
2070 project_id,
2071 buffer_id,
2072 completion: Some(language::proto::serialize_completion(&completion)),
2073 })
2074 .await?;
2075
2076 if let Some(transaction) = response.transaction {
2077 let transaction = language::proto::deserialize_transaction(transaction)?;
2078 buffer_handle
2079 .update(&mut cx, |buffer, _| {
2080 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
2081 })
2082 .await;
2083 if push_to_history {
2084 buffer_handle.update(&mut cx, |buffer, _| {
2085 buffer.push_transaction(transaction.clone(), Instant::now());
2086 });
2087 }
2088 Ok(Some(transaction))
2089 } else {
2090 Ok(None)
2091 }
2092 })
2093 } else {
2094 Task::ready(Err(anyhow!("project does not have a remote id")))
2095 }
2096 }
2097
2098 pub fn code_actions<T: ToOffset>(
2099 &self,
2100 buffer_handle: &ModelHandle<Buffer>,
2101 range: Range<T>,
2102 cx: &mut ModelContext<Self>,
2103 ) -> Task<Result<Vec<CodeAction>>> {
2104 let buffer_handle = buffer_handle.clone();
2105 let buffer = buffer_handle.read(cx);
2106 let buffer_id = buffer.remote_id();
2107 let worktree;
2108 let buffer_abs_path;
2109 if let Some(file) = File::from_dyn(buffer.file()) {
2110 worktree = file.worktree.clone();
2111 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
2112 } else {
2113 return Task::ready(Ok(Default::default()));
2114 };
2115 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
2116
2117 if worktree.read(cx).as_local().is_some() {
2118 let buffer_abs_path = buffer_abs_path.unwrap();
2119 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2120 server.clone()
2121 } else {
2122 return Task::ready(Ok(Default::default()));
2123 };
2124
2125 let lsp_range = lsp::Range::new(
2126 range.start.to_point_utf16(buffer).to_lsp_position(),
2127 range.end.to_point_utf16(buffer).to_lsp_position(),
2128 );
2129 cx.foreground().spawn(async move {
2130 if !lang_server.capabilities().code_action_provider.is_some() {
2131 return Ok(Default::default());
2132 }
2133
2134 Ok(lang_server
2135 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
2136 text_document: lsp::TextDocumentIdentifier::new(
2137 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
2138 ),
2139 range: lsp_range,
2140 work_done_progress_params: Default::default(),
2141 partial_result_params: Default::default(),
2142 context: lsp::CodeActionContext {
2143 diagnostics: Default::default(),
2144 only: Some(vec![
2145 lsp::CodeActionKind::QUICKFIX,
2146 lsp::CodeActionKind::REFACTOR,
2147 lsp::CodeActionKind::REFACTOR_EXTRACT,
2148 ]),
2149 },
2150 })
2151 .await?
2152 .unwrap_or_default()
2153 .into_iter()
2154 .filter_map(|entry| {
2155 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
2156 Some(CodeAction {
2157 range: range.clone(),
2158 lsp_action,
2159 })
2160 } else {
2161 None
2162 }
2163 })
2164 .collect())
2165 })
2166 } else if let Some(project_id) = self.remote_id() {
2167 let rpc = self.client.clone();
2168 let version = buffer.version();
2169 cx.spawn_weak(|_, mut cx| async move {
2170 let response = rpc
2171 .request(proto::GetCodeActions {
2172 project_id,
2173 buffer_id,
2174 start: Some(language::proto::serialize_anchor(&range.start)),
2175 end: Some(language::proto::serialize_anchor(&range.end)),
2176 version: serialize_version(&version),
2177 })
2178 .await?;
2179
2180 buffer_handle
2181 .update(&mut cx, |buffer, _| {
2182 buffer.wait_for_version(deserialize_version(response.version))
2183 })
2184 .await;
2185
2186 response
2187 .actions
2188 .into_iter()
2189 .map(language::proto::deserialize_code_action)
2190 .collect()
2191 })
2192 } else {
2193 Task::ready(Ok(Default::default()))
2194 }
2195 }
2196
2197 pub fn apply_code_action(
2198 &self,
2199 buffer_handle: ModelHandle<Buffer>,
2200 mut action: CodeAction,
2201 push_to_history: bool,
2202 cx: &mut ModelContext<Self>,
2203 ) -> Task<Result<ProjectTransaction>> {
2204 if self.is_local() {
2205 let buffer = buffer_handle.read(cx);
2206 let lang_name = if let Some(lang) = buffer.language() {
2207 lang.name()
2208 } else {
2209 return Task::ready(Ok(Default::default()));
2210 };
2211 let lang_server = if let Some(server) = self.language_server_for_buffer(buffer, cx) {
2212 server.clone()
2213 } else {
2214 return Task::ready(Ok(Default::default()));
2215 };
2216 let range = action.range.to_point_utf16(buffer);
2217
2218 cx.spawn(|this, mut cx| async move {
2219 if let Some(lsp_range) = action
2220 .lsp_action
2221 .data
2222 .as_mut()
2223 .and_then(|d| d.get_mut("codeActionParams"))
2224 .and_then(|d| d.get_mut("range"))
2225 {
2226 *lsp_range = serde_json::to_value(&lsp::Range::new(
2227 range.start.to_lsp_position(),
2228 range.end.to_lsp_position(),
2229 ))
2230 .unwrap();
2231 action.lsp_action = lang_server
2232 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2233 .await?;
2234 } else {
2235 let actions = this
2236 .update(&mut cx, |this, cx| {
2237 this.code_actions(&buffer_handle, action.range, cx)
2238 })
2239 .await?;
2240 action.lsp_action = actions
2241 .into_iter()
2242 .find(|a| a.lsp_action.title == action.lsp_action.title)
2243 .ok_or_else(|| anyhow!("code action is outdated"))?
2244 .lsp_action;
2245 }
2246
2247 if let Some(edit) = action.lsp_action.edit {
2248 Self::deserialize_workspace_edit(
2249 this,
2250 edit,
2251 push_to_history,
2252 lang_name,
2253 lang_server,
2254 &mut cx,
2255 )
2256 .await
2257 } else {
2258 Ok(ProjectTransaction::default())
2259 }
2260 })
2261 } else if let Some(project_id) = self.remote_id() {
2262 let client = self.client.clone();
2263 let request = proto::ApplyCodeAction {
2264 project_id,
2265 buffer_id: buffer_handle.read(cx).remote_id(),
2266 action: Some(language::proto::serialize_code_action(&action)),
2267 };
2268 cx.spawn(|this, mut cx| async move {
2269 let response = client
2270 .request(request)
2271 .await?
2272 .transaction
2273 .ok_or_else(|| anyhow!("missing transaction"))?;
2274 this.update(&mut cx, |this, cx| {
2275 this.deserialize_project_transaction(response, push_to_history, cx)
2276 })
2277 .await
2278 })
2279 } else {
2280 Task::ready(Err(anyhow!("project does not have a remote id")))
2281 }
2282 }
2283
2284 async fn deserialize_workspace_edit(
2285 this: ModelHandle<Self>,
2286 edit: lsp::WorkspaceEdit,
2287 push_to_history: bool,
2288 language_name: Arc<str>,
2289 language_server: Arc<LanguageServer>,
2290 cx: &mut AsyncAppContext,
2291 ) -> Result<ProjectTransaction> {
2292 let fs = this.read_with(cx, |this, _| this.fs.clone());
2293 let mut operations = Vec::new();
2294 if let Some(document_changes) = edit.document_changes {
2295 match document_changes {
2296 lsp::DocumentChanges::Edits(edits) => {
2297 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2298 }
2299 lsp::DocumentChanges::Operations(ops) => operations = ops,
2300 }
2301 } else if let Some(changes) = edit.changes {
2302 operations.extend(changes.into_iter().map(|(uri, edits)| {
2303 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2304 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2305 uri,
2306 version: None,
2307 },
2308 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2309 })
2310 }));
2311 }
2312
2313 let mut project_transaction = ProjectTransaction::default();
2314 for operation in operations {
2315 match operation {
2316 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2317 let abs_path = op
2318 .uri
2319 .to_file_path()
2320 .map_err(|_| anyhow!("can't convert URI to path"))?;
2321
2322 if let Some(parent_path) = abs_path.parent() {
2323 fs.create_dir(parent_path).await?;
2324 }
2325 if abs_path.ends_with("/") {
2326 fs.create_dir(&abs_path).await?;
2327 } else {
2328 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2329 .await?;
2330 }
2331 }
2332 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2333 let source_abs_path = op
2334 .old_uri
2335 .to_file_path()
2336 .map_err(|_| anyhow!("can't convert URI to path"))?;
2337 let target_abs_path = op
2338 .new_uri
2339 .to_file_path()
2340 .map_err(|_| anyhow!("can't convert URI to path"))?;
2341 fs.rename(
2342 &source_abs_path,
2343 &target_abs_path,
2344 op.options.map(Into::into).unwrap_or_default(),
2345 )
2346 .await?;
2347 }
2348 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2349 let abs_path = op
2350 .uri
2351 .to_file_path()
2352 .map_err(|_| anyhow!("can't convert URI to path"))?;
2353 let options = op.options.map(Into::into).unwrap_or_default();
2354 if abs_path.ends_with("/") {
2355 fs.remove_dir(&abs_path, options).await?;
2356 } else {
2357 fs.remove_file(&abs_path, options).await?;
2358 }
2359 }
2360 lsp::DocumentChangeOperation::Edit(op) => {
2361 let buffer_to_edit = this
2362 .update(cx, |this, cx| {
2363 this.open_local_buffer_via_lsp(
2364 op.text_document.uri,
2365 language_name.clone(),
2366 language_server.clone(),
2367 cx,
2368 )
2369 })
2370 .await?;
2371
2372 let edits = this
2373 .update(cx, |this, cx| {
2374 let edits = op.edits.into_iter().map(|edit| match edit {
2375 lsp::OneOf::Left(edit) => edit,
2376 lsp::OneOf::Right(edit) => edit.text_edit,
2377 });
2378 this.edits_from_lsp(
2379 &buffer_to_edit,
2380 edits,
2381 op.text_document.version,
2382 cx,
2383 )
2384 })
2385 .await?;
2386
2387 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2388 buffer.finalize_last_transaction();
2389 buffer.start_transaction();
2390 for (range, text) in edits {
2391 buffer.edit([range], text, cx);
2392 }
2393 let transaction = if buffer.end_transaction(cx).is_some() {
2394 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2395 if !push_to_history {
2396 buffer.forget_transaction(transaction.id);
2397 }
2398 Some(transaction)
2399 } else {
2400 None
2401 };
2402
2403 transaction
2404 });
2405 if let Some(transaction) = transaction {
2406 project_transaction.0.insert(buffer_to_edit, transaction);
2407 }
2408 }
2409 }
2410 }
2411
2412 Ok(project_transaction)
2413 }
2414
2415 pub fn prepare_rename<T: ToPointUtf16>(
2416 &self,
2417 buffer: ModelHandle<Buffer>,
2418 position: T,
2419 cx: &mut ModelContext<Self>,
2420 ) -> Task<Result<Option<Range<Anchor>>>> {
2421 let position = position.to_point_utf16(buffer.read(cx));
2422 self.request_lsp(buffer, PrepareRename { position }, cx)
2423 }
2424
2425 pub fn perform_rename<T: ToPointUtf16>(
2426 &self,
2427 buffer: ModelHandle<Buffer>,
2428 position: T,
2429 new_name: String,
2430 push_to_history: bool,
2431 cx: &mut ModelContext<Self>,
2432 ) -> Task<Result<ProjectTransaction>> {
2433 let position = position.to_point_utf16(buffer.read(cx));
2434 self.request_lsp(
2435 buffer,
2436 PerformRename {
2437 position,
2438 new_name,
2439 push_to_history,
2440 },
2441 cx,
2442 )
2443 }
2444
2445 pub fn search(
2446 &self,
2447 query: SearchQuery,
2448 cx: &mut ModelContext<Self>,
2449 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2450 if self.is_local() {
2451 let snapshots = self
2452 .visible_worktrees(cx)
2453 .filter_map(|tree| {
2454 let tree = tree.read(cx).as_local()?;
2455 Some(tree.snapshot())
2456 })
2457 .collect::<Vec<_>>();
2458
2459 let background = cx.background().clone();
2460 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2461 if path_count == 0 {
2462 return Task::ready(Ok(Default::default()));
2463 }
2464 let workers = background.num_cpus().min(path_count);
2465 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2466 cx.background()
2467 .spawn({
2468 let fs = self.fs.clone();
2469 let background = cx.background().clone();
2470 let query = query.clone();
2471 async move {
2472 let fs = &fs;
2473 let query = &query;
2474 let matching_paths_tx = &matching_paths_tx;
2475 let paths_per_worker = (path_count + workers - 1) / workers;
2476 let snapshots = &snapshots;
2477 background
2478 .scoped(|scope| {
2479 for worker_ix in 0..workers {
2480 let worker_start_ix = worker_ix * paths_per_worker;
2481 let worker_end_ix = worker_start_ix + paths_per_worker;
2482 scope.spawn(async move {
2483 let mut snapshot_start_ix = 0;
2484 let mut abs_path = PathBuf::new();
2485 for snapshot in snapshots {
2486 let snapshot_end_ix =
2487 snapshot_start_ix + snapshot.visible_file_count();
2488 if worker_end_ix <= snapshot_start_ix {
2489 break;
2490 } else if worker_start_ix > snapshot_end_ix {
2491 snapshot_start_ix = snapshot_end_ix;
2492 continue;
2493 } else {
2494 let start_in_snapshot = worker_start_ix
2495 .saturating_sub(snapshot_start_ix);
2496 let end_in_snapshot =
2497 cmp::min(worker_end_ix, snapshot_end_ix)
2498 - snapshot_start_ix;
2499
2500 for entry in snapshot
2501 .files(false, start_in_snapshot)
2502 .take(end_in_snapshot - start_in_snapshot)
2503 {
2504 if matching_paths_tx.is_closed() {
2505 break;
2506 }
2507
2508 abs_path.clear();
2509 abs_path.push(&snapshot.abs_path());
2510 abs_path.push(&entry.path);
2511 let matches = if let Some(file) =
2512 fs.open_sync(&abs_path).await.log_err()
2513 {
2514 query.detect(file).unwrap_or(false)
2515 } else {
2516 false
2517 };
2518
2519 if matches {
2520 let project_path =
2521 (snapshot.id(), entry.path.clone());
2522 if matching_paths_tx
2523 .send(project_path)
2524 .await
2525 .is_err()
2526 {
2527 break;
2528 }
2529 }
2530 }
2531
2532 snapshot_start_ix = snapshot_end_ix;
2533 }
2534 }
2535 });
2536 }
2537 })
2538 .await;
2539 }
2540 })
2541 .detach();
2542
2543 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2544 let open_buffers = self
2545 .opened_buffers
2546 .values()
2547 .filter_map(|b| b.upgrade(cx))
2548 .collect::<HashSet<_>>();
2549 cx.spawn(|this, cx| async move {
2550 for buffer in &open_buffers {
2551 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2552 buffers_tx.send((buffer.clone(), snapshot)).await?;
2553 }
2554
2555 let open_buffers = Rc::new(RefCell::new(open_buffers));
2556 while let Some(project_path) = matching_paths_rx.next().await {
2557 if buffers_tx.is_closed() {
2558 break;
2559 }
2560
2561 let this = this.clone();
2562 let open_buffers = open_buffers.clone();
2563 let buffers_tx = buffers_tx.clone();
2564 cx.spawn(|mut cx| async move {
2565 if let Some(buffer) = this
2566 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2567 .await
2568 .log_err()
2569 {
2570 if open_buffers.borrow_mut().insert(buffer.clone()) {
2571 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2572 buffers_tx.send((buffer, snapshot)).await?;
2573 }
2574 }
2575
2576 Ok::<_, anyhow::Error>(())
2577 })
2578 .detach();
2579 }
2580
2581 Ok::<_, anyhow::Error>(())
2582 })
2583 .detach_and_log_err(cx);
2584
2585 let background = cx.background().clone();
2586 cx.background().spawn(async move {
2587 let query = &query;
2588 let mut matched_buffers = Vec::new();
2589 for _ in 0..workers {
2590 matched_buffers.push(HashMap::default());
2591 }
2592 background
2593 .scoped(|scope| {
2594 for worker_matched_buffers in matched_buffers.iter_mut() {
2595 let mut buffers_rx = buffers_rx.clone();
2596 scope.spawn(async move {
2597 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2598 let buffer_matches = query
2599 .search(snapshot.as_rope())
2600 .await
2601 .iter()
2602 .map(|range| {
2603 snapshot.anchor_before(range.start)
2604 ..snapshot.anchor_after(range.end)
2605 })
2606 .collect::<Vec<_>>();
2607 if !buffer_matches.is_empty() {
2608 worker_matched_buffers
2609 .insert(buffer.clone(), buffer_matches);
2610 }
2611 }
2612 });
2613 }
2614 })
2615 .await;
2616 Ok(matched_buffers.into_iter().flatten().collect())
2617 })
2618 } else if let Some(project_id) = self.remote_id() {
2619 let request = self.client.request(query.to_proto(project_id));
2620 cx.spawn(|this, mut cx| async move {
2621 let response = request.await?;
2622 let mut result = HashMap::default();
2623 for location in response.locations {
2624 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2625 let target_buffer = this
2626 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2627 .await?;
2628 let start = location
2629 .start
2630 .and_then(deserialize_anchor)
2631 .ok_or_else(|| anyhow!("missing target start"))?;
2632 let end = location
2633 .end
2634 .and_then(deserialize_anchor)
2635 .ok_or_else(|| anyhow!("missing target end"))?;
2636 result
2637 .entry(target_buffer)
2638 .or_insert(Vec::new())
2639 .push(start..end)
2640 }
2641 Ok(result)
2642 })
2643 } else {
2644 Task::ready(Ok(Default::default()))
2645 }
2646 }
2647
2648 fn request_lsp<R: LspCommand>(
2649 &self,
2650 buffer_handle: ModelHandle<Buffer>,
2651 request: R,
2652 cx: &mut ModelContext<Self>,
2653 ) -> Task<Result<R::Response>>
2654 where
2655 <R::LspRequest as lsp::request::Request>::Result: Send,
2656 {
2657 let buffer = buffer_handle.read(cx);
2658 if self.is_local() {
2659 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2660 if let Some((file, language_server)) =
2661 file.zip(self.language_server_for_buffer(buffer, cx).cloned())
2662 {
2663 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2664 return cx.spawn(|this, cx| async move {
2665 if !request.check_capabilities(&language_server.capabilities()) {
2666 return Ok(Default::default());
2667 }
2668
2669 let response = language_server
2670 .request::<R::LspRequest>(lsp_params)
2671 .await
2672 .context("lsp request failed")?;
2673 request
2674 .response_from_lsp(response, this, buffer_handle, cx)
2675 .await
2676 });
2677 }
2678 } else if let Some(project_id) = self.remote_id() {
2679 let rpc = self.client.clone();
2680 let message = request.to_proto(project_id, buffer);
2681 return cx.spawn(|this, cx| async move {
2682 let response = rpc.request(message).await?;
2683 request
2684 .response_from_proto(response, this, buffer_handle, cx)
2685 .await
2686 });
2687 }
2688 Task::ready(Ok(Default::default()))
2689 }
2690
2691 pub fn find_or_create_local_worktree(
2692 &mut self,
2693 abs_path: impl AsRef<Path>,
2694 visible: bool,
2695 cx: &mut ModelContext<Self>,
2696 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2697 let abs_path = abs_path.as_ref();
2698 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2699 Task::ready(Ok((tree.clone(), relative_path.into())))
2700 } else {
2701 let worktree = self.create_local_worktree(abs_path, visible, cx);
2702 cx.foreground()
2703 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2704 }
2705 }
2706
2707 pub fn find_local_worktree(
2708 &self,
2709 abs_path: &Path,
2710 cx: &AppContext,
2711 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2712 for tree in self.worktrees(cx) {
2713 if let Some(relative_path) = tree
2714 .read(cx)
2715 .as_local()
2716 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2717 {
2718 return Some((tree.clone(), relative_path.into()));
2719 }
2720 }
2721 None
2722 }
2723
2724 pub fn is_shared(&self) -> bool {
2725 match &self.client_state {
2726 ProjectClientState::Local { is_shared, .. } => *is_shared,
2727 ProjectClientState::Remote { .. } => false,
2728 }
2729 }
2730
2731 fn create_local_worktree(
2732 &mut self,
2733 abs_path: impl AsRef<Path>,
2734 visible: bool,
2735 cx: &mut ModelContext<Self>,
2736 ) -> Task<Result<ModelHandle<Worktree>>> {
2737 let fs = self.fs.clone();
2738 let client = self.client.clone();
2739 let path: Arc<Path> = abs_path.as_ref().into();
2740 let task = self
2741 .loading_local_worktrees
2742 .entry(path.clone())
2743 .or_insert_with(|| {
2744 cx.spawn(|project, mut cx| {
2745 async move {
2746 let worktree =
2747 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2748 .await;
2749 project.update(&mut cx, |project, _| {
2750 project.loading_local_worktrees.remove(&path);
2751 });
2752 let worktree = worktree?;
2753
2754 let (remote_project_id, is_shared) =
2755 project.update(&mut cx, |project, cx| {
2756 project.add_worktree(&worktree, cx);
2757 (project.remote_id(), project.is_shared())
2758 });
2759
2760 if let Some(project_id) = remote_project_id {
2761 if is_shared {
2762 worktree
2763 .update(&mut cx, |worktree, cx| {
2764 worktree.as_local_mut().unwrap().share(project_id, cx)
2765 })
2766 .await?;
2767 } else {
2768 worktree
2769 .update(&mut cx, |worktree, cx| {
2770 worktree.as_local_mut().unwrap().register(project_id, cx)
2771 })
2772 .await?;
2773 }
2774 }
2775
2776 Ok(worktree)
2777 }
2778 .map_err(|err| Arc::new(err))
2779 })
2780 .shared()
2781 })
2782 .clone();
2783 cx.foreground().spawn(async move {
2784 match task.await {
2785 Ok(worktree) => Ok(worktree),
2786 Err(err) => Err(anyhow!("{}", err)),
2787 }
2788 })
2789 }
2790
2791 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2792 self.worktrees.retain(|worktree| {
2793 worktree
2794 .upgrade(cx)
2795 .map_or(false, |w| w.read(cx).id() != id)
2796 });
2797 cx.notify();
2798 }
2799
2800 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2801 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2802 if worktree.read(cx).is_local() {
2803 cx.subscribe(&worktree, |this, worktree, _, cx| {
2804 this.update_local_worktree_buffers(worktree, cx);
2805 })
2806 .detach();
2807 }
2808
2809 let push_strong_handle = {
2810 let worktree = worktree.read(cx);
2811 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2812 };
2813 if push_strong_handle {
2814 self.worktrees
2815 .push(WorktreeHandle::Strong(worktree.clone()));
2816 } else {
2817 cx.observe_release(&worktree, |this, _, cx| {
2818 this.worktrees
2819 .retain(|worktree| worktree.upgrade(cx).is_some());
2820 cx.notify();
2821 })
2822 .detach();
2823 self.worktrees
2824 .push(WorktreeHandle::Weak(worktree.downgrade()));
2825 }
2826 cx.notify();
2827 }
2828
2829 fn update_local_worktree_buffers(
2830 &mut self,
2831 worktree_handle: ModelHandle<Worktree>,
2832 cx: &mut ModelContext<Self>,
2833 ) {
2834 let snapshot = worktree_handle.read(cx).snapshot();
2835 let mut buffers_to_delete = Vec::new();
2836 for (buffer_id, buffer) in &self.opened_buffers {
2837 if let Some(buffer) = buffer.upgrade(cx) {
2838 buffer.update(cx, |buffer, cx| {
2839 if let Some(old_file) = File::from_dyn(buffer.file()) {
2840 if old_file.worktree != worktree_handle {
2841 return;
2842 }
2843
2844 let new_file = if let Some(entry) = old_file
2845 .entry_id
2846 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2847 {
2848 File {
2849 is_local: true,
2850 entry_id: Some(entry.id),
2851 mtime: entry.mtime,
2852 path: entry.path.clone(),
2853 worktree: worktree_handle.clone(),
2854 }
2855 } else if let Some(entry) =
2856 snapshot.entry_for_path(old_file.path().as_ref())
2857 {
2858 File {
2859 is_local: true,
2860 entry_id: Some(entry.id),
2861 mtime: entry.mtime,
2862 path: entry.path.clone(),
2863 worktree: worktree_handle.clone(),
2864 }
2865 } else {
2866 File {
2867 is_local: true,
2868 entry_id: None,
2869 path: old_file.path().clone(),
2870 mtime: old_file.mtime(),
2871 worktree: worktree_handle.clone(),
2872 }
2873 };
2874
2875 if let Some(project_id) = self.remote_id() {
2876 self.client
2877 .send(proto::UpdateBufferFile {
2878 project_id,
2879 buffer_id: *buffer_id as u64,
2880 file: Some(new_file.to_proto()),
2881 })
2882 .log_err();
2883 }
2884 buffer.file_updated(Box::new(new_file), cx).detach();
2885 }
2886 });
2887 } else {
2888 buffers_to_delete.push(*buffer_id);
2889 }
2890 }
2891
2892 for buffer_id in buffers_to_delete {
2893 self.opened_buffers.remove(&buffer_id);
2894 }
2895 }
2896
2897 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2898 let new_active_entry = entry.and_then(|project_path| {
2899 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2900 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2901 Some(ProjectEntry {
2902 worktree_id: project_path.worktree_id,
2903 entry_id: entry.id,
2904 })
2905 });
2906 if new_active_entry != self.active_entry {
2907 self.active_entry = new_active_entry;
2908 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2909 }
2910 }
2911
2912 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2913 self.language_servers_with_diagnostics_running > 0
2914 }
2915
2916 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2917 let mut summary = DiagnosticSummary::default();
2918 for (_, path_summary) in self.diagnostic_summaries(cx) {
2919 summary.error_count += path_summary.error_count;
2920 summary.warning_count += path_summary.warning_count;
2921 summary.info_count += path_summary.info_count;
2922 summary.hint_count += path_summary.hint_count;
2923 }
2924 summary
2925 }
2926
2927 pub fn diagnostic_summaries<'a>(
2928 &'a self,
2929 cx: &'a AppContext,
2930 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2931 self.worktrees(cx).flat_map(move |worktree| {
2932 let worktree = worktree.read(cx);
2933 let worktree_id = worktree.id();
2934 worktree
2935 .diagnostic_summaries()
2936 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2937 })
2938 }
2939
2940 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2941 self.language_servers_with_diagnostics_running += 1;
2942 if self.language_servers_with_diagnostics_running == 1 {
2943 cx.emit(Event::DiskBasedDiagnosticsStarted);
2944 }
2945 }
2946
2947 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2948 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2949 self.language_servers_with_diagnostics_running -= 1;
2950 if self.language_servers_with_diagnostics_running == 0 {
2951 cx.emit(Event::DiskBasedDiagnosticsFinished);
2952 }
2953 }
2954
2955 pub fn active_entry(&self) -> Option<ProjectEntry> {
2956 self.active_entry
2957 }
2958
2959 // RPC message handlers
2960
2961 async fn handle_unshare_project(
2962 this: ModelHandle<Self>,
2963 _: TypedEnvelope<proto::UnshareProject>,
2964 _: Arc<Client>,
2965 mut cx: AsyncAppContext,
2966 ) -> Result<()> {
2967 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2968 Ok(())
2969 }
2970
2971 async fn handle_add_collaborator(
2972 this: ModelHandle<Self>,
2973 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2974 _: Arc<Client>,
2975 mut cx: AsyncAppContext,
2976 ) -> Result<()> {
2977 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2978 let collaborator = envelope
2979 .payload
2980 .collaborator
2981 .take()
2982 .ok_or_else(|| anyhow!("empty collaborator"))?;
2983
2984 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2985 this.update(&mut cx, |this, cx| {
2986 this.collaborators
2987 .insert(collaborator.peer_id, collaborator);
2988 cx.notify();
2989 });
2990
2991 Ok(())
2992 }
2993
2994 async fn handle_remove_collaborator(
2995 this: ModelHandle<Self>,
2996 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2997 _: Arc<Client>,
2998 mut cx: AsyncAppContext,
2999 ) -> Result<()> {
3000 this.update(&mut cx, |this, cx| {
3001 let peer_id = PeerId(envelope.payload.peer_id);
3002 let replica_id = this
3003 .collaborators
3004 .remove(&peer_id)
3005 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
3006 .replica_id;
3007 for (_, buffer) in &this.opened_buffers {
3008 if let Some(buffer) = buffer.upgrade(cx) {
3009 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
3010 }
3011 }
3012 cx.notify();
3013 Ok(())
3014 })
3015 }
3016
3017 async fn handle_register_worktree(
3018 this: ModelHandle<Self>,
3019 envelope: TypedEnvelope<proto::RegisterWorktree>,
3020 client: Arc<Client>,
3021 mut cx: AsyncAppContext,
3022 ) -> Result<()> {
3023 this.update(&mut cx, |this, cx| {
3024 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
3025 let replica_id = this.replica_id();
3026 let worktree = proto::Worktree {
3027 id: envelope.payload.worktree_id,
3028 root_name: envelope.payload.root_name,
3029 entries: Default::default(),
3030 diagnostic_summaries: Default::default(),
3031 visible: envelope.payload.visible,
3032 };
3033 let (worktree, load_task) =
3034 Worktree::remote(remote_id, replica_id, worktree, client, cx);
3035 this.add_worktree(&worktree, cx);
3036 load_task.detach();
3037 Ok(())
3038 })
3039 }
3040
3041 async fn handle_unregister_worktree(
3042 this: ModelHandle<Self>,
3043 envelope: TypedEnvelope<proto::UnregisterWorktree>,
3044 _: Arc<Client>,
3045 mut cx: AsyncAppContext,
3046 ) -> Result<()> {
3047 this.update(&mut cx, |this, cx| {
3048 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3049 this.remove_worktree(worktree_id, cx);
3050 Ok(())
3051 })
3052 }
3053
3054 async fn handle_update_worktree(
3055 this: ModelHandle<Self>,
3056 envelope: TypedEnvelope<proto::UpdateWorktree>,
3057 _: Arc<Client>,
3058 mut cx: AsyncAppContext,
3059 ) -> Result<()> {
3060 this.update(&mut cx, |this, cx| {
3061 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3062 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3063 worktree.update(cx, |worktree, _| {
3064 let worktree = worktree.as_remote_mut().unwrap();
3065 worktree.update_from_remote(envelope)
3066 })?;
3067 }
3068 Ok(())
3069 })
3070 }
3071
3072 async fn handle_update_diagnostic_summary(
3073 this: ModelHandle<Self>,
3074 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
3075 _: Arc<Client>,
3076 mut cx: AsyncAppContext,
3077 ) -> Result<()> {
3078 this.update(&mut cx, |this, cx| {
3079 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3080 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
3081 if let Some(summary) = envelope.payload.summary {
3082 let project_path = ProjectPath {
3083 worktree_id,
3084 path: Path::new(&summary.path).into(),
3085 };
3086 worktree.update(cx, |worktree, _| {
3087 worktree
3088 .as_remote_mut()
3089 .unwrap()
3090 .update_diagnostic_summary(project_path.path.clone(), &summary);
3091 });
3092 cx.emit(Event::DiagnosticsUpdated(project_path));
3093 }
3094 }
3095 Ok(())
3096 })
3097 }
3098
3099 async fn handle_disk_based_diagnostics_updating(
3100 this: ModelHandle<Self>,
3101 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
3102 _: Arc<Client>,
3103 mut cx: AsyncAppContext,
3104 ) -> Result<()> {
3105 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
3106 Ok(())
3107 }
3108
3109 async fn handle_disk_based_diagnostics_updated(
3110 this: ModelHandle<Self>,
3111 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
3112 _: Arc<Client>,
3113 mut cx: AsyncAppContext,
3114 ) -> Result<()> {
3115 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
3116 Ok(())
3117 }
3118
3119 async fn handle_update_buffer(
3120 this: ModelHandle<Self>,
3121 envelope: TypedEnvelope<proto::UpdateBuffer>,
3122 _: Arc<Client>,
3123 mut cx: AsyncAppContext,
3124 ) -> Result<()> {
3125 this.update(&mut cx, |this, cx| {
3126 let payload = envelope.payload.clone();
3127 let buffer_id = payload.buffer_id;
3128 let ops = payload
3129 .operations
3130 .into_iter()
3131 .map(|op| language::proto::deserialize_operation(op))
3132 .collect::<Result<Vec<_>, _>>()?;
3133 match this.opened_buffers.entry(buffer_id) {
3134 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
3135 OpenBuffer::Strong(buffer) => {
3136 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
3137 }
3138 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
3139 OpenBuffer::Weak(_) => {}
3140 },
3141 hash_map::Entry::Vacant(e) => {
3142 e.insert(OpenBuffer::Loading(ops));
3143 }
3144 }
3145 Ok(())
3146 })
3147 }
3148
3149 async fn handle_update_buffer_file(
3150 this: ModelHandle<Self>,
3151 envelope: TypedEnvelope<proto::UpdateBufferFile>,
3152 _: Arc<Client>,
3153 mut cx: AsyncAppContext,
3154 ) -> Result<()> {
3155 this.update(&mut cx, |this, cx| {
3156 let payload = envelope.payload.clone();
3157 let buffer_id = payload.buffer_id;
3158 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
3159 let worktree = this
3160 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
3161 .ok_or_else(|| anyhow!("no such worktree"))?;
3162 let file = File::from_proto(file, worktree.clone(), cx)?;
3163 let buffer = this
3164 .opened_buffers
3165 .get_mut(&buffer_id)
3166 .and_then(|b| b.upgrade(cx))
3167 .ok_or_else(|| anyhow!("no such buffer"))?;
3168 buffer.update(cx, |buffer, cx| {
3169 buffer.file_updated(Box::new(file), cx).detach();
3170 });
3171 Ok(())
3172 })
3173 }
3174
3175 async fn handle_save_buffer(
3176 this: ModelHandle<Self>,
3177 envelope: TypedEnvelope<proto::SaveBuffer>,
3178 _: Arc<Client>,
3179 mut cx: AsyncAppContext,
3180 ) -> Result<proto::BufferSaved> {
3181 let buffer_id = envelope.payload.buffer_id;
3182 let requested_version = deserialize_version(envelope.payload.version);
3183
3184 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
3185 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
3186 let buffer = this
3187 .opened_buffers
3188 .get(&buffer_id)
3189 .map(|buffer| buffer.upgrade(cx).unwrap())
3190 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3191 Ok::<_, anyhow::Error>((project_id, buffer))
3192 })?;
3193 buffer
3194 .update(&mut cx, |buffer, _| {
3195 buffer.wait_for_version(requested_version)
3196 })
3197 .await;
3198
3199 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
3200 Ok(proto::BufferSaved {
3201 project_id,
3202 buffer_id,
3203 version: serialize_version(&saved_version),
3204 mtime: Some(mtime.into()),
3205 })
3206 }
3207
3208 async fn handle_format_buffers(
3209 this: ModelHandle<Self>,
3210 envelope: TypedEnvelope<proto::FormatBuffers>,
3211 _: Arc<Client>,
3212 mut cx: AsyncAppContext,
3213 ) -> Result<proto::FormatBuffersResponse> {
3214 let sender_id = envelope.original_sender_id()?;
3215 let format = this.update(&mut cx, |this, cx| {
3216 let mut buffers = HashSet::default();
3217 for buffer_id in &envelope.payload.buffer_ids {
3218 buffers.insert(
3219 this.opened_buffers
3220 .get(buffer_id)
3221 .map(|buffer| buffer.upgrade(cx).unwrap())
3222 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3223 );
3224 }
3225 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3226 })?;
3227
3228 let project_transaction = format.await?;
3229 let project_transaction = this.update(&mut cx, |this, cx| {
3230 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3231 });
3232 Ok(proto::FormatBuffersResponse {
3233 transaction: Some(project_transaction),
3234 })
3235 }
3236
3237 async fn handle_get_completions(
3238 this: ModelHandle<Self>,
3239 envelope: TypedEnvelope<proto::GetCompletions>,
3240 _: Arc<Client>,
3241 mut cx: AsyncAppContext,
3242 ) -> Result<proto::GetCompletionsResponse> {
3243 let position = envelope
3244 .payload
3245 .position
3246 .and_then(language::proto::deserialize_anchor)
3247 .ok_or_else(|| anyhow!("invalid position"))?;
3248 let version = deserialize_version(envelope.payload.version);
3249 let buffer = this.read_with(&cx, |this, cx| {
3250 this.opened_buffers
3251 .get(&envelope.payload.buffer_id)
3252 .map(|buffer| buffer.upgrade(cx).unwrap())
3253 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3254 })?;
3255 buffer
3256 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3257 .await;
3258 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3259 let completions = this
3260 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3261 .await?;
3262
3263 Ok(proto::GetCompletionsResponse {
3264 completions: completions
3265 .iter()
3266 .map(language::proto::serialize_completion)
3267 .collect(),
3268 version: serialize_version(&version),
3269 })
3270 }
3271
3272 async fn handle_apply_additional_edits_for_completion(
3273 this: ModelHandle<Self>,
3274 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3275 _: Arc<Client>,
3276 mut cx: AsyncAppContext,
3277 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3278 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3279 let buffer = this
3280 .opened_buffers
3281 .get(&envelope.payload.buffer_id)
3282 .map(|buffer| buffer.upgrade(cx).unwrap())
3283 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3284 let language = buffer.read(cx).language();
3285 let completion = language::proto::deserialize_completion(
3286 envelope
3287 .payload
3288 .completion
3289 .ok_or_else(|| anyhow!("invalid completion"))?,
3290 language,
3291 )?;
3292 Ok::<_, anyhow::Error>(
3293 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3294 )
3295 })?;
3296
3297 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3298 transaction: apply_additional_edits
3299 .await?
3300 .as_ref()
3301 .map(language::proto::serialize_transaction),
3302 })
3303 }
3304
3305 async fn handle_get_code_actions(
3306 this: ModelHandle<Self>,
3307 envelope: TypedEnvelope<proto::GetCodeActions>,
3308 _: Arc<Client>,
3309 mut cx: AsyncAppContext,
3310 ) -> Result<proto::GetCodeActionsResponse> {
3311 let start = envelope
3312 .payload
3313 .start
3314 .and_then(language::proto::deserialize_anchor)
3315 .ok_or_else(|| anyhow!("invalid start"))?;
3316 let end = envelope
3317 .payload
3318 .end
3319 .and_then(language::proto::deserialize_anchor)
3320 .ok_or_else(|| anyhow!("invalid end"))?;
3321 let buffer = this.update(&mut cx, |this, cx| {
3322 this.opened_buffers
3323 .get(&envelope.payload.buffer_id)
3324 .map(|buffer| buffer.upgrade(cx).unwrap())
3325 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3326 })?;
3327 buffer
3328 .update(&mut cx, |buffer, _| {
3329 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3330 })
3331 .await;
3332
3333 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3334 let code_actions = this.update(&mut cx, |this, cx| {
3335 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3336 })?;
3337
3338 Ok(proto::GetCodeActionsResponse {
3339 actions: code_actions
3340 .await?
3341 .iter()
3342 .map(language::proto::serialize_code_action)
3343 .collect(),
3344 version: serialize_version(&version),
3345 })
3346 }
3347
3348 async fn handle_apply_code_action(
3349 this: ModelHandle<Self>,
3350 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3351 _: Arc<Client>,
3352 mut cx: AsyncAppContext,
3353 ) -> Result<proto::ApplyCodeActionResponse> {
3354 let sender_id = envelope.original_sender_id()?;
3355 let action = language::proto::deserialize_code_action(
3356 envelope
3357 .payload
3358 .action
3359 .ok_or_else(|| anyhow!("invalid action"))?,
3360 )?;
3361 let apply_code_action = this.update(&mut cx, |this, cx| {
3362 let buffer = this
3363 .opened_buffers
3364 .get(&envelope.payload.buffer_id)
3365 .map(|buffer| buffer.upgrade(cx).unwrap())
3366 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3367 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3368 })?;
3369
3370 let project_transaction = apply_code_action.await?;
3371 let project_transaction = this.update(&mut cx, |this, cx| {
3372 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3373 });
3374 Ok(proto::ApplyCodeActionResponse {
3375 transaction: Some(project_transaction),
3376 })
3377 }
3378
3379 async fn handle_lsp_command<T: LspCommand>(
3380 this: ModelHandle<Self>,
3381 envelope: TypedEnvelope<T::ProtoRequest>,
3382 _: Arc<Client>,
3383 mut cx: AsyncAppContext,
3384 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3385 where
3386 <T::LspRequest as lsp::request::Request>::Result: Send,
3387 {
3388 let sender_id = envelope.original_sender_id()?;
3389 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3390 let buffer_handle = this.read_with(&cx, |this, _| {
3391 this.opened_buffers
3392 .get(&buffer_id)
3393 .map(|buffer| buffer.upgrade(&cx).unwrap())
3394 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3395 })?;
3396 let request = T::from_proto(
3397 envelope.payload,
3398 this.clone(),
3399 buffer_handle.clone(),
3400 cx.clone(),
3401 )
3402 .await?;
3403 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3404 let response = this
3405 .update(&mut cx, |this, cx| {
3406 this.request_lsp(buffer_handle, request, cx)
3407 })
3408 .await?;
3409 this.update(&mut cx, |this, cx| {
3410 Ok(T::response_to_proto(
3411 response,
3412 this,
3413 sender_id,
3414 &buffer_version,
3415 cx,
3416 ))
3417 })
3418 }
3419
3420 async fn handle_get_project_symbols(
3421 this: ModelHandle<Self>,
3422 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3423 _: Arc<Client>,
3424 mut cx: AsyncAppContext,
3425 ) -> Result<proto::GetProjectSymbolsResponse> {
3426 let symbols = this
3427 .update(&mut cx, |this, cx| {
3428 this.symbols(&envelope.payload.query, cx)
3429 })
3430 .await?;
3431
3432 Ok(proto::GetProjectSymbolsResponse {
3433 symbols: symbols.iter().map(serialize_symbol).collect(),
3434 })
3435 }
3436
3437 async fn handle_search_project(
3438 this: ModelHandle<Self>,
3439 envelope: TypedEnvelope<proto::SearchProject>,
3440 _: Arc<Client>,
3441 mut cx: AsyncAppContext,
3442 ) -> Result<proto::SearchProjectResponse> {
3443 let peer_id = envelope.original_sender_id()?;
3444 let query = SearchQuery::from_proto(envelope.payload)?;
3445 let result = this
3446 .update(&mut cx, |this, cx| this.search(query, cx))
3447 .await?;
3448
3449 this.update(&mut cx, |this, cx| {
3450 let mut locations = Vec::new();
3451 for (buffer, ranges) in result {
3452 for range in ranges {
3453 let start = serialize_anchor(&range.start);
3454 let end = serialize_anchor(&range.end);
3455 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3456 locations.push(proto::Location {
3457 buffer: Some(buffer),
3458 start: Some(start),
3459 end: Some(end),
3460 });
3461 }
3462 }
3463 Ok(proto::SearchProjectResponse { locations })
3464 })
3465 }
3466
3467 async fn handle_open_buffer_for_symbol(
3468 this: ModelHandle<Self>,
3469 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3470 _: Arc<Client>,
3471 mut cx: AsyncAppContext,
3472 ) -> Result<proto::OpenBufferForSymbolResponse> {
3473 let peer_id = envelope.original_sender_id()?;
3474 let symbol = envelope
3475 .payload
3476 .symbol
3477 .ok_or_else(|| anyhow!("invalid symbol"))?;
3478 let symbol = this.read_with(&cx, |this, _| {
3479 let symbol = this.deserialize_symbol(symbol)?;
3480 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3481 if signature == symbol.signature {
3482 Ok(symbol)
3483 } else {
3484 Err(anyhow!("invalid symbol signature"))
3485 }
3486 })?;
3487 let buffer = this
3488 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3489 .await?;
3490
3491 Ok(proto::OpenBufferForSymbolResponse {
3492 buffer: Some(this.update(&mut cx, |this, cx| {
3493 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3494 })),
3495 })
3496 }
3497
3498 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3499 let mut hasher = Sha256::new();
3500 hasher.update(worktree_id.to_proto().to_be_bytes());
3501 hasher.update(path.to_string_lossy().as_bytes());
3502 hasher.update(self.nonce.to_be_bytes());
3503 hasher.finalize().as_slice().try_into().unwrap()
3504 }
3505
3506 async fn handle_open_buffer(
3507 this: ModelHandle<Self>,
3508 envelope: TypedEnvelope<proto::OpenBuffer>,
3509 _: Arc<Client>,
3510 mut cx: AsyncAppContext,
3511 ) -> Result<proto::OpenBufferResponse> {
3512 let peer_id = envelope.original_sender_id()?;
3513 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3514 let open_buffer = this.update(&mut cx, |this, cx| {
3515 this.open_buffer(
3516 ProjectPath {
3517 worktree_id,
3518 path: PathBuf::from(envelope.payload.path).into(),
3519 },
3520 cx,
3521 )
3522 });
3523
3524 let buffer = open_buffer.await?;
3525 this.update(&mut cx, |this, cx| {
3526 Ok(proto::OpenBufferResponse {
3527 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3528 })
3529 })
3530 }
3531
3532 fn serialize_project_transaction_for_peer(
3533 &mut self,
3534 project_transaction: ProjectTransaction,
3535 peer_id: PeerId,
3536 cx: &AppContext,
3537 ) -> proto::ProjectTransaction {
3538 let mut serialized_transaction = proto::ProjectTransaction {
3539 buffers: Default::default(),
3540 transactions: Default::default(),
3541 };
3542 for (buffer, transaction) in project_transaction.0 {
3543 serialized_transaction
3544 .buffers
3545 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3546 serialized_transaction
3547 .transactions
3548 .push(language::proto::serialize_transaction(&transaction));
3549 }
3550 serialized_transaction
3551 }
3552
3553 fn deserialize_project_transaction(
3554 &mut self,
3555 message: proto::ProjectTransaction,
3556 push_to_history: bool,
3557 cx: &mut ModelContext<Self>,
3558 ) -> Task<Result<ProjectTransaction>> {
3559 cx.spawn(|this, mut cx| async move {
3560 let mut project_transaction = ProjectTransaction::default();
3561 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3562 let buffer = this
3563 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3564 .await?;
3565 let transaction = language::proto::deserialize_transaction(transaction)?;
3566 project_transaction.0.insert(buffer, transaction);
3567 }
3568
3569 for (buffer, transaction) in &project_transaction.0 {
3570 buffer
3571 .update(&mut cx, |buffer, _| {
3572 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3573 })
3574 .await;
3575
3576 if push_to_history {
3577 buffer.update(&mut cx, |buffer, _| {
3578 buffer.push_transaction(transaction.clone(), Instant::now());
3579 });
3580 }
3581 }
3582
3583 Ok(project_transaction)
3584 })
3585 }
3586
3587 fn serialize_buffer_for_peer(
3588 &mut self,
3589 buffer: &ModelHandle<Buffer>,
3590 peer_id: PeerId,
3591 cx: &AppContext,
3592 ) -> proto::Buffer {
3593 let buffer_id = buffer.read(cx).remote_id();
3594 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3595 if shared_buffers.insert(buffer_id) {
3596 proto::Buffer {
3597 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3598 }
3599 } else {
3600 proto::Buffer {
3601 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3602 }
3603 }
3604 }
3605
3606 fn deserialize_buffer(
3607 &mut self,
3608 buffer: proto::Buffer,
3609 cx: &mut ModelContext<Self>,
3610 ) -> Task<Result<ModelHandle<Buffer>>> {
3611 let replica_id = self.replica_id();
3612
3613 let opened_buffer_tx = self.opened_buffer.0.clone();
3614 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3615 cx.spawn(|this, mut cx| async move {
3616 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3617 proto::buffer::Variant::Id(id) => {
3618 let buffer = loop {
3619 let buffer = this.read_with(&cx, |this, cx| {
3620 this.opened_buffers
3621 .get(&id)
3622 .and_then(|buffer| buffer.upgrade(cx))
3623 });
3624 if let Some(buffer) = buffer {
3625 break buffer;
3626 }
3627 opened_buffer_rx
3628 .next()
3629 .await
3630 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3631 };
3632 Ok(buffer)
3633 }
3634 proto::buffer::Variant::State(mut buffer) => {
3635 let mut buffer_worktree = None;
3636 let mut buffer_file = None;
3637 if let Some(file) = buffer.file.take() {
3638 this.read_with(&cx, |this, cx| {
3639 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3640 let worktree =
3641 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3642 anyhow!("no worktree found for id {}", file.worktree_id)
3643 })?;
3644 buffer_file =
3645 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3646 as Box<dyn language::File>);
3647 buffer_worktree = Some(worktree);
3648 Ok::<_, anyhow::Error>(())
3649 })?;
3650 }
3651
3652 let buffer = cx.add_model(|cx| {
3653 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3654 });
3655
3656 this.update(&mut cx, |this, cx| this.register_buffer(&buffer, cx))?;
3657
3658 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3659 Ok(buffer)
3660 }
3661 }
3662 })
3663 }
3664
3665 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3666 let language = self
3667 .languages
3668 .get_language(&serialized_symbol.language_name);
3669 let start = serialized_symbol
3670 .start
3671 .ok_or_else(|| anyhow!("invalid start"))?;
3672 let end = serialized_symbol
3673 .end
3674 .ok_or_else(|| anyhow!("invalid end"))?;
3675 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3676 Ok(Symbol {
3677 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3678 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3679 language_name: serialized_symbol.language_name.clone(),
3680 label: language
3681 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3682 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3683 name: serialized_symbol.name,
3684 path: PathBuf::from(serialized_symbol.path),
3685 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3686 kind,
3687 signature: serialized_symbol
3688 .signature
3689 .try_into()
3690 .map_err(|_| anyhow!("invalid signature"))?,
3691 })
3692 }
3693
3694 async fn handle_buffer_saved(
3695 this: ModelHandle<Self>,
3696 envelope: TypedEnvelope<proto::BufferSaved>,
3697 _: Arc<Client>,
3698 mut cx: AsyncAppContext,
3699 ) -> Result<()> {
3700 let version = deserialize_version(envelope.payload.version);
3701 let mtime = envelope
3702 .payload
3703 .mtime
3704 .ok_or_else(|| anyhow!("missing mtime"))?
3705 .into();
3706
3707 this.update(&mut cx, |this, cx| {
3708 let buffer = this
3709 .opened_buffers
3710 .get(&envelope.payload.buffer_id)
3711 .and_then(|buffer| buffer.upgrade(cx));
3712 if let Some(buffer) = buffer {
3713 buffer.update(cx, |buffer, cx| {
3714 buffer.did_save(version, mtime, None, cx);
3715 });
3716 }
3717 Ok(())
3718 })
3719 }
3720
3721 async fn handle_buffer_reloaded(
3722 this: ModelHandle<Self>,
3723 envelope: TypedEnvelope<proto::BufferReloaded>,
3724 _: Arc<Client>,
3725 mut cx: AsyncAppContext,
3726 ) -> Result<()> {
3727 let payload = envelope.payload.clone();
3728 let version = deserialize_version(payload.version);
3729 let mtime = payload
3730 .mtime
3731 .ok_or_else(|| anyhow!("missing mtime"))?
3732 .into();
3733 this.update(&mut cx, |this, cx| {
3734 let buffer = this
3735 .opened_buffers
3736 .get(&payload.buffer_id)
3737 .and_then(|buffer| buffer.upgrade(cx));
3738 if let Some(buffer) = buffer {
3739 buffer.update(cx, |buffer, cx| {
3740 buffer.did_reload(version, mtime, cx);
3741 });
3742 }
3743 Ok(())
3744 })
3745 }
3746
3747 pub fn match_paths<'a>(
3748 &self,
3749 query: &'a str,
3750 include_ignored: bool,
3751 smart_case: bool,
3752 max_results: usize,
3753 cancel_flag: &'a AtomicBool,
3754 cx: &AppContext,
3755 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3756 let worktrees = self
3757 .worktrees(cx)
3758 .filter(|worktree| worktree.read(cx).is_visible())
3759 .collect::<Vec<_>>();
3760 let include_root_name = worktrees.len() > 1;
3761 let candidate_sets = worktrees
3762 .into_iter()
3763 .map(|worktree| CandidateSet {
3764 snapshot: worktree.read(cx).snapshot(),
3765 include_ignored,
3766 include_root_name,
3767 })
3768 .collect::<Vec<_>>();
3769
3770 let background = cx.background().clone();
3771 async move {
3772 fuzzy::match_paths(
3773 candidate_sets.as_slice(),
3774 query,
3775 smart_case,
3776 max_results,
3777 cancel_flag,
3778 background,
3779 )
3780 .await
3781 }
3782 }
3783
3784 fn edits_from_lsp(
3785 &mut self,
3786 buffer: &ModelHandle<Buffer>,
3787 lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
3788 version: Option<i32>,
3789 cx: &mut ModelContext<Self>,
3790 ) -> Task<Result<Vec<(Range<Anchor>, String)>>> {
3791 let snapshot = self.buffer_snapshot_for_lsp_version(buffer, version, cx);
3792 cx.background().spawn(async move {
3793 let snapshot = snapshot?;
3794 let mut lsp_edits = lsp_edits
3795 .into_iter()
3796 .map(|edit| (range_from_lsp(edit.range), edit.new_text))
3797 .peekable();
3798
3799 let mut edits = Vec::new();
3800 while let Some((mut range, mut new_text)) = lsp_edits.next() {
3801 // Combine any LSP edits that are adjacent.
3802 //
3803 // Also, combine LSP edits that are separated from each other by only
3804 // a newline. This is important because for some code actions,
3805 // Rust-analyzer rewrites the entire buffer via a series of edits that
3806 // are separated by unchanged newline characters.
3807 //
3808 // In order for the diffing logic below to work properly, any edits that
3809 // cancel each other out must be combined into one.
3810 while let Some((next_range, next_text)) = lsp_edits.peek() {
3811 if next_range.start > range.end {
3812 if next_range.start.row > range.end.row + 1
3813 || next_range.start.column > 0
3814 || snapshot.clip_point_utf16(
3815 PointUtf16::new(range.end.row, u32::MAX),
3816 Bias::Left,
3817 ) > range.end
3818 {
3819 break;
3820 }
3821 new_text.push('\n');
3822 }
3823 range.end = next_range.end;
3824 new_text.push_str(&next_text);
3825 lsp_edits.next();
3826 }
3827
3828 if snapshot.clip_point_utf16(range.start, Bias::Left) != range.start
3829 || snapshot.clip_point_utf16(range.end, Bias::Left) != range.end
3830 {
3831 return Err(anyhow!("invalid edits received from language server"));
3832 }
3833
3834 // For multiline edits, perform a diff of the old and new text so that
3835 // we can identify the changes more precisely, preserving the locations
3836 // of any anchors positioned in the unchanged regions.
3837 if range.end.row > range.start.row {
3838 let mut offset = range.start.to_offset(&snapshot);
3839 let old_text = snapshot.text_for_range(range).collect::<String>();
3840
3841 let diff = TextDiff::from_lines(old_text.as_str(), &new_text);
3842 let mut moved_since_edit = true;
3843 for change in diff.iter_all_changes() {
3844 let tag = change.tag();
3845 let value = change.value();
3846 match tag {
3847 ChangeTag::Equal => {
3848 offset += value.len();
3849 moved_since_edit = true;
3850 }
3851 ChangeTag::Delete => {
3852 let start = snapshot.anchor_after(offset);
3853 let end = snapshot.anchor_before(offset + value.len());
3854 if moved_since_edit {
3855 edits.push((start..end, String::new()));
3856 } else {
3857 edits.last_mut().unwrap().0.end = end;
3858 }
3859 offset += value.len();
3860 moved_since_edit = false;
3861 }
3862 ChangeTag::Insert => {
3863 if moved_since_edit {
3864 let anchor = snapshot.anchor_after(offset);
3865 edits.push((anchor.clone()..anchor, value.to_string()));
3866 } else {
3867 edits.last_mut().unwrap().1.push_str(value);
3868 }
3869 moved_since_edit = false;
3870 }
3871 }
3872 }
3873 } else if range.end == range.start {
3874 let anchor = snapshot.anchor_after(range.start);
3875 edits.push((anchor.clone()..anchor, new_text));
3876 } else {
3877 let edit_start = snapshot.anchor_after(range.start);
3878 let edit_end = snapshot.anchor_before(range.end);
3879 edits.push((edit_start..edit_end, new_text));
3880 }
3881 }
3882
3883 Ok(edits)
3884 })
3885 }
3886
3887 fn buffer_snapshot_for_lsp_version(
3888 &mut self,
3889 buffer: &ModelHandle<Buffer>,
3890 version: Option<i32>,
3891 cx: &AppContext,
3892 ) -> Result<TextBufferSnapshot> {
3893 const OLD_VERSIONS_TO_RETAIN: i32 = 10;
3894
3895 if let Some(version) = version {
3896 let buffer_id = buffer.read(cx).remote_id();
3897 let snapshots = self
3898 .buffer_snapshots
3899 .get_mut(&buffer_id)
3900 .ok_or_else(|| anyhow!("no snapshot found for buffer {}", buffer_id))?;
3901 let mut found_snapshot = None;
3902 snapshots.retain(|(snapshot_version, snapshot)| {
3903 if snapshot_version + OLD_VERSIONS_TO_RETAIN < version {
3904 false
3905 } else {
3906 if *snapshot_version == version {
3907 found_snapshot = Some(snapshot.clone());
3908 }
3909 true
3910 }
3911 });
3912
3913 found_snapshot.ok_or_else(|| {
3914 anyhow!(
3915 "snapshot not found for buffer {} at version {}",
3916 buffer_id,
3917 version
3918 )
3919 })
3920 } else {
3921 Ok((buffer.read(cx)).text_snapshot())
3922 }
3923 }
3924
3925 fn language_server_for_buffer(
3926 &self,
3927 buffer: &Buffer,
3928 cx: &AppContext,
3929 ) -> Option<&Arc<LanguageServer>> {
3930 if let Some((file, language)) = File::from_dyn(buffer.file()).zip(buffer.language()) {
3931 let worktree_id = file.worktree_id(cx);
3932 self.language_servers.get(&(worktree_id, language.name()))
3933 } else {
3934 None
3935 }
3936 }
3937}
3938
3939impl WorktreeHandle {
3940 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3941 match self {
3942 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3943 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3944 }
3945 }
3946}
3947
3948impl OpenBuffer {
3949 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3950 match self {
3951 OpenBuffer::Strong(handle) => Some(handle.clone()),
3952 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3953 OpenBuffer::Loading(_) => None,
3954 }
3955 }
3956}
3957
3958struct CandidateSet {
3959 snapshot: Snapshot,
3960 include_ignored: bool,
3961 include_root_name: bool,
3962}
3963
3964impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3965 type Candidates = CandidateSetIter<'a>;
3966
3967 fn id(&self) -> usize {
3968 self.snapshot.id().to_usize()
3969 }
3970
3971 fn len(&self) -> usize {
3972 if self.include_ignored {
3973 self.snapshot.file_count()
3974 } else {
3975 self.snapshot.visible_file_count()
3976 }
3977 }
3978
3979 fn prefix(&self) -> Arc<str> {
3980 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3981 self.snapshot.root_name().into()
3982 } else if self.include_root_name {
3983 format!("{}/", self.snapshot.root_name()).into()
3984 } else {
3985 "".into()
3986 }
3987 }
3988
3989 fn candidates(&'a self, start: usize) -> Self::Candidates {
3990 CandidateSetIter {
3991 traversal: self.snapshot.files(self.include_ignored, start),
3992 }
3993 }
3994}
3995
3996struct CandidateSetIter<'a> {
3997 traversal: Traversal<'a>,
3998}
3999
4000impl<'a> Iterator for CandidateSetIter<'a> {
4001 type Item = PathMatchCandidate<'a>;
4002
4003 fn next(&mut self) -> Option<Self::Item> {
4004 self.traversal.next().map(|entry| {
4005 if let EntryKind::File(char_bag) = entry.kind {
4006 PathMatchCandidate {
4007 path: &entry.path,
4008 char_bag,
4009 }
4010 } else {
4011 unreachable!()
4012 }
4013 })
4014 }
4015}
4016
4017impl Entity for Project {
4018 type Event = Event;
4019
4020 fn release(&mut self, _: &mut gpui::MutableAppContext) {
4021 match &self.client_state {
4022 ProjectClientState::Local { remote_id_rx, .. } => {
4023 if let Some(project_id) = *remote_id_rx.borrow() {
4024 self.client
4025 .send(proto::UnregisterProject { project_id })
4026 .log_err();
4027 }
4028 }
4029 ProjectClientState::Remote { remote_id, .. } => {
4030 self.client
4031 .send(proto::LeaveProject {
4032 project_id: *remote_id,
4033 })
4034 .log_err();
4035 }
4036 }
4037 }
4038
4039 fn app_will_quit(
4040 &mut self,
4041 _: &mut MutableAppContext,
4042 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
4043 let shutdown_futures = self
4044 .language_servers
4045 .drain()
4046 .filter_map(|(_, server)| server.shutdown())
4047 .collect::<Vec<_>>();
4048 Some(
4049 async move {
4050 futures::future::join_all(shutdown_futures).await;
4051 }
4052 .boxed(),
4053 )
4054 }
4055}
4056
4057impl Collaborator {
4058 fn from_proto(
4059 message: proto::Collaborator,
4060 user_store: &ModelHandle<UserStore>,
4061 cx: &mut AsyncAppContext,
4062 ) -> impl Future<Output = Result<Self>> {
4063 let user = user_store.update(cx, |user_store, cx| {
4064 user_store.fetch_user(message.user_id, cx)
4065 });
4066
4067 async move {
4068 Ok(Self {
4069 peer_id: PeerId(message.peer_id),
4070 user: user.await?,
4071 replica_id: message.replica_id as ReplicaId,
4072 })
4073 }
4074 }
4075}
4076
4077impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
4078 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
4079 Self {
4080 worktree_id,
4081 path: path.as_ref().into(),
4082 }
4083 }
4084}
4085
4086impl From<lsp::CreateFileOptions> for fs::CreateOptions {
4087 fn from(options: lsp::CreateFileOptions) -> Self {
4088 Self {
4089 overwrite: options.overwrite.unwrap_or(false),
4090 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4091 }
4092 }
4093}
4094
4095impl From<lsp::RenameFileOptions> for fs::RenameOptions {
4096 fn from(options: lsp::RenameFileOptions) -> Self {
4097 Self {
4098 overwrite: options.overwrite.unwrap_or(false),
4099 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
4100 }
4101 }
4102}
4103
4104impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
4105 fn from(options: lsp::DeleteFileOptions) -> Self {
4106 Self {
4107 recursive: options.recursive.unwrap_or(false),
4108 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
4109 }
4110 }
4111}
4112
4113fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
4114 proto::Symbol {
4115 source_worktree_id: symbol.source_worktree_id.to_proto(),
4116 worktree_id: symbol.worktree_id.to_proto(),
4117 language_name: symbol.language_name.clone(),
4118 name: symbol.name.clone(),
4119 kind: unsafe { mem::transmute(symbol.kind) },
4120 path: symbol.path.to_string_lossy().to_string(),
4121 start: Some(proto::Point {
4122 row: symbol.range.start.row,
4123 column: symbol.range.start.column,
4124 }),
4125 end: Some(proto::Point {
4126 row: symbol.range.end.row,
4127 column: symbol.range.end.column,
4128 }),
4129 signature: symbol.signature.to_vec(),
4130 }
4131}
4132
4133fn relativize_path(base: &Path, path: &Path) -> PathBuf {
4134 let mut path_components = path.components();
4135 let mut base_components = base.components();
4136 let mut components: Vec<Component> = Vec::new();
4137 loop {
4138 match (path_components.next(), base_components.next()) {
4139 (None, None) => break,
4140 (Some(a), None) => {
4141 components.push(a);
4142 components.extend(path_components.by_ref());
4143 break;
4144 }
4145 (None, _) => components.push(Component::ParentDir),
4146 (Some(a), Some(b)) if components.is_empty() && a == b => (),
4147 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
4148 (Some(a), Some(_)) => {
4149 components.push(Component::ParentDir);
4150 for _ in base_components {
4151 components.push(Component::ParentDir);
4152 }
4153 components.push(a);
4154 components.extend(path_components.by_ref());
4155 break;
4156 }
4157 }
4158 }
4159 components.iter().map(|c| c.as_os_str()).collect()
4160}
4161
4162#[cfg(test)]
4163mod tests {
4164 use super::{Event, *};
4165 use fs::RealFs;
4166 use futures::StreamExt;
4167 use gpui::test::subscribe;
4168 use language::{
4169 tree_sitter_rust, Diagnostic, LanguageConfig, LanguageServerConfig, OffsetRangeExt, Point,
4170 ToPoint,
4171 };
4172 use lsp::Url;
4173 use serde_json::json;
4174 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
4175 use unindent::Unindent as _;
4176 use util::test::temp_tree;
4177 use worktree::WorktreeHandle as _;
4178
4179 #[gpui::test]
4180 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
4181 let dir = temp_tree(json!({
4182 "root": {
4183 "apple": "",
4184 "banana": {
4185 "carrot": {
4186 "date": "",
4187 "endive": "",
4188 }
4189 },
4190 "fennel": {
4191 "grape": "",
4192 }
4193 }
4194 }));
4195
4196 let root_link_path = dir.path().join("root_link");
4197 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
4198 unix::fs::symlink(
4199 &dir.path().join("root/fennel"),
4200 &dir.path().join("root/finnochio"),
4201 )
4202 .unwrap();
4203
4204 let project = Project::test(Arc::new(RealFs), cx);
4205
4206 let (tree, _) = project
4207 .update(cx, |project, cx| {
4208 project.find_or_create_local_worktree(&root_link_path, true, cx)
4209 })
4210 .await
4211 .unwrap();
4212
4213 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4214 .await;
4215 cx.read(|cx| {
4216 let tree = tree.read(cx);
4217 assert_eq!(tree.file_count(), 5);
4218 assert_eq!(
4219 tree.inode_for_path("fennel/grape"),
4220 tree.inode_for_path("finnochio/grape")
4221 );
4222 });
4223
4224 let cancel_flag = Default::default();
4225 let results = project
4226 .read_with(cx, |project, cx| {
4227 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
4228 })
4229 .await;
4230 assert_eq!(
4231 results
4232 .into_iter()
4233 .map(|result| result.path)
4234 .collect::<Vec<Arc<Path>>>(),
4235 vec![
4236 PathBuf::from("banana/carrot/date").into(),
4237 PathBuf::from("banana/carrot/endive").into(),
4238 ]
4239 );
4240 }
4241
4242 #[gpui::test]
4243 async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
4244 cx.foreground().forbid_parking();
4245
4246 let (mut rust_lsp_config, mut fake_rust_servers) = LanguageServerConfig::fake();
4247 let (mut json_lsp_config, mut fake_json_servers) = LanguageServerConfig::fake();
4248 rust_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4249 completion_provider: Some(lsp::CompletionOptions {
4250 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4251 ..Default::default()
4252 }),
4253 ..Default::default()
4254 });
4255 json_lsp_config.set_fake_capabilities(lsp::ServerCapabilities {
4256 completion_provider: Some(lsp::CompletionOptions {
4257 trigger_characters: Some(vec![":".to_string()]),
4258 ..Default::default()
4259 }),
4260 ..Default::default()
4261 });
4262
4263 let rust_language = Arc::new(Language::new(
4264 LanguageConfig {
4265 name: "Rust".into(),
4266 path_suffixes: vec!["rs".to_string()],
4267 language_server: Some(rust_lsp_config),
4268 ..Default::default()
4269 },
4270 Some(tree_sitter_rust::language()),
4271 ));
4272 let json_language = Arc::new(Language::new(
4273 LanguageConfig {
4274 name: "JSON".into(),
4275 path_suffixes: vec!["json".to_string()],
4276 language_server: Some(json_lsp_config),
4277 ..Default::default()
4278 },
4279 None,
4280 ));
4281
4282 let fs = FakeFs::new(cx.background());
4283 fs.insert_tree(
4284 "/the-root",
4285 json!({
4286 "test.rs": "const A: i32 = 1;",
4287 "test2.rs": "",
4288 "Cargo.toml": "a = 1",
4289 "package.json": "{\"a\": 1}",
4290 }),
4291 )
4292 .await;
4293
4294 let project = Project::test(fs, cx);
4295 project.update(cx, |project, _| {
4296 project.languages.add(rust_language);
4297 project.languages.add(json_language);
4298 });
4299
4300 let worktree_id = project
4301 .update(cx, |project, cx| {
4302 project.find_or_create_local_worktree("/the-root", true, cx)
4303 })
4304 .await
4305 .unwrap()
4306 .0
4307 .read_with(cx, |tree, _| tree.id());
4308
4309 // Open a buffer without an associated language server.
4310 let toml_buffer = project
4311 .update(cx, |project, cx| {
4312 project.open_buffer((worktree_id, "Cargo.toml"), cx)
4313 })
4314 .await
4315 .unwrap();
4316
4317 // Open a buffer with an associated language server.
4318 let rust_buffer = project
4319 .update(cx, |project, cx| {
4320 project.open_buffer((worktree_id, "test.rs"), cx)
4321 })
4322 .await
4323 .unwrap();
4324
4325 // A server is started up, and it is notified about Rust files.
4326 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4327 assert_eq!(
4328 fake_rust_server
4329 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4330 .await
4331 .text_document,
4332 lsp::TextDocumentItem {
4333 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4334 version: 0,
4335 text: "const A: i32 = 1;".to_string(),
4336 language_id: Default::default()
4337 }
4338 );
4339
4340 // The buffer is configured based on the language server's capabilities.
4341 rust_buffer.read_with(cx, |buffer, _| {
4342 assert_eq!(
4343 buffer.completion_triggers(),
4344 &[".".to_string(), "::".to_string()]
4345 );
4346 });
4347 toml_buffer.read_with(cx, |buffer, _| {
4348 assert!(buffer.completion_triggers().is_empty());
4349 });
4350
4351 // Edit a buffer. The changes are reported to the language server.
4352 rust_buffer.update(cx, |buffer, cx| buffer.edit([16..16], "2", cx));
4353 assert_eq!(
4354 fake_rust_server
4355 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4356 .await
4357 .text_document,
4358 lsp::VersionedTextDocumentIdentifier::new(
4359 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
4360 1
4361 )
4362 );
4363
4364 // Open a third buffer with a different associated language server.
4365 let json_buffer = project
4366 .update(cx, |project, cx| {
4367 project.open_buffer((worktree_id, "package.json"), cx)
4368 })
4369 .await
4370 .unwrap();
4371
4372 // Another language server is started up, and it is notified about
4373 // all three open buffers.
4374 let mut fake_json_server = fake_json_servers.next().await.unwrap();
4375 assert_eq!(
4376 fake_json_server
4377 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4378 .await
4379 .text_document,
4380 lsp::TextDocumentItem {
4381 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4382 version: 0,
4383 text: "{\"a\": 1}".to_string(),
4384 language_id: Default::default()
4385 }
4386 );
4387
4388 // This buffer is configured based on the second language server's
4389 // capabilities.
4390 json_buffer.read_with(cx, |buffer, _| {
4391 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
4392 });
4393
4394 // When opening another buffer whose language server is already running,
4395 // it is also configured based on the existing language server's capabilities.
4396 let rust_buffer2 = project
4397 .update(cx, |project, cx| {
4398 project.open_buffer((worktree_id, "test2.rs"), cx)
4399 })
4400 .await
4401 .unwrap();
4402 rust_buffer2.read_with(cx, |buffer, _| {
4403 assert_eq!(
4404 buffer.completion_triggers(),
4405 &[".".to_string(), "::".to_string()]
4406 );
4407 });
4408
4409 // Changes are reported only to servers matching the buffer's language.
4410 toml_buffer.update(cx, |buffer, cx| buffer.edit([5..5], "23", cx));
4411 rust_buffer2.update(cx, |buffer, cx| buffer.edit([0..0], "let x = 1;", cx));
4412 assert_eq!(
4413 fake_rust_server
4414 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4415 .await
4416 .text_document,
4417 lsp::VersionedTextDocumentIdentifier::new(
4418 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
4419 1
4420 )
4421 );
4422
4423 // Save notifications are reported to all servers.
4424 toml_buffer
4425 .update(cx, |buffer, cx| buffer.save(cx))
4426 .await
4427 .unwrap();
4428 assert_eq!(
4429 fake_rust_server
4430 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4431 .await
4432 .text_document,
4433 lsp::TextDocumentIdentifier::new(
4434 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4435 )
4436 );
4437 assert_eq!(
4438 fake_json_server
4439 .receive_notification::<lsp::notification::DidSaveTextDocument>()
4440 .await
4441 .text_document,
4442 lsp::TextDocumentIdentifier::new(
4443 lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap()
4444 )
4445 );
4446
4447 // Close notifications are reported only to servers matching the buffer's language.
4448 cx.update(|_| drop(json_buffer));
4449 let close_message = lsp::DidCloseTextDocumentParams {
4450 text_document: lsp::TextDocumentIdentifier::new(
4451 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
4452 ),
4453 };
4454 assert_eq!(
4455 fake_json_server
4456 .receive_notification::<lsp::notification::DidCloseTextDocument>()
4457 .await,
4458 close_message,
4459 );
4460 }
4461
4462 #[gpui::test]
4463 async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
4464 cx.foreground().forbid_parking();
4465
4466 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4467 let progress_token = language_server_config
4468 .disk_based_diagnostics_progress_token
4469 .clone()
4470 .unwrap();
4471
4472 let language = Arc::new(Language::new(
4473 LanguageConfig {
4474 name: "Rust".into(),
4475 path_suffixes: vec!["rs".to_string()],
4476 language_server: Some(language_server_config),
4477 ..Default::default()
4478 },
4479 Some(tree_sitter_rust::language()),
4480 ));
4481
4482 let fs = FakeFs::new(cx.background());
4483 fs.insert_tree(
4484 "/dir",
4485 json!({
4486 "a.rs": "fn a() { A }",
4487 "b.rs": "const y: i32 = 1",
4488 }),
4489 )
4490 .await;
4491
4492 let project = Project::test(fs, cx);
4493 project.update(cx, |project, _| project.languages.add(language));
4494
4495 let (tree, _) = project
4496 .update(cx, |project, cx| {
4497 project.find_or_create_local_worktree("/dir", true, cx)
4498 })
4499 .await
4500 .unwrap();
4501 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4502
4503 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4504 .await;
4505
4506 // Cause worktree to start the fake language server
4507 let _buffer = project
4508 .update(cx, |project, cx| {
4509 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
4510 })
4511 .await
4512 .unwrap();
4513
4514 let mut events = subscribe(&project, cx);
4515
4516 let mut fake_server = fake_servers.next().await.unwrap();
4517 fake_server.start_progress(&progress_token).await;
4518 assert_eq!(
4519 events.next().await.unwrap(),
4520 Event::DiskBasedDiagnosticsStarted
4521 );
4522
4523 fake_server.start_progress(&progress_token).await;
4524 fake_server.end_progress(&progress_token).await;
4525 fake_server.start_progress(&progress_token).await;
4526
4527 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4528 lsp::PublishDiagnosticsParams {
4529 uri: Url::from_file_path("/dir/a.rs").unwrap(),
4530 version: None,
4531 diagnostics: vec![lsp::Diagnostic {
4532 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4533 severity: Some(lsp::DiagnosticSeverity::ERROR),
4534 message: "undefined variable 'A'".to_string(),
4535 ..Default::default()
4536 }],
4537 },
4538 );
4539 assert_eq!(
4540 events.next().await.unwrap(),
4541 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
4542 );
4543
4544 fake_server.end_progress(&progress_token).await;
4545 fake_server.end_progress(&progress_token).await;
4546 assert_eq!(
4547 events.next().await.unwrap(),
4548 Event::DiskBasedDiagnosticsUpdated
4549 );
4550 assert_eq!(
4551 events.next().await.unwrap(),
4552 Event::DiskBasedDiagnosticsFinished
4553 );
4554
4555 let buffer = project
4556 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4557 .await
4558 .unwrap();
4559
4560 buffer.read_with(cx, |buffer, _| {
4561 let snapshot = buffer.snapshot();
4562 let diagnostics = snapshot
4563 .diagnostics_in_range::<_, Point>(0..buffer.len())
4564 .collect::<Vec<_>>();
4565 assert_eq!(
4566 diagnostics,
4567 &[DiagnosticEntry {
4568 range: Point::new(0, 9)..Point::new(0, 10),
4569 diagnostic: Diagnostic {
4570 severity: lsp::DiagnosticSeverity::ERROR,
4571 message: "undefined variable 'A'".to_string(),
4572 group_id: 0,
4573 is_primary: true,
4574 ..Default::default()
4575 }
4576 }]
4577 )
4578 });
4579 }
4580
4581 #[gpui::test]
4582 async fn test_transforming_disk_based_diagnostics(cx: &mut gpui::TestAppContext) {
4583 cx.foreground().forbid_parking();
4584
4585 let (mut lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4586 lsp_config
4587 .disk_based_diagnostic_sources
4588 .insert("disk".to_string());
4589 let language = Arc::new(Language::new(
4590 LanguageConfig {
4591 name: "Rust".into(),
4592 path_suffixes: vec!["rs".to_string()],
4593 language_server: Some(lsp_config),
4594 ..Default::default()
4595 },
4596 Some(tree_sitter_rust::language()),
4597 ));
4598
4599 let text = "
4600 fn a() { A }
4601 fn b() { BB }
4602 fn c() { CCC }
4603 "
4604 .unindent();
4605
4606 let fs = FakeFs::new(cx.background());
4607 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4608
4609 let project = Project::test(fs, cx);
4610 project.update(cx, |project, _| project.languages.add(language));
4611
4612 let worktree_id = project
4613 .update(cx, |project, cx| {
4614 project.find_or_create_local_worktree("/dir", true, cx)
4615 })
4616 .await
4617 .unwrap()
4618 .0
4619 .read_with(cx, |tree, _| tree.id());
4620
4621 let buffer = project
4622 .update(cx, |project, cx| {
4623 project.open_buffer((worktree_id, "a.rs"), cx)
4624 })
4625 .await
4626 .unwrap();
4627
4628 let mut fake_server = fake_servers.next().await.unwrap();
4629 let open_notification = fake_server
4630 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4631 .await;
4632
4633 // Edit the buffer, moving the content down
4634 buffer.update(cx, |buffer, cx| buffer.edit([0..0], "\n\n", cx));
4635 let change_notification_1 = fake_server
4636 .receive_notification::<lsp::notification::DidChangeTextDocument>()
4637 .await;
4638 assert!(
4639 change_notification_1.text_document.version > open_notification.text_document.version
4640 );
4641
4642 // Report some diagnostics for the initial version of the buffer
4643 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4644 lsp::PublishDiagnosticsParams {
4645 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4646 version: Some(open_notification.text_document.version),
4647 diagnostics: vec![
4648 lsp::Diagnostic {
4649 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4650 severity: Some(DiagnosticSeverity::ERROR),
4651 message: "undefined variable 'A'".to_string(),
4652 source: Some("disk".to_string()),
4653 ..Default::default()
4654 },
4655 lsp::Diagnostic {
4656 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4657 severity: Some(DiagnosticSeverity::ERROR),
4658 message: "undefined variable 'BB'".to_string(),
4659 source: Some("disk".to_string()),
4660 ..Default::default()
4661 },
4662 lsp::Diagnostic {
4663 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
4664 severity: Some(DiagnosticSeverity::ERROR),
4665 source: Some("disk".to_string()),
4666 message: "undefined variable 'CCC'".to_string(),
4667 ..Default::default()
4668 },
4669 ],
4670 },
4671 );
4672
4673 // The diagnostics have moved down since they were created.
4674 buffer.next_notification(cx).await;
4675 buffer.read_with(cx, |buffer, _| {
4676 assert_eq!(
4677 buffer
4678 .snapshot()
4679 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0))
4680 .collect::<Vec<_>>(),
4681 &[
4682 DiagnosticEntry {
4683 range: Point::new(3, 9)..Point::new(3, 11),
4684 diagnostic: Diagnostic {
4685 severity: DiagnosticSeverity::ERROR,
4686 message: "undefined variable 'BB'".to_string(),
4687 is_disk_based: true,
4688 group_id: 1,
4689 is_primary: true,
4690 ..Default::default()
4691 },
4692 },
4693 DiagnosticEntry {
4694 range: Point::new(4, 9)..Point::new(4, 12),
4695 diagnostic: Diagnostic {
4696 severity: DiagnosticSeverity::ERROR,
4697 message: "undefined variable 'CCC'".to_string(),
4698 is_disk_based: true,
4699 group_id: 2,
4700 is_primary: true,
4701 ..Default::default()
4702 }
4703 }
4704 ]
4705 );
4706 assert_eq!(
4707 chunks_with_diagnostics(buffer, 0..buffer.len()),
4708 [
4709 ("\n\nfn a() { ".to_string(), None),
4710 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4711 (" }\nfn b() { ".to_string(), None),
4712 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
4713 (" }\nfn c() { ".to_string(), None),
4714 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
4715 (" }\n".to_string(), None),
4716 ]
4717 );
4718 assert_eq!(
4719 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
4720 [
4721 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
4722 (" }\nfn c() { ".to_string(), None),
4723 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
4724 ]
4725 );
4726 });
4727
4728 // Ensure overlapping diagnostics are highlighted correctly.
4729 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4730 lsp::PublishDiagnosticsParams {
4731 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4732 version: Some(open_notification.text_document.version),
4733 diagnostics: vec![
4734 lsp::Diagnostic {
4735 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4736 severity: Some(DiagnosticSeverity::ERROR),
4737 message: "undefined variable 'A'".to_string(),
4738 source: Some("disk".to_string()),
4739 ..Default::default()
4740 },
4741 lsp::Diagnostic {
4742 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
4743 severity: Some(DiagnosticSeverity::WARNING),
4744 message: "unreachable statement".to_string(),
4745 source: Some("disk".to_string()),
4746 ..Default::default()
4747 },
4748 ],
4749 },
4750 );
4751
4752 buffer.next_notification(cx).await;
4753 buffer.read_with(cx, |buffer, _| {
4754 assert_eq!(
4755 buffer
4756 .snapshot()
4757 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0))
4758 .collect::<Vec<_>>(),
4759 &[
4760 DiagnosticEntry {
4761 range: Point::new(2, 9)..Point::new(2, 12),
4762 diagnostic: Diagnostic {
4763 severity: DiagnosticSeverity::WARNING,
4764 message: "unreachable statement".to_string(),
4765 is_disk_based: true,
4766 group_id: 1,
4767 is_primary: true,
4768 ..Default::default()
4769 }
4770 },
4771 DiagnosticEntry {
4772 range: Point::new(2, 9)..Point::new(2, 10),
4773 diagnostic: Diagnostic {
4774 severity: DiagnosticSeverity::ERROR,
4775 message: "undefined variable 'A'".to_string(),
4776 is_disk_based: true,
4777 group_id: 0,
4778 is_primary: true,
4779 ..Default::default()
4780 },
4781 }
4782 ]
4783 );
4784 assert_eq!(
4785 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
4786 [
4787 ("fn a() { ".to_string(), None),
4788 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
4789 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
4790 ("\n".to_string(), None),
4791 ]
4792 );
4793 assert_eq!(
4794 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
4795 [
4796 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
4797 ("\n".to_string(), None),
4798 ]
4799 );
4800 });
4801
4802 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
4803 // changes since the last save.
4804 buffer.update(cx, |buffer, cx| {
4805 buffer.edit(Some(Point::new(2, 0)..Point::new(2, 0)), " ", cx);
4806 buffer.edit(Some(Point::new(2, 8)..Point::new(2, 10)), "(x: usize)", cx);
4807 });
4808 let change_notification_2 =
4809 fake_server.receive_notification::<lsp::notification::DidChangeTextDocument>();
4810 assert!(
4811 change_notification_2.await.text_document.version
4812 > change_notification_1.text_document.version
4813 );
4814
4815 // Handle out-of-order diagnostics
4816 fake_server.notify::<lsp::notification::PublishDiagnostics>(
4817 lsp::PublishDiagnosticsParams {
4818 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4819 version: Some(open_notification.text_document.version),
4820 diagnostics: vec![
4821 lsp::Diagnostic {
4822 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
4823 severity: Some(DiagnosticSeverity::ERROR),
4824 message: "undefined variable 'BB'".to_string(),
4825 source: Some("disk".to_string()),
4826 ..Default::default()
4827 },
4828 lsp::Diagnostic {
4829 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4830 severity: Some(DiagnosticSeverity::WARNING),
4831 message: "undefined variable 'A'".to_string(),
4832 source: Some("disk".to_string()),
4833 ..Default::default()
4834 },
4835 ],
4836 },
4837 );
4838
4839 buffer.next_notification(cx).await;
4840 buffer.read_with(cx, |buffer, _| {
4841 assert_eq!(
4842 buffer
4843 .snapshot()
4844 .diagnostics_in_range::<_, Point>(0..buffer.len())
4845 .collect::<Vec<_>>(),
4846 &[
4847 DiagnosticEntry {
4848 range: Point::new(2, 21)..Point::new(2, 22),
4849 diagnostic: Diagnostic {
4850 severity: DiagnosticSeverity::WARNING,
4851 message: "undefined variable 'A'".to_string(),
4852 is_disk_based: true,
4853 group_id: 1,
4854 is_primary: true,
4855 ..Default::default()
4856 }
4857 },
4858 DiagnosticEntry {
4859 range: Point::new(3, 9)..Point::new(3, 11),
4860 diagnostic: Diagnostic {
4861 severity: DiagnosticSeverity::ERROR,
4862 message: "undefined variable 'BB'".to_string(),
4863 is_disk_based: true,
4864 group_id: 0,
4865 is_primary: true,
4866 ..Default::default()
4867 },
4868 }
4869 ]
4870 );
4871 });
4872 }
4873
4874 #[gpui::test]
4875 async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
4876 cx.foreground().forbid_parking();
4877
4878 let text = concat!(
4879 "let one = ;\n", //
4880 "let two = \n",
4881 "let three = 3;\n",
4882 );
4883
4884 let fs = FakeFs::new(cx.background());
4885 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
4886
4887 let project = Project::test(fs, cx);
4888 let worktree_id = project
4889 .update(cx, |project, cx| {
4890 project.find_or_create_local_worktree("/dir", true, cx)
4891 })
4892 .await
4893 .unwrap()
4894 .0
4895 .read_with(cx, |tree, _| tree.id());
4896
4897 let buffer = project
4898 .update(cx, |project, cx| {
4899 project.open_buffer((worktree_id, "a.rs"), cx)
4900 })
4901 .await
4902 .unwrap();
4903
4904 project.update(cx, |project, cx| {
4905 project
4906 .update_buffer_diagnostics(
4907 &buffer,
4908 vec![
4909 DiagnosticEntry {
4910 range: PointUtf16::new(0, 10)..PointUtf16::new(0, 10),
4911 diagnostic: Diagnostic {
4912 severity: DiagnosticSeverity::ERROR,
4913 message: "syntax error 1".to_string(),
4914 ..Default::default()
4915 },
4916 },
4917 DiagnosticEntry {
4918 range: PointUtf16::new(1, 10)..PointUtf16::new(1, 10),
4919 diagnostic: Diagnostic {
4920 severity: DiagnosticSeverity::ERROR,
4921 message: "syntax error 2".to_string(),
4922 ..Default::default()
4923 },
4924 },
4925 ],
4926 None,
4927 cx,
4928 )
4929 .unwrap();
4930 });
4931
4932 // An empty range is extended forward to include the following character.
4933 // At the end of a line, an empty range is extended backward to include
4934 // the preceding character.
4935 buffer.read_with(cx, |buffer, _| {
4936 let chunks = chunks_with_diagnostics(&buffer, 0..buffer.len());
4937 assert_eq!(
4938 chunks
4939 .iter()
4940 .map(|(s, d)| (s.as_str(), *d))
4941 .collect::<Vec<_>>(),
4942 &[
4943 ("let one = ", None),
4944 (";", Some(DiagnosticSeverity::ERROR)),
4945 ("\nlet two =", None),
4946 (" ", Some(DiagnosticSeverity::ERROR)),
4947 ("\nlet three = 3;\n", None)
4948 ]
4949 );
4950 });
4951 }
4952
4953 #[gpui::test]
4954 async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
4955 cx.foreground().forbid_parking();
4956
4957 let (lsp_config, mut fake_servers) = LanguageServerConfig::fake();
4958 let language = Arc::new(Language::new(
4959 LanguageConfig {
4960 name: "Rust".into(),
4961 path_suffixes: vec!["rs".to_string()],
4962 language_server: Some(lsp_config),
4963 ..Default::default()
4964 },
4965 Some(tree_sitter_rust::language()),
4966 ));
4967
4968 let text = "
4969 fn a() {
4970 f1();
4971 }
4972 fn b() {
4973 f2();
4974 }
4975 fn c() {
4976 f3();
4977 }
4978 "
4979 .unindent();
4980
4981 let fs = FakeFs::new(cx.background());
4982 fs.insert_tree(
4983 "/dir",
4984 json!({
4985 "a.rs": text.clone(),
4986 }),
4987 )
4988 .await;
4989
4990 let project = Project::test(fs, cx);
4991 project.update(cx, |project, _| project.languages.add(language));
4992
4993 let worktree_id = project
4994 .update(cx, |project, cx| {
4995 project.find_or_create_local_worktree("/dir", true, cx)
4996 })
4997 .await
4998 .unwrap()
4999 .0
5000 .read_with(cx, |tree, _| tree.id());
5001
5002 let buffer = project
5003 .update(cx, |project, cx| {
5004 project.open_buffer((worktree_id, "a.rs"), cx)
5005 })
5006 .await
5007 .unwrap();
5008
5009 let mut fake_server = fake_servers.next().await.unwrap();
5010 let lsp_document_version = fake_server
5011 .receive_notification::<lsp::notification::DidOpenTextDocument>()
5012 .await
5013 .text_document
5014 .version;
5015
5016 // Simulate editing the buffer after the language server computes some edits.
5017 buffer.update(cx, |buffer, cx| {
5018 buffer.edit(
5019 [Point::new(0, 0)..Point::new(0, 0)],
5020 "// above first function\n",
5021 cx,
5022 );
5023 buffer.edit(
5024 [Point::new(2, 0)..Point::new(2, 0)],
5025 " // inside first function\n",
5026 cx,
5027 );
5028 buffer.edit(
5029 [Point::new(6, 4)..Point::new(6, 4)],
5030 "// inside second function ",
5031 cx,
5032 );
5033
5034 assert_eq!(
5035 buffer.text(),
5036 "
5037 // above first function
5038 fn a() {
5039 // inside first function
5040 f1();
5041 }
5042 fn b() {
5043 // inside second function f2();
5044 }
5045 fn c() {
5046 f3();
5047 }
5048 "
5049 .unindent()
5050 );
5051 });
5052
5053 let edits = project
5054 .update(cx, |project, cx| {
5055 project.edits_from_lsp(
5056 &buffer,
5057 vec![
5058 // replace body of first function
5059 lsp::TextEdit {
5060 range: lsp::Range::new(
5061 lsp::Position::new(0, 0),
5062 lsp::Position::new(3, 0),
5063 ),
5064 new_text: "
5065 fn a() {
5066 f10();
5067 }
5068 "
5069 .unindent(),
5070 },
5071 // edit inside second function
5072 lsp::TextEdit {
5073 range: lsp::Range::new(
5074 lsp::Position::new(4, 6),
5075 lsp::Position::new(4, 6),
5076 ),
5077 new_text: "00".into(),
5078 },
5079 // edit inside third function via two distinct edits
5080 lsp::TextEdit {
5081 range: lsp::Range::new(
5082 lsp::Position::new(7, 5),
5083 lsp::Position::new(7, 5),
5084 ),
5085 new_text: "4000".into(),
5086 },
5087 lsp::TextEdit {
5088 range: lsp::Range::new(
5089 lsp::Position::new(7, 5),
5090 lsp::Position::new(7, 6),
5091 ),
5092 new_text: "".into(),
5093 },
5094 ],
5095 Some(lsp_document_version),
5096 cx,
5097 )
5098 })
5099 .await
5100 .unwrap();
5101
5102 buffer.update(cx, |buffer, cx| {
5103 for (range, new_text) in edits {
5104 buffer.edit([range], new_text, cx);
5105 }
5106 assert_eq!(
5107 buffer.text(),
5108 "
5109 // above first function
5110 fn a() {
5111 // inside first function
5112 f10();
5113 }
5114 fn b() {
5115 // inside second function f200();
5116 }
5117 fn c() {
5118 f4000();
5119 }
5120 "
5121 .unindent()
5122 );
5123 });
5124 }
5125
5126 #[gpui::test]
5127 async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
5128 cx.foreground().forbid_parking();
5129
5130 let text = "
5131 use a::b;
5132 use a::c;
5133
5134 fn f() {
5135 b();
5136 c();
5137 }
5138 "
5139 .unindent();
5140
5141 let fs = FakeFs::new(cx.background());
5142 fs.insert_tree(
5143 "/dir",
5144 json!({
5145 "a.rs": text.clone(),
5146 }),
5147 )
5148 .await;
5149
5150 let project = Project::test(fs, cx);
5151 let worktree_id = project
5152 .update(cx, |project, cx| {
5153 project.find_or_create_local_worktree("/dir", true, cx)
5154 })
5155 .await
5156 .unwrap()
5157 .0
5158 .read_with(cx, |tree, _| tree.id());
5159
5160 let buffer = project
5161 .update(cx, |project, cx| {
5162 project.open_buffer((worktree_id, "a.rs"), cx)
5163 })
5164 .await
5165 .unwrap();
5166
5167 // Simulate the language server sending us a small edit in the form of a very large diff.
5168 // Rust-analyzer does this when performing a merge-imports code action.
5169 let edits = project
5170 .update(cx, |project, cx| {
5171 project.edits_from_lsp(
5172 &buffer,
5173 [
5174 // Replace the first use statement without editing the semicolon.
5175 lsp::TextEdit {
5176 range: lsp::Range::new(
5177 lsp::Position::new(0, 4),
5178 lsp::Position::new(0, 8),
5179 ),
5180 new_text: "a::{b, c}".into(),
5181 },
5182 // Reinsert the remainder of the file between the semicolon and the final
5183 // newline of the file.
5184 lsp::TextEdit {
5185 range: lsp::Range::new(
5186 lsp::Position::new(0, 9),
5187 lsp::Position::new(0, 9),
5188 ),
5189 new_text: "\n\n".into(),
5190 },
5191 lsp::TextEdit {
5192 range: lsp::Range::new(
5193 lsp::Position::new(0, 9),
5194 lsp::Position::new(0, 9),
5195 ),
5196 new_text: "
5197 fn f() {
5198 b();
5199 c();
5200 }"
5201 .unindent(),
5202 },
5203 // Delete everything after the first newline of the file.
5204 lsp::TextEdit {
5205 range: lsp::Range::new(
5206 lsp::Position::new(1, 0),
5207 lsp::Position::new(7, 0),
5208 ),
5209 new_text: "".into(),
5210 },
5211 ],
5212 None,
5213 cx,
5214 )
5215 })
5216 .await
5217 .unwrap();
5218
5219 buffer.update(cx, |buffer, cx| {
5220 let edits = edits
5221 .into_iter()
5222 .map(|(range, text)| {
5223 (
5224 range.start.to_point(&buffer)..range.end.to_point(&buffer),
5225 text,
5226 )
5227 })
5228 .collect::<Vec<_>>();
5229
5230 assert_eq!(
5231 edits,
5232 [
5233 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
5234 (Point::new(1, 0)..Point::new(2, 0), "".into())
5235 ]
5236 );
5237
5238 for (range, new_text) in edits {
5239 buffer.edit([range], new_text, cx);
5240 }
5241 assert_eq!(
5242 buffer.text(),
5243 "
5244 use a::{b, c};
5245
5246 fn f() {
5247 b();
5248 c();
5249 }
5250 "
5251 .unindent()
5252 );
5253 });
5254 }
5255
5256 fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
5257 buffer: &Buffer,
5258 range: Range<T>,
5259 ) -> Vec<(String, Option<DiagnosticSeverity>)> {
5260 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
5261 for chunk in buffer.snapshot().chunks(range, true) {
5262 if chunks
5263 .last()
5264 .map_or(false, |prev_chunk| prev_chunk.1 == chunk.diagnostic)
5265 {
5266 chunks.last_mut().unwrap().0.push_str(chunk.text);
5267 } else {
5268 chunks.push((chunk.text.to_string(), chunk.diagnostic));
5269 }
5270 }
5271 chunks
5272 }
5273
5274 #[gpui::test]
5275 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
5276 let dir = temp_tree(json!({
5277 "root": {
5278 "dir1": {},
5279 "dir2": {
5280 "dir3": {}
5281 }
5282 }
5283 }));
5284
5285 let project = Project::test(Arc::new(RealFs), cx);
5286 let (tree, _) = project
5287 .update(cx, |project, cx| {
5288 project.find_or_create_local_worktree(&dir.path(), true, cx)
5289 })
5290 .await
5291 .unwrap();
5292
5293 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5294 .await;
5295
5296 let cancel_flag = Default::default();
5297 let results = project
5298 .read_with(cx, |project, cx| {
5299 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
5300 })
5301 .await;
5302
5303 assert!(results.is_empty());
5304 }
5305
5306 #[gpui::test]
5307 async fn test_definition(cx: &mut gpui::TestAppContext) {
5308 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
5309 let language = Arc::new(Language::new(
5310 LanguageConfig {
5311 name: "Rust".into(),
5312 path_suffixes: vec!["rs".to_string()],
5313 language_server: Some(language_server_config),
5314 ..Default::default()
5315 },
5316 Some(tree_sitter_rust::language()),
5317 ));
5318
5319 let fs = FakeFs::new(cx.background());
5320 fs.insert_tree(
5321 "/dir",
5322 json!({
5323 "a.rs": "const fn a() { A }",
5324 "b.rs": "const y: i32 = crate::a()",
5325 }),
5326 )
5327 .await;
5328
5329 let project = Project::test(fs, cx);
5330 project.update(cx, |project, _| {
5331 Arc::get_mut(&mut project.languages).unwrap().add(language);
5332 });
5333
5334 let (tree, _) = project
5335 .update(cx, |project, cx| {
5336 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
5337 })
5338 .await
5339 .unwrap();
5340 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5341 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5342 .await;
5343
5344 let buffer = project
5345 .update(cx, |project, cx| {
5346 project.open_buffer(
5347 ProjectPath {
5348 worktree_id,
5349 path: Path::new("").into(),
5350 },
5351 cx,
5352 )
5353 })
5354 .await
5355 .unwrap();
5356
5357 let mut fake_server = fake_servers.next().await.unwrap();
5358 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
5359 let params = params.text_document_position_params;
5360 assert_eq!(
5361 params.text_document.uri.to_file_path().unwrap(),
5362 Path::new("/dir/b.rs"),
5363 );
5364 assert_eq!(params.position, lsp::Position::new(0, 22));
5365
5366 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
5367 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
5368 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
5369 )))
5370 });
5371
5372 let mut definitions = project
5373 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
5374 .await
5375 .unwrap();
5376
5377 assert_eq!(definitions.len(), 1);
5378 let definition = definitions.pop().unwrap();
5379 cx.update(|cx| {
5380 let target_buffer = definition.buffer.read(cx);
5381 assert_eq!(
5382 target_buffer
5383 .file()
5384 .unwrap()
5385 .as_local()
5386 .unwrap()
5387 .abs_path(cx),
5388 Path::new("/dir/a.rs"),
5389 );
5390 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
5391 assert_eq!(
5392 list_worktrees(&project, cx),
5393 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
5394 );
5395
5396 drop(definition);
5397 });
5398 cx.read(|cx| {
5399 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
5400 });
5401
5402 fn list_worktrees<'a>(
5403 project: &'a ModelHandle<Project>,
5404 cx: &'a AppContext,
5405 ) -> Vec<(&'a Path, bool)> {
5406 project
5407 .read(cx)
5408 .worktrees(cx)
5409 .map(|worktree| {
5410 let worktree = worktree.read(cx);
5411 (
5412 worktree.as_local().unwrap().abs_path().as_ref(),
5413 worktree.is_visible(),
5414 )
5415 })
5416 .collect::<Vec<_>>()
5417 }
5418 }
5419
5420 #[gpui::test]
5421 async fn test_save_file(cx: &mut gpui::TestAppContext) {
5422 let fs = FakeFs::new(cx.background());
5423 fs.insert_tree(
5424 "/dir",
5425 json!({
5426 "file1": "the old contents",
5427 }),
5428 )
5429 .await;
5430
5431 let project = Project::test(fs.clone(), cx);
5432 let worktree_id = project
5433 .update(cx, |p, cx| {
5434 p.find_or_create_local_worktree("/dir", true, cx)
5435 })
5436 .await
5437 .unwrap()
5438 .0
5439 .read_with(cx, |tree, _| tree.id());
5440
5441 let buffer = project
5442 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5443 .await
5444 .unwrap();
5445 buffer
5446 .update(cx, |buffer, cx| {
5447 assert_eq!(buffer.text(), "the old contents");
5448 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5449 buffer.save(cx)
5450 })
5451 .await
5452 .unwrap();
5453
5454 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5455 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5456 }
5457
5458 #[gpui::test]
5459 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
5460 let fs = FakeFs::new(cx.background());
5461 fs.insert_tree(
5462 "/dir",
5463 json!({
5464 "file1": "the old contents",
5465 }),
5466 )
5467 .await;
5468
5469 let project = Project::test(fs.clone(), cx);
5470 let worktree_id = project
5471 .update(cx, |p, cx| {
5472 p.find_or_create_local_worktree("/dir/file1", true, cx)
5473 })
5474 .await
5475 .unwrap()
5476 .0
5477 .read_with(cx, |tree, _| tree.id());
5478
5479 let buffer = project
5480 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
5481 .await
5482 .unwrap();
5483 buffer
5484 .update(cx, |buffer, cx| {
5485 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
5486 buffer.save(cx)
5487 })
5488 .await
5489 .unwrap();
5490
5491 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
5492 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
5493 }
5494
5495 #[gpui::test]
5496 async fn test_save_as(cx: &mut gpui::TestAppContext) {
5497 let fs = FakeFs::new(cx.background());
5498 fs.insert_tree("/dir", json!({})).await;
5499
5500 let project = Project::test(fs.clone(), cx);
5501 let (worktree, _) = project
5502 .update(cx, |project, cx| {
5503 project.find_or_create_local_worktree("/dir", true, cx)
5504 })
5505 .await
5506 .unwrap();
5507 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5508
5509 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
5510 buffer.update(cx, |buffer, cx| {
5511 buffer.edit([0..0], "abc", cx);
5512 assert!(buffer.is_dirty());
5513 assert!(!buffer.has_conflict());
5514 });
5515 project
5516 .update(cx, |project, cx| {
5517 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
5518 })
5519 .await
5520 .unwrap();
5521 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
5522 buffer.read_with(cx, |buffer, cx| {
5523 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
5524 assert!(!buffer.is_dirty());
5525 assert!(!buffer.has_conflict());
5526 });
5527
5528 let opened_buffer = project
5529 .update(cx, |project, cx| {
5530 project.open_buffer((worktree_id, "file1"), cx)
5531 })
5532 .await
5533 .unwrap();
5534 assert_eq!(opened_buffer, buffer);
5535 }
5536
5537 #[gpui::test(retries = 5)]
5538 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
5539 let dir = temp_tree(json!({
5540 "a": {
5541 "file1": "",
5542 "file2": "",
5543 "file3": "",
5544 },
5545 "b": {
5546 "c": {
5547 "file4": "",
5548 "file5": "",
5549 }
5550 }
5551 }));
5552
5553 let project = Project::test(Arc::new(RealFs), cx);
5554 let rpc = project.read_with(cx, |p, _| p.client.clone());
5555
5556 let (tree, _) = project
5557 .update(cx, |p, cx| {
5558 p.find_or_create_local_worktree(dir.path(), true, cx)
5559 })
5560 .await
5561 .unwrap();
5562 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5563
5564 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
5565 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
5566 async move { buffer.await.unwrap() }
5567 };
5568 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
5569 tree.read_with(cx, |tree, _| {
5570 tree.entry_for_path(path)
5571 .expect(&format!("no entry for path {}", path))
5572 .id
5573 })
5574 };
5575
5576 let buffer2 = buffer_for_path("a/file2", cx).await;
5577 let buffer3 = buffer_for_path("a/file3", cx).await;
5578 let buffer4 = buffer_for_path("b/c/file4", cx).await;
5579 let buffer5 = buffer_for_path("b/c/file5", cx).await;
5580
5581 let file2_id = id_for_path("a/file2", &cx);
5582 let file3_id = id_for_path("a/file3", &cx);
5583 let file4_id = id_for_path("b/c/file4", &cx);
5584
5585 // Wait for the initial scan.
5586 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5587 .await;
5588
5589 // Create a remote copy of this worktree.
5590 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
5591 let (remote, load_task) = cx.update(|cx| {
5592 Worktree::remote(
5593 1,
5594 1,
5595 initial_snapshot.to_proto(&Default::default(), true),
5596 rpc.clone(),
5597 cx,
5598 )
5599 });
5600 load_task.await;
5601
5602 cx.read(|cx| {
5603 assert!(!buffer2.read(cx).is_dirty());
5604 assert!(!buffer3.read(cx).is_dirty());
5605 assert!(!buffer4.read(cx).is_dirty());
5606 assert!(!buffer5.read(cx).is_dirty());
5607 });
5608
5609 // Rename and delete files and directories.
5610 tree.flush_fs_events(&cx).await;
5611 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
5612 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
5613 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
5614 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
5615 tree.flush_fs_events(&cx).await;
5616
5617 let expected_paths = vec![
5618 "a",
5619 "a/file1",
5620 "a/file2.new",
5621 "b",
5622 "d",
5623 "d/file3",
5624 "d/file4",
5625 ];
5626
5627 cx.read(|app| {
5628 assert_eq!(
5629 tree.read(app)
5630 .paths()
5631 .map(|p| p.to_str().unwrap())
5632 .collect::<Vec<_>>(),
5633 expected_paths
5634 );
5635
5636 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
5637 assert_eq!(id_for_path("d/file3", &cx), file3_id);
5638 assert_eq!(id_for_path("d/file4", &cx), file4_id);
5639
5640 assert_eq!(
5641 buffer2.read(app).file().unwrap().path().as_ref(),
5642 Path::new("a/file2.new")
5643 );
5644 assert_eq!(
5645 buffer3.read(app).file().unwrap().path().as_ref(),
5646 Path::new("d/file3")
5647 );
5648 assert_eq!(
5649 buffer4.read(app).file().unwrap().path().as_ref(),
5650 Path::new("d/file4")
5651 );
5652 assert_eq!(
5653 buffer5.read(app).file().unwrap().path().as_ref(),
5654 Path::new("b/c/file5")
5655 );
5656
5657 assert!(!buffer2.read(app).file().unwrap().is_deleted());
5658 assert!(!buffer3.read(app).file().unwrap().is_deleted());
5659 assert!(!buffer4.read(app).file().unwrap().is_deleted());
5660 assert!(buffer5.read(app).file().unwrap().is_deleted());
5661 });
5662
5663 // Update the remote worktree. Check that it becomes consistent with the
5664 // local worktree.
5665 remote.update(cx, |remote, cx| {
5666 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
5667 &initial_snapshot,
5668 1,
5669 1,
5670 true,
5671 );
5672 remote
5673 .as_remote_mut()
5674 .unwrap()
5675 .snapshot
5676 .apply_remote_update(update_message)
5677 .unwrap();
5678
5679 assert_eq!(
5680 remote
5681 .paths()
5682 .map(|p| p.to_str().unwrap())
5683 .collect::<Vec<_>>(),
5684 expected_paths
5685 );
5686 });
5687 }
5688
5689 #[gpui::test]
5690 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
5691 let fs = FakeFs::new(cx.background());
5692 fs.insert_tree(
5693 "/the-dir",
5694 json!({
5695 "a.txt": "a-contents",
5696 "b.txt": "b-contents",
5697 }),
5698 )
5699 .await;
5700
5701 let project = Project::test(fs.clone(), cx);
5702 let worktree_id = project
5703 .update(cx, |p, cx| {
5704 p.find_or_create_local_worktree("/the-dir", true, cx)
5705 })
5706 .await
5707 .unwrap()
5708 .0
5709 .read_with(cx, |tree, _| tree.id());
5710
5711 // Spawn multiple tasks to open paths, repeating some paths.
5712 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
5713 (
5714 p.open_buffer((worktree_id, "a.txt"), cx),
5715 p.open_buffer((worktree_id, "b.txt"), cx),
5716 p.open_buffer((worktree_id, "a.txt"), cx),
5717 )
5718 });
5719
5720 let buffer_a_1 = buffer_a_1.await.unwrap();
5721 let buffer_a_2 = buffer_a_2.await.unwrap();
5722 let buffer_b = buffer_b.await.unwrap();
5723 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
5724 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
5725
5726 // There is only one buffer per path.
5727 let buffer_a_id = buffer_a_1.id();
5728 assert_eq!(buffer_a_2.id(), buffer_a_id);
5729
5730 // Open the same path again while it is still open.
5731 drop(buffer_a_1);
5732 let buffer_a_3 = project
5733 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
5734 .await
5735 .unwrap();
5736
5737 // There's still only one buffer per path.
5738 assert_eq!(buffer_a_3.id(), buffer_a_id);
5739 }
5740
5741 #[gpui::test]
5742 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
5743 use std::fs;
5744
5745 let dir = temp_tree(json!({
5746 "file1": "abc",
5747 "file2": "def",
5748 "file3": "ghi",
5749 }));
5750
5751 let project = Project::test(Arc::new(RealFs), cx);
5752 let (worktree, _) = project
5753 .update(cx, |p, cx| {
5754 p.find_or_create_local_worktree(dir.path(), true, cx)
5755 })
5756 .await
5757 .unwrap();
5758 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
5759
5760 worktree.flush_fs_events(&cx).await;
5761 worktree
5762 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
5763 .await;
5764
5765 let buffer1 = project
5766 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
5767 .await
5768 .unwrap();
5769 let events = Rc::new(RefCell::new(Vec::new()));
5770
5771 // initially, the buffer isn't dirty.
5772 buffer1.update(cx, |buffer, cx| {
5773 cx.subscribe(&buffer1, {
5774 let events = events.clone();
5775 move |_, _, event, _| match event {
5776 BufferEvent::Operation(_) => {}
5777 _ => events.borrow_mut().push(event.clone()),
5778 }
5779 })
5780 .detach();
5781
5782 assert!(!buffer.is_dirty());
5783 assert!(events.borrow().is_empty());
5784
5785 buffer.edit(vec![1..2], "", cx);
5786 });
5787
5788 // after the first edit, the buffer is dirty, and emits a dirtied event.
5789 buffer1.update(cx, |buffer, cx| {
5790 assert!(buffer.text() == "ac");
5791 assert!(buffer.is_dirty());
5792 assert_eq!(
5793 *events.borrow(),
5794 &[language::Event::Edited, language::Event::Dirtied]
5795 );
5796 events.borrow_mut().clear();
5797 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
5798 });
5799
5800 // after saving, the buffer is not dirty, and emits a saved event.
5801 buffer1.update(cx, |buffer, cx| {
5802 assert!(!buffer.is_dirty());
5803 assert_eq!(*events.borrow(), &[language::Event::Saved]);
5804 events.borrow_mut().clear();
5805
5806 buffer.edit(vec![1..1], "B", cx);
5807 buffer.edit(vec![2..2], "D", cx);
5808 });
5809
5810 // after editing again, the buffer is dirty, and emits another dirty event.
5811 buffer1.update(cx, |buffer, cx| {
5812 assert!(buffer.text() == "aBDc");
5813 assert!(buffer.is_dirty());
5814 assert_eq!(
5815 *events.borrow(),
5816 &[
5817 language::Event::Edited,
5818 language::Event::Dirtied,
5819 language::Event::Edited,
5820 ],
5821 );
5822 events.borrow_mut().clear();
5823
5824 // TODO - currently, after restoring the buffer to its
5825 // previously-saved state, the is still considered dirty.
5826 buffer.edit([1..3], "", cx);
5827 assert!(buffer.text() == "ac");
5828 assert!(buffer.is_dirty());
5829 });
5830
5831 assert_eq!(*events.borrow(), &[language::Event::Edited]);
5832
5833 // When a file is deleted, the buffer is considered dirty.
5834 let events = Rc::new(RefCell::new(Vec::new()));
5835 let buffer2 = project
5836 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
5837 .await
5838 .unwrap();
5839 buffer2.update(cx, |_, cx| {
5840 cx.subscribe(&buffer2, {
5841 let events = events.clone();
5842 move |_, _, event, _| events.borrow_mut().push(event.clone())
5843 })
5844 .detach();
5845 });
5846
5847 fs::remove_file(dir.path().join("file2")).unwrap();
5848 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
5849 assert_eq!(
5850 *events.borrow(),
5851 &[language::Event::Dirtied, language::Event::FileHandleChanged]
5852 );
5853
5854 // When a file is already dirty when deleted, we don't emit a Dirtied event.
5855 let events = Rc::new(RefCell::new(Vec::new()));
5856 let buffer3 = project
5857 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
5858 .await
5859 .unwrap();
5860 buffer3.update(cx, |_, cx| {
5861 cx.subscribe(&buffer3, {
5862 let events = events.clone();
5863 move |_, _, event, _| events.borrow_mut().push(event.clone())
5864 })
5865 .detach();
5866 });
5867
5868 worktree.flush_fs_events(&cx).await;
5869 buffer3.update(cx, |buffer, cx| {
5870 buffer.edit(Some(0..0), "x", cx);
5871 });
5872 events.borrow_mut().clear();
5873 fs::remove_file(dir.path().join("file3")).unwrap();
5874 buffer3
5875 .condition(&cx, |_, _| !events.borrow().is_empty())
5876 .await;
5877 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
5878 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
5879 }
5880
5881 #[gpui::test]
5882 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
5883 use std::fs;
5884
5885 let initial_contents = "aaa\nbbbbb\nc\n";
5886 let dir = temp_tree(json!({ "the-file": initial_contents }));
5887
5888 let project = Project::test(Arc::new(RealFs), cx);
5889 let (worktree, _) = project
5890 .update(cx, |p, cx| {
5891 p.find_or_create_local_worktree(dir.path(), true, cx)
5892 })
5893 .await
5894 .unwrap();
5895 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
5896
5897 worktree
5898 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
5899 .await;
5900
5901 let abs_path = dir.path().join("the-file");
5902 let buffer = project
5903 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
5904 .await
5905 .unwrap();
5906
5907 // TODO
5908 // Add a cursor on each row.
5909 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
5910 // assert!(!buffer.is_dirty());
5911 // buffer.add_selection_set(
5912 // &(0..3)
5913 // .map(|row| Selection {
5914 // id: row as usize,
5915 // start: Point::new(row, 1),
5916 // end: Point::new(row, 1),
5917 // reversed: false,
5918 // goal: SelectionGoal::None,
5919 // })
5920 // .collect::<Vec<_>>(),
5921 // cx,
5922 // )
5923 // });
5924
5925 // Change the file on disk, adding two new lines of text, and removing
5926 // one line.
5927 buffer.read_with(cx, |buffer, _| {
5928 assert!(!buffer.is_dirty());
5929 assert!(!buffer.has_conflict());
5930 });
5931 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
5932 fs::write(&abs_path, new_contents).unwrap();
5933
5934 // Because the buffer was not modified, it is reloaded from disk. Its
5935 // contents are edited according to the diff between the old and new
5936 // file contents.
5937 buffer
5938 .condition(&cx, |buffer, _| buffer.text() == new_contents)
5939 .await;
5940
5941 buffer.update(cx, |buffer, _| {
5942 assert_eq!(buffer.text(), new_contents);
5943 assert!(!buffer.is_dirty());
5944 assert!(!buffer.has_conflict());
5945
5946 // TODO
5947 // let cursor_positions = buffer
5948 // .selection_set(selection_set_id)
5949 // .unwrap()
5950 // .selections::<Point>(&*buffer)
5951 // .map(|selection| {
5952 // assert_eq!(selection.start, selection.end);
5953 // selection.start
5954 // })
5955 // .collect::<Vec<_>>();
5956 // assert_eq!(
5957 // cursor_positions,
5958 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
5959 // );
5960 });
5961
5962 // Modify the buffer
5963 buffer.update(cx, |buffer, cx| {
5964 buffer.edit(vec![0..0], " ", cx);
5965 assert!(buffer.is_dirty());
5966 assert!(!buffer.has_conflict());
5967 });
5968
5969 // Change the file on disk again, adding blank lines to the beginning.
5970 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
5971
5972 // Because the buffer is modified, it doesn't reload from disk, but is
5973 // marked as having a conflict.
5974 buffer
5975 .condition(&cx, |buffer, _| buffer.has_conflict())
5976 .await;
5977 }
5978
5979 #[gpui::test]
5980 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5981 cx.foreground().forbid_parking();
5982
5983 let fs = FakeFs::new(cx.background());
5984 fs.insert_tree(
5985 "/the-dir",
5986 json!({
5987 "a.rs": "
5988 fn foo(mut v: Vec<usize>) {
5989 for x in &v {
5990 v.push(1);
5991 }
5992 }
5993 "
5994 .unindent(),
5995 }),
5996 )
5997 .await;
5998
5999 let project = Project::test(fs.clone(), cx);
6000 let (worktree, _) = project
6001 .update(cx, |p, cx| {
6002 p.find_or_create_local_worktree("/the-dir", true, cx)
6003 })
6004 .await
6005 .unwrap();
6006 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
6007
6008 let buffer = project
6009 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
6010 .await
6011 .unwrap();
6012
6013 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
6014 let message = lsp::PublishDiagnosticsParams {
6015 uri: buffer_uri.clone(),
6016 diagnostics: vec![
6017 lsp::Diagnostic {
6018 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6019 severity: Some(DiagnosticSeverity::WARNING),
6020 message: "error 1".to_string(),
6021 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6022 location: lsp::Location {
6023 uri: buffer_uri.clone(),
6024 range: lsp::Range::new(
6025 lsp::Position::new(1, 8),
6026 lsp::Position::new(1, 9),
6027 ),
6028 },
6029 message: "error 1 hint 1".to_string(),
6030 }]),
6031 ..Default::default()
6032 },
6033 lsp::Diagnostic {
6034 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
6035 severity: Some(DiagnosticSeverity::HINT),
6036 message: "error 1 hint 1".to_string(),
6037 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6038 location: lsp::Location {
6039 uri: buffer_uri.clone(),
6040 range: lsp::Range::new(
6041 lsp::Position::new(1, 8),
6042 lsp::Position::new(1, 9),
6043 ),
6044 },
6045 message: "original diagnostic".to_string(),
6046 }]),
6047 ..Default::default()
6048 },
6049 lsp::Diagnostic {
6050 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
6051 severity: Some(DiagnosticSeverity::ERROR),
6052 message: "error 2".to_string(),
6053 related_information: Some(vec![
6054 lsp::DiagnosticRelatedInformation {
6055 location: lsp::Location {
6056 uri: buffer_uri.clone(),
6057 range: lsp::Range::new(
6058 lsp::Position::new(1, 13),
6059 lsp::Position::new(1, 15),
6060 ),
6061 },
6062 message: "error 2 hint 1".to_string(),
6063 },
6064 lsp::DiagnosticRelatedInformation {
6065 location: lsp::Location {
6066 uri: buffer_uri.clone(),
6067 range: lsp::Range::new(
6068 lsp::Position::new(1, 13),
6069 lsp::Position::new(1, 15),
6070 ),
6071 },
6072 message: "error 2 hint 2".to_string(),
6073 },
6074 ]),
6075 ..Default::default()
6076 },
6077 lsp::Diagnostic {
6078 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6079 severity: Some(DiagnosticSeverity::HINT),
6080 message: "error 2 hint 1".to_string(),
6081 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6082 location: lsp::Location {
6083 uri: buffer_uri.clone(),
6084 range: lsp::Range::new(
6085 lsp::Position::new(2, 8),
6086 lsp::Position::new(2, 17),
6087 ),
6088 },
6089 message: "original diagnostic".to_string(),
6090 }]),
6091 ..Default::default()
6092 },
6093 lsp::Diagnostic {
6094 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
6095 severity: Some(DiagnosticSeverity::HINT),
6096 message: "error 2 hint 2".to_string(),
6097 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
6098 location: lsp::Location {
6099 uri: buffer_uri.clone(),
6100 range: lsp::Range::new(
6101 lsp::Position::new(2, 8),
6102 lsp::Position::new(2, 17),
6103 ),
6104 },
6105 message: "original diagnostic".to_string(),
6106 }]),
6107 ..Default::default()
6108 },
6109 ],
6110 version: None,
6111 };
6112
6113 project
6114 .update(cx, |p, cx| {
6115 p.update_diagnostics(message, &Default::default(), cx)
6116 })
6117 .unwrap();
6118 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6119
6120 assert_eq!(
6121 buffer
6122 .diagnostics_in_range::<_, Point>(0..buffer.len())
6123 .collect::<Vec<_>>(),
6124 &[
6125 DiagnosticEntry {
6126 range: Point::new(1, 8)..Point::new(1, 9),
6127 diagnostic: Diagnostic {
6128 severity: DiagnosticSeverity::WARNING,
6129 message: "error 1".to_string(),
6130 group_id: 0,
6131 is_primary: true,
6132 ..Default::default()
6133 }
6134 },
6135 DiagnosticEntry {
6136 range: Point::new(1, 8)..Point::new(1, 9),
6137 diagnostic: Diagnostic {
6138 severity: DiagnosticSeverity::HINT,
6139 message: "error 1 hint 1".to_string(),
6140 group_id: 0,
6141 is_primary: false,
6142 ..Default::default()
6143 }
6144 },
6145 DiagnosticEntry {
6146 range: Point::new(1, 13)..Point::new(1, 15),
6147 diagnostic: Diagnostic {
6148 severity: DiagnosticSeverity::HINT,
6149 message: "error 2 hint 1".to_string(),
6150 group_id: 1,
6151 is_primary: false,
6152 ..Default::default()
6153 }
6154 },
6155 DiagnosticEntry {
6156 range: Point::new(1, 13)..Point::new(1, 15),
6157 diagnostic: Diagnostic {
6158 severity: DiagnosticSeverity::HINT,
6159 message: "error 2 hint 2".to_string(),
6160 group_id: 1,
6161 is_primary: false,
6162 ..Default::default()
6163 }
6164 },
6165 DiagnosticEntry {
6166 range: Point::new(2, 8)..Point::new(2, 17),
6167 diagnostic: Diagnostic {
6168 severity: DiagnosticSeverity::ERROR,
6169 message: "error 2".to_string(),
6170 group_id: 1,
6171 is_primary: true,
6172 ..Default::default()
6173 }
6174 }
6175 ]
6176 );
6177
6178 assert_eq!(
6179 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
6180 &[
6181 DiagnosticEntry {
6182 range: Point::new(1, 8)..Point::new(1, 9),
6183 diagnostic: Diagnostic {
6184 severity: DiagnosticSeverity::WARNING,
6185 message: "error 1".to_string(),
6186 group_id: 0,
6187 is_primary: true,
6188 ..Default::default()
6189 }
6190 },
6191 DiagnosticEntry {
6192 range: Point::new(1, 8)..Point::new(1, 9),
6193 diagnostic: Diagnostic {
6194 severity: DiagnosticSeverity::HINT,
6195 message: "error 1 hint 1".to_string(),
6196 group_id: 0,
6197 is_primary: false,
6198 ..Default::default()
6199 }
6200 },
6201 ]
6202 );
6203 assert_eq!(
6204 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
6205 &[
6206 DiagnosticEntry {
6207 range: Point::new(1, 13)..Point::new(1, 15),
6208 diagnostic: Diagnostic {
6209 severity: DiagnosticSeverity::HINT,
6210 message: "error 2 hint 1".to_string(),
6211 group_id: 1,
6212 is_primary: false,
6213 ..Default::default()
6214 }
6215 },
6216 DiagnosticEntry {
6217 range: Point::new(1, 13)..Point::new(1, 15),
6218 diagnostic: Diagnostic {
6219 severity: DiagnosticSeverity::HINT,
6220 message: "error 2 hint 2".to_string(),
6221 group_id: 1,
6222 is_primary: false,
6223 ..Default::default()
6224 }
6225 },
6226 DiagnosticEntry {
6227 range: Point::new(2, 8)..Point::new(2, 17),
6228 diagnostic: Diagnostic {
6229 severity: DiagnosticSeverity::ERROR,
6230 message: "error 2".to_string(),
6231 group_id: 1,
6232 is_primary: true,
6233 ..Default::default()
6234 }
6235 }
6236 ]
6237 );
6238 }
6239
6240 #[gpui::test]
6241 async fn test_rename(cx: &mut gpui::TestAppContext) {
6242 cx.foreground().forbid_parking();
6243
6244 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
6245 let language = Arc::new(Language::new(
6246 LanguageConfig {
6247 name: "Rust".into(),
6248 path_suffixes: vec!["rs".to_string()],
6249 language_server: Some(language_server_config),
6250 ..Default::default()
6251 },
6252 Some(tree_sitter_rust::language()),
6253 ));
6254
6255 let fs = FakeFs::new(cx.background());
6256 fs.insert_tree(
6257 "/dir",
6258 json!({
6259 "one.rs": "const ONE: usize = 1;",
6260 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
6261 }),
6262 )
6263 .await;
6264
6265 let project = Project::test(fs.clone(), cx);
6266 project.update(cx, |project, _| {
6267 Arc::get_mut(&mut project.languages).unwrap().add(language);
6268 });
6269
6270 let (tree, _) = project
6271 .update(cx, |project, cx| {
6272 project.find_or_create_local_worktree("/dir", true, cx)
6273 })
6274 .await
6275 .unwrap();
6276 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6277 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6278 .await;
6279
6280 let buffer = project
6281 .update(cx, |project, cx| {
6282 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
6283 })
6284 .await
6285 .unwrap();
6286
6287 let mut fake_server = fake_servers.next().await.unwrap();
6288
6289 let response = project.update(cx, |project, cx| {
6290 project.prepare_rename(buffer.clone(), 7, cx)
6291 });
6292 fake_server
6293 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
6294 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
6295 assert_eq!(params.position, lsp::Position::new(0, 7));
6296 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
6297 lsp::Position::new(0, 6),
6298 lsp::Position::new(0, 9),
6299 )))
6300 })
6301 .next()
6302 .await
6303 .unwrap();
6304 let range = response.await.unwrap().unwrap();
6305 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
6306 assert_eq!(range, 6..9);
6307
6308 let response = project.update(cx, |project, cx| {
6309 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
6310 });
6311 fake_server
6312 .handle_request::<lsp::request::Rename, _>(|params, _| {
6313 assert_eq!(
6314 params.text_document_position.text_document.uri.as_str(),
6315 "file:///dir/one.rs"
6316 );
6317 assert_eq!(
6318 params.text_document_position.position,
6319 lsp::Position::new(0, 7)
6320 );
6321 assert_eq!(params.new_name, "THREE");
6322 Some(lsp::WorkspaceEdit {
6323 changes: Some(
6324 [
6325 (
6326 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
6327 vec![lsp::TextEdit::new(
6328 lsp::Range::new(
6329 lsp::Position::new(0, 6),
6330 lsp::Position::new(0, 9),
6331 ),
6332 "THREE".to_string(),
6333 )],
6334 ),
6335 (
6336 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
6337 vec![
6338 lsp::TextEdit::new(
6339 lsp::Range::new(
6340 lsp::Position::new(0, 24),
6341 lsp::Position::new(0, 27),
6342 ),
6343 "THREE".to_string(),
6344 ),
6345 lsp::TextEdit::new(
6346 lsp::Range::new(
6347 lsp::Position::new(0, 35),
6348 lsp::Position::new(0, 38),
6349 ),
6350 "THREE".to_string(),
6351 ),
6352 ],
6353 ),
6354 ]
6355 .into_iter()
6356 .collect(),
6357 ),
6358 ..Default::default()
6359 })
6360 })
6361 .next()
6362 .await
6363 .unwrap();
6364 let mut transaction = response.await.unwrap().0;
6365 assert_eq!(transaction.len(), 2);
6366 assert_eq!(
6367 transaction
6368 .remove_entry(&buffer)
6369 .unwrap()
6370 .0
6371 .read_with(cx, |buffer, _| buffer.text()),
6372 "const THREE: usize = 1;"
6373 );
6374 assert_eq!(
6375 transaction
6376 .into_keys()
6377 .next()
6378 .unwrap()
6379 .read_with(cx, |buffer, _| buffer.text()),
6380 "const TWO: usize = one::THREE + one::THREE;"
6381 );
6382 }
6383
6384 #[gpui::test]
6385 async fn test_search(cx: &mut gpui::TestAppContext) {
6386 let fs = FakeFs::new(cx.background());
6387 fs.insert_tree(
6388 "/dir",
6389 json!({
6390 "one.rs": "const ONE: usize = 1;",
6391 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
6392 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
6393 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
6394 }),
6395 )
6396 .await;
6397 let project = Project::test(fs.clone(), cx);
6398 let (tree, _) = project
6399 .update(cx, |project, cx| {
6400 project.find_or_create_local_worktree("/dir", true, cx)
6401 })
6402 .await
6403 .unwrap();
6404 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
6405 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
6406 .await;
6407
6408 assert_eq!(
6409 search(&project, SearchQuery::text("TWO", false, true), cx)
6410 .await
6411 .unwrap(),
6412 HashMap::from_iter([
6413 ("two.rs".to_string(), vec![6..9]),
6414 ("three.rs".to_string(), vec![37..40])
6415 ])
6416 );
6417
6418 let buffer_4 = project
6419 .update(cx, |project, cx| {
6420 project.open_buffer((worktree_id, "four.rs"), cx)
6421 })
6422 .await
6423 .unwrap();
6424 buffer_4.update(cx, |buffer, cx| {
6425 buffer.edit([20..28, 31..43], "two::TWO", cx);
6426 });
6427
6428 assert_eq!(
6429 search(&project, SearchQuery::text("TWO", false, true), cx)
6430 .await
6431 .unwrap(),
6432 HashMap::from_iter([
6433 ("two.rs".to_string(), vec![6..9]),
6434 ("three.rs".to_string(), vec![37..40]),
6435 ("four.rs".to_string(), vec![25..28, 36..39])
6436 ])
6437 );
6438
6439 async fn search(
6440 project: &ModelHandle<Project>,
6441 query: SearchQuery,
6442 cx: &mut gpui::TestAppContext,
6443 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
6444 let results = project
6445 .update(cx, |project, cx| project.search(query, cx))
6446 .await?;
6447
6448 Ok(results
6449 .into_iter()
6450 .map(|(buffer, ranges)| {
6451 buffer.read_with(cx, |buffer, _| {
6452 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
6453 let ranges = ranges
6454 .into_iter()
6455 .map(|range| range.to_offset(buffer))
6456 .collect::<Vec<_>>();
6457 (path, ranges)
6458 })
6459 })
6460 .collect())
6461 }
6462 }
6463}