1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 _detect_unshare_task: Task<Option<()>>,
96 },
97}
98
99#[derive(Clone, Debug)]
100pub struct Collaborator {
101 pub user: Arc<User>,
102 pub peer_id: PeerId,
103 pub replica_id: ReplicaId,
104}
105
106#[derive(Clone, Debug, PartialEq)]
107pub enum Event {
108 ActiveEntryChanged(Option<ProjectEntry>),
109 WorktreeRemoved(WorktreeId),
110 DiskBasedDiagnosticsStarted,
111 DiskBasedDiagnosticsUpdated,
112 DiskBasedDiagnosticsFinished,
113 DiagnosticsUpdated(ProjectPath),
114}
115
116#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
117pub struct ProjectPath {
118 pub worktree_id: WorktreeId,
119 pub path: Arc<Path>,
120}
121
122#[derive(Clone, Debug, Default, PartialEq)]
123pub struct DiagnosticSummary {
124 pub error_count: usize,
125 pub warning_count: usize,
126 pub info_count: usize,
127 pub hint_count: usize,
128}
129
130#[derive(Debug)]
131pub struct Location {
132 pub buffer: ModelHandle<Buffer>,
133 pub range: Range<language::Anchor>,
134}
135
136#[derive(Debug)]
137pub struct DocumentHighlight {
138 pub range: Range<language::Anchor>,
139 pub kind: DocumentHighlightKind,
140}
141
142#[derive(Clone, Debug)]
143pub struct Symbol {
144 pub source_worktree_id: WorktreeId,
145 pub worktree_id: WorktreeId,
146 pub language_name: String,
147 pub path: PathBuf,
148 pub label: CodeLabel,
149 pub name: String,
150 pub kind: lsp::SymbolKind,
151 pub range: Range<PointUtf16>,
152 pub signature: [u8; 32],
153}
154
155#[derive(Default)]
156pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
157
158impl DiagnosticSummary {
159 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
160 let mut this = Self {
161 error_count: 0,
162 warning_count: 0,
163 info_count: 0,
164 hint_count: 0,
165 };
166
167 for entry in diagnostics {
168 if entry.diagnostic.is_primary {
169 match entry.diagnostic.severity {
170 DiagnosticSeverity::ERROR => this.error_count += 1,
171 DiagnosticSeverity::WARNING => this.warning_count += 1,
172 DiagnosticSeverity::INFORMATION => this.info_count += 1,
173 DiagnosticSeverity::HINT => this.hint_count += 1,
174 _ => {}
175 }
176 }
177 }
178
179 this
180 }
181
182 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
183 proto::DiagnosticSummary {
184 path: path.to_string_lossy().to_string(),
185 error_count: self.error_count as u32,
186 warning_count: self.warning_count as u32,
187 info_count: self.info_count as u32,
188 hint_count: self.hint_count as u32,
189 }
190 }
191}
192
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
194pub struct ProjectEntry {
195 pub worktree_id: WorktreeId,
196 pub entry_id: usize,
197}
198
199impl Project {
200 pub fn init(client: &Arc<Client>) {
201 client.add_entity_message_handler(Self::handle_add_collaborator);
202 client.add_entity_message_handler(Self::handle_buffer_reloaded);
203 client.add_entity_message_handler(Self::handle_buffer_saved);
204 client.add_entity_message_handler(Self::handle_close_buffer);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
206 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
207 client.add_entity_message_handler(Self::handle_remove_collaborator);
208 client.add_entity_message_handler(Self::handle_register_worktree);
209 client.add_entity_message_handler(Self::handle_unregister_worktree);
210 client.add_entity_message_handler(Self::handle_unshare_project);
211 client.add_entity_message_handler(Self::handle_update_buffer_file);
212 client.add_entity_message_handler(Self::handle_update_buffer);
213 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
214 client.add_entity_message_handler(Self::handle_update_worktree);
215 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
216 client.add_entity_request_handler(Self::handle_apply_code_action);
217 client.add_entity_request_handler(Self::handle_format_buffers);
218 client.add_entity_request_handler(Self::handle_get_code_actions);
219 client.add_entity_request_handler(Self::handle_get_completions);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
224 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
225 client.add_entity_request_handler(Self::handle_search_project);
226 client.add_entity_request_handler(Self::handle_get_project_symbols);
227 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
228 client.add_entity_request_handler(Self::handle_open_buffer);
229 client.add_entity_request_handler(Self::handle_save_buffer);
230 }
231
232 pub fn local(
233 client: Arc<Client>,
234 user_store: ModelHandle<UserStore>,
235 languages: Arc<LanguageRegistry>,
236 fs: Arc<dyn Fs>,
237 cx: &mut MutableAppContext,
238 ) -> ModelHandle<Self> {
239 cx.add_model(|cx: &mut ModelContext<Self>| {
240 let (remote_id_tx, remote_id_rx) = watch::channel();
241 let _maintain_remote_id_task = cx.spawn_weak({
242 let rpc = client.clone();
243 move |this, mut cx| {
244 async move {
245 let mut status = rpc.status();
246 while let Some(status) = status.next().await {
247 if let Some(this) = this.upgrade(&cx) {
248 let remote_id = if status.is_connected() {
249 let response = rpc.request(proto::RegisterProject {}).await?;
250 Some(response.project_id)
251 } else {
252 None
253 };
254
255 if let Some(project_id) = remote_id {
256 let mut registrations = Vec::new();
257 this.update(&mut cx, |this, cx| {
258 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
259 registrations.push(worktree.update(
260 cx,
261 |worktree, cx| {
262 let worktree = worktree.as_local_mut().unwrap();
263 worktree.register(project_id, cx)
264 },
265 ));
266 }
267 });
268 for registration in registrations {
269 registration.await?;
270 }
271 }
272 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
273 }
274 }
275 Ok(())
276 }
277 .log_err()
278 }
279 });
280
281 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
282 Self {
283 worktrees: Default::default(),
284 collaborators: Default::default(),
285 opened_buffers: Default::default(),
286 shared_buffers: Default::default(),
287 loading_buffers: Default::default(),
288 loading_local_worktrees: Default::default(),
289 client_state: ProjectClientState::Local {
290 is_shared: false,
291 remote_id_tx,
292 remote_id_rx,
293 _maintain_remote_id_task,
294 },
295 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
296 subscriptions: Vec::new(),
297 active_entry: None,
298 languages,
299 client,
300 user_store,
301 fs,
302 language_servers_with_diagnostics_running: 0,
303 language_servers: Default::default(),
304 started_language_servers: Default::default(),
305 nonce: StdRng::from_entropy().gen(),
306 }
307 })
308 }
309
310 pub async fn remote(
311 remote_id: u64,
312 client: Arc<Client>,
313 user_store: ModelHandle<UserStore>,
314 languages: Arc<LanguageRegistry>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncAppContext,
317 ) -> Result<ModelHandle<Self>> {
318 client.authenticate_and_connect(&cx).await?;
319
320 let response = client
321 .request(proto::JoinProject {
322 project_id: remote_id,
323 })
324 .await?;
325
326 let replica_id = response.replica_id as ReplicaId;
327
328 let mut worktrees = Vec::new();
329 for worktree in response.worktrees {
330 let (worktree, load_task) = cx
331 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
332 worktrees.push(worktree);
333 load_task.detach();
334 }
335
336 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
337 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
338 let mut this = Self {
339 worktrees: Vec::new(),
340 loading_buffers: Default::default(),
341 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
342 shared_buffers: Default::default(),
343 loading_local_worktrees: Default::default(),
344 active_entry: None,
345 collaborators: Default::default(),
346 languages,
347 user_store: user_store.clone(),
348 fs,
349 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
350 client: client.clone(),
351 client_state: ProjectClientState::Remote {
352 sharing_has_stopped: false,
353 remote_id,
354 replica_id,
355 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
356 async move {
357 let mut status = client.status();
358 let is_connected =
359 status.next().await.map_or(false, |s| s.is_connected());
360 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
361 if !is_connected || status.next().await.is_some() {
362 if let Some(this) = this.upgrade(&cx) {
363 this.update(&mut cx, |this, cx| this.project_unshared(cx))
364 }
365 }
366 Ok(())
367 }
368 .log_err()
369 }),
370 },
371 language_servers_with_diagnostics_running: 0,
372 language_servers: Default::default(),
373 started_language_servers: Default::default(),
374 opened_buffers: Default::default(),
375 nonce: StdRng::from_entropy().gen(),
376 };
377 for worktree in worktrees {
378 this.add_worktree(&worktree, cx);
379 }
380 this
381 });
382
383 let user_ids = response
384 .collaborators
385 .iter()
386 .map(|peer| peer.user_id)
387 .collect();
388 user_store
389 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
390 .await?;
391 let mut collaborators = HashMap::default();
392 for message in response.collaborators {
393 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
394 collaborators.insert(collaborator.peer_id, collaborator);
395 }
396
397 this.update(cx, |this, _| {
398 this.collaborators = collaborators;
399 });
400
401 Ok(this)
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
406 let languages = Arc::new(LanguageRegistry::new());
407 let http_client = client::test::FakeHttpClient::with_404_response();
408 let client = client::Client::new(http_client.clone());
409 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
410 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
411 }
412
413 #[cfg(any(test, feature = "test-support"))]
414 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
415 self.opened_buffers
416 .get(&remote_id)
417 .and_then(|buffer| buffer.upgrade(cx))
418 }
419
420 #[cfg(any(test, feature = "test-support"))]
421 pub fn languages(&self) -> &Arc<LanguageRegistry> {
422 &self.languages
423 }
424
425 #[cfg(any(test, feature = "test-support"))]
426 pub fn check_invariants(&self, cx: &AppContext) {
427 if self.is_local() {
428 let mut worktree_root_paths = HashMap::default();
429 for worktree in self.worktrees(cx) {
430 let worktree = worktree.read(cx);
431 let abs_path = worktree.as_local().unwrap().abs_path().clone();
432 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
433 assert_eq!(
434 prev_worktree_id,
435 None,
436 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
437 abs_path,
438 worktree.id(),
439 prev_worktree_id
440 )
441 }
442 } else {
443 let replica_id = self.replica_id();
444 for buffer in self.opened_buffers.values() {
445 if let Some(buffer) = buffer.upgrade(cx) {
446 let buffer = buffer.read(cx);
447 assert_eq!(
448 buffer.deferred_ops_len(),
449 0,
450 "replica {}, buffer {} has deferred operations",
451 replica_id,
452 buffer.remote_id()
453 );
454 }
455 }
456 }
457 }
458
459 #[cfg(any(test, feature = "test-support"))]
460 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
461 let path = path.into();
462 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
463 self.opened_buffers.iter().any(|(_, buffer)| {
464 if let Some(buffer) = buffer.upgrade(cx) {
465 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
466 if file.worktree == worktree && file.path() == &path.path {
467 return true;
468 }
469 }
470 }
471 false
472 })
473 } else {
474 false
475 }
476 }
477
478 pub fn fs(&self) -> &Arc<dyn Fs> {
479 &self.fs
480 }
481
482 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
483 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
484 *remote_id_tx.borrow_mut() = remote_id;
485 }
486
487 self.subscriptions.clear();
488 if let Some(remote_id) = remote_id {
489 self.subscriptions
490 .push(self.client.add_model_for_remote_entity(remote_id, cx));
491 }
492 }
493
494 pub fn remote_id(&self) -> Option<u64> {
495 match &self.client_state {
496 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
497 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
498 }
499 }
500
501 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
502 let mut id = None;
503 let mut watch = None;
504 match &self.client_state {
505 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
506 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
507 }
508
509 async move {
510 if let Some(id) = id {
511 return id;
512 }
513 let mut watch = watch.unwrap();
514 loop {
515 let id = *watch.borrow();
516 if let Some(id) = id {
517 return id;
518 }
519 watch.next().await;
520 }
521 }
522 }
523
524 pub fn replica_id(&self) -> ReplicaId {
525 match &self.client_state {
526 ProjectClientState::Local { .. } => 0,
527 ProjectClientState::Remote { replica_id, .. } => *replica_id,
528 }
529 }
530
531 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
532 &self.collaborators
533 }
534
535 pub fn worktrees<'a>(
536 &'a self,
537 cx: &'a AppContext,
538 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
539 self.worktrees
540 .iter()
541 .filter_map(move |worktree| worktree.upgrade(cx))
542 }
543
544 pub fn visible_worktrees<'a>(
545 &'a self,
546 cx: &'a AppContext,
547 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
548 self.worktrees.iter().filter_map(|worktree| {
549 worktree.upgrade(cx).and_then(|worktree| {
550 if worktree.read(cx).is_visible() {
551 Some(worktree)
552 } else {
553 None
554 }
555 })
556 })
557 }
558
559 pub fn worktree_for_id(
560 &self,
561 id: WorktreeId,
562 cx: &AppContext,
563 ) -> Option<ModelHandle<Worktree>> {
564 self.worktrees(cx)
565 .find(|worktree| worktree.read(cx).id() == id)
566 }
567
568 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
569 let rpc = self.client.clone();
570 cx.spawn(|this, mut cx| async move {
571 let project_id = this.update(&mut cx, |this, cx| {
572 if let ProjectClientState::Local {
573 is_shared,
574 remote_id_rx,
575 ..
576 } = &mut this.client_state
577 {
578 *is_shared = true;
579
580 for open_buffer in this.opened_buffers.values_mut() {
581 match open_buffer {
582 OpenBuffer::Strong(_) => {}
583 OpenBuffer::Weak(buffer) => {
584 if let Some(buffer) = buffer.upgrade(cx) {
585 *open_buffer = OpenBuffer::Strong(buffer);
586 }
587 }
588 OpenBuffer::Loading(_) => unreachable!(),
589 }
590 }
591
592 for worktree_handle in this.worktrees.iter_mut() {
593 match worktree_handle {
594 WorktreeHandle::Strong(_) => {}
595 WorktreeHandle::Weak(worktree) => {
596 if let Some(worktree) = worktree.upgrade(cx) {
597 *worktree_handle = WorktreeHandle::Strong(worktree);
598 }
599 }
600 }
601 }
602
603 remote_id_rx
604 .borrow()
605 .ok_or_else(|| anyhow!("no project id"))
606 } else {
607 Err(anyhow!("can't share a remote project"))
608 }
609 })?;
610
611 rpc.request(proto::ShareProject { project_id }).await?;
612
613 let mut tasks = Vec::new();
614 this.update(&mut cx, |this, cx| {
615 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
616 worktree.update(cx, |worktree, cx| {
617 let worktree = worktree.as_local_mut().unwrap();
618 tasks.push(worktree.share(project_id, cx));
619 });
620 }
621 });
622 for task in tasks {
623 task.await?;
624 }
625 this.update(&mut cx, |_, cx| cx.notify());
626 Ok(())
627 })
628 }
629
630 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
631 let rpc = self.client.clone();
632 cx.spawn(|this, mut cx| async move {
633 let project_id = this.update(&mut cx, |this, cx| {
634 if let ProjectClientState::Local {
635 is_shared,
636 remote_id_rx,
637 ..
638 } = &mut this.client_state
639 {
640 *is_shared = false;
641
642 for open_buffer in this.opened_buffers.values_mut() {
643 match open_buffer {
644 OpenBuffer::Strong(buffer) => {
645 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
646 }
647 _ => {}
648 }
649 }
650
651 for worktree_handle in this.worktrees.iter_mut() {
652 match worktree_handle {
653 WorktreeHandle::Strong(worktree) => {
654 if !worktree.read(cx).is_visible() {
655 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
656 }
657 }
658 _ => {}
659 }
660 }
661
662 remote_id_rx
663 .borrow()
664 .ok_or_else(|| anyhow!("no project id"))
665 } else {
666 Err(anyhow!("can't share a remote project"))
667 }
668 })?;
669
670 rpc.send(proto::UnshareProject { project_id })?;
671 this.update(&mut cx, |this, cx| {
672 this.collaborators.clear();
673 this.shared_buffers.clear();
674 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
675 worktree.update(cx, |worktree, _| {
676 worktree.as_local_mut().unwrap().unshare();
677 });
678 }
679 cx.notify()
680 });
681 Ok(())
682 })
683 }
684
685 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
686 if let ProjectClientState::Remote {
687 sharing_has_stopped,
688 ..
689 } = &mut self.client_state
690 {
691 *sharing_has_stopped = true;
692 self.collaborators.clear();
693 cx.notify();
694 }
695 }
696
697 pub fn is_read_only(&self) -> bool {
698 match &self.client_state {
699 ProjectClientState::Local { .. } => false,
700 ProjectClientState::Remote {
701 sharing_has_stopped,
702 ..
703 } => *sharing_has_stopped,
704 }
705 }
706
707 pub fn is_local(&self) -> bool {
708 match &self.client_state {
709 ProjectClientState::Local { .. } => true,
710 ProjectClientState::Remote { .. } => false,
711 }
712 }
713
714 pub fn is_remote(&self) -> bool {
715 !self.is_local()
716 }
717
718 pub fn open_buffer(
719 &mut self,
720 path: impl Into<ProjectPath>,
721 cx: &mut ModelContext<Self>,
722 ) -> Task<Result<ModelHandle<Buffer>>> {
723 let project_path = path.into();
724 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
725 worktree
726 } else {
727 return Task::ready(Err(anyhow!("no such worktree")));
728 };
729
730 // If there is already a buffer for the given path, then return it.
731 let existing_buffer = self.get_open_buffer(&project_path, cx);
732 if let Some(existing_buffer) = existing_buffer {
733 return Task::ready(Ok(existing_buffer));
734 }
735
736 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
737 // If the given path is already being loaded, then wait for that existing
738 // task to complete and return the same buffer.
739 hash_map::Entry::Occupied(e) => e.get().clone(),
740
741 // Otherwise, record the fact that this path is now being loaded.
742 hash_map::Entry::Vacant(entry) => {
743 let (mut tx, rx) = postage::watch::channel();
744 entry.insert(rx.clone());
745
746 let load_buffer = if worktree.read(cx).is_local() {
747 self.open_local_buffer(&project_path.path, &worktree, cx)
748 } else {
749 self.open_remote_buffer(&project_path.path, &worktree, cx)
750 };
751
752 cx.spawn(move |this, mut cx| async move {
753 let load_result = load_buffer.await;
754 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
755 // Record the fact that the buffer is no longer loading.
756 this.loading_buffers.remove(&project_path);
757 let buffer = load_result.map_err(Arc::new)?;
758 Ok(buffer)
759 }));
760 })
761 .detach();
762 rx
763 }
764 };
765
766 cx.foreground().spawn(async move {
767 loop {
768 if let Some(result) = loading_watch.borrow().as_ref() {
769 match result {
770 Ok(buffer) => return Ok(buffer.clone()),
771 Err(error) => return Err(anyhow!("{}", error)),
772 }
773 }
774 loading_watch.next().await;
775 }
776 })
777 }
778
779 fn open_local_buffer(
780 &mut self,
781 path: &Arc<Path>,
782 worktree: &ModelHandle<Worktree>,
783 cx: &mut ModelContext<Self>,
784 ) -> Task<Result<ModelHandle<Buffer>>> {
785 let load_buffer = worktree.update(cx, |worktree, cx| {
786 let worktree = worktree.as_local_mut().unwrap();
787 worktree.load_buffer(path, cx)
788 });
789 let worktree = worktree.downgrade();
790 cx.spawn(|this, mut cx| async move {
791 let buffer = load_buffer.await?;
792 let worktree = worktree
793 .upgrade(&cx)
794 .ok_or_else(|| anyhow!("worktree was removed"))?;
795 this.update(&mut cx, |this, cx| {
796 this.register_buffer(&buffer, Some(&worktree), cx)
797 })?;
798 Ok(buffer)
799 })
800 }
801
802 fn open_remote_buffer(
803 &mut self,
804 path: &Arc<Path>,
805 worktree: &ModelHandle<Worktree>,
806 cx: &mut ModelContext<Self>,
807 ) -> Task<Result<ModelHandle<Buffer>>> {
808 let rpc = self.client.clone();
809 let project_id = self.remote_id().unwrap();
810 let remote_worktree_id = worktree.read(cx).id();
811 let path = path.clone();
812 let path_string = path.to_string_lossy().to_string();
813 cx.spawn(|this, mut cx| async move {
814 let response = rpc
815 .request(proto::OpenBuffer {
816 project_id,
817 worktree_id: remote_worktree_id.to_proto(),
818 path: path_string,
819 })
820 .await?;
821 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
822 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
823 .await
824 })
825 }
826
827 fn open_local_buffer_via_lsp(
828 &mut self,
829 abs_path: lsp::Url,
830 lang_name: String,
831 lang_server: Arc<LanguageServer>,
832 cx: &mut ModelContext<Self>,
833 ) -> Task<Result<ModelHandle<Buffer>>> {
834 cx.spawn(|this, mut cx| async move {
835 let abs_path = abs_path
836 .to_file_path()
837 .map_err(|_| anyhow!("can't convert URI to path"))?;
838 let (worktree, relative_path) = if let Some(result) =
839 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
840 {
841 result
842 } else {
843 let worktree = this
844 .update(&mut cx, |this, cx| {
845 this.create_local_worktree(&abs_path, false, cx)
846 })
847 .await?;
848 this.update(&mut cx, |this, cx| {
849 this.language_servers
850 .insert((worktree.read(cx).id(), lang_name), lang_server);
851 });
852 (worktree, PathBuf::new())
853 };
854
855 let project_path = ProjectPath {
856 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
857 path: relative_path.into(),
858 };
859 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
860 .await
861 })
862 }
863
864 pub fn save_buffer_as(
865 &mut self,
866 buffer: ModelHandle<Buffer>,
867 abs_path: PathBuf,
868 cx: &mut ModelContext<Project>,
869 ) -> Task<Result<()>> {
870 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
871 cx.spawn(|this, mut cx| async move {
872 let (worktree, path) = worktree_task.await?;
873 worktree
874 .update(&mut cx, |worktree, cx| {
875 worktree
876 .as_local_mut()
877 .unwrap()
878 .save_buffer_as(buffer.clone(), path, cx)
879 })
880 .await?;
881 this.update(&mut cx, |this, cx| {
882 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
883 });
884 Ok(())
885 })
886 }
887
888 pub fn get_open_buffer(
889 &mut self,
890 path: &ProjectPath,
891 cx: &mut ModelContext<Self>,
892 ) -> Option<ModelHandle<Buffer>> {
893 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
894 self.opened_buffers.values().find_map(|buffer| {
895 let buffer = buffer.upgrade(cx)?;
896 let file = File::from_dyn(buffer.read(cx).file())?;
897 if file.worktree == worktree && file.path() == &path.path {
898 Some(buffer)
899 } else {
900 None
901 }
902 })
903 }
904
905 fn register_buffer(
906 &mut self,
907 buffer: &ModelHandle<Buffer>,
908 worktree: Option<&ModelHandle<Worktree>>,
909 cx: &mut ModelContext<Self>,
910 ) -> Result<()> {
911 let remote_id = buffer.read(cx).remote_id();
912 let open_buffer = if self.is_remote() || self.is_shared() {
913 OpenBuffer::Strong(buffer.clone())
914 } else {
915 OpenBuffer::Weak(buffer.downgrade())
916 };
917
918 match self.opened_buffers.insert(remote_id, open_buffer) {
919 None => {}
920 Some(OpenBuffer::Loading(operations)) => {
921 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
922 }
923 Some(OpenBuffer::Weak(existing_handle)) => {
924 if existing_handle.upgrade(cx).is_some() {
925 Err(anyhow!(
926 "already registered buffer with remote id {}",
927 remote_id
928 ))?
929 }
930 }
931 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
932 "already registered buffer with remote id {}",
933 remote_id
934 ))?,
935 }
936 self.assign_language_to_buffer(&buffer, worktree, cx);
937 Ok(())
938 }
939
940 fn assign_language_to_buffer(
941 &mut self,
942 buffer: &ModelHandle<Buffer>,
943 worktree: Option<&ModelHandle<Worktree>>,
944 cx: &mut ModelContext<Self>,
945 ) -> Option<()> {
946 let (path, full_path) = {
947 let file = buffer.read(cx).file()?;
948 (file.path().clone(), file.full_path(cx))
949 };
950
951 // If the buffer has a language, set it and start/assign the language server
952 if let Some(language) = self.languages.select_language(&full_path) {
953 buffer.update(cx, |buffer, cx| {
954 buffer.set_language(Some(language.clone()), cx);
955 });
956
957 // For local worktrees, start a language server if needed.
958 // Also assign the language server and any previously stored diagnostics to the buffer.
959 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
960 let worktree_id = local_worktree.id();
961 let worktree_abs_path = local_worktree.abs_path().clone();
962 let buffer = buffer.downgrade();
963 let language_server =
964 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
965
966 cx.spawn_weak(|_, mut cx| async move {
967 if let Some(language_server) = language_server.await {
968 if let Some(buffer) = buffer.upgrade(&cx) {
969 buffer.update(&mut cx, |buffer, cx| {
970 buffer.set_language_server(Some(language_server), cx);
971 });
972 }
973 }
974 })
975 .detach();
976 }
977 }
978
979 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
980 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
981 buffer.update(cx, |buffer, cx| {
982 buffer.update_diagnostics(diagnostics, None, cx).log_err();
983 });
984 }
985 }
986
987 None
988 }
989
990 fn start_language_server(
991 &mut self,
992 worktree_id: WorktreeId,
993 worktree_path: Arc<Path>,
994 language: Arc<Language>,
995 cx: &mut ModelContext<Self>,
996 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
997 enum LspEvent {
998 DiagnosticsStart,
999 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1000 DiagnosticsFinish,
1001 }
1002
1003 let key = (worktree_id, language.name().to_string());
1004 self.started_language_servers
1005 .entry(key.clone())
1006 .or_insert_with(|| {
1007 let language_server = self.languages.start_language_server(
1008 &language,
1009 worktree_path,
1010 self.client.http_client(),
1011 cx,
1012 );
1013 let rpc = self.client.clone();
1014 cx.spawn_weak(|this, mut cx| async move {
1015 let language_server = language_server?.await.log_err()?;
1016 if let Some(this) = this.upgrade(&cx) {
1017 this.update(&mut cx, |this, _| {
1018 this.language_servers.insert(key, language_server.clone());
1019 });
1020 }
1021
1022 let disk_based_sources = language
1023 .disk_based_diagnostic_sources()
1024 .cloned()
1025 .unwrap_or_default();
1026 let disk_based_diagnostics_progress_token =
1027 language.disk_based_diagnostics_progress_token().cloned();
1028 let has_disk_based_diagnostic_progress_token =
1029 disk_based_diagnostics_progress_token.is_some();
1030 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1031
1032 // Listen for `PublishDiagnostics` notifications.
1033 language_server
1034 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1035 let diagnostics_tx = diagnostics_tx.clone();
1036 move |params| {
1037 if !has_disk_based_diagnostic_progress_token {
1038 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1039 }
1040 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1041 .ok();
1042 if !has_disk_based_diagnostic_progress_token {
1043 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1044 }
1045 }
1046 })
1047 .detach();
1048
1049 // Listen for `Progress` notifications. Send an event when the language server
1050 // transitions between running jobs and not running any jobs.
1051 let mut running_jobs_for_this_server: i32 = 0;
1052 language_server
1053 .on_notification::<lsp::notification::Progress, _>(move |params| {
1054 let token = match params.token {
1055 lsp::NumberOrString::Number(_) => None,
1056 lsp::NumberOrString::String(token) => Some(token),
1057 };
1058
1059 if token == disk_based_diagnostics_progress_token {
1060 match params.value {
1061 lsp::ProgressParamsValue::WorkDone(progress) => {
1062 match progress {
1063 lsp::WorkDoneProgress::Begin(_) => {
1064 running_jobs_for_this_server += 1;
1065 if running_jobs_for_this_server == 1 {
1066 block_on(
1067 diagnostics_tx
1068 .send(LspEvent::DiagnosticsStart),
1069 )
1070 .ok();
1071 }
1072 }
1073 lsp::WorkDoneProgress::End(_) => {
1074 running_jobs_for_this_server -= 1;
1075 if running_jobs_for_this_server == 0 {
1076 block_on(
1077 diagnostics_tx
1078 .send(LspEvent::DiagnosticsFinish),
1079 )
1080 .ok();
1081 }
1082 }
1083 _ => {}
1084 }
1085 }
1086 }
1087 }
1088 })
1089 .detach();
1090
1091 // Process all the LSP events.
1092 cx.spawn(|mut cx| async move {
1093 while let Ok(message) = diagnostics_rx.recv().await {
1094 let this = this.upgrade(&cx)?;
1095 match message {
1096 LspEvent::DiagnosticsStart => {
1097 this.update(&mut cx, |this, cx| {
1098 this.disk_based_diagnostics_started(cx);
1099 if let Some(project_id) = this.remote_id() {
1100 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1101 project_id,
1102 })
1103 .log_err();
1104 }
1105 });
1106 }
1107 LspEvent::DiagnosticsUpdate(mut params) => {
1108 language.process_diagnostics(&mut params);
1109 this.update(&mut cx, |this, cx| {
1110 this.update_diagnostics(params, &disk_based_sources, cx)
1111 .log_err();
1112 });
1113 }
1114 LspEvent::DiagnosticsFinish => {
1115 this.update(&mut cx, |this, cx| {
1116 this.disk_based_diagnostics_finished(cx);
1117 if let Some(project_id) = this.remote_id() {
1118 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1119 project_id,
1120 })
1121 .log_err();
1122 }
1123 });
1124 }
1125 }
1126 }
1127 Some(())
1128 })
1129 .detach();
1130
1131 Some(language_server)
1132 })
1133 .shared()
1134 })
1135 .clone()
1136 }
1137
1138 pub fn update_diagnostics(
1139 &mut self,
1140 params: lsp::PublishDiagnosticsParams,
1141 disk_based_sources: &HashSet<String>,
1142 cx: &mut ModelContext<Self>,
1143 ) -> Result<()> {
1144 let abs_path = params
1145 .uri
1146 .to_file_path()
1147 .map_err(|_| anyhow!("URI is not a file"))?;
1148 let mut next_group_id = 0;
1149 let mut diagnostics = Vec::default();
1150 let mut primary_diagnostic_group_ids = HashMap::default();
1151 let mut sources_by_group_id = HashMap::default();
1152 let mut supporting_diagnostic_severities = HashMap::default();
1153 for diagnostic in ¶ms.diagnostics {
1154 let source = diagnostic.source.as_ref();
1155 let code = diagnostic.code.as_ref().map(|code| match code {
1156 lsp::NumberOrString::Number(code) => code.to_string(),
1157 lsp::NumberOrString::String(code) => code.clone(),
1158 });
1159 let range = range_from_lsp(diagnostic.range);
1160 let is_supporting = diagnostic
1161 .related_information
1162 .as_ref()
1163 .map_or(false, |infos| {
1164 infos.iter().any(|info| {
1165 primary_diagnostic_group_ids.contains_key(&(
1166 source,
1167 code.clone(),
1168 range_from_lsp(info.location.range),
1169 ))
1170 })
1171 });
1172
1173 if is_supporting {
1174 if let Some(severity) = diagnostic.severity {
1175 supporting_diagnostic_severities
1176 .insert((source, code.clone(), range), severity);
1177 }
1178 } else {
1179 let group_id = post_inc(&mut next_group_id);
1180 let is_disk_based =
1181 source.map_or(false, |source| disk_based_sources.contains(source));
1182
1183 sources_by_group_id.insert(group_id, source);
1184 primary_diagnostic_group_ids
1185 .insert((source, code.clone(), range.clone()), group_id);
1186
1187 diagnostics.push(DiagnosticEntry {
1188 range,
1189 diagnostic: Diagnostic {
1190 code: code.clone(),
1191 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1192 message: diagnostic.message.clone(),
1193 group_id,
1194 is_primary: true,
1195 is_valid: true,
1196 is_disk_based,
1197 },
1198 });
1199 if let Some(infos) = &diagnostic.related_information {
1200 for info in infos {
1201 if info.location.uri == params.uri && !info.message.is_empty() {
1202 let range = range_from_lsp(info.location.range);
1203 diagnostics.push(DiagnosticEntry {
1204 range,
1205 diagnostic: Diagnostic {
1206 code: code.clone(),
1207 severity: DiagnosticSeverity::INFORMATION,
1208 message: info.message.clone(),
1209 group_id,
1210 is_primary: false,
1211 is_valid: true,
1212 is_disk_based,
1213 },
1214 });
1215 }
1216 }
1217 }
1218 }
1219 }
1220
1221 for entry in &mut diagnostics {
1222 let diagnostic = &mut entry.diagnostic;
1223 if !diagnostic.is_primary {
1224 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1225 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1226 source,
1227 diagnostic.code.clone(),
1228 entry.range.clone(),
1229 )) {
1230 diagnostic.severity = severity;
1231 }
1232 }
1233 }
1234
1235 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1236 Ok(())
1237 }
1238
1239 pub fn update_diagnostic_entries(
1240 &mut self,
1241 abs_path: PathBuf,
1242 version: Option<i32>,
1243 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1244 cx: &mut ModelContext<Project>,
1245 ) -> Result<(), anyhow::Error> {
1246 let (worktree, relative_path) = self
1247 .find_local_worktree(&abs_path, cx)
1248 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1249 if !worktree.read(cx).is_visible() {
1250 return Ok(());
1251 }
1252
1253 let project_path = ProjectPath {
1254 worktree_id: worktree.read(cx).id(),
1255 path: relative_path.into(),
1256 };
1257
1258 for buffer in self.opened_buffers.values() {
1259 if let Some(buffer) = buffer.upgrade(cx) {
1260 if buffer
1261 .read(cx)
1262 .file()
1263 .map_or(false, |file| *file.path() == project_path.path)
1264 {
1265 buffer.update(cx, |buffer, cx| {
1266 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1267 })?;
1268 break;
1269 }
1270 }
1271 }
1272 worktree.update(cx, |worktree, cx| {
1273 worktree
1274 .as_local_mut()
1275 .ok_or_else(|| anyhow!("not a local worktree"))?
1276 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1277 })?;
1278 cx.emit(Event::DiagnosticsUpdated(project_path));
1279 Ok(())
1280 }
1281
1282 pub fn format(
1283 &self,
1284 buffers: HashSet<ModelHandle<Buffer>>,
1285 push_to_history: bool,
1286 cx: &mut ModelContext<Project>,
1287 ) -> Task<Result<ProjectTransaction>> {
1288 let mut local_buffers = Vec::new();
1289 let mut remote_buffers = None;
1290 for buffer_handle in buffers {
1291 let buffer = buffer_handle.read(cx);
1292 let worktree;
1293 if let Some(file) = File::from_dyn(buffer.file()) {
1294 worktree = file.worktree.clone();
1295 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1296 let lang_server;
1297 if let Some(lang) = buffer.language() {
1298 if let Some(server) = self
1299 .language_servers
1300 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1301 {
1302 lang_server = server.clone();
1303 } else {
1304 return Task::ready(Ok(Default::default()));
1305 };
1306 } else {
1307 return Task::ready(Ok(Default::default()));
1308 }
1309
1310 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1311 } else {
1312 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1313 }
1314 } else {
1315 return Task::ready(Ok(Default::default()));
1316 }
1317 }
1318
1319 let remote_buffers = self.remote_id().zip(remote_buffers);
1320 let client = self.client.clone();
1321
1322 cx.spawn(|this, mut cx| async move {
1323 let mut project_transaction = ProjectTransaction::default();
1324
1325 if let Some((project_id, remote_buffers)) = remote_buffers {
1326 let response = client
1327 .request(proto::FormatBuffers {
1328 project_id,
1329 buffer_ids: remote_buffers
1330 .iter()
1331 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1332 .collect(),
1333 })
1334 .await?
1335 .transaction
1336 .ok_or_else(|| anyhow!("missing transaction"))?;
1337 project_transaction = this
1338 .update(&mut cx, |this, cx| {
1339 this.deserialize_project_transaction(response, push_to_history, cx)
1340 })
1341 .await?;
1342 }
1343
1344 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1345 let lsp_edits = lang_server
1346 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1347 text_document: lsp::TextDocumentIdentifier::new(
1348 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1349 ),
1350 options: Default::default(),
1351 work_done_progress_params: Default::default(),
1352 })
1353 .await?;
1354
1355 if let Some(lsp_edits) = lsp_edits {
1356 let edits = buffer
1357 .update(&mut cx, |buffer, cx| {
1358 buffer.edits_from_lsp(lsp_edits, None, cx)
1359 })
1360 .await?;
1361 buffer.update(&mut cx, |buffer, cx| {
1362 buffer.finalize_last_transaction();
1363 buffer.start_transaction();
1364 for (range, text) in edits {
1365 buffer.edit([range], text, cx);
1366 }
1367 if buffer.end_transaction(cx).is_some() {
1368 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1369 if !push_to_history {
1370 buffer.forget_transaction(transaction.id);
1371 }
1372 project_transaction.0.insert(cx.handle(), transaction);
1373 }
1374 });
1375 }
1376 }
1377
1378 Ok(project_transaction)
1379 })
1380 }
1381
1382 pub fn definition<T: ToPointUtf16>(
1383 &self,
1384 buffer: &ModelHandle<Buffer>,
1385 position: T,
1386 cx: &mut ModelContext<Self>,
1387 ) -> Task<Result<Vec<Location>>> {
1388 let position = position.to_point_utf16(buffer.read(cx));
1389 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1390 }
1391
1392 pub fn references<T: ToPointUtf16>(
1393 &self,
1394 buffer: &ModelHandle<Buffer>,
1395 position: T,
1396 cx: &mut ModelContext<Self>,
1397 ) -> Task<Result<Vec<Location>>> {
1398 let position = position.to_point_utf16(buffer.read(cx));
1399 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1400 }
1401
1402 pub fn document_highlights<T: ToPointUtf16>(
1403 &self,
1404 buffer: &ModelHandle<Buffer>,
1405 position: T,
1406 cx: &mut ModelContext<Self>,
1407 ) -> Task<Result<Vec<DocumentHighlight>>> {
1408 let position = position.to_point_utf16(buffer.read(cx));
1409
1410 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1411 }
1412
1413 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1414 if self.is_local() {
1415 let mut language_servers = HashMap::default();
1416 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1417 if let Some((worktree, language)) = self
1418 .worktree_for_id(*worktree_id, cx)
1419 .and_then(|worktree| worktree.read(cx).as_local())
1420 .zip(self.languages.get_language(language_name))
1421 {
1422 language_servers
1423 .entry(Arc::as_ptr(language_server))
1424 .or_insert((
1425 language_server.clone(),
1426 *worktree_id,
1427 worktree.abs_path().clone(),
1428 language.clone(),
1429 ));
1430 }
1431 }
1432
1433 let mut requests = Vec::new();
1434 for (language_server, _, _, _) in language_servers.values() {
1435 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1436 lsp::WorkspaceSymbolParams {
1437 query: query.to_string(),
1438 ..Default::default()
1439 },
1440 ));
1441 }
1442
1443 cx.spawn_weak(|this, cx| async move {
1444 let responses = futures::future::try_join_all(requests).await?;
1445
1446 let mut symbols = Vec::new();
1447 if let Some(this) = this.upgrade(&cx) {
1448 this.read_with(&cx, |this, cx| {
1449 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1450 language_servers.into_values().zip(responses)
1451 {
1452 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1453 |lsp_symbol| {
1454 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1455 let mut worktree_id = source_worktree_id;
1456 let path;
1457 if let Some((worktree, rel_path)) =
1458 this.find_local_worktree(&abs_path, cx)
1459 {
1460 worktree_id = worktree.read(cx).id();
1461 path = rel_path;
1462 } else {
1463 path = relativize_path(&worktree_abs_path, &abs_path);
1464 }
1465
1466 let label = language
1467 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1468 .unwrap_or_else(|| {
1469 CodeLabel::plain(lsp_symbol.name.clone(), None)
1470 });
1471 let signature = this.symbol_signature(worktree_id, &path);
1472
1473 Some(Symbol {
1474 source_worktree_id,
1475 worktree_id,
1476 language_name: language.name().to_string(),
1477 name: lsp_symbol.name,
1478 kind: lsp_symbol.kind,
1479 label,
1480 path,
1481 range: range_from_lsp(lsp_symbol.location.range),
1482 signature,
1483 })
1484 },
1485 ));
1486 }
1487 })
1488 }
1489
1490 Ok(symbols)
1491 })
1492 } else if let Some(project_id) = self.remote_id() {
1493 let request = self.client.request(proto::GetProjectSymbols {
1494 project_id,
1495 query: query.to_string(),
1496 });
1497 cx.spawn_weak(|this, cx| async move {
1498 let response = request.await?;
1499 let mut symbols = Vec::new();
1500 if let Some(this) = this.upgrade(&cx) {
1501 this.read_with(&cx, |this, _| {
1502 symbols.extend(
1503 response
1504 .symbols
1505 .into_iter()
1506 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1507 );
1508 })
1509 }
1510 Ok(symbols)
1511 })
1512 } else {
1513 Task::ready(Ok(Default::default()))
1514 }
1515 }
1516
1517 pub fn open_buffer_for_symbol(
1518 &mut self,
1519 symbol: &Symbol,
1520 cx: &mut ModelContext<Self>,
1521 ) -> Task<Result<ModelHandle<Buffer>>> {
1522 if self.is_local() {
1523 let language_server = if let Some(server) = self
1524 .language_servers
1525 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1526 {
1527 server.clone()
1528 } else {
1529 return Task::ready(Err(anyhow!(
1530 "language server for worktree and language not found"
1531 )));
1532 };
1533
1534 let worktree_abs_path = if let Some(worktree_abs_path) = self
1535 .worktree_for_id(symbol.worktree_id, cx)
1536 .and_then(|worktree| worktree.read(cx).as_local())
1537 .map(|local_worktree| local_worktree.abs_path())
1538 {
1539 worktree_abs_path
1540 } else {
1541 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1542 };
1543 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1544 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1545 uri
1546 } else {
1547 return Task::ready(Err(anyhow!("invalid symbol path")));
1548 };
1549
1550 self.open_local_buffer_via_lsp(
1551 symbol_uri,
1552 symbol.language_name.clone(),
1553 language_server,
1554 cx,
1555 )
1556 } else if let Some(project_id) = self.remote_id() {
1557 let request = self.client.request(proto::OpenBufferForSymbol {
1558 project_id,
1559 symbol: Some(serialize_symbol(symbol)),
1560 });
1561 cx.spawn(|this, mut cx| async move {
1562 let response = request.await?;
1563 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1564 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1565 .await
1566 })
1567 } else {
1568 Task::ready(Err(anyhow!("project does not have a remote id")))
1569 }
1570 }
1571
1572 pub fn completions<T: ToPointUtf16>(
1573 &self,
1574 source_buffer_handle: &ModelHandle<Buffer>,
1575 position: T,
1576 cx: &mut ModelContext<Self>,
1577 ) -> Task<Result<Vec<Completion>>> {
1578 let source_buffer_handle = source_buffer_handle.clone();
1579 let source_buffer = source_buffer_handle.read(cx);
1580 let buffer_id = source_buffer.remote_id();
1581 let language = source_buffer.language().cloned();
1582 let worktree;
1583 let buffer_abs_path;
1584 if let Some(file) = File::from_dyn(source_buffer.file()) {
1585 worktree = file.worktree.clone();
1586 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1587 } else {
1588 return Task::ready(Ok(Default::default()));
1589 };
1590
1591 let position = position.to_point_utf16(source_buffer);
1592 let anchor = source_buffer.anchor_after(position);
1593
1594 if worktree.read(cx).as_local().is_some() {
1595 let buffer_abs_path = buffer_abs_path.unwrap();
1596 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1597 server
1598 } else {
1599 return Task::ready(Ok(Default::default()));
1600 };
1601
1602 cx.spawn(|_, cx| async move {
1603 let completions = lang_server
1604 .request::<lsp::request::Completion>(lsp::CompletionParams {
1605 text_document_position: lsp::TextDocumentPositionParams::new(
1606 lsp::TextDocumentIdentifier::new(
1607 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1608 ),
1609 position.to_lsp_position(),
1610 ),
1611 context: Default::default(),
1612 work_done_progress_params: Default::default(),
1613 partial_result_params: Default::default(),
1614 })
1615 .await
1616 .context("lsp completion request failed")?;
1617
1618 let completions = if let Some(completions) = completions {
1619 match completions {
1620 lsp::CompletionResponse::Array(completions) => completions,
1621 lsp::CompletionResponse::List(list) => list.items,
1622 }
1623 } else {
1624 Default::default()
1625 };
1626
1627 source_buffer_handle.read_with(&cx, |this, _| {
1628 Ok(completions
1629 .into_iter()
1630 .filter_map(|lsp_completion| {
1631 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1632 lsp::CompletionTextEdit::Edit(edit) => {
1633 (range_from_lsp(edit.range), edit.new_text.clone())
1634 }
1635 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1636 log::info!("unsupported insert/replace completion");
1637 return None;
1638 }
1639 };
1640
1641 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1642 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1643 if clipped_start == old_range.start && clipped_end == old_range.end {
1644 Some(Completion {
1645 old_range: this.anchor_before(old_range.start)
1646 ..this.anchor_after(old_range.end),
1647 new_text,
1648 label: language
1649 .as_ref()
1650 .and_then(|l| l.label_for_completion(&lsp_completion))
1651 .unwrap_or_else(|| {
1652 CodeLabel::plain(
1653 lsp_completion.label.clone(),
1654 lsp_completion.filter_text.as_deref(),
1655 )
1656 }),
1657 lsp_completion,
1658 })
1659 } else {
1660 None
1661 }
1662 })
1663 .collect())
1664 })
1665 })
1666 } else if let Some(project_id) = self.remote_id() {
1667 let rpc = self.client.clone();
1668 let message = proto::GetCompletions {
1669 project_id,
1670 buffer_id,
1671 position: Some(language::proto::serialize_anchor(&anchor)),
1672 version: (&source_buffer.version()).into(),
1673 };
1674 cx.spawn_weak(|_, mut cx| async move {
1675 let response = rpc.request(message).await?;
1676
1677 source_buffer_handle
1678 .update(&mut cx, |buffer, _| {
1679 buffer.wait_for_version(response.version.into())
1680 })
1681 .await;
1682
1683 response
1684 .completions
1685 .into_iter()
1686 .map(|completion| {
1687 language::proto::deserialize_completion(completion, language.as_ref())
1688 })
1689 .collect()
1690 })
1691 } else {
1692 Task::ready(Ok(Default::default()))
1693 }
1694 }
1695
1696 pub fn apply_additional_edits_for_completion(
1697 &self,
1698 buffer_handle: ModelHandle<Buffer>,
1699 completion: Completion,
1700 push_to_history: bool,
1701 cx: &mut ModelContext<Self>,
1702 ) -> Task<Result<Option<Transaction>>> {
1703 let buffer = buffer_handle.read(cx);
1704 let buffer_id = buffer.remote_id();
1705
1706 if self.is_local() {
1707 let lang_server = if let Some(language_server) = buffer.language_server() {
1708 language_server.clone()
1709 } else {
1710 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1711 };
1712
1713 cx.spawn(|_, mut cx| async move {
1714 let resolved_completion = lang_server
1715 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1716 .await?;
1717 if let Some(edits) = resolved_completion.additional_text_edits {
1718 let edits = buffer_handle
1719 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1720 .await?;
1721 buffer_handle.update(&mut cx, |buffer, cx| {
1722 buffer.finalize_last_transaction();
1723 buffer.start_transaction();
1724 for (range, text) in edits {
1725 buffer.edit([range], text, cx);
1726 }
1727 let transaction = if buffer.end_transaction(cx).is_some() {
1728 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1729 if !push_to_history {
1730 buffer.forget_transaction(transaction.id);
1731 }
1732 Some(transaction)
1733 } else {
1734 None
1735 };
1736 Ok(transaction)
1737 })
1738 } else {
1739 Ok(None)
1740 }
1741 })
1742 } else if let Some(project_id) = self.remote_id() {
1743 let client = self.client.clone();
1744 cx.spawn(|_, mut cx| async move {
1745 let response = client
1746 .request(proto::ApplyCompletionAdditionalEdits {
1747 project_id,
1748 buffer_id,
1749 completion: Some(language::proto::serialize_completion(&completion)),
1750 })
1751 .await?;
1752
1753 if let Some(transaction) = response.transaction {
1754 let transaction = language::proto::deserialize_transaction(transaction)?;
1755 buffer_handle
1756 .update(&mut cx, |buffer, _| {
1757 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1758 })
1759 .await;
1760 if push_to_history {
1761 buffer_handle.update(&mut cx, |buffer, _| {
1762 buffer.push_transaction(transaction.clone(), Instant::now());
1763 });
1764 }
1765 Ok(Some(transaction))
1766 } else {
1767 Ok(None)
1768 }
1769 })
1770 } else {
1771 Task::ready(Err(anyhow!("project does not have a remote id")))
1772 }
1773 }
1774
1775 pub fn code_actions<T: ToOffset>(
1776 &self,
1777 buffer_handle: &ModelHandle<Buffer>,
1778 range: Range<T>,
1779 cx: &mut ModelContext<Self>,
1780 ) -> Task<Result<Vec<CodeAction>>> {
1781 let buffer_handle = buffer_handle.clone();
1782 let buffer = buffer_handle.read(cx);
1783 let buffer_id = buffer.remote_id();
1784 let worktree;
1785 let buffer_abs_path;
1786 if let Some(file) = File::from_dyn(buffer.file()) {
1787 worktree = file.worktree.clone();
1788 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1789 } else {
1790 return Task::ready(Ok(Default::default()));
1791 };
1792 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1793
1794 if worktree.read(cx).as_local().is_some() {
1795 let buffer_abs_path = buffer_abs_path.unwrap();
1796 let lang_name;
1797 let lang_server;
1798 if let Some(lang) = buffer.language() {
1799 lang_name = lang.name().to_string();
1800 if let Some(server) = self
1801 .language_servers
1802 .get(&(worktree.read(cx).id(), lang_name.clone()))
1803 {
1804 lang_server = server.clone();
1805 } else {
1806 return Task::ready(Ok(Default::default()));
1807 };
1808 } else {
1809 return Task::ready(Ok(Default::default()));
1810 }
1811
1812 let lsp_range = lsp::Range::new(
1813 range.start.to_point_utf16(buffer).to_lsp_position(),
1814 range.end.to_point_utf16(buffer).to_lsp_position(),
1815 );
1816 cx.foreground().spawn(async move {
1817 if !lang_server
1818 .capabilities()
1819 .await
1820 .map_or(false, |capabilities| {
1821 capabilities.code_action_provider.is_some()
1822 })
1823 {
1824 return Ok(Default::default());
1825 }
1826
1827 Ok(lang_server
1828 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1829 text_document: lsp::TextDocumentIdentifier::new(
1830 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1831 ),
1832 range: lsp_range,
1833 work_done_progress_params: Default::default(),
1834 partial_result_params: Default::default(),
1835 context: lsp::CodeActionContext {
1836 diagnostics: Default::default(),
1837 only: Some(vec![
1838 lsp::CodeActionKind::QUICKFIX,
1839 lsp::CodeActionKind::REFACTOR,
1840 lsp::CodeActionKind::REFACTOR_EXTRACT,
1841 ]),
1842 },
1843 })
1844 .await?
1845 .unwrap_or_default()
1846 .into_iter()
1847 .filter_map(|entry| {
1848 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1849 Some(CodeAction {
1850 range: range.clone(),
1851 lsp_action,
1852 })
1853 } else {
1854 None
1855 }
1856 })
1857 .collect())
1858 })
1859 } else if let Some(project_id) = self.remote_id() {
1860 let rpc = self.client.clone();
1861 let version = buffer.version();
1862 cx.spawn_weak(|_, mut cx| async move {
1863 let response = rpc
1864 .request(proto::GetCodeActions {
1865 project_id,
1866 buffer_id,
1867 start: Some(language::proto::serialize_anchor(&range.start)),
1868 end: Some(language::proto::serialize_anchor(&range.end)),
1869 version: (&version).into(),
1870 })
1871 .await?;
1872
1873 buffer_handle
1874 .update(&mut cx, |buffer, _| {
1875 buffer.wait_for_version(response.version.into())
1876 })
1877 .await;
1878
1879 response
1880 .actions
1881 .into_iter()
1882 .map(language::proto::deserialize_code_action)
1883 .collect()
1884 })
1885 } else {
1886 Task::ready(Ok(Default::default()))
1887 }
1888 }
1889
1890 pub fn apply_code_action(
1891 &self,
1892 buffer_handle: ModelHandle<Buffer>,
1893 mut action: CodeAction,
1894 push_to_history: bool,
1895 cx: &mut ModelContext<Self>,
1896 ) -> Task<Result<ProjectTransaction>> {
1897 if self.is_local() {
1898 let buffer = buffer_handle.read(cx);
1899 let lang_name = if let Some(lang) = buffer.language() {
1900 lang.name().to_string()
1901 } else {
1902 return Task::ready(Ok(Default::default()));
1903 };
1904 let lang_server = if let Some(language_server) = buffer.language_server() {
1905 language_server.clone()
1906 } else {
1907 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1908 };
1909 let range = action.range.to_point_utf16(buffer);
1910
1911 cx.spawn(|this, mut cx| async move {
1912 if let Some(lsp_range) = action
1913 .lsp_action
1914 .data
1915 .as_mut()
1916 .and_then(|d| d.get_mut("codeActionParams"))
1917 .and_then(|d| d.get_mut("range"))
1918 {
1919 *lsp_range = serde_json::to_value(&lsp::Range::new(
1920 range.start.to_lsp_position(),
1921 range.end.to_lsp_position(),
1922 ))
1923 .unwrap();
1924 action.lsp_action = lang_server
1925 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1926 .await?;
1927 } else {
1928 let actions = this
1929 .update(&mut cx, |this, cx| {
1930 this.code_actions(&buffer_handle, action.range, cx)
1931 })
1932 .await?;
1933 action.lsp_action = actions
1934 .into_iter()
1935 .find(|a| a.lsp_action.title == action.lsp_action.title)
1936 .ok_or_else(|| anyhow!("code action is outdated"))?
1937 .lsp_action;
1938 }
1939
1940 if let Some(edit) = action.lsp_action.edit {
1941 Self::deserialize_workspace_edit(
1942 this,
1943 edit,
1944 push_to_history,
1945 lang_name,
1946 lang_server,
1947 &mut cx,
1948 )
1949 .await
1950 } else {
1951 Ok(ProjectTransaction::default())
1952 }
1953 })
1954 } else if let Some(project_id) = self.remote_id() {
1955 let client = self.client.clone();
1956 let request = proto::ApplyCodeAction {
1957 project_id,
1958 buffer_id: buffer_handle.read(cx).remote_id(),
1959 action: Some(language::proto::serialize_code_action(&action)),
1960 };
1961 cx.spawn(|this, mut cx| async move {
1962 let response = client
1963 .request(request)
1964 .await?
1965 .transaction
1966 .ok_or_else(|| anyhow!("missing transaction"))?;
1967 this.update(&mut cx, |this, cx| {
1968 this.deserialize_project_transaction(response, push_to_history, cx)
1969 })
1970 .await
1971 })
1972 } else {
1973 Task::ready(Err(anyhow!("project does not have a remote id")))
1974 }
1975 }
1976
1977 async fn deserialize_workspace_edit(
1978 this: ModelHandle<Self>,
1979 edit: lsp::WorkspaceEdit,
1980 push_to_history: bool,
1981 language_name: String,
1982 language_server: Arc<LanguageServer>,
1983 cx: &mut AsyncAppContext,
1984 ) -> Result<ProjectTransaction> {
1985 let fs = this.read_with(cx, |this, _| this.fs.clone());
1986 let mut operations = Vec::new();
1987 if let Some(document_changes) = edit.document_changes {
1988 match document_changes {
1989 lsp::DocumentChanges::Edits(edits) => {
1990 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1991 }
1992 lsp::DocumentChanges::Operations(ops) => operations = ops,
1993 }
1994 } else if let Some(changes) = edit.changes {
1995 operations.extend(changes.into_iter().map(|(uri, edits)| {
1996 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1997 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1998 uri,
1999 version: None,
2000 },
2001 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2002 })
2003 }));
2004 }
2005
2006 let mut project_transaction = ProjectTransaction::default();
2007 for operation in operations {
2008 match operation {
2009 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2010 let abs_path = op
2011 .uri
2012 .to_file_path()
2013 .map_err(|_| anyhow!("can't convert URI to path"))?;
2014
2015 if let Some(parent_path) = abs_path.parent() {
2016 fs.create_dir(parent_path).await?;
2017 }
2018 if abs_path.ends_with("/") {
2019 fs.create_dir(&abs_path).await?;
2020 } else {
2021 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2022 .await?;
2023 }
2024 }
2025 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2026 let source_abs_path = op
2027 .old_uri
2028 .to_file_path()
2029 .map_err(|_| anyhow!("can't convert URI to path"))?;
2030 let target_abs_path = op
2031 .new_uri
2032 .to_file_path()
2033 .map_err(|_| anyhow!("can't convert URI to path"))?;
2034 fs.rename(
2035 &source_abs_path,
2036 &target_abs_path,
2037 op.options.map(Into::into).unwrap_or_default(),
2038 )
2039 .await?;
2040 }
2041 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2042 let abs_path = op
2043 .uri
2044 .to_file_path()
2045 .map_err(|_| anyhow!("can't convert URI to path"))?;
2046 let options = op.options.map(Into::into).unwrap_or_default();
2047 if abs_path.ends_with("/") {
2048 fs.remove_dir(&abs_path, options).await?;
2049 } else {
2050 fs.remove_file(&abs_path, options).await?;
2051 }
2052 }
2053 lsp::DocumentChangeOperation::Edit(op) => {
2054 let buffer_to_edit = this
2055 .update(cx, |this, cx| {
2056 this.open_local_buffer_via_lsp(
2057 op.text_document.uri,
2058 language_name.clone(),
2059 language_server.clone(),
2060 cx,
2061 )
2062 })
2063 .await?;
2064
2065 let edits = buffer_to_edit
2066 .update(cx, |buffer, cx| {
2067 let edits = op.edits.into_iter().map(|edit| match edit {
2068 lsp::OneOf::Left(edit) => edit,
2069 lsp::OneOf::Right(edit) => edit.text_edit,
2070 });
2071 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2072 })
2073 .await?;
2074
2075 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2076 buffer.finalize_last_transaction();
2077 buffer.start_transaction();
2078 for (range, text) in edits {
2079 buffer.edit([range], text, cx);
2080 }
2081 let transaction = if buffer.end_transaction(cx).is_some() {
2082 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2083 if !push_to_history {
2084 buffer.forget_transaction(transaction.id);
2085 }
2086 Some(transaction)
2087 } else {
2088 None
2089 };
2090
2091 transaction
2092 });
2093 if let Some(transaction) = transaction {
2094 project_transaction.0.insert(buffer_to_edit, transaction);
2095 }
2096 }
2097 }
2098 }
2099
2100 Ok(project_transaction)
2101 }
2102
2103 pub fn prepare_rename<T: ToPointUtf16>(
2104 &self,
2105 buffer: ModelHandle<Buffer>,
2106 position: T,
2107 cx: &mut ModelContext<Self>,
2108 ) -> Task<Result<Option<Range<Anchor>>>> {
2109 let position = position.to_point_utf16(buffer.read(cx));
2110 self.request_lsp(buffer, PrepareRename { position }, cx)
2111 }
2112
2113 pub fn perform_rename<T: ToPointUtf16>(
2114 &self,
2115 buffer: ModelHandle<Buffer>,
2116 position: T,
2117 new_name: String,
2118 push_to_history: bool,
2119 cx: &mut ModelContext<Self>,
2120 ) -> Task<Result<ProjectTransaction>> {
2121 let position = position.to_point_utf16(buffer.read(cx));
2122 self.request_lsp(
2123 buffer,
2124 PerformRename {
2125 position,
2126 new_name,
2127 push_to_history,
2128 },
2129 cx,
2130 )
2131 }
2132
2133 pub fn search(
2134 &self,
2135 query: SearchQuery,
2136 cx: &mut ModelContext<Self>,
2137 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2138 if self.is_local() {
2139 let snapshots = self
2140 .visible_worktrees(cx)
2141 .filter_map(|tree| {
2142 let tree = tree.read(cx).as_local()?;
2143 Some(tree.snapshot())
2144 })
2145 .collect::<Vec<_>>();
2146
2147 let background = cx.background().clone();
2148 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2149 if path_count == 0 {
2150 return Task::ready(Ok(Default::default()));
2151 }
2152 let workers = background.num_cpus().min(path_count);
2153 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2154 cx.background()
2155 .spawn({
2156 let fs = self.fs.clone();
2157 let background = cx.background().clone();
2158 let query = query.clone();
2159 async move {
2160 let fs = &fs;
2161 let query = &query;
2162 let matching_paths_tx = &matching_paths_tx;
2163 let paths_per_worker = (path_count + workers - 1) / workers;
2164 let snapshots = &snapshots;
2165 background
2166 .scoped(|scope| {
2167 for worker_ix in 0..workers {
2168 let worker_start_ix = worker_ix * paths_per_worker;
2169 let worker_end_ix = worker_start_ix + paths_per_worker;
2170 scope.spawn(async move {
2171 let mut snapshot_start_ix = 0;
2172 let mut abs_path = PathBuf::new();
2173 for snapshot in snapshots {
2174 let snapshot_end_ix =
2175 snapshot_start_ix + snapshot.visible_file_count();
2176 if worker_end_ix <= snapshot_start_ix {
2177 break;
2178 } else if worker_start_ix > snapshot_end_ix {
2179 snapshot_start_ix = snapshot_end_ix;
2180 continue;
2181 } else {
2182 let start_in_snapshot = worker_start_ix
2183 .saturating_sub(snapshot_start_ix);
2184 let end_in_snapshot =
2185 cmp::min(worker_end_ix, snapshot_end_ix)
2186 - snapshot_start_ix;
2187
2188 for entry in snapshot
2189 .files(false, start_in_snapshot)
2190 .take(end_in_snapshot - start_in_snapshot)
2191 {
2192 if matching_paths_tx.is_closed() {
2193 break;
2194 }
2195
2196 abs_path.clear();
2197 abs_path.push(&snapshot.abs_path());
2198 abs_path.push(&entry.path);
2199 let matches = if let Some(file) =
2200 fs.open_sync(&abs_path).await.log_err()
2201 {
2202 query.detect(file).unwrap_or(false)
2203 } else {
2204 false
2205 };
2206
2207 if matches {
2208 let project_path =
2209 (snapshot.id(), entry.path.clone());
2210 if matching_paths_tx
2211 .send(project_path)
2212 .await
2213 .is_err()
2214 {
2215 break;
2216 }
2217 }
2218 }
2219
2220 snapshot_start_ix = snapshot_end_ix;
2221 }
2222 }
2223 });
2224 }
2225 })
2226 .await;
2227 }
2228 })
2229 .detach();
2230
2231 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2232 let open_buffers = self
2233 .opened_buffers
2234 .values()
2235 .filter_map(|b| b.upgrade(cx))
2236 .collect::<HashSet<_>>();
2237 cx.spawn(|this, cx| async move {
2238 for buffer in &open_buffers {
2239 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2240 buffers_tx.send((buffer.clone(), snapshot)).await?;
2241 }
2242
2243 let open_buffers = Rc::new(RefCell::new(open_buffers));
2244 while let Some(project_path) = matching_paths_rx.next().await {
2245 if buffers_tx.is_closed() {
2246 break;
2247 }
2248
2249 let this = this.clone();
2250 let open_buffers = open_buffers.clone();
2251 let buffers_tx = buffers_tx.clone();
2252 cx.spawn(|mut cx| async move {
2253 if let Some(buffer) = this
2254 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2255 .await
2256 .log_err()
2257 {
2258 if open_buffers.borrow_mut().insert(buffer.clone()) {
2259 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2260 buffers_tx.send((buffer, snapshot)).await?;
2261 }
2262 }
2263
2264 Ok::<_, anyhow::Error>(())
2265 })
2266 .detach();
2267 }
2268
2269 Ok::<_, anyhow::Error>(())
2270 })
2271 .detach_and_log_err(cx);
2272
2273 let background = cx.background().clone();
2274 cx.background().spawn(async move {
2275 let query = &query;
2276 let mut matched_buffers = Vec::new();
2277 for _ in 0..workers {
2278 matched_buffers.push(HashMap::default());
2279 }
2280 background
2281 .scoped(|scope| {
2282 for worker_matched_buffers in matched_buffers.iter_mut() {
2283 let mut buffers_rx = buffers_rx.clone();
2284 scope.spawn(async move {
2285 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2286 let buffer_matches = query
2287 .search(snapshot.as_rope())
2288 .await
2289 .iter()
2290 .map(|range| {
2291 snapshot.anchor_before(range.start)
2292 ..snapshot.anchor_after(range.end)
2293 })
2294 .collect::<Vec<_>>();
2295 if !buffer_matches.is_empty() {
2296 worker_matched_buffers
2297 .insert(buffer.clone(), buffer_matches);
2298 }
2299 }
2300 });
2301 }
2302 })
2303 .await;
2304 Ok(matched_buffers.into_iter().flatten().collect())
2305 })
2306 } else if let Some(project_id) = self.remote_id() {
2307 let request = self.client.request(query.to_proto(project_id));
2308 cx.spawn(|this, mut cx| async move {
2309 let response = request.await?;
2310 let mut result = HashMap::default();
2311 for location in response.locations {
2312 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2313 let target_buffer = this
2314 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2315 .await?;
2316 let start = location
2317 .start
2318 .and_then(deserialize_anchor)
2319 .ok_or_else(|| anyhow!("missing target start"))?;
2320 let end = location
2321 .end
2322 .and_then(deserialize_anchor)
2323 .ok_or_else(|| anyhow!("missing target end"))?;
2324 result
2325 .entry(target_buffer)
2326 .or_insert(Vec::new())
2327 .push(start..end)
2328 }
2329 Ok(result)
2330 })
2331 } else {
2332 Task::ready(Ok(Default::default()))
2333 }
2334 }
2335
2336 fn request_lsp<R: LspCommand>(
2337 &self,
2338 buffer_handle: ModelHandle<Buffer>,
2339 request: R,
2340 cx: &mut ModelContext<Self>,
2341 ) -> Task<Result<R::Response>>
2342 where
2343 <R::LspRequest as lsp::request::Request>::Result: Send,
2344 {
2345 let buffer = buffer_handle.read(cx);
2346 if self.is_local() {
2347 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2348 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2349 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2350 return cx.spawn(|this, cx| async move {
2351 if !language_server
2352 .capabilities()
2353 .await
2354 .map_or(false, |capabilities| {
2355 request.check_capabilities(&capabilities)
2356 })
2357 {
2358 return Ok(Default::default());
2359 }
2360
2361 let response = language_server
2362 .request::<R::LspRequest>(lsp_params)
2363 .await
2364 .context("lsp request failed")?;
2365 request
2366 .response_from_lsp(response, this, buffer_handle, cx)
2367 .await
2368 });
2369 }
2370 } else if let Some(project_id) = self.remote_id() {
2371 let rpc = self.client.clone();
2372 let message = request.to_proto(project_id, buffer);
2373 return cx.spawn(|this, cx| async move {
2374 let response = rpc.request(message).await?;
2375 request
2376 .response_from_proto(response, this, buffer_handle, cx)
2377 .await
2378 });
2379 }
2380 Task::ready(Ok(Default::default()))
2381 }
2382
2383 pub fn find_or_create_local_worktree(
2384 &mut self,
2385 abs_path: impl AsRef<Path>,
2386 visible: bool,
2387 cx: &mut ModelContext<Self>,
2388 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2389 let abs_path = abs_path.as_ref();
2390 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2391 Task::ready(Ok((tree.clone(), relative_path.into())))
2392 } else {
2393 let worktree = self.create_local_worktree(abs_path, visible, cx);
2394 cx.foreground()
2395 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2396 }
2397 }
2398
2399 pub fn find_local_worktree(
2400 &self,
2401 abs_path: &Path,
2402 cx: &AppContext,
2403 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2404 for tree in self.worktrees(cx) {
2405 if let Some(relative_path) = tree
2406 .read(cx)
2407 .as_local()
2408 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2409 {
2410 return Some((tree.clone(), relative_path.into()));
2411 }
2412 }
2413 None
2414 }
2415
2416 pub fn is_shared(&self) -> bool {
2417 match &self.client_state {
2418 ProjectClientState::Local { is_shared, .. } => *is_shared,
2419 ProjectClientState::Remote { .. } => false,
2420 }
2421 }
2422
2423 fn create_local_worktree(
2424 &mut self,
2425 abs_path: impl AsRef<Path>,
2426 visible: bool,
2427 cx: &mut ModelContext<Self>,
2428 ) -> Task<Result<ModelHandle<Worktree>>> {
2429 let fs = self.fs.clone();
2430 let client = self.client.clone();
2431 let path: Arc<Path> = abs_path.as_ref().into();
2432 let task = self
2433 .loading_local_worktrees
2434 .entry(path.clone())
2435 .or_insert_with(|| {
2436 cx.spawn(|project, mut cx| {
2437 async move {
2438 let worktree =
2439 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2440 .await;
2441 project.update(&mut cx, |project, _| {
2442 project.loading_local_worktrees.remove(&path);
2443 });
2444 let worktree = worktree?;
2445
2446 let (remote_project_id, is_shared) =
2447 project.update(&mut cx, |project, cx| {
2448 project.add_worktree(&worktree, cx);
2449 (project.remote_id(), project.is_shared())
2450 });
2451
2452 if let Some(project_id) = remote_project_id {
2453 if is_shared {
2454 worktree
2455 .update(&mut cx, |worktree, cx| {
2456 worktree.as_local_mut().unwrap().share(project_id, cx)
2457 })
2458 .await?;
2459 } else {
2460 worktree
2461 .update(&mut cx, |worktree, cx| {
2462 worktree.as_local_mut().unwrap().register(project_id, cx)
2463 })
2464 .await?;
2465 }
2466 }
2467
2468 Ok(worktree)
2469 }
2470 .map_err(|err| Arc::new(err))
2471 })
2472 .shared()
2473 })
2474 .clone();
2475 cx.foreground().spawn(async move {
2476 match task.await {
2477 Ok(worktree) => Ok(worktree),
2478 Err(err) => Err(anyhow!("{}", err)),
2479 }
2480 })
2481 }
2482
2483 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2484 self.worktrees.retain(|worktree| {
2485 worktree
2486 .upgrade(cx)
2487 .map_or(false, |w| w.read(cx).id() != id)
2488 });
2489 cx.notify();
2490 }
2491
2492 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2493 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2494 if worktree.read(cx).is_local() {
2495 cx.subscribe(&worktree, |this, worktree, _, cx| {
2496 this.update_local_worktree_buffers(worktree, cx);
2497 })
2498 .detach();
2499 }
2500
2501 let push_strong_handle = {
2502 let worktree = worktree.read(cx);
2503 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2504 };
2505 if push_strong_handle {
2506 self.worktrees
2507 .push(WorktreeHandle::Strong(worktree.clone()));
2508 } else {
2509 cx.observe_release(&worktree, |this, cx| {
2510 this.worktrees
2511 .retain(|worktree| worktree.upgrade(cx).is_some());
2512 cx.notify();
2513 })
2514 .detach();
2515 self.worktrees
2516 .push(WorktreeHandle::Weak(worktree.downgrade()));
2517 }
2518 cx.notify();
2519 }
2520
2521 fn update_local_worktree_buffers(
2522 &mut self,
2523 worktree_handle: ModelHandle<Worktree>,
2524 cx: &mut ModelContext<Self>,
2525 ) {
2526 let snapshot = worktree_handle.read(cx).snapshot();
2527 let mut buffers_to_delete = Vec::new();
2528 for (buffer_id, buffer) in &self.opened_buffers {
2529 if let Some(buffer) = buffer.upgrade(cx) {
2530 buffer.update(cx, |buffer, cx| {
2531 if let Some(old_file) = File::from_dyn(buffer.file()) {
2532 if old_file.worktree != worktree_handle {
2533 return;
2534 }
2535
2536 let new_file = if let Some(entry) = old_file
2537 .entry_id
2538 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2539 {
2540 File {
2541 is_local: true,
2542 entry_id: Some(entry.id),
2543 mtime: entry.mtime,
2544 path: entry.path.clone(),
2545 worktree: worktree_handle.clone(),
2546 }
2547 } else if let Some(entry) =
2548 snapshot.entry_for_path(old_file.path().as_ref())
2549 {
2550 File {
2551 is_local: true,
2552 entry_id: Some(entry.id),
2553 mtime: entry.mtime,
2554 path: entry.path.clone(),
2555 worktree: worktree_handle.clone(),
2556 }
2557 } else {
2558 File {
2559 is_local: true,
2560 entry_id: None,
2561 path: old_file.path().clone(),
2562 mtime: old_file.mtime(),
2563 worktree: worktree_handle.clone(),
2564 }
2565 };
2566
2567 if let Some(project_id) = self.remote_id() {
2568 self.client
2569 .send(proto::UpdateBufferFile {
2570 project_id,
2571 buffer_id: *buffer_id as u64,
2572 file: Some(new_file.to_proto()),
2573 })
2574 .log_err();
2575 }
2576 buffer.file_updated(Box::new(new_file), cx).detach();
2577 }
2578 });
2579 } else {
2580 buffers_to_delete.push(*buffer_id);
2581 }
2582 }
2583
2584 for buffer_id in buffers_to_delete {
2585 self.opened_buffers.remove(&buffer_id);
2586 }
2587 }
2588
2589 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2590 let new_active_entry = entry.and_then(|project_path| {
2591 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2592 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2593 Some(ProjectEntry {
2594 worktree_id: project_path.worktree_id,
2595 entry_id: entry.id,
2596 })
2597 });
2598 if new_active_entry != self.active_entry {
2599 self.active_entry = new_active_entry;
2600 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2601 }
2602 }
2603
2604 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2605 self.language_servers_with_diagnostics_running > 0
2606 }
2607
2608 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2609 let mut summary = DiagnosticSummary::default();
2610 for (_, path_summary) in self.diagnostic_summaries(cx) {
2611 summary.error_count += path_summary.error_count;
2612 summary.warning_count += path_summary.warning_count;
2613 summary.info_count += path_summary.info_count;
2614 summary.hint_count += path_summary.hint_count;
2615 }
2616 summary
2617 }
2618
2619 pub fn diagnostic_summaries<'a>(
2620 &'a self,
2621 cx: &'a AppContext,
2622 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2623 self.worktrees(cx).flat_map(move |worktree| {
2624 let worktree = worktree.read(cx);
2625 let worktree_id = worktree.id();
2626 worktree
2627 .diagnostic_summaries()
2628 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2629 })
2630 }
2631
2632 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2633 self.language_servers_with_diagnostics_running += 1;
2634 if self.language_servers_with_diagnostics_running == 1 {
2635 cx.emit(Event::DiskBasedDiagnosticsStarted);
2636 }
2637 }
2638
2639 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2640 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2641 self.language_servers_with_diagnostics_running -= 1;
2642 if self.language_servers_with_diagnostics_running == 0 {
2643 cx.emit(Event::DiskBasedDiagnosticsFinished);
2644 }
2645 }
2646
2647 pub fn active_entry(&self) -> Option<ProjectEntry> {
2648 self.active_entry
2649 }
2650
2651 // RPC message handlers
2652
2653 async fn handle_unshare_project(
2654 this: ModelHandle<Self>,
2655 _: TypedEnvelope<proto::UnshareProject>,
2656 _: Arc<Client>,
2657 mut cx: AsyncAppContext,
2658 ) -> Result<()> {
2659 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2660 Ok(())
2661 }
2662
2663 async fn handle_add_collaborator(
2664 this: ModelHandle<Self>,
2665 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2666 _: Arc<Client>,
2667 mut cx: AsyncAppContext,
2668 ) -> Result<()> {
2669 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2670 let collaborator = envelope
2671 .payload
2672 .collaborator
2673 .take()
2674 .ok_or_else(|| anyhow!("empty collaborator"))?;
2675
2676 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2677 this.update(&mut cx, |this, cx| {
2678 this.collaborators
2679 .insert(collaborator.peer_id, collaborator);
2680 cx.notify();
2681 });
2682
2683 Ok(())
2684 }
2685
2686 async fn handle_remove_collaborator(
2687 this: ModelHandle<Self>,
2688 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2689 _: Arc<Client>,
2690 mut cx: AsyncAppContext,
2691 ) -> Result<()> {
2692 this.update(&mut cx, |this, cx| {
2693 let peer_id = PeerId(envelope.payload.peer_id);
2694 let replica_id = this
2695 .collaborators
2696 .remove(&peer_id)
2697 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2698 .replica_id;
2699 for (_, buffer) in &this.opened_buffers {
2700 if let Some(buffer) = buffer.upgrade(cx) {
2701 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2702 }
2703 }
2704 cx.notify();
2705 Ok(())
2706 })
2707 }
2708
2709 async fn handle_register_worktree(
2710 this: ModelHandle<Self>,
2711 envelope: TypedEnvelope<proto::RegisterWorktree>,
2712 client: Arc<Client>,
2713 mut cx: AsyncAppContext,
2714 ) -> Result<()> {
2715 this.update(&mut cx, |this, cx| {
2716 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2717 let replica_id = this.replica_id();
2718 let worktree = proto::Worktree {
2719 id: envelope.payload.worktree_id,
2720 root_name: envelope.payload.root_name,
2721 entries: Default::default(),
2722 diagnostic_summaries: Default::default(),
2723 visible: envelope.payload.visible,
2724 };
2725 let (worktree, load_task) =
2726 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2727 this.add_worktree(&worktree, cx);
2728 load_task.detach();
2729 Ok(())
2730 })
2731 }
2732
2733 async fn handle_unregister_worktree(
2734 this: ModelHandle<Self>,
2735 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2736 _: Arc<Client>,
2737 mut cx: AsyncAppContext,
2738 ) -> Result<()> {
2739 this.update(&mut cx, |this, cx| {
2740 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2741 this.remove_worktree(worktree_id, cx);
2742 Ok(())
2743 })
2744 }
2745
2746 async fn handle_update_worktree(
2747 this: ModelHandle<Self>,
2748 envelope: TypedEnvelope<proto::UpdateWorktree>,
2749 _: Arc<Client>,
2750 mut cx: AsyncAppContext,
2751 ) -> Result<()> {
2752 this.update(&mut cx, |this, cx| {
2753 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2754 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2755 worktree.update(cx, |worktree, _| {
2756 let worktree = worktree.as_remote_mut().unwrap();
2757 worktree.update_from_remote(envelope)
2758 })?;
2759 }
2760 Ok(())
2761 })
2762 }
2763
2764 async fn handle_update_diagnostic_summary(
2765 this: ModelHandle<Self>,
2766 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2767 _: Arc<Client>,
2768 mut cx: AsyncAppContext,
2769 ) -> Result<()> {
2770 this.update(&mut cx, |this, cx| {
2771 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2772 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2773 if let Some(summary) = envelope.payload.summary {
2774 let project_path = ProjectPath {
2775 worktree_id,
2776 path: Path::new(&summary.path).into(),
2777 };
2778 worktree.update(cx, |worktree, _| {
2779 worktree
2780 .as_remote_mut()
2781 .unwrap()
2782 .update_diagnostic_summary(project_path.path.clone(), &summary);
2783 });
2784 cx.emit(Event::DiagnosticsUpdated(project_path));
2785 }
2786 }
2787 Ok(())
2788 })
2789 }
2790
2791 async fn handle_disk_based_diagnostics_updating(
2792 this: ModelHandle<Self>,
2793 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2794 _: Arc<Client>,
2795 mut cx: AsyncAppContext,
2796 ) -> Result<()> {
2797 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2798 Ok(())
2799 }
2800
2801 async fn handle_disk_based_diagnostics_updated(
2802 this: ModelHandle<Self>,
2803 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2804 _: Arc<Client>,
2805 mut cx: AsyncAppContext,
2806 ) -> Result<()> {
2807 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2808 Ok(())
2809 }
2810
2811 async fn handle_update_buffer(
2812 this: ModelHandle<Self>,
2813 envelope: TypedEnvelope<proto::UpdateBuffer>,
2814 _: Arc<Client>,
2815 mut cx: AsyncAppContext,
2816 ) -> Result<()> {
2817 this.update(&mut cx, |this, cx| {
2818 let payload = envelope.payload.clone();
2819 let buffer_id = payload.buffer_id;
2820 let ops = payload
2821 .operations
2822 .into_iter()
2823 .map(|op| language::proto::deserialize_operation(op))
2824 .collect::<Result<Vec<_>, _>>()?;
2825 match this.opened_buffers.entry(buffer_id) {
2826 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2827 OpenBuffer::Strong(buffer) => {
2828 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2829 }
2830 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2831 OpenBuffer::Weak(_) => {}
2832 },
2833 hash_map::Entry::Vacant(e) => {
2834 e.insert(OpenBuffer::Loading(ops));
2835 }
2836 }
2837 Ok(())
2838 })
2839 }
2840
2841 async fn handle_update_buffer_file(
2842 this: ModelHandle<Self>,
2843 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2844 _: Arc<Client>,
2845 mut cx: AsyncAppContext,
2846 ) -> Result<()> {
2847 this.update(&mut cx, |this, cx| {
2848 let payload = envelope.payload.clone();
2849 let buffer_id = payload.buffer_id;
2850 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2851 let worktree = this
2852 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2853 .ok_or_else(|| anyhow!("no such worktree"))?;
2854 let file = File::from_proto(file, worktree.clone(), cx)?;
2855 let buffer = this
2856 .opened_buffers
2857 .get_mut(&buffer_id)
2858 .and_then(|b| b.upgrade(cx))
2859 .ok_or_else(|| anyhow!("no such buffer"))?;
2860 buffer.update(cx, |buffer, cx| {
2861 buffer.file_updated(Box::new(file), cx).detach();
2862 });
2863 Ok(())
2864 })
2865 }
2866
2867 async fn handle_save_buffer(
2868 this: ModelHandle<Self>,
2869 envelope: TypedEnvelope<proto::SaveBuffer>,
2870 _: Arc<Client>,
2871 mut cx: AsyncAppContext,
2872 ) -> Result<proto::BufferSaved> {
2873 let buffer_id = envelope.payload.buffer_id;
2874 let requested_version = envelope.payload.version.try_into()?;
2875
2876 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2877 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2878 let buffer = this
2879 .opened_buffers
2880 .get(&buffer_id)
2881 .map(|buffer| buffer.upgrade(cx).unwrap())
2882 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2883 Ok::<_, anyhow::Error>((project_id, buffer))
2884 })?;
2885 buffer
2886 .update(&mut cx, |buffer, _| {
2887 buffer.wait_for_version(requested_version)
2888 })
2889 .await;
2890
2891 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2892 Ok(proto::BufferSaved {
2893 project_id,
2894 buffer_id,
2895 version: (&saved_version).into(),
2896 mtime: Some(mtime.into()),
2897 })
2898 }
2899
2900 async fn handle_format_buffers(
2901 this: ModelHandle<Self>,
2902 envelope: TypedEnvelope<proto::FormatBuffers>,
2903 _: Arc<Client>,
2904 mut cx: AsyncAppContext,
2905 ) -> Result<proto::FormatBuffersResponse> {
2906 let sender_id = envelope.original_sender_id()?;
2907 let format = this.update(&mut cx, |this, cx| {
2908 let mut buffers = HashSet::default();
2909 for buffer_id in &envelope.payload.buffer_ids {
2910 buffers.insert(
2911 this.opened_buffers
2912 .get(buffer_id)
2913 .map(|buffer| buffer.upgrade(cx).unwrap())
2914 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2915 );
2916 }
2917 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2918 })?;
2919
2920 let project_transaction = format.await?;
2921 let project_transaction = this.update(&mut cx, |this, cx| {
2922 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2923 });
2924 Ok(proto::FormatBuffersResponse {
2925 transaction: Some(project_transaction),
2926 })
2927 }
2928
2929 async fn handle_get_completions(
2930 this: ModelHandle<Self>,
2931 envelope: TypedEnvelope<proto::GetCompletions>,
2932 _: Arc<Client>,
2933 mut cx: AsyncAppContext,
2934 ) -> Result<proto::GetCompletionsResponse> {
2935 let position = envelope
2936 .payload
2937 .position
2938 .and_then(language::proto::deserialize_anchor)
2939 .ok_or_else(|| anyhow!("invalid position"))?;
2940 let version = clock::Global::from(envelope.payload.version);
2941 let buffer = this.read_with(&cx, |this, cx| {
2942 this.opened_buffers
2943 .get(&envelope.payload.buffer_id)
2944 .map(|buffer| buffer.upgrade(cx).unwrap())
2945 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2946 })?;
2947 buffer
2948 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2949 .await;
2950 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2951 let completions = this
2952 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2953 .await?;
2954
2955 Ok(proto::GetCompletionsResponse {
2956 completions: completions
2957 .iter()
2958 .map(language::proto::serialize_completion)
2959 .collect(),
2960 version: (&version).into(),
2961 })
2962 }
2963
2964 async fn handle_apply_additional_edits_for_completion(
2965 this: ModelHandle<Self>,
2966 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2967 _: Arc<Client>,
2968 mut cx: AsyncAppContext,
2969 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2970 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2971 let buffer = this
2972 .opened_buffers
2973 .get(&envelope.payload.buffer_id)
2974 .map(|buffer| buffer.upgrade(cx).unwrap())
2975 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2976 let language = buffer.read(cx).language();
2977 let completion = language::proto::deserialize_completion(
2978 envelope
2979 .payload
2980 .completion
2981 .ok_or_else(|| anyhow!("invalid completion"))?,
2982 language,
2983 )?;
2984 Ok::<_, anyhow::Error>(
2985 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2986 )
2987 })?;
2988
2989 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2990 transaction: apply_additional_edits
2991 .await?
2992 .as_ref()
2993 .map(language::proto::serialize_transaction),
2994 })
2995 }
2996
2997 async fn handle_get_code_actions(
2998 this: ModelHandle<Self>,
2999 envelope: TypedEnvelope<proto::GetCodeActions>,
3000 _: Arc<Client>,
3001 mut cx: AsyncAppContext,
3002 ) -> Result<proto::GetCodeActionsResponse> {
3003 let start = envelope
3004 .payload
3005 .start
3006 .and_then(language::proto::deserialize_anchor)
3007 .ok_or_else(|| anyhow!("invalid start"))?;
3008 let end = envelope
3009 .payload
3010 .end
3011 .and_then(language::proto::deserialize_anchor)
3012 .ok_or_else(|| anyhow!("invalid end"))?;
3013 let buffer = this.update(&mut cx, |this, cx| {
3014 this.opened_buffers
3015 .get(&envelope.payload.buffer_id)
3016 .map(|buffer| buffer.upgrade(cx).unwrap())
3017 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3018 })?;
3019 buffer
3020 .update(&mut cx, |buffer, _| {
3021 buffer.wait_for_version(envelope.payload.version.into())
3022 })
3023 .await;
3024
3025 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3026 let code_actions = this.update(&mut cx, |this, cx| {
3027 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3028 })?;
3029
3030 Ok(proto::GetCodeActionsResponse {
3031 actions: code_actions
3032 .await?
3033 .iter()
3034 .map(language::proto::serialize_code_action)
3035 .collect(),
3036 version: (&version).into(),
3037 })
3038 }
3039
3040 async fn handle_apply_code_action(
3041 this: ModelHandle<Self>,
3042 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3043 _: Arc<Client>,
3044 mut cx: AsyncAppContext,
3045 ) -> Result<proto::ApplyCodeActionResponse> {
3046 let sender_id = envelope.original_sender_id()?;
3047 let action = language::proto::deserialize_code_action(
3048 envelope
3049 .payload
3050 .action
3051 .ok_or_else(|| anyhow!("invalid action"))?,
3052 )?;
3053 let apply_code_action = this.update(&mut cx, |this, cx| {
3054 let buffer = this
3055 .opened_buffers
3056 .get(&envelope.payload.buffer_id)
3057 .map(|buffer| buffer.upgrade(cx).unwrap())
3058 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3059 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3060 })?;
3061
3062 let project_transaction = apply_code_action.await?;
3063 let project_transaction = this.update(&mut cx, |this, cx| {
3064 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3065 });
3066 Ok(proto::ApplyCodeActionResponse {
3067 transaction: Some(project_transaction),
3068 })
3069 }
3070
3071 async fn handle_lsp_command<T: LspCommand>(
3072 this: ModelHandle<Self>,
3073 envelope: TypedEnvelope<T::ProtoRequest>,
3074 _: Arc<Client>,
3075 mut cx: AsyncAppContext,
3076 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3077 where
3078 <T::LspRequest as lsp::request::Request>::Result: Send,
3079 {
3080 let sender_id = envelope.original_sender_id()?;
3081 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3082 let buffer_handle = this.read_with(&cx, |this, _| {
3083 this.opened_buffers
3084 .get(&buffer_id)
3085 .map(|buffer| buffer.upgrade(&cx).unwrap())
3086 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3087 })?;
3088 let request = T::from_proto(
3089 envelope.payload,
3090 this.clone(),
3091 buffer_handle.clone(),
3092 cx.clone(),
3093 )
3094 .await?;
3095 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3096 let response = this
3097 .update(&mut cx, |this, cx| {
3098 this.request_lsp(buffer_handle, request, cx)
3099 })
3100 .await?;
3101 this.update(&mut cx, |this, cx| {
3102 Ok(T::response_to_proto(
3103 response,
3104 this,
3105 sender_id,
3106 &buffer_version,
3107 cx,
3108 ))
3109 })
3110 }
3111
3112 async fn handle_get_project_symbols(
3113 this: ModelHandle<Self>,
3114 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3115 _: Arc<Client>,
3116 mut cx: AsyncAppContext,
3117 ) -> Result<proto::GetProjectSymbolsResponse> {
3118 let symbols = this
3119 .update(&mut cx, |this, cx| {
3120 this.symbols(&envelope.payload.query, cx)
3121 })
3122 .await?;
3123
3124 Ok(proto::GetProjectSymbolsResponse {
3125 symbols: symbols.iter().map(serialize_symbol).collect(),
3126 })
3127 }
3128
3129 async fn handle_search_project(
3130 this: ModelHandle<Self>,
3131 envelope: TypedEnvelope<proto::SearchProject>,
3132 _: Arc<Client>,
3133 mut cx: AsyncAppContext,
3134 ) -> Result<proto::SearchProjectResponse> {
3135 let peer_id = envelope.original_sender_id()?;
3136 let query = SearchQuery::from_proto(envelope.payload)?;
3137 let result = this
3138 .update(&mut cx, |this, cx| this.search(query, cx))
3139 .await?;
3140
3141 this.update(&mut cx, |this, cx| {
3142 let mut locations = Vec::new();
3143 for (buffer, ranges) in result {
3144 for range in ranges {
3145 let start = serialize_anchor(&range.start);
3146 let end = serialize_anchor(&range.end);
3147 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3148 locations.push(proto::Location {
3149 buffer: Some(buffer),
3150 start: Some(start),
3151 end: Some(end),
3152 });
3153 }
3154 }
3155 Ok(proto::SearchProjectResponse { locations })
3156 })
3157 }
3158
3159 async fn handle_open_buffer_for_symbol(
3160 this: ModelHandle<Self>,
3161 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3162 _: Arc<Client>,
3163 mut cx: AsyncAppContext,
3164 ) -> Result<proto::OpenBufferForSymbolResponse> {
3165 let peer_id = envelope.original_sender_id()?;
3166 let symbol = envelope
3167 .payload
3168 .symbol
3169 .ok_or_else(|| anyhow!("invalid symbol"))?;
3170 let symbol = this.read_with(&cx, |this, _| {
3171 let symbol = this.deserialize_symbol(symbol)?;
3172 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3173 if signature == symbol.signature {
3174 Ok(symbol)
3175 } else {
3176 Err(anyhow!("invalid symbol signature"))
3177 }
3178 })?;
3179 let buffer = this
3180 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3181 .await?;
3182
3183 Ok(proto::OpenBufferForSymbolResponse {
3184 buffer: Some(this.update(&mut cx, |this, cx| {
3185 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3186 })),
3187 })
3188 }
3189
3190 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3191 let mut hasher = Sha256::new();
3192 hasher.update(worktree_id.to_proto().to_be_bytes());
3193 hasher.update(path.to_string_lossy().as_bytes());
3194 hasher.update(self.nonce.to_be_bytes());
3195 hasher.finalize().as_slice().try_into().unwrap()
3196 }
3197
3198 async fn handle_open_buffer(
3199 this: ModelHandle<Self>,
3200 envelope: TypedEnvelope<proto::OpenBuffer>,
3201 _: Arc<Client>,
3202 mut cx: AsyncAppContext,
3203 ) -> Result<proto::OpenBufferResponse> {
3204 let peer_id = envelope.original_sender_id()?;
3205 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3206 let open_buffer = this.update(&mut cx, |this, cx| {
3207 this.open_buffer(
3208 ProjectPath {
3209 worktree_id,
3210 path: PathBuf::from(envelope.payload.path).into(),
3211 },
3212 cx,
3213 )
3214 });
3215
3216 let buffer = open_buffer.await?;
3217 this.update(&mut cx, |this, cx| {
3218 Ok(proto::OpenBufferResponse {
3219 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3220 })
3221 })
3222 }
3223
3224 fn serialize_project_transaction_for_peer(
3225 &mut self,
3226 project_transaction: ProjectTransaction,
3227 peer_id: PeerId,
3228 cx: &AppContext,
3229 ) -> proto::ProjectTransaction {
3230 let mut serialized_transaction = proto::ProjectTransaction {
3231 buffers: Default::default(),
3232 transactions: Default::default(),
3233 };
3234 for (buffer, transaction) in project_transaction.0 {
3235 serialized_transaction
3236 .buffers
3237 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3238 serialized_transaction
3239 .transactions
3240 .push(language::proto::serialize_transaction(&transaction));
3241 }
3242 serialized_transaction
3243 }
3244
3245 fn deserialize_project_transaction(
3246 &mut self,
3247 message: proto::ProjectTransaction,
3248 push_to_history: bool,
3249 cx: &mut ModelContext<Self>,
3250 ) -> Task<Result<ProjectTransaction>> {
3251 cx.spawn(|this, mut cx| async move {
3252 let mut project_transaction = ProjectTransaction::default();
3253 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3254 let buffer = this
3255 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3256 .await?;
3257 let transaction = language::proto::deserialize_transaction(transaction)?;
3258 project_transaction.0.insert(buffer, transaction);
3259 }
3260
3261 for (buffer, transaction) in &project_transaction.0 {
3262 buffer
3263 .update(&mut cx, |buffer, _| {
3264 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3265 })
3266 .await;
3267
3268 if push_to_history {
3269 buffer.update(&mut cx, |buffer, _| {
3270 buffer.push_transaction(transaction.clone(), Instant::now());
3271 });
3272 }
3273 }
3274
3275 Ok(project_transaction)
3276 })
3277 }
3278
3279 fn serialize_buffer_for_peer(
3280 &mut self,
3281 buffer: &ModelHandle<Buffer>,
3282 peer_id: PeerId,
3283 cx: &AppContext,
3284 ) -> proto::Buffer {
3285 let buffer_id = buffer.read(cx).remote_id();
3286 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3287 if shared_buffers.insert(buffer_id) {
3288 proto::Buffer {
3289 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3290 }
3291 } else {
3292 proto::Buffer {
3293 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3294 }
3295 }
3296 }
3297
3298 fn deserialize_buffer(
3299 &mut self,
3300 buffer: proto::Buffer,
3301 cx: &mut ModelContext<Self>,
3302 ) -> Task<Result<ModelHandle<Buffer>>> {
3303 let replica_id = self.replica_id();
3304
3305 let opened_buffer_tx = self.opened_buffer.0.clone();
3306 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3307 cx.spawn(|this, mut cx| async move {
3308 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3309 proto::buffer::Variant::Id(id) => {
3310 let buffer = loop {
3311 let buffer = this.read_with(&cx, |this, cx| {
3312 this.opened_buffers
3313 .get(&id)
3314 .and_then(|buffer| buffer.upgrade(cx))
3315 });
3316 if let Some(buffer) = buffer {
3317 break buffer;
3318 }
3319 opened_buffer_rx
3320 .next()
3321 .await
3322 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3323 };
3324 Ok(buffer)
3325 }
3326 proto::buffer::Variant::State(mut buffer) => {
3327 let mut buffer_worktree = None;
3328 let mut buffer_file = None;
3329 if let Some(file) = buffer.file.take() {
3330 this.read_with(&cx, |this, cx| {
3331 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3332 let worktree =
3333 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3334 anyhow!("no worktree found for id {}", file.worktree_id)
3335 })?;
3336 buffer_file =
3337 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3338 as Box<dyn language::File>);
3339 buffer_worktree = Some(worktree);
3340 Ok::<_, anyhow::Error>(())
3341 })?;
3342 }
3343
3344 let buffer = cx.add_model(|cx| {
3345 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3346 });
3347
3348 this.update(&mut cx, |this, cx| {
3349 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3350 })?;
3351
3352 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3353 Ok(buffer)
3354 }
3355 }
3356 })
3357 }
3358
3359 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3360 let language = self
3361 .languages
3362 .get_language(&serialized_symbol.language_name);
3363 let start = serialized_symbol
3364 .start
3365 .ok_or_else(|| anyhow!("invalid start"))?;
3366 let end = serialized_symbol
3367 .end
3368 .ok_or_else(|| anyhow!("invalid end"))?;
3369 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3370 Ok(Symbol {
3371 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3372 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3373 language_name: serialized_symbol.language_name.clone(),
3374 label: language
3375 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3376 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3377 name: serialized_symbol.name,
3378 path: PathBuf::from(serialized_symbol.path),
3379 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3380 kind,
3381 signature: serialized_symbol
3382 .signature
3383 .try_into()
3384 .map_err(|_| anyhow!("invalid signature"))?,
3385 })
3386 }
3387
3388 async fn handle_close_buffer(
3389 _: ModelHandle<Self>,
3390 _: TypedEnvelope<proto::CloseBuffer>,
3391 _: Arc<Client>,
3392 _: AsyncAppContext,
3393 ) -> Result<()> {
3394 // TODO: use this for following
3395 Ok(())
3396 }
3397
3398 async fn handle_buffer_saved(
3399 this: ModelHandle<Self>,
3400 envelope: TypedEnvelope<proto::BufferSaved>,
3401 _: Arc<Client>,
3402 mut cx: AsyncAppContext,
3403 ) -> Result<()> {
3404 let version = envelope.payload.version.try_into()?;
3405 let mtime = envelope
3406 .payload
3407 .mtime
3408 .ok_or_else(|| anyhow!("missing mtime"))?
3409 .into();
3410
3411 this.update(&mut cx, |this, cx| {
3412 let buffer = this
3413 .opened_buffers
3414 .get(&envelope.payload.buffer_id)
3415 .and_then(|buffer| buffer.upgrade(cx));
3416 if let Some(buffer) = buffer {
3417 buffer.update(cx, |buffer, cx| {
3418 buffer.did_save(version, mtime, None, cx);
3419 });
3420 }
3421 Ok(())
3422 })
3423 }
3424
3425 async fn handle_buffer_reloaded(
3426 this: ModelHandle<Self>,
3427 envelope: TypedEnvelope<proto::BufferReloaded>,
3428 _: Arc<Client>,
3429 mut cx: AsyncAppContext,
3430 ) -> Result<()> {
3431 let payload = envelope.payload.clone();
3432 let version = payload.version.try_into()?;
3433 let mtime = payload
3434 .mtime
3435 .ok_or_else(|| anyhow!("missing mtime"))?
3436 .into();
3437 this.update(&mut cx, |this, cx| {
3438 let buffer = this
3439 .opened_buffers
3440 .get(&payload.buffer_id)
3441 .and_then(|buffer| buffer.upgrade(cx));
3442 if let Some(buffer) = buffer {
3443 buffer.update(cx, |buffer, cx| {
3444 buffer.did_reload(version, mtime, cx);
3445 });
3446 }
3447 Ok(())
3448 })
3449 }
3450
3451 pub fn match_paths<'a>(
3452 &self,
3453 query: &'a str,
3454 include_ignored: bool,
3455 smart_case: bool,
3456 max_results: usize,
3457 cancel_flag: &'a AtomicBool,
3458 cx: &AppContext,
3459 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3460 let worktrees = self
3461 .worktrees(cx)
3462 .filter(|worktree| worktree.read(cx).is_visible())
3463 .collect::<Vec<_>>();
3464 let include_root_name = worktrees.len() > 1;
3465 let candidate_sets = worktrees
3466 .into_iter()
3467 .map(|worktree| CandidateSet {
3468 snapshot: worktree.read(cx).snapshot(),
3469 include_ignored,
3470 include_root_name,
3471 })
3472 .collect::<Vec<_>>();
3473
3474 let background = cx.background().clone();
3475 async move {
3476 fuzzy::match_paths(
3477 candidate_sets.as_slice(),
3478 query,
3479 smart_case,
3480 max_results,
3481 cancel_flag,
3482 background,
3483 )
3484 .await
3485 }
3486 }
3487}
3488
3489impl WorktreeHandle {
3490 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3491 match self {
3492 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3493 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3494 }
3495 }
3496}
3497
3498impl OpenBuffer {
3499 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3500 match self {
3501 OpenBuffer::Strong(handle) => Some(handle.clone()),
3502 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3503 OpenBuffer::Loading(_) => None,
3504 }
3505 }
3506}
3507
3508struct CandidateSet {
3509 snapshot: Snapshot,
3510 include_ignored: bool,
3511 include_root_name: bool,
3512}
3513
3514impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3515 type Candidates = CandidateSetIter<'a>;
3516
3517 fn id(&self) -> usize {
3518 self.snapshot.id().to_usize()
3519 }
3520
3521 fn len(&self) -> usize {
3522 if self.include_ignored {
3523 self.snapshot.file_count()
3524 } else {
3525 self.snapshot.visible_file_count()
3526 }
3527 }
3528
3529 fn prefix(&self) -> Arc<str> {
3530 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3531 self.snapshot.root_name().into()
3532 } else if self.include_root_name {
3533 format!("{}/", self.snapshot.root_name()).into()
3534 } else {
3535 "".into()
3536 }
3537 }
3538
3539 fn candidates(&'a self, start: usize) -> Self::Candidates {
3540 CandidateSetIter {
3541 traversal: self.snapshot.files(self.include_ignored, start),
3542 }
3543 }
3544}
3545
3546struct CandidateSetIter<'a> {
3547 traversal: Traversal<'a>,
3548}
3549
3550impl<'a> Iterator for CandidateSetIter<'a> {
3551 type Item = PathMatchCandidate<'a>;
3552
3553 fn next(&mut self) -> Option<Self::Item> {
3554 self.traversal.next().map(|entry| {
3555 if let EntryKind::File(char_bag) = entry.kind {
3556 PathMatchCandidate {
3557 path: &entry.path,
3558 char_bag,
3559 }
3560 } else {
3561 unreachable!()
3562 }
3563 })
3564 }
3565}
3566
3567impl Entity for Project {
3568 type Event = Event;
3569
3570 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3571 match &self.client_state {
3572 ProjectClientState::Local { remote_id_rx, .. } => {
3573 if let Some(project_id) = *remote_id_rx.borrow() {
3574 self.client
3575 .send(proto::UnregisterProject { project_id })
3576 .log_err();
3577 }
3578 }
3579 ProjectClientState::Remote { remote_id, .. } => {
3580 self.client
3581 .send(proto::LeaveProject {
3582 project_id: *remote_id,
3583 })
3584 .log_err();
3585 }
3586 }
3587 }
3588
3589 fn app_will_quit(
3590 &mut self,
3591 _: &mut MutableAppContext,
3592 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3593 let shutdown_futures = self
3594 .language_servers
3595 .drain()
3596 .filter_map(|(_, server)| server.shutdown())
3597 .collect::<Vec<_>>();
3598 Some(
3599 async move {
3600 futures::future::join_all(shutdown_futures).await;
3601 }
3602 .boxed(),
3603 )
3604 }
3605}
3606
3607impl Collaborator {
3608 fn from_proto(
3609 message: proto::Collaborator,
3610 user_store: &ModelHandle<UserStore>,
3611 cx: &mut AsyncAppContext,
3612 ) -> impl Future<Output = Result<Self>> {
3613 let user = user_store.update(cx, |user_store, cx| {
3614 user_store.fetch_user(message.user_id, cx)
3615 });
3616
3617 async move {
3618 Ok(Self {
3619 peer_id: PeerId(message.peer_id),
3620 user: user.await?,
3621 replica_id: message.replica_id as ReplicaId,
3622 })
3623 }
3624 }
3625}
3626
3627impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3628 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3629 Self {
3630 worktree_id,
3631 path: path.as_ref().into(),
3632 }
3633 }
3634}
3635
3636impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3637 fn from(options: lsp::CreateFileOptions) -> Self {
3638 Self {
3639 overwrite: options.overwrite.unwrap_or(false),
3640 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3641 }
3642 }
3643}
3644
3645impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3646 fn from(options: lsp::RenameFileOptions) -> Self {
3647 Self {
3648 overwrite: options.overwrite.unwrap_or(false),
3649 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3650 }
3651 }
3652}
3653
3654impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3655 fn from(options: lsp::DeleteFileOptions) -> Self {
3656 Self {
3657 recursive: options.recursive.unwrap_or(false),
3658 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3659 }
3660 }
3661}
3662
3663fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3664 proto::Symbol {
3665 source_worktree_id: symbol.source_worktree_id.to_proto(),
3666 worktree_id: symbol.worktree_id.to_proto(),
3667 language_name: symbol.language_name.clone(),
3668 name: symbol.name.clone(),
3669 kind: unsafe { mem::transmute(symbol.kind) },
3670 path: symbol.path.to_string_lossy().to_string(),
3671 start: Some(proto::Point {
3672 row: symbol.range.start.row,
3673 column: symbol.range.start.column,
3674 }),
3675 end: Some(proto::Point {
3676 row: symbol.range.end.row,
3677 column: symbol.range.end.column,
3678 }),
3679 signature: symbol.signature.to_vec(),
3680 }
3681}
3682
3683fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3684 let mut path_components = path.components();
3685 let mut base_components = base.components();
3686 let mut components: Vec<Component> = Vec::new();
3687 loop {
3688 match (path_components.next(), base_components.next()) {
3689 (None, None) => break,
3690 (Some(a), None) => {
3691 components.push(a);
3692 components.extend(path_components.by_ref());
3693 break;
3694 }
3695 (None, _) => components.push(Component::ParentDir),
3696 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3697 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3698 (Some(a), Some(_)) => {
3699 components.push(Component::ParentDir);
3700 for _ in base_components {
3701 components.push(Component::ParentDir);
3702 }
3703 components.push(a);
3704 components.extend(path_components.by_ref());
3705 break;
3706 }
3707 }
3708 }
3709 components.iter().map(|c| c.as_os_str()).collect()
3710}
3711
3712#[cfg(test)]
3713mod tests {
3714 use super::{Event, *};
3715 use fs::RealFs;
3716 use futures::StreamExt;
3717 use gpui::test::subscribe;
3718 use language::{
3719 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3720 };
3721 use lsp::Url;
3722 use serde_json::json;
3723 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3724 use unindent::Unindent as _;
3725 use util::test::temp_tree;
3726 use worktree::WorktreeHandle as _;
3727
3728 #[gpui::test]
3729 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3730 let dir = temp_tree(json!({
3731 "root": {
3732 "apple": "",
3733 "banana": {
3734 "carrot": {
3735 "date": "",
3736 "endive": "",
3737 }
3738 },
3739 "fennel": {
3740 "grape": "",
3741 }
3742 }
3743 }));
3744
3745 let root_link_path = dir.path().join("root_link");
3746 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3747 unix::fs::symlink(
3748 &dir.path().join("root/fennel"),
3749 &dir.path().join("root/finnochio"),
3750 )
3751 .unwrap();
3752
3753 let project = Project::test(Arc::new(RealFs), cx);
3754
3755 let (tree, _) = project
3756 .update(cx, |project, cx| {
3757 project.find_or_create_local_worktree(&root_link_path, true, cx)
3758 })
3759 .await
3760 .unwrap();
3761
3762 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3763 .await;
3764 cx.read(|cx| {
3765 let tree = tree.read(cx);
3766 assert_eq!(tree.file_count(), 5);
3767 assert_eq!(
3768 tree.inode_for_path("fennel/grape"),
3769 tree.inode_for_path("finnochio/grape")
3770 );
3771 });
3772
3773 let cancel_flag = Default::default();
3774 let results = project
3775 .read_with(cx, |project, cx| {
3776 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3777 })
3778 .await;
3779 assert_eq!(
3780 results
3781 .into_iter()
3782 .map(|result| result.path)
3783 .collect::<Vec<Arc<Path>>>(),
3784 vec![
3785 PathBuf::from("banana/carrot/date").into(),
3786 PathBuf::from("banana/carrot/endive").into(),
3787 ]
3788 );
3789 }
3790
3791 #[gpui::test]
3792 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3793 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3794 let progress_token = language_server_config
3795 .disk_based_diagnostics_progress_token
3796 .clone()
3797 .unwrap();
3798
3799 let language = Arc::new(Language::new(
3800 LanguageConfig {
3801 name: "Rust".into(),
3802 path_suffixes: vec!["rs".to_string()],
3803 language_server: Some(language_server_config),
3804 ..Default::default()
3805 },
3806 Some(tree_sitter_rust::language()),
3807 ));
3808
3809 let fs = FakeFs::new(cx.background());
3810 fs.insert_tree(
3811 "/dir",
3812 json!({
3813 "a.rs": "fn a() { A }",
3814 "b.rs": "const y: i32 = 1",
3815 }),
3816 )
3817 .await;
3818
3819 let project = Project::test(fs, cx);
3820 project.update(cx, |project, _| {
3821 Arc::get_mut(&mut project.languages).unwrap().add(language);
3822 });
3823
3824 let (tree, _) = project
3825 .update(cx, |project, cx| {
3826 project.find_or_create_local_worktree("/dir", true, cx)
3827 })
3828 .await
3829 .unwrap();
3830 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3831
3832 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3833 .await;
3834
3835 // Cause worktree to start the fake language server
3836 let _buffer = project
3837 .update(cx, |project, cx| {
3838 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3839 })
3840 .await
3841 .unwrap();
3842
3843 let mut events = subscribe(&project, cx);
3844
3845 let mut fake_server = fake_servers.next().await.unwrap();
3846 fake_server.start_progress(&progress_token).await;
3847 assert_eq!(
3848 events.next().await.unwrap(),
3849 Event::DiskBasedDiagnosticsStarted
3850 );
3851
3852 fake_server.start_progress(&progress_token).await;
3853 fake_server.end_progress(&progress_token).await;
3854 fake_server.start_progress(&progress_token).await;
3855
3856 fake_server
3857 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3858 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3859 version: None,
3860 diagnostics: vec![lsp::Diagnostic {
3861 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3862 severity: Some(lsp::DiagnosticSeverity::ERROR),
3863 message: "undefined variable 'A'".to_string(),
3864 ..Default::default()
3865 }],
3866 })
3867 .await;
3868 assert_eq!(
3869 events.next().await.unwrap(),
3870 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3871 );
3872
3873 fake_server.end_progress(&progress_token).await;
3874 fake_server.end_progress(&progress_token).await;
3875 assert_eq!(
3876 events.next().await.unwrap(),
3877 Event::DiskBasedDiagnosticsUpdated
3878 );
3879 assert_eq!(
3880 events.next().await.unwrap(),
3881 Event::DiskBasedDiagnosticsFinished
3882 );
3883
3884 let buffer = project
3885 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3886 .await
3887 .unwrap();
3888
3889 buffer.read_with(cx, |buffer, _| {
3890 let snapshot = buffer.snapshot();
3891 let diagnostics = snapshot
3892 .diagnostics_in_range::<_, Point>(0..buffer.len())
3893 .collect::<Vec<_>>();
3894 assert_eq!(
3895 diagnostics,
3896 &[DiagnosticEntry {
3897 range: Point::new(0, 9)..Point::new(0, 10),
3898 diagnostic: Diagnostic {
3899 severity: lsp::DiagnosticSeverity::ERROR,
3900 message: "undefined variable 'A'".to_string(),
3901 group_id: 0,
3902 is_primary: true,
3903 ..Default::default()
3904 }
3905 }]
3906 )
3907 });
3908 }
3909
3910 #[gpui::test]
3911 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3912 let dir = temp_tree(json!({
3913 "root": {
3914 "dir1": {},
3915 "dir2": {
3916 "dir3": {}
3917 }
3918 }
3919 }));
3920
3921 let project = Project::test(Arc::new(RealFs), cx);
3922 let (tree, _) = project
3923 .update(cx, |project, cx| {
3924 project.find_or_create_local_worktree(&dir.path(), true, cx)
3925 })
3926 .await
3927 .unwrap();
3928
3929 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3930 .await;
3931
3932 let cancel_flag = Default::default();
3933 let results = project
3934 .read_with(cx, |project, cx| {
3935 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3936 })
3937 .await;
3938
3939 assert!(results.is_empty());
3940 }
3941
3942 #[gpui::test]
3943 async fn test_definition(cx: &mut gpui::TestAppContext) {
3944 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3945 let language = Arc::new(Language::new(
3946 LanguageConfig {
3947 name: "Rust".into(),
3948 path_suffixes: vec!["rs".to_string()],
3949 language_server: Some(language_server_config),
3950 ..Default::default()
3951 },
3952 Some(tree_sitter_rust::language()),
3953 ));
3954
3955 let fs = FakeFs::new(cx.background());
3956 fs.insert_tree(
3957 "/dir",
3958 json!({
3959 "a.rs": "const fn a() { A }",
3960 "b.rs": "const y: i32 = crate::a()",
3961 }),
3962 )
3963 .await;
3964
3965 let project = Project::test(fs, cx);
3966 project.update(cx, |project, _| {
3967 Arc::get_mut(&mut project.languages).unwrap().add(language);
3968 });
3969
3970 let (tree, _) = project
3971 .update(cx, |project, cx| {
3972 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3973 })
3974 .await
3975 .unwrap();
3976 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3977 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3978 .await;
3979
3980 let buffer = project
3981 .update(cx, |project, cx| {
3982 project.open_buffer(
3983 ProjectPath {
3984 worktree_id,
3985 path: Path::new("").into(),
3986 },
3987 cx,
3988 )
3989 })
3990 .await
3991 .unwrap();
3992
3993 let mut fake_server = fake_servers.next().await.unwrap();
3994 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3995 let params = params.text_document_position_params;
3996 assert_eq!(
3997 params.text_document.uri.to_file_path().unwrap(),
3998 Path::new("/dir/b.rs"),
3999 );
4000 assert_eq!(params.position, lsp::Position::new(0, 22));
4001
4002 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
4003 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4004 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4005 )))
4006 });
4007
4008 let mut definitions = project
4009 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
4010 .await
4011 .unwrap();
4012
4013 assert_eq!(definitions.len(), 1);
4014 let definition = definitions.pop().unwrap();
4015 cx.update(|cx| {
4016 let target_buffer = definition.buffer.read(cx);
4017 assert_eq!(
4018 target_buffer
4019 .file()
4020 .unwrap()
4021 .as_local()
4022 .unwrap()
4023 .abs_path(cx),
4024 Path::new("/dir/a.rs"),
4025 );
4026 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4027 assert_eq!(
4028 list_worktrees(&project, cx),
4029 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4030 );
4031
4032 drop(definition);
4033 });
4034 cx.read(|cx| {
4035 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4036 });
4037
4038 fn list_worktrees<'a>(
4039 project: &'a ModelHandle<Project>,
4040 cx: &'a AppContext,
4041 ) -> Vec<(&'a Path, bool)> {
4042 project
4043 .read(cx)
4044 .worktrees(cx)
4045 .map(|worktree| {
4046 let worktree = worktree.read(cx);
4047 (
4048 worktree.as_local().unwrap().abs_path().as_ref(),
4049 worktree.is_visible(),
4050 )
4051 })
4052 .collect::<Vec<_>>()
4053 }
4054 }
4055
4056 #[gpui::test]
4057 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4058 let fs = FakeFs::new(cx.background());
4059 fs.insert_tree(
4060 "/dir",
4061 json!({
4062 "file1": "the old contents",
4063 }),
4064 )
4065 .await;
4066
4067 let project = Project::test(fs.clone(), cx);
4068 let worktree_id = project
4069 .update(cx, |p, cx| {
4070 p.find_or_create_local_worktree("/dir", true, cx)
4071 })
4072 .await
4073 .unwrap()
4074 .0
4075 .read_with(cx, |tree, _| tree.id());
4076
4077 let buffer = project
4078 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4079 .await
4080 .unwrap();
4081 buffer
4082 .update(cx, |buffer, cx| {
4083 assert_eq!(buffer.text(), "the old contents");
4084 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4085 buffer.save(cx)
4086 })
4087 .await
4088 .unwrap();
4089
4090 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4091 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4092 }
4093
4094 #[gpui::test]
4095 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4096 let fs = FakeFs::new(cx.background());
4097 fs.insert_tree(
4098 "/dir",
4099 json!({
4100 "file1": "the old contents",
4101 }),
4102 )
4103 .await;
4104
4105 let project = Project::test(fs.clone(), cx);
4106 let worktree_id = project
4107 .update(cx, |p, cx| {
4108 p.find_or_create_local_worktree("/dir/file1", true, cx)
4109 })
4110 .await
4111 .unwrap()
4112 .0
4113 .read_with(cx, |tree, _| tree.id());
4114
4115 let buffer = project
4116 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4117 .await
4118 .unwrap();
4119 buffer
4120 .update(cx, |buffer, cx| {
4121 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4122 buffer.save(cx)
4123 })
4124 .await
4125 .unwrap();
4126
4127 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4128 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4129 }
4130
4131 #[gpui::test(retries = 5)]
4132 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4133 let dir = temp_tree(json!({
4134 "a": {
4135 "file1": "",
4136 "file2": "",
4137 "file3": "",
4138 },
4139 "b": {
4140 "c": {
4141 "file4": "",
4142 "file5": "",
4143 }
4144 }
4145 }));
4146
4147 let project = Project::test(Arc::new(RealFs), cx);
4148 let rpc = project.read_with(cx, |p, _| p.client.clone());
4149
4150 let (tree, _) = project
4151 .update(cx, |p, cx| {
4152 p.find_or_create_local_worktree(dir.path(), true, cx)
4153 })
4154 .await
4155 .unwrap();
4156 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4157
4158 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4159 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4160 async move { buffer.await.unwrap() }
4161 };
4162 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4163 tree.read_with(cx, |tree, _| {
4164 tree.entry_for_path(path)
4165 .expect(&format!("no entry for path {}", path))
4166 .id
4167 })
4168 };
4169
4170 let buffer2 = buffer_for_path("a/file2", cx).await;
4171 let buffer3 = buffer_for_path("a/file3", cx).await;
4172 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4173 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4174
4175 let file2_id = id_for_path("a/file2", &cx);
4176 let file3_id = id_for_path("a/file3", &cx);
4177 let file4_id = id_for_path("b/c/file4", &cx);
4178
4179 // Wait for the initial scan.
4180 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4181 .await;
4182
4183 // Create a remote copy of this worktree.
4184 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4185 let (remote, load_task) = cx.update(|cx| {
4186 Worktree::remote(
4187 1,
4188 1,
4189 initial_snapshot.to_proto(&Default::default(), true),
4190 rpc.clone(),
4191 cx,
4192 )
4193 });
4194 load_task.await;
4195
4196 cx.read(|cx| {
4197 assert!(!buffer2.read(cx).is_dirty());
4198 assert!(!buffer3.read(cx).is_dirty());
4199 assert!(!buffer4.read(cx).is_dirty());
4200 assert!(!buffer5.read(cx).is_dirty());
4201 });
4202
4203 // Rename and delete files and directories.
4204 tree.flush_fs_events(&cx).await;
4205 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4206 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4207 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4208 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4209 tree.flush_fs_events(&cx).await;
4210
4211 let expected_paths = vec![
4212 "a",
4213 "a/file1",
4214 "a/file2.new",
4215 "b",
4216 "d",
4217 "d/file3",
4218 "d/file4",
4219 ];
4220
4221 cx.read(|app| {
4222 assert_eq!(
4223 tree.read(app)
4224 .paths()
4225 .map(|p| p.to_str().unwrap())
4226 .collect::<Vec<_>>(),
4227 expected_paths
4228 );
4229
4230 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4231 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4232 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4233
4234 assert_eq!(
4235 buffer2.read(app).file().unwrap().path().as_ref(),
4236 Path::new("a/file2.new")
4237 );
4238 assert_eq!(
4239 buffer3.read(app).file().unwrap().path().as_ref(),
4240 Path::new("d/file3")
4241 );
4242 assert_eq!(
4243 buffer4.read(app).file().unwrap().path().as_ref(),
4244 Path::new("d/file4")
4245 );
4246 assert_eq!(
4247 buffer5.read(app).file().unwrap().path().as_ref(),
4248 Path::new("b/c/file5")
4249 );
4250
4251 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4252 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4253 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4254 assert!(buffer5.read(app).file().unwrap().is_deleted());
4255 });
4256
4257 // Update the remote worktree. Check that it becomes consistent with the
4258 // local worktree.
4259 remote.update(cx, |remote, cx| {
4260 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4261 &initial_snapshot,
4262 1,
4263 1,
4264 true,
4265 );
4266 remote
4267 .as_remote_mut()
4268 .unwrap()
4269 .snapshot
4270 .apply_remote_update(update_message)
4271 .unwrap();
4272
4273 assert_eq!(
4274 remote
4275 .paths()
4276 .map(|p| p.to_str().unwrap())
4277 .collect::<Vec<_>>(),
4278 expected_paths
4279 );
4280 });
4281 }
4282
4283 #[gpui::test]
4284 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4285 let fs = FakeFs::new(cx.background());
4286 fs.insert_tree(
4287 "/the-dir",
4288 json!({
4289 "a.txt": "a-contents",
4290 "b.txt": "b-contents",
4291 }),
4292 )
4293 .await;
4294
4295 let project = Project::test(fs.clone(), cx);
4296 let worktree_id = project
4297 .update(cx, |p, cx| {
4298 p.find_or_create_local_worktree("/the-dir", true, cx)
4299 })
4300 .await
4301 .unwrap()
4302 .0
4303 .read_with(cx, |tree, _| tree.id());
4304
4305 // Spawn multiple tasks to open paths, repeating some paths.
4306 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4307 (
4308 p.open_buffer((worktree_id, "a.txt"), cx),
4309 p.open_buffer((worktree_id, "b.txt"), cx),
4310 p.open_buffer((worktree_id, "a.txt"), cx),
4311 )
4312 });
4313
4314 let buffer_a_1 = buffer_a_1.await.unwrap();
4315 let buffer_a_2 = buffer_a_2.await.unwrap();
4316 let buffer_b = buffer_b.await.unwrap();
4317 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4318 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4319
4320 // There is only one buffer per path.
4321 let buffer_a_id = buffer_a_1.id();
4322 assert_eq!(buffer_a_2.id(), buffer_a_id);
4323
4324 // Open the same path again while it is still open.
4325 drop(buffer_a_1);
4326 let buffer_a_3 = project
4327 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4328 .await
4329 .unwrap();
4330
4331 // There's still only one buffer per path.
4332 assert_eq!(buffer_a_3.id(), buffer_a_id);
4333 }
4334
4335 #[gpui::test]
4336 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4337 use std::fs;
4338
4339 let dir = temp_tree(json!({
4340 "file1": "abc",
4341 "file2": "def",
4342 "file3": "ghi",
4343 }));
4344
4345 let project = Project::test(Arc::new(RealFs), cx);
4346 let (worktree, _) = project
4347 .update(cx, |p, cx| {
4348 p.find_or_create_local_worktree(dir.path(), true, cx)
4349 })
4350 .await
4351 .unwrap();
4352 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4353
4354 worktree.flush_fs_events(&cx).await;
4355 worktree
4356 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4357 .await;
4358
4359 let buffer1 = project
4360 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4361 .await
4362 .unwrap();
4363 let events = Rc::new(RefCell::new(Vec::new()));
4364
4365 // initially, the buffer isn't dirty.
4366 buffer1.update(cx, |buffer, cx| {
4367 cx.subscribe(&buffer1, {
4368 let events = events.clone();
4369 move |_, _, event, _| events.borrow_mut().push(event.clone())
4370 })
4371 .detach();
4372
4373 assert!(!buffer.is_dirty());
4374 assert!(events.borrow().is_empty());
4375
4376 buffer.edit(vec![1..2], "", cx);
4377 });
4378
4379 // after the first edit, the buffer is dirty, and emits a dirtied event.
4380 buffer1.update(cx, |buffer, cx| {
4381 assert!(buffer.text() == "ac");
4382 assert!(buffer.is_dirty());
4383 assert_eq!(
4384 *events.borrow(),
4385 &[language::Event::Edited, language::Event::Dirtied]
4386 );
4387 events.borrow_mut().clear();
4388 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4389 });
4390
4391 // after saving, the buffer is not dirty, and emits a saved event.
4392 buffer1.update(cx, |buffer, cx| {
4393 assert!(!buffer.is_dirty());
4394 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4395 events.borrow_mut().clear();
4396
4397 buffer.edit(vec![1..1], "B", cx);
4398 buffer.edit(vec![2..2], "D", cx);
4399 });
4400
4401 // after editing again, the buffer is dirty, and emits another dirty event.
4402 buffer1.update(cx, |buffer, cx| {
4403 assert!(buffer.text() == "aBDc");
4404 assert!(buffer.is_dirty());
4405 assert_eq!(
4406 *events.borrow(),
4407 &[
4408 language::Event::Edited,
4409 language::Event::Dirtied,
4410 language::Event::Edited,
4411 ],
4412 );
4413 events.borrow_mut().clear();
4414
4415 // TODO - currently, after restoring the buffer to its
4416 // previously-saved state, the is still considered dirty.
4417 buffer.edit([1..3], "", cx);
4418 assert!(buffer.text() == "ac");
4419 assert!(buffer.is_dirty());
4420 });
4421
4422 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4423
4424 // When a file is deleted, the buffer is considered dirty.
4425 let events = Rc::new(RefCell::new(Vec::new()));
4426 let buffer2 = project
4427 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4428 .await
4429 .unwrap();
4430 buffer2.update(cx, |_, cx| {
4431 cx.subscribe(&buffer2, {
4432 let events = events.clone();
4433 move |_, _, event, _| events.borrow_mut().push(event.clone())
4434 })
4435 .detach();
4436 });
4437
4438 fs::remove_file(dir.path().join("file2")).unwrap();
4439 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4440 assert_eq!(
4441 *events.borrow(),
4442 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4443 );
4444
4445 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4446 let events = Rc::new(RefCell::new(Vec::new()));
4447 let buffer3 = project
4448 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4449 .await
4450 .unwrap();
4451 buffer3.update(cx, |_, cx| {
4452 cx.subscribe(&buffer3, {
4453 let events = events.clone();
4454 move |_, _, event, _| events.borrow_mut().push(event.clone())
4455 })
4456 .detach();
4457 });
4458
4459 worktree.flush_fs_events(&cx).await;
4460 buffer3.update(cx, |buffer, cx| {
4461 buffer.edit(Some(0..0), "x", cx);
4462 });
4463 events.borrow_mut().clear();
4464 fs::remove_file(dir.path().join("file3")).unwrap();
4465 buffer3
4466 .condition(&cx, |_, _| !events.borrow().is_empty())
4467 .await;
4468 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4469 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4470 }
4471
4472 #[gpui::test]
4473 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4474 use std::fs;
4475
4476 let initial_contents = "aaa\nbbbbb\nc\n";
4477 let dir = temp_tree(json!({ "the-file": initial_contents }));
4478
4479 let project = Project::test(Arc::new(RealFs), cx);
4480 let (worktree, _) = project
4481 .update(cx, |p, cx| {
4482 p.find_or_create_local_worktree(dir.path(), true, cx)
4483 })
4484 .await
4485 .unwrap();
4486 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4487
4488 worktree
4489 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4490 .await;
4491
4492 let abs_path = dir.path().join("the-file");
4493 let buffer = project
4494 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4495 .await
4496 .unwrap();
4497
4498 // TODO
4499 // Add a cursor on each row.
4500 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4501 // assert!(!buffer.is_dirty());
4502 // buffer.add_selection_set(
4503 // &(0..3)
4504 // .map(|row| Selection {
4505 // id: row as usize,
4506 // start: Point::new(row, 1),
4507 // end: Point::new(row, 1),
4508 // reversed: false,
4509 // goal: SelectionGoal::None,
4510 // })
4511 // .collect::<Vec<_>>(),
4512 // cx,
4513 // )
4514 // });
4515
4516 // Change the file on disk, adding two new lines of text, and removing
4517 // one line.
4518 buffer.read_with(cx, |buffer, _| {
4519 assert!(!buffer.is_dirty());
4520 assert!(!buffer.has_conflict());
4521 });
4522 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4523 fs::write(&abs_path, new_contents).unwrap();
4524
4525 // Because the buffer was not modified, it is reloaded from disk. Its
4526 // contents are edited according to the diff between the old and new
4527 // file contents.
4528 buffer
4529 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4530 .await;
4531
4532 buffer.update(cx, |buffer, _| {
4533 assert_eq!(buffer.text(), new_contents);
4534 assert!(!buffer.is_dirty());
4535 assert!(!buffer.has_conflict());
4536
4537 // TODO
4538 // let cursor_positions = buffer
4539 // .selection_set(selection_set_id)
4540 // .unwrap()
4541 // .selections::<Point>(&*buffer)
4542 // .map(|selection| {
4543 // assert_eq!(selection.start, selection.end);
4544 // selection.start
4545 // })
4546 // .collect::<Vec<_>>();
4547 // assert_eq!(
4548 // cursor_positions,
4549 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4550 // );
4551 });
4552
4553 // Modify the buffer
4554 buffer.update(cx, |buffer, cx| {
4555 buffer.edit(vec![0..0], " ", cx);
4556 assert!(buffer.is_dirty());
4557 assert!(!buffer.has_conflict());
4558 });
4559
4560 // Change the file on disk again, adding blank lines to the beginning.
4561 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4562
4563 // Because the buffer is modified, it doesn't reload from disk, but is
4564 // marked as having a conflict.
4565 buffer
4566 .condition(&cx, |buffer, _| buffer.has_conflict())
4567 .await;
4568 }
4569
4570 #[gpui::test]
4571 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4572 let fs = FakeFs::new(cx.background());
4573 fs.insert_tree(
4574 "/the-dir",
4575 json!({
4576 "a.rs": "
4577 fn foo(mut v: Vec<usize>) {
4578 for x in &v {
4579 v.push(1);
4580 }
4581 }
4582 "
4583 .unindent(),
4584 }),
4585 )
4586 .await;
4587
4588 let project = Project::test(fs.clone(), cx);
4589 let (worktree, _) = project
4590 .update(cx, |p, cx| {
4591 p.find_or_create_local_worktree("/the-dir", true, cx)
4592 })
4593 .await
4594 .unwrap();
4595 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4596
4597 let buffer = project
4598 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4599 .await
4600 .unwrap();
4601
4602 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4603 let message = lsp::PublishDiagnosticsParams {
4604 uri: buffer_uri.clone(),
4605 diagnostics: vec![
4606 lsp::Diagnostic {
4607 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4608 severity: Some(DiagnosticSeverity::WARNING),
4609 message: "error 1".to_string(),
4610 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4611 location: lsp::Location {
4612 uri: buffer_uri.clone(),
4613 range: lsp::Range::new(
4614 lsp::Position::new(1, 8),
4615 lsp::Position::new(1, 9),
4616 ),
4617 },
4618 message: "error 1 hint 1".to_string(),
4619 }]),
4620 ..Default::default()
4621 },
4622 lsp::Diagnostic {
4623 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4624 severity: Some(DiagnosticSeverity::HINT),
4625 message: "error 1 hint 1".to_string(),
4626 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4627 location: lsp::Location {
4628 uri: buffer_uri.clone(),
4629 range: lsp::Range::new(
4630 lsp::Position::new(1, 8),
4631 lsp::Position::new(1, 9),
4632 ),
4633 },
4634 message: "original diagnostic".to_string(),
4635 }]),
4636 ..Default::default()
4637 },
4638 lsp::Diagnostic {
4639 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4640 severity: Some(DiagnosticSeverity::ERROR),
4641 message: "error 2".to_string(),
4642 related_information: Some(vec![
4643 lsp::DiagnosticRelatedInformation {
4644 location: lsp::Location {
4645 uri: buffer_uri.clone(),
4646 range: lsp::Range::new(
4647 lsp::Position::new(1, 13),
4648 lsp::Position::new(1, 15),
4649 ),
4650 },
4651 message: "error 2 hint 1".to_string(),
4652 },
4653 lsp::DiagnosticRelatedInformation {
4654 location: lsp::Location {
4655 uri: buffer_uri.clone(),
4656 range: lsp::Range::new(
4657 lsp::Position::new(1, 13),
4658 lsp::Position::new(1, 15),
4659 ),
4660 },
4661 message: "error 2 hint 2".to_string(),
4662 },
4663 ]),
4664 ..Default::default()
4665 },
4666 lsp::Diagnostic {
4667 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4668 severity: Some(DiagnosticSeverity::HINT),
4669 message: "error 2 hint 1".to_string(),
4670 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4671 location: lsp::Location {
4672 uri: buffer_uri.clone(),
4673 range: lsp::Range::new(
4674 lsp::Position::new(2, 8),
4675 lsp::Position::new(2, 17),
4676 ),
4677 },
4678 message: "original diagnostic".to_string(),
4679 }]),
4680 ..Default::default()
4681 },
4682 lsp::Diagnostic {
4683 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4684 severity: Some(DiagnosticSeverity::HINT),
4685 message: "error 2 hint 2".to_string(),
4686 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4687 location: lsp::Location {
4688 uri: buffer_uri.clone(),
4689 range: lsp::Range::new(
4690 lsp::Position::new(2, 8),
4691 lsp::Position::new(2, 17),
4692 ),
4693 },
4694 message: "original diagnostic".to_string(),
4695 }]),
4696 ..Default::default()
4697 },
4698 ],
4699 version: None,
4700 };
4701
4702 project
4703 .update(cx, |p, cx| {
4704 p.update_diagnostics(message, &Default::default(), cx)
4705 })
4706 .unwrap();
4707 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4708
4709 assert_eq!(
4710 buffer
4711 .diagnostics_in_range::<_, Point>(0..buffer.len())
4712 .collect::<Vec<_>>(),
4713 &[
4714 DiagnosticEntry {
4715 range: Point::new(1, 8)..Point::new(1, 9),
4716 diagnostic: Diagnostic {
4717 severity: DiagnosticSeverity::WARNING,
4718 message: "error 1".to_string(),
4719 group_id: 0,
4720 is_primary: true,
4721 ..Default::default()
4722 }
4723 },
4724 DiagnosticEntry {
4725 range: Point::new(1, 8)..Point::new(1, 9),
4726 diagnostic: Diagnostic {
4727 severity: DiagnosticSeverity::HINT,
4728 message: "error 1 hint 1".to_string(),
4729 group_id: 0,
4730 is_primary: false,
4731 ..Default::default()
4732 }
4733 },
4734 DiagnosticEntry {
4735 range: Point::new(1, 13)..Point::new(1, 15),
4736 diagnostic: Diagnostic {
4737 severity: DiagnosticSeverity::HINT,
4738 message: "error 2 hint 1".to_string(),
4739 group_id: 1,
4740 is_primary: false,
4741 ..Default::default()
4742 }
4743 },
4744 DiagnosticEntry {
4745 range: Point::new(1, 13)..Point::new(1, 15),
4746 diagnostic: Diagnostic {
4747 severity: DiagnosticSeverity::HINT,
4748 message: "error 2 hint 2".to_string(),
4749 group_id: 1,
4750 is_primary: false,
4751 ..Default::default()
4752 }
4753 },
4754 DiagnosticEntry {
4755 range: Point::new(2, 8)..Point::new(2, 17),
4756 diagnostic: Diagnostic {
4757 severity: DiagnosticSeverity::ERROR,
4758 message: "error 2".to_string(),
4759 group_id: 1,
4760 is_primary: true,
4761 ..Default::default()
4762 }
4763 }
4764 ]
4765 );
4766
4767 assert_eq!(
4768 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4769 &[
4770 DiagnosticEntry {
4771 range: Point::new(1, 8)..Point::new(1, 9),
4772 diagnostic: Diagnostic {
4773 severity: DiagnosticSeverity::WARNING,
4774 message: "error 1".to_string(),
4775 group_id: 0,
4776 is_primary: true,
4777 ..Default::default()
4778 }
4779 },
4780 DiagnosticEntry {
4781 range: Point::new(1, 8)..Point::new(1, 9),
4782 diagnostic: Diagnostic {
4783 severity: DiagnosticSeverity::HINT,
4784 message: "error 1 hint 1".to_string(),
4785 group_id: 0,
4786 is_primary: false,
4787 ..Default::default()
4788 }
4789 },
4790 ]
4791 );
4792 assert_eq!(
4793 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4794 &[
4795 DiagnosticEntry {
4796 range: Point::new(1, 13)..Point::new(1, 15),
4797 diagnostic: Diagnostic {
4798 severity: DiagnosticSeverity::HINT,
4799 message: "error 2 hint 1".to_string(),
4800 group_id: 1,
4801 is_primary: false,
4802 ..Default::default()
4803 }
4804 },
4805 DiagnosticEntry {
4806 range: Point::new(1, 13)..Point::new(1, 15),
4807 diagnostic: Diagnostic {
4808 severity: DiagnosticSeverity::HINT,
4809 message: "error 2 hint 2".to_string(),
4810 group_id: 1,
4811 is_primary: false,
4812 ..Default::default()
4813 }
4814 },
4815 DiagnosticEntry {
4816 range: Point::new(2, 8)..Point::new(2, 17),
4817 diagnostic: Diagnostic {
4818 severity: DiagnosticSeverity::ERROR,
4819 message: "error 2".to_string(),
4820 group_id: 1,
4821 is_primary: true,
4822 ..Default::default()
4823 }
4824 }
4825 ]
4826 );
4827 }
4828
4829 #[gpui::test]
4830 async fn test_rename(cx: &mut gpui::TestAppContext) {
4831 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4832 let language = Arc::new(Language::new(
4833 LanguageConfig {
4834 name: "Rust".into(),
4835 path_suffixes: vec!["rs".to_string()],
4836 language_server: Some(language_server_config),
4837 ..Default::default()
4838 },
4839 Some(tree_sitter_rust::language()),
4840 ));
4841
4842 let fs = FakeFs::new(cx.background());
4843 fs.insert_tree(
4844 "/dir",
4845 json!({
4846 "one.rs": "const ONE: usize = 1;",
4847 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4848 }),
4849 )
4850 .await;
4851
4852 let project = Project::test(fs.clone(), cx);
4853 project.update(cx, |project, _| {
4854 Arc::get_mut(&mut project.languages).unwrap().add(language);
4855 });
4856
4857 let (tree, _) = project
4858 .update(cx, |project, cx| {
4859 project.find_or_create_local_worktree("/dir", true, cx)
4860 })
4861 .await
4862 .unwrap();
4863 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4864 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4865 .await;
4866
4867 let buffer = project
4868 .update(cx, |project, cx| {
4869 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4870 })
4871 .await
4872 .unwrap();
4873
4874 let mut fake_server = fake_servers.next().await.unwrap();
4875
4876 let response = project.update(cx, |project, cx| {
4877 project.prepare_rename(buffer.clone(), 7, cx)
4878 });
4879 fake_server
4880 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4881 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4882 assert_eq!(params.position, lsp::Position::new(0, 7));
4883 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4884 lsp::Position::new(0, 6),
4885 lsp::Position::new(0, 9),
4886 )))
4887 })
4888 .next()
4889 .await
4890 .unwrap();
4891 let range = response.await.unwrap().unwrap();
4892 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4893 assert_eq!(range, 6..9);
4894
4895 let response = project.update(cx, |project, cx| {
4896 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4897 });
4898 fake_server
4899 .handle_request::<lsp::request::Rename, _>(|params, _| {
4900 assert_eq!(
4901 params.text_document_position.text_document.uri.as_str(),
4902 "file:///dir/one.rs"
4903 );
4904 assert_eq!(
4905 params.text_document_position.position,
4906 lsp::Position::new(0, 7)
4907 );
4908 assert_eq!(params.new_name, "THREE");
4909 Some(lsp::WorkspaceEdit {
4910 changes: Some(
4911 [
4912 (
4913 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4914 vec![lsp::TextEdit::new(
4915 lsp::Range::new(
4916 lsp::Position::new(0, 6),
4917 lsp::Position::new(0, 9),
4918 ),
4919 "THREE".to_string(),
4920 )],
4921 ),
4922 (
4923 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4924 vec![
4925 lsp::TextEdit::new(
4926 lsp::Range::new(
4927 lsp::Position::new(0, 24),
4928 lsp::Position::new(0, 27),
4929 ),
4930 "THREE".to_string(),
4931 ),
4932 lsp::TextEdit::new(
4933 lsp::Range::new(
4934 lsp::Position::new(0, 35),
4935 lsp::Position::new(0, 38),
4936 ),
4937 "THREE".to_string(),
4938 ),
4939 ],
4940 ),
4941 ]
4942 .into_iter()
4943 .collect(),
4944 ),
4945 ..Default::default()
4946 })
4947 })
4948 .next()
4949 .await
4950 .unwrap();
4951 let mut transaction = response.await.unwrap().0;
4952 assert_eq!(transaction.len(), 2);
4953 assert_eq!(
4954 transaction
4955 .remove_entry(&buffer)
4956 .unwrap()
4957 .0
4958 .read_with(cx, |buffer, _| buffer.text()),
4959 "const THREE: usize = 1;"
4960 );
4961 assert_eq!(
4962 transaction
4963 .into_keys()
4964 .next()
4965 .unwrap()
4966 .read_with(cx, |buffer, _| buffer.text()),
4967 "const TWO: usize = one::THREE + one::THREE;"
4968 );
4969 }
4970
4971 #[gpui::test]
4972 async fn test_search(cx: &mut gpui::TestAppContext) {
4973 let fs = FakeFs::new(cx.background());
4974 fs.insert_tree(
4975 "/dir",
4976 json!({
4977 "one.rs": "const ONE: usize = 1;",
4978 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4979 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4980 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4981 }),
4982 )
4983 .await;
4984 let project = Project::test(fs.clone(), cx);
4985 let (tree, _) = project
4986 .update(cx, |project, cx| {
4987 project.find_or_create_local_worktree("/dir", true, cx)
4988 })
4989 .await
4990 .unwrap();
4991 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4992 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4993 .await;
4994
4995 assert_eq!(
4996 search(&project, SearchQuery::text("TWO", false, true), cx)
4997 .await
4998 .unwrap(),
4999 HashMap::from_iter([
5000 ("two.rs".to_string(), vec![6..9]),
5001 ("three.rs".to_string(), vec![37..40])
5002 ])
5003 );
5004
5005 let buffer_4 = project
5006 .update(cx, |project, cx| {
5007 project.open_buffer((worktree_id, "four.rs"), cx)
5008 })
5009 .await
5010 .unwrap();
5011 buffer_4.update(cx, |buffer, cx| {
5012 buffer.edit([20..28, 31..43], "two::TWO", cx);
5013 });
5014
5015 assert_eq!(
5016 search(&project, SearchQuery::text("TWO", false, true), cx)
5017 .await
5018 .unwrap(),
5019 HashMap::from_iter([
5020 ("two.rs".to_string(), vec![6..9]),
5021 ("three.rs".to_string(), vec![37..40]),
5022 ("four.rs".to_string(), vec![25..28, 36..39])
5023 ])
5024 );
5025
5026 async fn search(
5027 project: &ModelHandle<Project>,
5028 query: SearchQuery,
5029 cx: &mut gpui::TestAppContext,
5030 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5031 let results = project
5032 .update(cx, |project, cx| project.search(query, cx))
5033 .await?;
5034
5035 Ok(results
5036 .into_iter()
5037 .map(|(buffer, ranges)| {
5038 buffer.read_with(cx, |buffer, _| {
5039 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5040 let ranges = ranges
5041 .into_iter()
5042 .map(|range| range.to_offset(buffer))
5043 .collect::<Vec<_>>();
5044 (path, ranges)
5045 })
5046 })
5047 .collect())
5048 }
5049 }
5050}