1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 },
96}
97
98#[derive(Clone, Debug)]
99pub struct Collaborator {
100 pub user: Arc<User>,
101 pub peer_id: PeerId,
102 pub replica_id: ReplicaId,
103}
104
105#[derive(Clone, Debug, PartialEq)]
106pub enum Event {
107 ActiveEntryChanged(Option<ProjectEntry>),
108 WorktreeRemoved(WorktreeId),
109 DiskBasedDiagnosticsStarted,
110 DiskBasedDiagnosticsUpdated,
111 DiskBasedDiagnosticsFinished,
112 DiagnosticsUpdated(ProjectPath),
113}
114
115#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
116pub struct ProjectPath {
117 pub worktree_id: WorktreeId,
118 pub path: Arc<Path>,
119}
120
121#[derive(Clone, Debug, Default, PartialEq)]
122pub struct DiagnosticSummary {
123 pub error_count: usize,
124 pub warning_count: usize,
125 pub info_count: usize,
126 pub hint_count: usize,
127}
128
129#[derive(Debug)]
130pub struct Location {
131 pub buffer: ModelHandle<Buffer>,
132 pub range: Range<language::Anchor>,
133}
134
135#[derive(Debug)]
136pub struct DocumentHighlight {
137 pub range: Range<language::Anchor>,
138 pub kind: DocumentHighlightKind,
139}
140
141#[derive(Clone, Debug)]
142pub struct Symbol {
143 pub source_worktree_id: WorktreeId,
144 pub worktree_id: WorktreeId,
145 pub language_name: String,
146 pub path: PathBuf,
147 pub label: CodeLabel,
148 pub name: String,
149 pub kind: lsp::SymbolKind,
150 pub range: Range<PointUtf16>,
151 pub signature: [u8; 32],
152}
153
154#[derive(Default)]
155pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
156
157impl DiagnosticSummary {
158 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
159 let mut this = Self {
160 error_count: 0,
161 warning_count: 0,
162 info_count: 0,
163 hint_count: 0,
164 };
165
166 for entry in diagnostics {
167 if entry.diagnostic.is_primary {
168 match entry.diagnostic.severity {
169 DiagnosticSeverity::ERROR => this.error_count += 1,
170 DiagnosticSeverity::WARNING => this.warning_count += 1,
171 DiagnosticSeverity::INFORMATION => this.info_count += 1,
172 DiagnosticSeverity::HINT => this.hint_count += 1,
173 _ => {}
174 }
175 }
176 }
177
178 this
179 }
180
181 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
182 proto::DiagnosticSummary {
183 path: path.to_string_lossy().to_string(),
184 error_count: self.error_count as u32,
185 warning_count: self.warning_count as u32,
186 info_count: self.info_count as u32,
187 hint_count: self.hint_count as u32,
188 }
189 }
190}
191
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
193pub struct ProjectEntry {
194 pub worktree_id: WorktreeId,
195 pub entry_id: usize,
196}
197
198impl Project {
199 pub fn init(client: &Arc<Client>) {
200 client.add_entity_message_handler(Self::handle_add_collaborator);
201 client.add_entity_message_handler(Self::handle_buffer_reloaded);
202 client.add_entity_message_handler(Self::handle_buffer_saved);
203 client.add_entity_message_handler(Self::handle_close_buffer);
204 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
206 client.add_entity_message_handler(Self::handle_remove_collaborator);
207 client.add_entity_message_handler(Self::handle_register_worktree);
208 client.add_entity_message_handler(Self::handle_unregister_worktree);
209 client.add_entity_message_handler(Self::handle_unshare_project);
210 client.add_entity_message_handler(Self::handle_update_buffer_file);
211 client.add_entity_message_handler(Self::handle_update_buffer);
212 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
213 client.add_entity_message_handler(Self::handle_update_worktree);
214 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
215 client.add_entity_request_handler(Self::handle_apply_code_action);
216 client.add_entity_request_handler(Self::handle_format_buffers);
217 client.add_entity_request_handler(Self::handle_get_code_actions);
218 client.add_entity_request_handler(Self::handle_get_completions);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
224 client.add_entity_request_handler(Self::handle_search_project);
225 client.add_entity_request_handler(Self::handle_get_project_symbols);
226 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
227 client.add_entity_request_handler(Self::handle_open_buffer);
228 client.add_entity_request_handler(Self::handle_save_buffer);
229 }
230
231 pub fn local(
232 client: Arc<Client>,
233 user_store: ModelHandle<UserStore>,
234 languages: Arc<LanguageRegistry>,
235 fs: Arc<dyn Fs>,
236 cx: &mut MutableAppContext,
237 ) -> ModelHandle<Self> {
238 cx.add_model(|cx: &mut ModelContext<Self>| {
239 let (remote_id_tx, remote_id_rx) = watch::channel();
240 let _maintain_remote_id_task = cx.spawn_weak({
241 let rpc = client.clone();
242 move |this, mut cx| {
243 async move {
244 let mut status = rpc.status();
245 while let Some(status) = status.next().await {
246 if let Some(this) = this.upgrade(&cx) {
247 let remote_id = if let client::Status::Connected { .. } = status {
248 let response = rpc.request(proto::RegisterProject {}).await?;
249 Some(response.project_id)
250 } else {
251 None
252 };
253
254 if let Some(project_id) = remote_id {
255 let mut registrations = Vec::new();
256 this.update(&mut cx, |this, cx| {
257 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
258 registrations.push(worktree.update(
259 cx,
260 |worktree, cx| {
261 let worktree = worktree.as_local_mut().unwrap();
262 worktree.register(project_id, cx)
263 },
264 ));
265 }
266 });
267 for registration in registrations {
268 registration.await?;
269 }
270 }
271 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
272 }
273 }
274 Ok(())
275 }
276 .log_err()
277 }
278 });
279
280 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
281 Self {
282 worktrees: Default::default(),
283 collaborators: Default::default(),
284 opened_buffers: Default::default(),
285 shared_buffers: Default::default(),
286 loading_buffers: Default::default(),
287 loading_local_worktrees: Default::default(),
288 client_state: ProjectClientState::Local {
289 is_shared: false,
290 remote_id_tx,
291 remote_id_rx,
292 _maintain_remote_id_task,
293 },
294 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
295 subscriptions: Vec::new(),
296 active_entry: None,
297 languages,
298 client,
299 user_store,
300 fs,
301 language_servers_with_diagnostics_running: 0,
302 language_servers: Default::default(),
303 started_language_servers: Default::default(),
304 nonce: StdRng::from_entropy().gen(),
305 }
306 })
307 }
308
309 pub async fn remote(
310 remote_id: u64,
311 client: Arc<Client>,
312 user_store: ModelHandle<UserStore>,
313 languages: Arc<LanguageRegistry>,
314 fs: Arc<dyn Fs>,
315 cx: &mut AsyncAppContext,
316 ) -> Result<ModelHandle<Self>> {
317 client.authenticate_and_connect(&cx).await?;
318
319 let response = client
320 .request(proto::JoinProject {
321 project_id: remote_id,
322 })
323 .await?;
324
325 let replica_id = response.replica_id as ReplicaId;
326
327 let mut worktrees = Vec::new();
328 for worktree in response.worktrees {
329 let (worktree, load_task) = cx
330 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
331 worktrees.push(worktree);
332 load_task.detach();
333 }
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 let this = cx.add_model(|cx| {
337 let mut this = Self {
338 worktrees: Vec::new(),
339 loading_buffers: Default::default(),
340 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
341 shared_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 active_entry: None,
344 collaborators: Default::default(),
345 languages,
346 user_store: user_store.clone(),
347 fs,
348 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
349 client,
350 client_state: ProjectClientState::Remote {
351 sharing_has_stopped: false,
352 remote_id,
353 replica_id,
354 },
355 language_servers_with_diagnostics_running: 0,
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 opened_buffers: Default::default(),
359 nonce: StdRng::from_entropy().gen(),
360 };
361 for worktree in worktrees {
362 this.add_worktree(&worktree, cx);
363 }
364 this
365 });
366
367 let user_ids = response
368 .collaborators
369 .iter()
370 .map(|peer| peer.user_id)
371 .collect();
372 user_store
373 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
374 .await?;
375 let mut collaborators = HashMap::default();
376 for message in response.collaborators {
377 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
378 collaborators.insert(collaborator.peer_id, collaborator);
379 }
380
381 this.update(cx, |this, _| {
382 this.collaborators = collaborators;
383 });
384
385 Ok(this)
386 }
387
388 #[cfg(any(test, feature = "test-support"))]
389 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
390 let languages = Arc::new(LanguageRegistry::new());
391 let http_client = client::test::FakeHttpClient::with_404_response();
392 let client = client::Client::new(http_client.clone());
393 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
394 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
395 }
396
397 #[cfg(any(test, feature = "test-support"))]
398 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
399 self.opened_buffers
400 .get(&remote_id)
401 .and_then(|buffer| buffer.upgrade(cx))
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn languages(&self) -> &Arc<LanguageRegistry> {
406 &self.languages
407 }
408
409 #[cfg(any(test, feature = "test-support"))]
410 pub fn check_invariants(&self, cx: &AppContext) {
411 if self.is_local() {
412 let mut worktree_root_paths = HashMap::default();
413 for worktree in self.worktrees(cx) {
414 let worktree = worktree.read(cx);
415 let abs_path = worktree.as_local().unwrap().abs_path().clone();
416 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
417 assert_eq!(
418 prev_worktree_id,
419 None,
420 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
421 abs_path,
422 worktree.id(),
423 prev_worktree_id
424 )
425 }
426 } else {
427 let replica_id = self.replica_id();
428 for buffer in self.opened_buffers.values() {
429 if let Some(buffer) = buffer.upgrade(cx) {
430 let buffer = buffer.read(cx);
431 assert_eq!(
432 buffer.deferred_ops_len(),
433 0,
434 "replica {}, buffer {} has deferred operations",
435 replica_id,
436 buffer.remote_id()
437 );
438 }
439 }
440 }
441 }
442
443 #[cfg(any(test, feature = "test-support"))]
444 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
445 let path = path.into();
446 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
447 self.opened_buffers.iter().any(|(_, buffer)| {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
450 if file.worktree == worktree && file.path() == &path.path {
451 return true;
452 }
453 }
454 }
455 false
456 })
457 } else {
458 false
459 }
460 }
461
462 pub fn fs(&self) -> &Arc<dyn Fs> {
463 &self.fs
464 }
465
466 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
467 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
468 *remote_id_tx.borrow_mut() = remote_id;
469 }
470
471 self.subscriptions.clear();
472 if let Some(remote_id) = remote_id {
473 self.subscriptions
474 .push(self.client.add_model_for_remote_entity(remote_id, cx));
475 }
476 }
477
478 pub fn remote_id(&self) -> Option<u64> {
479 match &self.client_state {
480 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
481 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
482 }
483 }
484
485 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
486 let mut id = None;
487 let mut watch = None;
488 match &self.client_state {
489 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
490 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
491 }
492
493 async move {
494 if let Some(id) = id {
495 return id;
496 }
497 let mut watch = watch.unwrap();
498 loop {
499 let id = *watch.borrow();
500 if let Some(id) = id {
501 return id;
502 }
503 watch.next().await;
504 }
505 }
506 }
507
508 pub fn replica_id(&self) -> ReplicaId {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => 0,
511 ProjectClientState::Remote { replica_id, .. } => *replica_id,
512 }
513 }
514
515 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
516 &self.collaborators
517 }
518
519 pub fn worktrees<'a>(
520 &'a self,
521 cx: &'a AppContext,
522 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
523 self.worktrees
524 .iter()
525 .filter_map(move |worktree| worktree.upgrade(cx))
526 }
527
528 pub fn visible_worktrees<'a>(
529 &'a self,
530 cx: &'a AppContext,
531 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
532 self.worktrees.iter().filter_map(|worktree| {
533 worktree.upgrade(cx).and_then(|worktree| {
534 if worktree.read(cx).is_visible() {
535 Some(worktree)
536 } else {
537 None
538 }
539 })
540 })
541 }
542
543 pub fn worktree_for_id(
544 &self,
545 id: WorktreeId,
546 cx: &AppContext,
547 ) -> Option<ModelHandle<Worktree>> {
548 self.worktrees(cx)
549 .find(|worktree| worktree.read(cx).id() == id)
550 }
551
552 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
553 let rpc = self.client.clone();
554 cx.spawn(|this, mut cx| async move {
555 let project_id = this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local {
557 is_shared,
558 remote_id_rx,
559 ..
560 } = &mut this.client_state
561 {
562 *is_shared = true;
563
564 for open_buffer in this.opened_buffers.values_mut() {
565 match open_buffer {
566 OpenBuffer::Strong(_) => {}
567 OpenBuffer::Weak(buffer) => {
568 if let Some(buffer) = buffer.upgrade(cx) {
569 *open_buffer = OpenBuffer::Strong(buffer);
570 }
571 }
572 OpenBuffer::Loading(_) => unreachable!(),
573 }
574 }
575
576 for worktree_handle in this.worktrees.iter_mut() {
577 match worktree_handle {
578 WorktreeHandle::Strong(_) => {}
579 WorktreeHandle::Weak(worktree) => {
580 if let Some(worktree) = worktree.upgrade(cx) {
581 *worktree_handle = WorktreeHandle::Strong(worktree);
582 }
583 }
584 }
585 }
586
587 remote_id_rx
588 .borrow()
589 .ok_or_else(|| anyhow!("no project id"))
590 } else {
591 Err(anyhow!("can't share a remote project"))
592 }
593 })?;
594
595 rpc.request(proto::ShareProject { project_id }).await?;
596
597 let mut tasks = Vec::new();
598 this.update(&mut cx, |this, cx| {
599 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
600 worktree.update(cx, |worktree, cx| {
601 let worktree = worktree.as_local_mut().unwrap();
602 tasks.push(worktree.share(project_id, cx));
603 });
604 }
605 });
606 for task in tasks {
607 task.await?;
608 }
609 this.update(&mut cx, |_, cx| cx.notify());
610 Ok(())
611 })
612 }
613
614 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
615 let rpc = self.client.clone();
616 cx.spawn(|this, mut cx| async move {
617 let project_id = this.update(&mut cx, |this, cx| {
618 if let ProjectClientState::Local {
619 is_shared,
620 remote_id_rx,
621 ..
622 } = &mut this.client_state
623 {
624 *is_shared = false;
625
626 for open_buffer in this.opened_buffers.values_mut() {
627 match open_buffer {
628 OpenBuffer::Strong(buffer) => {
629 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
630 }
631 _ => {}
632 }
633 }
634
635 for worktree_handle in this.worktrees.iter_mut() {
636 match worktree_handle {
637 WorktreeHandle::Strong(worktree) => {
638 if !worktree.read(cx).is_visible() {
639 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
640 }
641 }
642 _ => {}
643 }
644 }
645
646 remote_id_rx
647 .borrow()
648 .ok_or_else(|| anyhow!("no project id"))
649 } else {
650 Err(anyhow!("can't share a remote project"))
651 }
652 })?;
653
654 rpc.send(proto::UnshareProject { project_id })?;
655 this.update(&mut cx, |this, cx| {
656 this.collaborators.clear();
657 this.shared_buffers.clear();
658 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
659 worktree.update(cx, |worktree, _| {
660 worktree.as_local_mut().unwrap().unshare();
661 });
662 }
663 cx.notify()
664 });
665 Ok(())
666 })
667 }
668
669 pub fn is_read_only(&self) -> bool {
670 match &self.client_state {
671 ProjectClientState::Local { .. } => false,
672 ProjectClientState::Remote {
673 sharing_has_stopped,
674 ..
675 } => *sharing_has_stopped,
676 }
677 }
678
679 pub fn is_local(&self) -> bool {
680 match &self.client_state {
681 ProjectClientState::Local { .. } => true,
682 ProjectClientState::Remote { .. } => false,
683 }
684 }
685
686 pub fn is_remote(&self) -> bool {
687 !self.is_local()
688 }
689
690 pub fn open_buffer(
691 &mut self,
692 path: impl Into<ProjectPath>,
693 cx: &mut ModelContext<Self>,
694 ) -> Task<Result<ModelHandle<Buffer>>> {
695 let project_path = path.into();
696 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
697 worktree
698 } else {
699 return Task::ready(Err(anyhow!("no such worktree")));
700 };
701
702 // If there is already a buffer for the given path, then return it.
703 let existing_buffer = self.get_open_buffer(&project_path, cx);
704 if let Some(existing_buffer) = existing_buffer {
705 return Task::ready(Ok(existing_buffer));
706 }
707
708 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
709 // If the given path is already being loaded, then wait for that existing
710 // task to complete and return the same buffer.
711 hash_map::Entry::Occupied(e) => e.get().clone(),
712
713 // Otherwise, record the fact that this path is now being loaded.
714 hash_map::Entry::Vacant(entry) => {
715 let (mut tx, rx) = postage::watch::channel();
716 entry.insert(rx.clone());
717
718 let load_buffer = if worktree.read(cx).is_local() {
719 self.open_local_buffer(&project_path.path, &worktree, cx)
720 } else {
721 self.open_remote_buffer(&project_path.path, &worktree, cx)
722 };
723
724 cx.spawn(move |this, mut cx| async move {
725 let load_result = load_buffer.await;
726 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
727 // Record the fact that the buffer is no longer loading.
728 this.loading_buffers.remove(&project_path);
729 let buffer = load_result.map_err(Arc::new)?;
730 Ok(buffer)
731 }));
732 })
733 .detach();
734 rx
735 }
736 };
737
738 cx.foreground().spawn(async move {
739 loop {
740 if let Some(result) = loading_watch.borrow().as_ref() {
741 match result {
742 Ok(buffer) => return Ok(buffer.clone()),
743 Err(error) => return Err(anyhow!("{}", error)),
744 }
745 }
746 loading_watch.next().await;
747 }
748 })
749 }
750
751 fn open_local_buffer(
752 &mut self,
753 path: &Arc<Path>,
754 worktree: &ModelHandle<Worktree>,
755 cx: &mut ModelContext<Self>,
756 ) -> Task<Result<ModelHandle<Buffer>>> {
757 let load_buffer = worktree.update(cx, |worktree, cx| {
758 let worktree = worktree.as_local_mut().unwrap();
759 worktree.load_buffer(path, cx)
760 });
761 let worktree = worktree.downgrade();
762 cx.spawn(|this, mut cx| async move {
763 let buffer = load_buffer.await?;
764 let worktree = worktree
765 .upgrade(&cx)
766 .ok_or_else(|| anyhow!("worktree was removed"))?;
767 this.update(&mut cx, |this, cx| {
768 this.register_buffer(&buffer, Some(&worktree), cx)
769 })?;
770 Ok(buffer)
771 })
772 }
773
774 fn open_remote_buffer(
775 &mut self,
776 path: &Arc<Path>,
777 worktree: &ModelHandle<Worktree>,
778 cx: &mut ModelContext<Self>,
779 ) -> Task<Result<ModelHandle<Buffer>>> {
780 let rpc = self.client.clone();
781 let project_id = self.remote_id().unwrap();
782 let remote_worktree_id = worktree.read(cx).id();
783 let path = path.clone();
784 let path_string = path.to_string_lossy().to_string();
785 cx.spawn(|this, mut cx| async move {
786 let response = rpc
787 .request(proto::OpenBuffer {
788 project_id,
789 worktree_id: remote_worktree_id.to_proto(),
790 path: path_string,
791 })
792 .await?;
793 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
794 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
795 .await
796 })
797 }
798
799 fn open_local_buffer_via_lsp(
800 &mut self,
801 abs_path: lsp::Url,
802 lang_name: String,
803 lang_server: Arc<LanguageServer>,
804 cx: &mut ModelContext<Self>,
805 ) -> Task<Result<ModelHandle<Buffer>>> {
806 cx.spawn(|this, mut cx| async move {
807 let abs_path = abs_path
808 .to_file_path()
809 .map_err(|_| anyhow!("can't convert URI to path"))?;
810 let (worktree, relative_path) = if let Some(result) =
811 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
812 {
813 result
814 } else {
815 let worktree = this
816 .update(&mut cx, |this, cx| {
817 this.create_local_worktree(&abs_path, false, cx)
818 })
819 .await?;
820 this.update(&mut cx, |this, cx| {
821 this.language_servers
822 .insert((worktree.read(cx).id(), lang_name), lang_server);
823 });
824 (worktree, PathBuf::new())
825 };
826
827 let project_path = ProjectPath {
828 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
829 path: relative_path.into(),
830 };
831 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
832 .await
833 })
834 }
835
836 pub fn save_buffer_as(
837 &mut self,
838 buffer: ModelHandle<Buffer>,
839 abs_path: PathBuf,
840 cx: &mut ModelContext<Project>,
841 ) -> Task<Result<()>> {
842 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
843 cx.spawn(|this, mut cx| async move {
844 let (worktree, path) = worktree_task.await?;
845 worktree
846 .update(&mut cx, |worktree, cx| {
847 worktree
848 .as_local_mut()
849 .unwrap()
850 .save_buffer_as(buffer.clone(), path, cx)
851 })
852 .await?;
853 this.update(&mut cx, |this, cx| {
854 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
855 });
856 Ok(())
857 })
858 }
859
860 pub fn get_open_buffer(
861 &mut self,
862 path: &ProjectPath,
863 cx: &mut ModelContext<Self>,
864 ) -> Option<ModelHandle<Buffer>> {
865 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
866 self.opened_buffers.values().find_map(|buffer| {
867 let buffer = buffer.upgrade(cx)?;
868 let file = File::from_dyn(buffer.read(cx).file())?;
869 if file.worktree == worktree && file.path() == &path.path {
870 Some(buffer)
871 } else {
872 None
873 }
874 })
875 }
876
877 fn register_buffer(
878 &mut self,
879 buffer: &ModelHandle<Buffer>,
880 worktree: Option<&ModelHandle<Worktree>>,
881 cx: &mut ModelContext<Self>,
882 ) -> Result<()> {
883 let remote_id = buffer.read(cx).remote_id();
884 let open_buffer = if self.is_remote() || self.is_shared() {
885 OpenBuffer::Strong(buffer.clone())
886 } else {
887 OpenBuffer::Weak(buffer.downgrade())
888 };
889
890 match self.opened_buffers.insert(remote_id, open_buffer) {
891 None => {}
892 Some(OpenBuffer::Loading(operations)) => {
893 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
894 }
895 Some(OpenBuffer::Weak(existing_handle)) => {
896 if existing_handle.upgrade(cx).is_some() {
897 Err(anyhow!(
898 "already registered buffer with remote id {}",
899 remote_id
900 ))?
901 }
902 }
903 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
904 "already registered buffer with remote id {}",
905 remote_id
906 ))?,
907 }
908 self.assign_language_to_buffer(&buffer, worktree, cx);
909 Ok(())
910 }
911
912 fn assign_language_to_buffer(
913 &mut self,
914 buffer: &ModelHandle<Buffer>,
915 worktree: Option<&ModelHandle<Worktree>>,
916 cx: &mut ModelContext<Self>,
917 ) -> Option<()> {
918 let (path, full_path) = {
919 let file = buffer.read(cx).file()?;
920 (file.path().clone(), file.full_path(cx))
921 };
922
923 // If the buffer has a language, set it and start/assign the language server
924 if let Some(language) = self.languages.select_language(&full_path) {
925 buffer.update(cx, |buffer, cx| {
926 buffer.set_language(Some(language.clone()), cx);
927 });
928
929 // For local worktrees, start a language server if needed.
930 // Also assign the language server and any previously stored diagnostics to the buffer.
931 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
932 let worktree_id = local_worktree.id();
933 let worktree_abs_path = local_worktree.abs_path().clone();
934 let buffer = buffer.downgrade();
935 let language_server =
936 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
937
938 cx.spawn_weak(|_, mut cx| async move {
939 if let Some(language_server) = language_server.await {
940 if let Some(buffer) = buffer.upgrade(&cx) {
941 buffer.update(&mut cx, |buffer, cx| {
942 buffer.set_language_server(Some(language_server), cx);
943 });
944 }
945 }
946 })
947 .detach();
948 }
949 }
950
951 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
952 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
953 buffer.update(cx, |buffer, cx| {
954 buffer.update_diagnostics(diagnostics, None, cx).log_err();
955 });
956 }
957 }
958
959 None
960 }
961
962 fn start_language_server(
963 &mut self,
964 worktree_id: WorktreeId,
965 worktree_path: Arc<Path>,
966 language: Arc<Language>,
967 cx: &mut ModelContext<Self>,
968 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
969 enum LspEvent {
970 DiagnosticsStart,
971 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
972 DiagnosticsFinish,
973 }
974
975 let key = (worktree_id, language.name().to_string());
976 self.started_language_servers
977 .entry(key.clone())
978 .or_insert_with(|| {
979 let language_server = self.languages.start_language_server(
980 &language,
981 worktree_path,
982 self.client.http_client(),
983 cx,
984 );
985 let rpc = self.client.clone();
986 cx.spawn_weak(|this, mut cx| async move {
987 let language_server = language_server?.await.log_err()?;
988 if let Some(this) = this.upgrade(&cx) {
989 this.update(&mut cx, |this, _| {
990 this.language_servers.insert(key, language_server.clone());
991 });
992 }
993
994 let disk_based_sources = language
995 .disk_based_diagnostic_sources()
996 .cloned()
997 .unwrap_or_default();
998 let disk_based_diagnostics_progress_token =
999 language.disk_based_diagnostics_progress_token().cloned();
1000 let has_disk_based_diagnostic_progress_token =
1001 disk_based_diagnostics_progress_token.is_some();
1002 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1003
1004 // Listen for `PublishDiagnostics` notifications.
1005 language_server
1006 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1007 let diagnostics_tx = diagnostics_tx.clone();
1008 move |params| {
1009 if !has_disk_based_diagnostic_progress_token {
1010 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1011 }
1012 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1013 .ok();
1014 if !has_disk_based_diagnostic_progress_token {
1015 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1016 }
1017 }
1018 })
1019 .detach();
1020
1021 // Listen for `Progress` notifications. Send an event when the language server
1022 // transitions between running jobs and not running any jobs.
1023 let mut running_jobs_for_this_server: i32 = 0;
1024 language_server
1025 .on_notification::<lsp::notification::Progress, _>(move |params| {
1026 let token = match params.token {
1027 lsp::NumberOrString::Number(_) => None,
1028 lsp::NumberOrString::String(token) => Some(token),
1029 };
1030
1031 if token == disk_based_diagnostics_progress_token {
1032 match params.value {
1033 lsp::ProgressParamsValue::WorkDone(progress) => {
1034 match progress {
1035 lsp::WorkDoneProgress::Begin(_) => {
1036 running_jobs_for_this_server += 1;
1037 if running_jobs_for_this_server == 1 {
1038 block_on(
1039 diagnostics_tx
1040 .send(LspEvent::DiagnosticsStart),
1041 )
1042 .ok();
1043 }
1044 }
1045 lsp::WorkDoneProgress::End(_) => {
1046 running_jobs_for_this_server -= 1;
1047 if running_jobs_for_this_server == 0 {
1048 block_on(
1049 diagnostics_tx
1050 .send(LspEvent::DiagnosticsFinish),
1051 )
1052 .ok();
1053 }
1054 }
1055 _ => {}
1056 }
1057 }
1058 }
1059 }
1060 })
1061 .detach();
1062
1063 // Process all the LSP events.
1064 cx.spawn(|mut cx| async move {
1065 while let Ok(message) = diagnostics_rx.recv().await {
1066 let this = this.upgrade(&cx)?;
1067 match message {
1068 LspEvent::DiagnosticsStart => {
1069 this.update(&mut cx, |this, cx| {
1070 this.disk_based_diagnostics_started(cx);
1071 if let Some(project_id) = this.remote_id() {
1072 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1073 project_id,
1074 })
1075 .log_err();
1076 }
1077 });
1078 }
1079 LspEvent::DiagnosticsUpdate(mut params) => {
1080 language.process_diagnostics(&mut params);
1081 this.update(&mut cx, |this, cx| {
1082 this.update_diagnostics(params, &disk_based_sources, cx)
1083 .log_err();
1084 });
1085 }
1086 LspEvent::DiagnosticsFinish => {
1087 this.update(&mut cx, |this, cx| {
1088 this.disk_based_diagnostics_finished(cx);
1089 if let Some(project_id) = this.remote_id() {
1090 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1091 project_id,
1092 })
1093 .log_err();
1094 }
1095 });
1096 }
1097 }
1098 }
1099 Some(())
1100 })
1101 .detach();
1102
1103 Some(language_server)
1104 })
1105 .shared()
1106 })
1107 .clone()
1108 }
1109
1110 pub fn update_diagnostics(
1111 &mut self,
1112 params: lsp::PublishDiagnosticsParams,
1113 disk_based_sources: &HashSet<String>,
1114 cx: &mut ModelContext<Self>,
1115 ) -> Result<()> {
1116 let abs_path = params
1117 .uri
1118 .to_file_path()
1119 .map_err(|_| anyhow!("URI is not a file"))?;
1120 let mut next_group_id = 0;
1121 let mut diagnostics = Vec::default();
1122 let mut primary_diagnostic_group_ids = HashMap::default();
1123 let mut sources_by_group_id = HashMap::default();
1124 let mut supporting_diagnostic_severities = HashMap::default();
1125 for diagnostic in ¶ms.diagnostics {
1126 let source = diagnostic.source.as_ref();
1127 let code = diagnostic.code.as_ref().map(|code| match code {
1128 lsp::NumberOrString::Number(code) => code.to_string(),
1129 lsp::NumberOrString::String(code) => code.clone(),
1130 });
1131 let range = range_from_lsp(diagnostic.range);
1132 let is_supporting = diagnostic
1133 .related_information
1134 .as_ref()
1135 .map_or(false, |infos| {
1136 infos.iter().any(|info| {
1137 primary_diagnostic_group_ids.contains_key(&(
1138 source,
1139 code.clone(),
1140 range_from_lsp(info.location.range),
1141 ))
1142 })
1143 });
1144
1145 if is_supporting {
1146 if let Some(severity) = diagnostic.severity {
1147 supporting_diagnostic_severities
1148 .insert((source, code.clone(), range), severity);
1149 }
1150 } else {
1151 let group_id = post_inc(&mut next_group_id);
1152 let is_disk_based =
1153 source.map_or(false, |source| disk_based_sources.contains(source));
1154
1155 sources_by_group_id.insert(group_id, source);
1156 primary_diagnostic_group_ids
1157 .insert((source, code.clone(), range.clone()), group_id);
1158
1159 diagnostics.push(DiagnosticEntry {
1160 range,
1161 diagnostic: Diagnostic {
1162 code: code.clone(),
1163 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1164 message: diagnostic.message.clone(),
1165 group_id,
1166 is_primary: true,
1167 is_valid: true,
1168 is_disk_based,
1169 },
1170 });
1171 if let Some(infos) = &diagnostic.related_information {
1172 for info in infos {
1173 if info.location.uri == params.uri && !info.message.is_empty() {
1174 let range = range_from_lsp(info.location.range);
1175 diagnostics.push(DiagnosticEntry {
1176 range,
1177 diagnostic: Diagnostic {
1178 code: code.clone(),
1179 severity: DiagnosticSeverity::INFORMATION,
1180 message: info.message.clone(),
1181 group_id,
1182 is_primary: false,
1183 is_valid: true,
1184 is_disk_based,
1185 },
1186 });
1187 }
1188 }
1189 }
1190 }
1191 }
1192
1193 for entry in &mut diagnostics {
1194 let diagnostic = &mut entry.diagnostic;
1195 if !diagnostic.is_primary {
1196 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1197 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1198 source,
1199 diagnostic.code.clone(),
1200 entry.range.clone(),
1201 )) {
1202 diagnostic.severity = severity;
1203 }
1204 }
1205 }
1206
1207 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1208 Ok(())
1209 }
1210
1211 pub fn update_diagnostic_entries(
1212 &mut self,
1213 abs_path: PathBuf,
1214 version: Option<i32>,
1215 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1216 cx: &mut ModelContext<Project>,
1217 ) -> Result<(), anyhow::Error> {
1218 let (worktree, relative_path) = self
1219 .find_local_worktree(&abs_path, cx)
1220 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1221 if !worktree.read(cx).is_visible() {
1222 return Ok(());
1223 }
1224
1225 let project_path = ProjectPath {
1226 worktree_id: worktree.read(cx).id(),
1227 path: relative_path.into(),
1228 };
1229
1230 for buffer in self.opened_buffers.values() {
1231 if let Some(buffer) = buffer.upgrade(cx) {
1232 if buffer
1233 .read(cx)
1234 .file()
1235 .map_or(false, |file| *file.path() == project_path.path)
1236 {
1237 buffer.update(cx, |buffer, cx| {
1238 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1239 })?;
1240 break;
1241 }
1242 }
1243 }
1244 worktree.update(cx, |worktree, cx| {
1245 worktree
1246 .as_local_mut()
1247 .ok_or_else(|| anyhow!("not a local worktree"))?
1248 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1249 })?;
1250 cx.emit(Event::DiagnosticsUpdated(project_path));
1251 Ok(())
1252 }
1253
1254 pub fn format(
1255 &self,
1256 buffers: HashSet<ModelHandle<Buffer>>,
1257 push_to_history: bool,
1258 cx: &mut ModelContext<Project>,
1259 ) -> Task<Result<ProjectTransaction>> {
1260 let mut local_buffers = Vec::new();
1261 let mut remote_buffers = None;
1262 for buffer_handle in buffers {
1263 let buffer = buffer_handle.read(cx);
1264 let worktree;
1265 if let Some(file) = File::from_dyn(buffer.file()) {
1266 worktree = file.worktree.clone();
1267 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1268 let lang_server;
1269 if let Some(lang) = buffer.language() {
1270 if let Some(server) = self
1271 .language_servers
1272 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1273 {
1274 lang_server = server.clone();
1275 } else {
1276 return Task::ready(Ok(Default::default()));
1277 };
1278 } else {
1279 return Task::ready(Ok(Default::default()));
1280 }
1281
1282 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1283 } else {
1284 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1285 }
1286 } else {
1287 return Task::ready(Ok(Default::default()));
1288 }
1289 }
1290
1291 let remote_buffers = self.remote_id().zip(remote_buffers);
1292 let client = self.client.clone();
1293
1294 cx.spawn(|this, mut cx| async move {
1295 let mut project_transaction = ProjectTransaction::default();
1296
1297 if let Some((project_id, remote_buffers)) = remote_buffers {
1298 let response = client
1299 .request(proto::FormatBuffers {
1300 project_id,
1301 buffer_ids: remote_buffers
1302 .iter()
1303 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1304 .collect(),
1305 })
1306 .await?
1307 .transaction
1308 .ok_or_else(|| anyhow!("missing transaction"))?;
1309 project_transaction = this
1310 .update(&mut cx, |this, cx| {
1311 this.deserialize_project_transaction(response, push_to_history, cx)
1312 })
1313 .await?;
1314 }
1315
1316 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1317 let lsp_edits = lang_server
1318 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1319 text_document: lsp::TextDocumentIdentifier::new(
1320 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1321 ),
1322 options: Default::default(),
1323 work_done_progress_params: Default::default(),
1324 })
1325 .await?;
1326
1327 if let Some(lsp_edits) = lsp_edits {
1328 let edits = buffer
1329 .update(&mut cx, |buffer, cx| {
1330 buffer.edits_from_lsp(lsp_edits, None, cx)
1331 })
1332 .await?;
1333 buffer.update(&mut cx, |buffer, cx| {
1334 buffer.finalize_last_transaction();
1335 buffer.start_transaction();
1336 for (range, text) in edits {
1337 buffer.edit([range], text, cx);
1338 }
1339 if buffer.end_transaction(cx).is_some() {
1340 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1341 if !push_to_history {
1342 buffer.forget_transaction(transaction.id);
1343 }
1344 project_transaction.0.insert(cx.handle(), transaction);
1345 }
1346 });
1347 }
1348 }
1349
1350 Ok(project_transaction)
1351 })
1352 }
1353
1354 pub fn definition<T: ToPointUtf16>(
1355 &self,
1356 buffer: &ModelHandle<Buffer>,
1357 position: T,
1358 cx: &mut ModelContext<Self>,
1359 ) -> Task<Result<Vec<Location>>> {
1360 let position = position.to_point_utf16(buffer.read(cx));
1361 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1362 }
1363
1364 pub fn references<T: ToPointUtf16>(
1365 &self,
1366 buffer: &ModelHandle<Buffer>,
1367 position: T,
1368 cx: &mut ModelContext<Self>,
1369 ) -> Task<Result<Vec<Location>>> {
1370 let position = position.to_point_utf16(buffer.read(cx));
1371 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1372 }
1373
1374 pub fn document_highlights<T: ToPointUtf16>(
1375 &self,
1376 buffer: &ModelHandle<Buffer>,
1377 position: T,
1378 cx: &mut ModelContext<Self>,
1379 ) -> Task<Result<Vec<DocumentHighlight>>> {
1380 let position = position.to_point_utf16(buffer.read(cx));
1381
1382 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1383 }
1384
1385 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1386 if self.is_local() {
1387 let mut language_servers = HashMap::default();
1388 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1389 if let Some((worktree, language)) = self
1390 .worktree_for_id(*worktree_id, cx)
1391 .and_then(|worktree| worktree.read(cx).as_local())
1392 .zip(self.languages.get_language(language_name))
1393 {
1394 language_servers
1395 .entry(Arc::as_ptr(language_server))
1396 .or_insert((
1397 language_server.clone(),
1398 *worktree_id,
1399 worktree.abs_path().clone(),
1400 language.clone(),
1401 ));
1402 }
1403 }
1404
1405 let mut requests = Vec::new();
1406 for (language_server, _, _, _) in language_servers.values() {
1407 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1408 lsp::WorkspaceSymbolParams {
1409 query: query.to_string(),
1410 ..Default::default()
1411 },
1412 ));
1413 }
1414
1415 cx.spawn_weak(|this, cx| async move {
1416 let responses = futures::future::try_join_all(requests).await?;
1417
1418 let mut symbols = Vec::new();
1419 if let Some(this) = this.upgrade(&cx) {
1420 this.read_with(&cx, |this, cx| {
1421 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1422 language_servers.into_values().zip(responses)
1423 {
1424 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1425 |lsp_symbol| {
1426 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1427 let mut worktree_id = source_worktree_id;
1428 let path;
1429 if let Some((worktree, rel_path)) =
1430 this.find_local_worktree(&abs_path, cx)
1431 {
1432 worktree_id = worktree.read(cx).id();
1433 path = rel_path;
1434 } else {
1435 path = relativize_path(&worktree_abs_path, &abs_path);
1436 }
1437
1438 let label = language
1439 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1440 .unwrap_or_else(|| {
1441 CodeLabel::plain(lsp_symbol.name.clone(), None)
1442 });
1443 let signature = this.symbol_signature(worktree_id, &path);
1444
1445 Some(Symbol {
1446 source_worktree_id,
1447 worktree_id,
1448 language_name: language.name().to_string(),
1449 name: lsp_symbol.name,
1450 kind: lsp_symbol.kind,
1451 label,
1452 path,
1453 range: range_from_lsp(lsp_symbol.location.range),
1454 signature,
1455 })
1456 },
1457 ));
1458 }
1459 })
1460 }
1461
1462 Ok(symbols)
1463 })
1464 } else if let Some(project_id) = self.remote_id() {
1465 let request = self.client.request(proto::GetProjectSymbols {
1466 project_id,
1467 query: query.to_string(),
1468 });
1469 cx.spawn_weak(|this, cx| async move {
1470 let response = request.await?;
1471 let mut symbols = Vec::new();
1472 if let Some(this) = this.upgrade(&cx) {
1473 this.read_with(&cx, |this, _| {
1474 symbols.extend(
1475 response
1476 .symbols
1477 .into_iter()
1478 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1479 );
1480 })
1481 }
1482 Ok(symbols)
1483 })
1484 } else {
1485 Task::ready(Ok(Default::default()))
1486 }
1487 }
1488
1489 pub fn open_buffer_for_symbol(
1490 &mut self,
1491 symbol: &Symbol,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Task<Result<ModelHandle<Buffer>>> {
1494 if self.is_local() {
1495 let language_server = if let Some(server) = self
1496 .language_servers
1497 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1498 {
1499 server.clone()
1500 } else {
1501 return Task::ready(Err(anyhow!(
1502 "language server for worktree and language not found"
1503 )));
1504 };
1505
1506 let worktree_abs_path = if let Some(worktree_abs_path) = self
1507 .worktree_for_id(symbol.worktree_id, cx)
1508 .and_then(|worktree| worktree.read(cx).as_local())
1509 .map(|local_worktree| local_worktree.abs_path())
1510 {
1511 worktree_abs_path
1512 } else {
1513 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1514 };
1515 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1516 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1517 uri
1518 } else {
1519 return Task::ready(Err(anyhow!("invalid symbol path")));
1520 };
1521
1522 self.open_local_buffer_via_lsp(
1523 symbol_uri,
1524 symbol.language_name.clone(),
1525 language_server,
1526 cx,
1527 )
1528 } else if let Some(project_id) = self.remote_id() {
1529 let request = self.client.request(proto::OpenBufferForSymbol {
1530 project_id,
1531 symbol: Some(serialize_symbol(symbol)),
1532 });
1533 cx.spawn(|this, mut cx| async move {
1534 let response = request.await?;
1535 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1536 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1537 .await
1538 })
1539 } else {
1540 Task::ready(Err(anyhow!("project does not have a remote id")))
1541 }
1542 }
1543
1544 pub fn completions<T: ToPointUtf16>(
1545 &self,
1546 source_buffer_handle: &ModelHandle<Buffer>,
1547 position: T,
1548 cx: &mut ModelContext<Self>,
1549 ) -> Task<Result<Vec<Completion>>> {
1550 let source_buffer_handle = source_buffer_handle.clone();
1551 let source_buffer = source_buffer_handle.read(cx);
1552 let buffer_id = source_buffer.remote_id();
1553 let language = source_buffer.language().cloned();
1554 let worktree;
1555 let buffer_abs_path;
1556 if let Some(file) = File::from_dyn(source_buffer.file()) {
1557 worktree = file.worktree.clone();
1558 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1559 } else {
1560 return Task::ready(Ok(Default::default()));
1561 };
1562
1563 let position = position.to_point_utf16(source_buffer);
1564 let anchor = source_buffer.anchor_after(position);
1565
1566 if worktree.read(cx).as_local().is_some() {
1567 let buffer_abs_path = buffer_abs_path.unwrap();
1568 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1569 server
1570 } else {
1571 return Task::ready(Ok(Default::default()));
1572 };
1573
1574 cx.spawn(|_, cx| async move {
1575 let completions = lang_server
1576 .request::<lsp::request::Completion>(lsp::CompletionParams {
1577 text_document_position: lsp::TextDocumentPositionParams::new(
1578 lsp::TextDocumentIdentifier::new(
1579 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1580 ),
1581 position.to_lsp_position(),
1582 ),
1583 context: Default::default(),
1584 work_done_progress_params: Default::default(),
1585 partial_result_params: Default::default(),
1586 })
1587 .await
1588 .context("lsp completion request failed")?;
1589
1590 let completions = if let Some(completions) = completions {
1591 match completions {
1592 lsp::CompletionResponse::Array(completions) => completions,
1593 lsp::CompletionResponse::List(list) => list.items,
1594 }
1595 } else {
1596 Default::default()
1597 };
1598
1599 source_buffer_handle.read_with(&cx, |this, _| {
1600 Ok(completions
1601 .into_iter()
1602 .filter_map(|lsp_completion| {
1603 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1604 lsp::CompletionTextEdit::Edit(edit) => {
1605 (range_from_lsp(edit.range), edit.new_text.clone())
1606 }
1607 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1608 log::info!("unsupported insert/replace completion");
1609 return None;
1610 }
1611 };
1612
1613 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1614 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1615 if clipped_start == old_range.start && clipped_end == old_range.end {
1616 Some(Completion {
1617 old_range: this.anchor_before(old_range.start)
1618 ..this.anchor_after(old_range.end),
1619 new_text,
1620 label: language
1621 .as_ref()
1622 .and_then(|l| l.label_for_completion(&lsp_completion))
1623 .unwrap_or_else(|| {
1624 CodeLabel::plain(
1625 lsp_completion.label.clone(),
1626 lsp_completion.filter_text.as_deref(),
1627 )
1628 }),
1629 lsp_completion,
1630 })
1631 } else {
1632 None
1633 }
1634 })
1635 .collect())
1636 })
1637 })
1638 } else if let Some(project_id) = self.remote_id() {
1639 let rpc = self.client.clone();
1640 let message = proto::GetCompletions {
1641 project_id,
1642 buffer_id,
1643 position: Some(language::proto::serialize_anchor(&anchor)),
1644 version: (&source_buffer.version()).into(),
1645 };
1646 cx.spawn_weak(|_, mut cx| async move {
1647 let response = rpc.request(message).await?;
1648
1649 source_buffer_handle
1650 .update(&mut cx, |buffer, _| {
1651 buffer.wait_for_version(response.version.into())
1652 })
1653 .await;
1654
1655 response
1656 .completions
1657 .into_iter()
1658 .map(|completion| {
1659 language::proto::deserialize_completion(completion, language.as_ref())
1660 })
1661 .collect()
1662 })
1663 } else {
1664 Task::ready(Ok(Default::default()))
1665 }
1666 }
1667
1668 pub fn apply_additional_edits_for_completion(
1669 &self,
1670 buffer_handle: ModelHandle<Buffer>,
1671 completion: Completion,
1672 push_to_history: bool,
1673 cx: &mut ModelContext<Self>,
1674 ) -> Task<Result<Option<Transaction>>> {
1675 let buffer = buffer_handle.read(cx);
1676 let buffer_id = buffer.remote_id();
1677
1678 if self.is_local() {
1679 let lang_server = if let Some(language_server) = buffer.language_server() {
1680 language_server.clone()
1681 } else {
1682 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1683 };
1684
1685 cx.spawn(|_, mut cx| async move {
1686 let resolved_completion = lang_server
1687 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1688 .await?;
1689 if let Some(edits) = resolved_completion.additional_text_edits {
1690 let edits = buffer_handle
1691 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1692 .await?;
1693 buffer_handle.update(&mut cx, |buffer, cx| {
1694 buffer.finalize_last_transaction();
1695 buffer.start_transaction();
1696 for (range, text) in edits {
1697 buffer.edit([range], text, cx);
1698 }
1699 let transaction = if buffer.end_transaction(cx).is_some() {
1700 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1701 if !push_to_history {
1702 buffer.forget_transaction(transaction.id);
1703 }
1704 Some(transaction)
1705 } else {
1706 None
1707 };
1708 Ok(transaction)
1709 })
1710 } else {
1711 Ok(None)
1712 }
1713 })
1714 } else if let Some(project_id) = self.remote_id() {
1715 let client = self.client.clone();
1716 cx.spawn(|_, mut cx| async move {
1717 let response = client
1718 .request(proto::ApplyCompletionAdditionalEdits {
1719 project_id,
1720 buffer_id,
1721 completion: Some(language::proto::serialize_completion(&completion)),
1722 })
1723 .await?;
1724
1725 if let Some(transaction) = response.transaction {
1726 let transaction = language::proto::deserialize_transaction(transaction)?;
1727 buffer_handle
1728 .update(&mut cx, |buffer, _| {
1729 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1730 })
1731 .await;
1732 if push_to_history {
1733 buffer_handle.update(&mut cx, |buffer, _| {
1734 buffer.push_transaction(transaction.clone(), Instant::now());
1735 });
1736 }
1737 Ok(Some(transaction))
1738 } else {
1739 Ok(None)
1740 }
1741 })
1742 } else {
1743 Task::ready(Err(anyhow!("project does not have a remote id")))
1744 }
1745 }
1746
1747 pub fn code_actions<T: ToOffset>(
1748 &self,
1749 buffer_handle: &ModelHandle<Buffer>,
1750 range: Range<T>,
1751 cx: &mut ModelContext<Self>,
1752 ) -> Task<Result<Vec<CodeAction>>> {
1753 let buffer_handle = buffer_handle.clone();
1754 let buffer = buffer_handle.read(cx);
1755 let buffer_id = buffer.remote_id();
1756 let worktree;
1757 let buffer_abs_path;
1758 if let Some(file) = File::from_dyn(buffer.file()) {
1759 worktree = file.worktree.clone();
1760 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1761 } else {
1762 return Task::ready(Ok(Default::default()));
1763 };
1764 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1765
1766 if worktree.read(cx).as_local().is_some() {
1767 let buffer_abs_path = buffer_abs_path.unwrap();
1768 let lang_name;
1769 let lang_server;
1770 if let Some(lang) = buffer.language() {
1771 lang_name = lang.name().to_string();
1772 if let Some(server) = self
1773 .language_servers
1774 .get(&(worktree.read(cx).id(), lang_name.clone()))
1775 {
1776 lang_server = server.clone();
1777 } else {
1778 return Task::ready(Ok(Default::default()));
1779 };
1780 } else {
1781 return Task::ready(Ok(Default::default()));
1782 }
1783
1784 let lsp_range = lsp::Range::new(
1785 range.start.to_point_utf16(buffer).to_lsp_position(),
1786 range.end.to_point_utf16(buffer).to_lsp_position(),
1787 );
1788 cx.foreground().spawn(async move {
1789 if !lang_server
1790 .capabilities()
1791 .await
1792 .map_or(false, |capabilities| {
1793 capabilities.code_action_provider.is_some()
1794 })
1795 {
1796 return Ok(Default::default());
1797 }
1798
1799 Ok(lang_server
1800 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1801 text_document: lsp::TextDocumentIdentifier::new(
1802 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1803 ),
1804 range: lsp_range,
1805 work_done_progress_params: Default::default(),
1806 partial_result_params: Default::default(),
1807 context: lsp::CodeActionContext {
1808 diagnostics: Default::default(),
1809 only: Some(vec![
1810 lsp::CodeActionKind::QUICKFIX,
1811 lsp::CodeActionKind::REFACTOR,
1812 lsp::CodeActionKind::REFACTOR_EXTRACT,
1813 ]),
1814 },
1815 })
1816 .await?
1817 .unwrap_or_default()
1818 .into_iter()
1819 .filter_map(|entry| {
1820 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1821 Some(CodeAction {
1822 range: range.clone(),
1823 lsp_action,
1824 })
1825 } else {
1826 None
1827 }
1828 })
1829 .collect())
1830 })
1831 } else if let Some(project_id) = self.remote_id() {
1832 let rpc = self.client.clone();
1833 let version = buffer.version();
1834 cx.spawn_weak(|_, mut cx| async move {
1835 let response = rpc
1836 .request(proto::GetCodeActions {
1837 project_id,
1838 buffer_id,
1839 start: Some(language::proto::serialize_anchor(&range.start)),
1840 end: Some(language::proto::serialize_anchor(&range.end)),
1841 version: (&version).into(),
1842 })
1843 .await?;
1844
1845 buffer_handle
1846 .update(&mut cx, |buffer, _| {
1847 buffer.wait_for_version(response.version.into())
1848 })
1849 .await;
1850
1851 response
1852 .actions
1853 .into_iter()
1854 .map(language::proto::deserialize_code_action)
1855 .collect()
1856 })
1857 } else {
1858 Task::ready(Ok(Default::default()))
1859 }
1860 }
1861
1862 pub fn apply_code_action(
1863 &self,
1864 buffer_handle: ModelHandle<Buffer>,
1865 mut action: CodeAction,
1866 push_to_history: bool,
1867 cx: &mut ModelContext<Self>,
1868 ) -> Task<Result<ProjectTransaction>> {
1869 if self.is_local() {
1870 let buffer = buffer_handle.read(cx);
1871 let lang_name = if let Some(lang) = buffer.language() {
1872 lang.name().to_string()
1873 } else {
1874 return Task::ready(Ok(Default::default()));
1875 };
1876 let lang_server = if let Some(language_server) = buffer.language_server() {
1877 language_server.clone()
1878 } else {
1879 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1880 };
1881 let range = action.range.to_point_utf16(buffer);
1882
1883 cx.spawn(|this, mut cx| async move {
1884 if let Some(lsp_range) = action
1885 .lsp_action
1886 .data
1887 .as_mut()
1888 .and_then(|d| d.get_mut("codeActionParams"))
1889 .and_then(|d| d.get_mut("range"))
1890 {
1891 *lsp_range = serde_json::to_value(&lsp::Range::new(
1892 range.start.to_lsp_position(),
1893 range.end.to_lsp_position(),
1894 ))
1895 .unwrap();
1896 action.lsp_action = lang_server
1897 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1898 .await?;
1899 } else {
1900 let actions = this
1901 .update(&mut cx, |this, cx| {
1902 this.code_actions(&buffer_handle, action.range, cx)
1903 })
1904 .await?;
1905 action.lsp_action = actions
1906 .into_iter()
1907 .find(|a| a.lsp_action.title == action.lsp_action.title)
1908 .ok_or_else(|| anyhow!("code action is outdated"))?
1909 .lsp_action;
1910 }
1911
1912 if let Some(edit) = action.lsp_action.edit {
1913 Self::deserialize_workspace_edit(
1914 this,
1915 edit,
1916 push_to_history,
1917 lang_name,
1918 lang_server,
1919 &mut cx,
1920 )
1921 .await
1922 } else {
1923 Ok(ProjectTransaction::default())
1924 }
1925 })
1926 } else if let Some(project_id) = self.remote_id() {
1927 let client = self.client.clone();
1928 let request = proto::ApplyCodeAction {
1929 project_id,
1930 buffer_id: buffer_handle.read(cx).remote_id(),
1931 action: Some(language::proto::serialize_code_action(&action)),
1932 };
1933 cx.spawn(|this, mut cx| async move {
1934 let response = client
1935 .request(request)
1936 .await?
1937 .transaction
1938 .ok_or_else(|| anyhow!("missing transaction"))?;
1939 this.update(&mut cx, |this, cx| {
1940 this.deserialize_project_transaction(response, push_to_history, cx)
1941 })
1942 .await
1943 })
1944 } else {
1945 Task::ready(Err(anyhow!("project does not have a remote id")))
1946 }
1947 }
1948
1949 async fn deserialize_workspace_edit(
1950 this: ModelHandle<Self>,
1951 edit: lsp::WorkspaceEdit,
1952 push_to_history: bool,
1953 language_name: String,
1954 language_server: Arc<LanguageServer>,
1955 cx: &mut AsyncAppContext,
1956 ) -> Result<ProjectTransaction> {
1957 let fs = this.read_with(cx, |this, _| this.fs.clone());
1958 let mut operations = Vec::new();
1959 if let Some(document_changes) = edit.document_changes {
1960 match document_changes {
1961 lsp::DocumentChanges::Edits(edits) => {
1962 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1963 }
1964 lsp::DocumentChanges::Operations(ops) => operations = ops,
1965 }
1966 } else if let Some(changes) = edit.changes {
1967 operations.extend(changes.into_iter().map(|(uri, edits)| {
1968 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1969 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1970 uri,
1971 version: None,
1972 },
1973 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1974 })
1975 }));
1976 }
1977
1978 let mut project_transaction = ProjectTransaction::default();
1979 for operation in operations {
1980 match operation {
1981 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1982 let abs_path = op
1983 .uri
1984 .to_file_path()
1985 .map_err(|_| anyhow!("can't convert URI to path"))?;
1986
1987 if let Some(parent_path) = abs_path.parent() {
1988 fs.create_dir(parent_path).await?;
1989 }
1990 if abs_path.ends_with("/") {
1991 fs.create_dir(&abs_path).await?;
1992 } else {
1993 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1994 .await?;
1995 }
1996 }
1997 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1998 let source_abs_path = op
1999 .old_uri
2000 .to_file_path()
2001 .map_err(|_| anyhow!("can't convert URI to path"))?;
2002 let target_abs_path = op
2003 .new_uri
2004 .to_file_path()
2005 .map_err(|_| anyhow!("can't convert URI to path"))?;
2006 fs.rename(
2007 &source_abs_path,
2008 &target_abs_path,
2009 op.options.map(Into::into).unwrap_or_default(),
2010 )
2011 .await?;
2012 }
2013 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2014 let abs_path = op
2015 .uri
2016 .to_file_path()
2017 .map_err(|_| anyhow!("can't convert URI to path"))?;
2018 let options = op.options.map(Into::into).unwrap_or_default();
2019 if abs_path.ends_with("/") {
2020 fs.remove_dir(&abs_path, options).await?;
2021 } else {
2022 fs.remove_file(&abs_path, options).await?;
2023 }
2024 }
2025 lsp::DocumentChangeOperation::Edit(op) => {
2026 let buffer_to_edit = this
2027 .update(cx, |this, cx| {
2028 this.open_local_buffer_via_lsp(
2029 op.text_document.uri,
2030 language_name.clone(),
2031 language_server.clone(),
2032 cx,
2033 )
2034 })
2035 .await?;
2036
2037 let edits = buffer_to_edit
2038 .update(cx, |buffer, cx| {
2039 let edits = op.edits.into_iter().map(|edit| match edit {
2040 lsp::OneOf::Left(edit) => edit,
2041 lsp::OneOf::Right(edit) => edit.text_edit,
2042 });
2043 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2044 })
2045 .await?;
2046
2047 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2048 buffer.finalize_last_transaction();
2049 buffer.start_transaction();
2050 for (range, text) in edits {
2051 buffer.edit([range], text, cx);
2052 }
2053 let transaction = if buffer.end_transaction(cx).is_some() {
2054 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2055 if !push_to_history {
2056 buffer.forget_transaction(transaction.id);
2057 }
2058 Some(transaction)
2059 } else {
2060 None
2061 };
2062
2063 transaction
2064 });
2065 if let Some(transaction) = transaction {
2066 project_transaction.0.insert(buffer_to_edit, transaction);
2067 }
2068 }
2069 }
2070 }
2071
2072 Ok(project_transaction)
2073 }
2074
2075 pub fn prepare_rename<T: ToPointUtf16>(
2076 &self,
2077 buffer: ModelHandle<Buffer>,
2078 position: T,
2079 cx: &mut ModelContext<Self>,
2080 ) -> Task<Result<Option<Range<Anchor>>>> {
2081 let position = position.to_point_utf16(buffer.read(cx));
2082 self.request_lsp(buffer, PrepareRename { position }, cx)
2083 }
2084
2085 pub fn perform_rename<T: ToPointUtf16>(
2086 &self,
2087 buffer: ModelHandle<Buffer>,
2088 position: T,
2089 new_name: String,
2090 push_to_history: bool,
2091 cx: &mut ModelContext<Self>,
2092 ) -> Task<Result<ProjectTransaction>> {
2093 let position = position.to_point_utf16(buffer.read(cx));
2094 self.request_lsp(
2095 buffer,
2096 PerformRename {
2097 position,
2098 new_name,
2099 push_to_history,
2100 },
2101 cx,
2102 )
2103 }
2104
2105 pub fn search(
2106 &self,
2107 query: SearchQuery,
2108 cx: &mut ModelContext<Self>,
2109 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2110 if self.is_local() {
2111 let snapshots = self
2112 .visible_worktrees(cx)
2113 .filter_map(|tree| {
2114 let tree = tree.read(cx).as_local()?;
2115 Some(tree.snapshot())
2116 })
2117 .collect::<Vec<_>>();
2118
2119 let background = cx.background().clone();
2120 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2121 if path_count == 0 {
2122 return Task::ready(Ok(Default::default()));
2123 }
2124 let workers = background.num_cpus().min(path_count);
2125 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2126 cx.background()
2127 .spawn({
2128 let fs = self.fs.clone();
2129 let background = cx.background().clone();
2130 let query = query.clone();
2131 async move {
2132 let fs = &fs;
2133 let query = &query;
2134 let matching_paths_tx = &matching_paths_tx;
2135 let paths_per_worker = (path_count + workers - 1) / workers;
2136 let snapshots = &snapshots;
2137 background
2138 .scoped(|scope| {
2139 for worker_ix in 0..workers {
2140 let worker_start_ix = worker_ix * paths_per_worker;
2141 let worker_end_ix = worker_start_ix + paths_per_worker;
2142 scope.spawn(async move {
2143 let mut snapshot_start_ix = 0;
2144 let mut abs_path = PathBuf::new();
2145 for snapshot in snapshots {
2146 let snapshot_end_ix =
2147 snapshot_start_ix + snapshot.visible_file_count();
2148 if worker_end_ix <= snapshot_start_ix {
2149 break;
2150 } else if worker_start_ix > snapshot_end_ix {
2151 snapshot_start_ix = snapshot_end_ix;
2152 continue;
2153 } else {
2154 let start_in_snapshot = worker_start_ix
2155 .saturating_sub(snapshot_start_ix);
2156 let end_in_snapshot =
2157 cmp::min(worker_end_ix, snapshot_end_ix)
2158 - snapshot_start_ix;
2159
2160 for entry in snapshot
2161 .files(false, start_in_snapshot)
2162 .take(end_in_snapshot - start_in_snapshot)
2163 {
2164 if matching_paths_tx.is_closed() {
2165 break;
2166 }
2167
2168 abs_path.clear();
2169 abs_path.push(&snapshot.abs_path());
2170 abs_path.push(&entry.path);
2171 let matches = if let Some(file) =
2172 fs.open_sync(&abs_path).await.log_err()
2173 {
2174 query.detect(file).unwrap_or(false)
2175 } else {
2176 false
2177 };
2178
2179 if matches {
2180 let project_path =
2181 (snapshot.id(), entry.path.clone());
2182 if matching_paths_tx
2183 .send(project_path)
2184 .await
2185 .is_err()
2186 {
2187 break;
2188 }
2189 }
2190 }
2191
2192 snapshot_start_ix = snapshot_end_ix;
2193 }
2194 }
2195 });
2196 }
2197 })
2198 .await;
2199 }
2200 })
2201 .detach();
2202
2203 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2204 let open_buffers = self
2205 .opened_buffers
2206 .values()
2207 .filter_map(|b| b.upgrade(cx))
2208 .collect::<HashSet<_>>();
2209 cx.spawn(|this, cx| async move {
2210 for buffer in &open_buffers {
2211 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2212 buffers_tx.send((buffer.clone(), snapshot)).await?;
2213 }
2214
2215 let open_buffers = Rc::new(RefCell::new(open_buffers));
2216 while let Some(project_path) = matching_paths_rx.next().await {
2217 if buffers_tx.is_closed() {
2218 break;
2219 }
2220
2221 let this = this.clone();
2222 let open_buffers = open_buffers.clone();
2223 let buffers_tx = buffers_tx.clone();
2224 cx.spawn(|mut cx| async move {
2225 if let Some(buffer) = this
2226 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2227 .await
2228 .log_err()
2229 {
2230 if open_buffers.borrow_mut().insert(buffer.clone()) {
2231 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2232 buffers_tx.send((buffer, snapshot)).await?;
2233 }
2234 }
2235
2236 Ok::<_, anyhow::Error>(())
2237 })
2238 .detach();
2239 }
2240
2241 Ok::<_, anyhow::Error>(())
2242 })
2243 .detach_and_log_err(cx);
2244
2245 let background = cx.background().clone();
2246 cx.background().spawn(async move {
2247 let query = &query;
2248 let mut matched_buffers = Vec::new();
2249 for _ in 0..workers {
2250 matched_buffers.push(HashMap::default());
2251 }
2252 background
2253 .scoped(|scope| {
2254 for worker_matched_buffers in matched_buffers.iter_mut() {
2255 let mut buffers_rx = buffers_rx.clone();
2256 scope.spawn(async move {
2257 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2258 let buffer_matches = query
2259 .search(snapshot.as_rope())
2260 .await
2261 .iter()
2262 .map(|range| {
2263 snapshot.anchor_before(range.start)
2264 ..snapshot.anchor_after(range.end)
2265 })
2266 .collect::<Vec<_>>();
2267 if !buffer_matches.is_empty() {
2268 worker_matched_buffers
2269 .insert(buffer.clone(), buffer_matches);
2270 }
2271 }
2272 });
2273 }
2274 })
2275 .await;
2276 Ok(matched_buffers.into_iter().flatten().collect())
2277 })
2278 } else if let Some(project_id) = self.remote_id() {
2279 let request = self.client.request(query.to_proto(project_id));
2280 cx.spawn(|this, mut cx| async move {
2281 let response = request.await?;
2282 let mut result = HashMap::default();
2283 for location in response.locations {
2284 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2285 let target_buffer = this
2286 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2287 .await?;
2288 let start = location
2289 .start
2290 .and_then(deserialize_anchor)
2291 .ok_or_else(|| anyhow!("missing target start"))?;
2292 let end = location
2293 .end
2294 .and_then(deserialize_anchor)
2295 .ok_or_else(|| anyhow!("missing target end"))?;
2296 result
2297 .entry(target_buffer)
2298 .or_insert(Vec::new())
2299 .push(start..end)
2300 }
2301 Ok(result)
2302 })
2303 } else {
2304 Task::ready(Ok(Default::default()))
2305 }
2306 }
2307
2308 fn request_lsp<R: LspCommand>(
2309 &self,
2310 buffer_handle: ModelHandle<Buffer>,
2311 request: R,
2312 cx: &mut ModelContext<Self>,
2313 ) -> Task<Result<R::Response>>
2314 where
2315 <R::LspRequest as lsp::request::Request>::Result: Send,
2316 {
2317 let buffer = buffer_handle.read(cx);
2318 if self.is_local() {
2319 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2320 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2321 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2322 return cx.spawn(|this, cx| async move {
2323 if !language_server
2324 .capabilities()
2325 .await
2326 .map_or(false, |capabilities| {
2327 request.check_capabilities(&capabilities)
2328 })
2329 {
2330 return Ok(Default::default());
2331 }
2332
2333 let response = language_server
2334 .request::<R::LspRequest>(lsp_params)
2335 .await
2336 .context("lsp request failed")?;
2337 request
2338 .response_from_lsp(response, this, buffer_handle, cx)
2339 .await
2340 });
2341 }
2342 } else if let Some(project_id) = self.remote_id() {
2343 let rpc = self.client.clone();
2344 let message = request.to_proto(project_id, buffer);
2345 return cx.spawn(|this, cx| async move {
2346 let response = rpc.request(message).await?;
2347 request
2348 .response_from_proto(response, this, buffer_handle, cx)
2349 .await
2350 });
2351 }
2352 Task::ready(Ok(Default::default()))
2353 }
2354
2355 pub fn find_or_create_local_worktree(
2356 &mut self,
2357 abs_path: impl AsRef<Path>,
2358 visible: bool,
2359 cx: &mut ModelContext<Self>,
2360 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2361 let abs_path = abs_path.as_ref();
2362 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2363 Task::ready(Ok((tree.clone(), relative_path.into())))
2364 } else {
2365 let worktree = self.create_local_worktree(abs_path, visible, cx);
2366 cx.foreground()
2367 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2368 }
2369 }
2370
2371 pub fn find_local_worktree(
2372 &self,
2373 abs_path: &Path,
2374 cx: &AppContext,
2375 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2376 for tree in self.worktrees(cx) {
2377 if let Some(relative_path) = tree
2378 .read(cx)
2379 .as_local()
2380 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2381 {
2382 return Some((tree.clone(), relative_path.into()));
2383 }
2384 }
2385 None
2386 }
2387
2388 pub fn is_shared(&self) -> bool {
2389 match &self.client_state {
2390 ProjectClientState::Local { is_shared, .. } => *is_shared,
2391 ProjectClientState::Remote { .. } => false,
2392 }
2393 }
2394
2395 fn create_local_worktree(
2396 &mut self,
2397 abs_path: impl AsRef<Path>,
2398 visible: bool,
2399 cx: &mut ModelContext<Self>,
2400 ) -> Task<Result<ModelHandle<Worktree>>> {
2401 let fs = self.fs.clone();
2402 let client = self.client.clone();
2403 let path: Arc<Path> = abs_path.as_ref().into();
2404 let task = self
2405 .loading_local_worktrees
2406 .entry(path.clone())
2407 .or_insert_with(|| {
2408 cx.spawn(|project, mut cx| {
2409 async move {
2410 let worktree =
2411 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2412 .await;
2413 project.update(&mut cx, |project, _| {
2414 project.loading_local_worktrees.remove(&path);
2415 });
2416 let worktree = worktree?;
2417
2418 let (remote_project_id, is_shared) =
2419 project.update(&mut cx, |project, cx| {
2420 project.add_worktree(&worktree, cx);
2421 (project.remote_id(), project.is_shared())
2422 });
2423
2424 if let Some(project_id) = remote_project_id {
2425 if is_shared {
2426 worktree
2427 .update(&mut cx, |worktree, cx| {
2428 worktree.as_local_mut().unwrap().share(project_id, cx)
2429 })
2430 .await?;
2431 } else {
2432 worktree
2433 .update(&mut cx, |worktree, cx| {
2434 worktree.as_local_mut().unwrap().register(project_id, cx)
2435 })
2436 .await?;
2437 }
2438 }
2439
2440 Ok(worktree)
2441 }
2442 .map_err(|err| Arc::new(err))
2443 })
2444 .shared()
2445 })
2446 .clone();
2447 cx.foreground().spawn(async move {
2448 match task.await {
2449 Ok(worktree) => Ok(worktree),
2450 Err(err) => Err(anyhow!("{}", err)),
2451 }
2452 })
2453 }
2454
2455 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2456 self.worktrees.retain(|worktree| {
2457 worktree
2458 .upgrade(cx)
2459 .map_or(false, |w| w.read(cx).id() != id)
2460 });
2461 cx.notify();
2462 }
2463
2464 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2465 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2466 if worktree.read(cx).is_local() {
2467 cx.subscribe(&worktree, |this, worktree, _, cx| {
2468 this.update_local_worktree_buffers(worktree, cx);
2469 })
2470 .detach();
2471 }
2472
2473 let push_strong_handle = {
2474 let worktree = worktree.read(cx);
2475 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2476 };
2477 if push_strong_handle {
2478 self.worktrees
2479 .push(WorktreeHandle::Strong(worktree.clone()));
2480 } else {
2481 cx.observe_release(&worktree, |this, cx| {
2482 this.worktrees
2483 .retain(|worktree| worktree.upgrade(cx).is_some());
2484 cx.notify();
2485 })
2486 .detach();
2487 self.worktrees
2488 .push(WorktreeHandle::Weak(worktree.downgrade()));
2489 }
2490 cx.notify();
2491 }
2492
2493 fn update_local_worktree_buffers(
2494 &mut self,
2495 worktree_handle: ModelHandle<Worktree>,
2496 cx: &mut ModelContext<Self>,
2497 ) {
2498 let snapshot = worktree_handle.read(cx).snapshot();
2499 let mut buffers_to_delete = Vec::new();
2500 for (buffer_id, buffer) in &self.opened_buffers {
2501 if let Some(buffer) = buffer.upgrade(cx) {
2502 buffer.update(cx, |buffer, cx| {
2503 if let Some(old_file) = File::from_dyn(buffer.file()) {
2504 if old_file.worktree != worktree_handle {
2505 return;
2506 }
2507
2508 let new_file = if let Some(entry) = old_file
2509 .entry_id
2510 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2511 {
2512 File {
2513 is_local: true,
2514 entry_id: Some(entry.id),
2515 mtime: entry.mtime,
2516 path: entry.path.clone(),
2517 worktree: worktree_handle.clone(),
2518 }
2519 } else if let Some(entry) =
2520 snapshot.entry_for_path(old_file.path().as_ref())
2521 {
2522 File {
2523 is_local: true,
2524 entry_id: Some(entry.id),
2525 mtime: entry.mtime,
2526 path: entry.path.clone(),
2527 worktree: worktree_handle.clone(),
2528 }
2529 } else {
2530 File {
2531 is_local: true,
2532 entry_id: None,
2533 path: old_file.path().clone(),
2534 mtime: old_file.mtime(),
2535 worktree: worktree_handle.clone(),
2536 }
2537 };
2538
2539 if let Some(project_id) = self.remote_id() {
2540 self.client
2541 .send(proto::UpdateBufferFile {
2542 project_id,
2543 buffer_id: *buffer_id as u64,
2544 file: Some(new_file.to_proto()),
2545 })
2546 .log_err();
2547 }
2548 buffer.file_updated(Box::new(new_file), cx).detach();
2549 }
2550 });
2551 } else {
2552 buffers_to_delete.push(*buffer_id);
2553 }
2554 }
2555
2556 for buffer_id in buffers_to_delete {
2557 self.opened_buffers.remove(&buffer_id);
2558 }
2559 }
2560
2561 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2562 let new_active_entry = entry.and_then(|project_path| {
2563 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2564 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2565 Some(ProjectEntry {
2566 worktree_id: project_path.worktree_id,
2567 entry_id: entry.id,
2568 })
2569 });
2570 if new_active_entry != self.active_entry {
2571 self.active_entry = new_active_entry;
2572 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2573 }
2574 }
2575
2576 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2577 self.language_servers_with_diagnostics_running > 0
2578 }
2579
2580 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2581 let mut summary = DiagnosticSummary::default();
2582 for (_, path_summary) in self.diagnostic_summaries(cx) {
2583 summary.error_count += path_summary.error_count;
2584 summary.warning_count += path_summary.warning_count;
2585 summary.info_count += path_summary.info_count;
2586 summary.hint_count += path_summary.hint_count;
2587 }
2588 summary
2589 }
2590
2591 pub fn diagnostic_summaries<'a>(
2592 &'a self,
2593 cx: &'a AppContext,
2594 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2595 self.worktrees(cx).flat_map(move |worktree| {
2596 let worktree = worktree.read(cx);
2597 let worktree_id = worktree.id();
2598 worktree
2599 .diagnostic_summaries()
2600 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2601 })
2602 }
2603
2604 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2605 self.language_servers_with_diagnostics_running += 1;
2606 if self.language_servers_with_diagnostics_running == 1 {
2607 cx.emit(Event::DiskBasedDiagnosticsStarted);
2608 }
2609 }
2610
2611 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2612 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2613 self.language_servers_with_diagnostics_running -= 1;
2614 if self.language_servers_with_diagnostics_running == 0 {
2615 cx.emit(Event::DiskBasedDiagnosticsFinished);
2616 }
2617 }
2618
2619 pub fn active_entry(&self) -> Option<ProjectEntry> {
2620 self.active_entry
2621 }
2622
2623 // RPC message handlers
2624
2625 async fn handle_unshare_project(
2626 this: ModelHandle<Self>,
2627 _: TypedEnvelope<proto::UnshareProject>,
2628 _: Arc<Client>,
2629 mut cx: AsyncAppContext,
2630 ) -> Result<()> {
2631 this.update(&mut cx, |this, cx| {
2632 if let ProjectClientState::Remote {
2633 sharing_has_stopped,
2634 ..
2635 } = &mut this.client_state
2636 {
2637 *sharing_has_stopped = true;
2638 this.collaborators.clear();
2639 cx.notify();
2640 } else {
2641 unreachable!()
2642 }
2643 });
2644
2645 Ok(())
2646 }
2647
2648 async fn handle_add_collaborator(
2649 this: ModelHandle<Self>,
2650 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2651 _: Arc<Client>,
2652 mut cx: AsyncAppContext,
2653 ) -> Result<()> {
2654 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2655 let collaborator = envelope
2656 .payload
2657 .collaborator
2658 .take()
2659 .ok_or_else(|| anyhow!("empty collaborator"))?;
2660
2661 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2662 this.update(&mut cx, |this, cx| {
2663 this.collaborators
2664 .insert(collaborator.peer_id, collaborator);
2665 cx.notify();
2666 });
2667
2668 Ok(())
2669 }
2670
2671 async fn handle_remove_collaborator(
2672 this: ModelHandle<Self>,
2673 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2674 _: Arc<Client>,
2675 mut cx: AsyncAppContext,
2676 ) -> Result<()> {
2677 this.update(&mut cx, |this, cx| {
2678 let peer_id = PeerId(envelope.payload.peer_id);
2679 let replica_id = this
2680 .collaborators
2681 .remove(&peer_id)
2682 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2683 .replica_id;
2684 for (_, buffer) in &this.opened_buffers {
2685 if let Some(buffer) = buffer.upgrade(cx) {
2686 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2687 }
2688 }
2689 cx.notify();
2690 Ok(())
2691 })
2692 }
2693
2694 async fn handle_register_worktree(
2695 this: ModelHandle<Self>,
2696 envelope: TypedEnvelope<proto::RegisterWorktree>,
2697 client: Arc<Client>,
2698 mut cx: AsyncAppContext,
2699 ) -> Result<()> {
2700 this.update(&mut cx, |this, cx| {
2701 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2702 let replica_id = this.replica_id();
2703 let worktree = proto::Worktree {
2704 id: envelope.payload.worktree_id,
2705 root_name: envelope.payload.root_name,
2706 entries: Default::default(),
2707 diagnostic_summaries: Default::default(),
2708 visible: envelope.payload.visible,
2709 };
2710 let (worktree, load_task) =
2711 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2712 this.add_worktree(&worktree, cx);
2713 load_task.detach();
2714 Ok(())
2715 })
2716 }
2717
2718 async fn handle_unregister_worktree(
2719 this: ModelHandle<Self>,
2720 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2721 _: Arc<Client>,
2722 mut cx: AsyncAppContext,
2723 ) -> Result<()> {
2724 this.update(&mut cx, |this, cx| {
2725 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2726 this.remove_worktree(worktree_id, cx);
2727 Ok(())
2728 })
2729 }
2730
2731 async fn handle_update_worktree(
2732 this: ModelHandle<Self>,
2733 envelope: TypedEnvelope<proto::UpdateWorktree>,
2734 _: Arc<Client>,
2735 mut cx: AsyncAppContext,
2736 ) -> Result<()> {
2737 this.update(&mut cx, |this, cx| {
2738 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2739 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2740 worktree.update(cx, |worktree, _| {
2741 let worktree = worktree.as_remote_mut().unwrap();
2742 worktree.update_from_remote(envelope)
2743 })?;
2744 }
2745 Ok(())
2746 })
2747 }
2748
2749 async fn handle_update_diagnostic_summary(
2750 this: ModelHandle<Self>,
2751 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2752 _: Arc<Client>,
2753 mut cx: AsyncAppContext,
2754 ) -> Result<()> {
2755 this.update(&mut cx, |this, cx| {
2756 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2757 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2758 if let Some(summary) = envelope.payload.summary {
2759 let project_path = ProjectPath {
2760 worktree_id,
2761 path: Path::new(&summary.path).into(),
2762 };
2763 worktree.update(cx, |worktree, _| {
2764 worktree
2765 .as_remote_mut()
2766 .unwrap()
2767 .update_diagnostic_summary(project_path.path.clone(), &summary);
2768 });
2769 cx.emit(Event::DiagnosticsUpdated(project_path));
2770 }
2771 }
2772 Ok(())
2773 })
2774 }
2775
2776 async fn handle_disk_based_diagnostics_updating(
2777 this: ModelHandle<Self>,
2778 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2779 _: Arc<Client>,
2780 mut cx: AsyncAppContext,
2781 ) -> Result<()> {
2782 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2783 Ok(())
2784 }
2785
2786 async fn handle_disk_based_diagnostics_updated(
2787 this: ModelHandle<Self>,
2788 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2789 _: Arc<Client>,
2790 mut cx: AsyncAppContext,
2791 ) -> Result<()> {
2792 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2793 Ok(())
2794 }
2795
2796 async fn handle_update_buffer(
2797 this: ModelHandle<Self>,
2798 envelope: TypedEnvelope<proto::UpdateBuffer>,
2799 _: Arc<Client>,
2800 mut cx: AsyncAppContext,
2801 ) -> Result<()> {
2802 this.update(&mut cx, |this, cx| {
2803 let payload = envelope.payload.clone();
2804 let buffer_id = payload.buffer_id;
2805 let ops = payload
2806 .operations
2807 .into_iter()
2808 .map(|op| language::proto::deserialize_operation(op))
2809 .collect::<Result<Vec<_>, _>>()?;
2810 match this.opened_buffers.entry(buffer_id) {
2811 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2812 OpenBuffer::Strong(buffer) => {
2813 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2814 }
2815 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2816 OpenBuffer::Weak(_) => {}
2817 },
2818 hash_map::Entry::Vacant(e) => {
2819 e.insert(OpenBuffer::Loading(ops));
2820 }
2821 }
2822 Ok(())
2823 })
2824 }
2825
2826 async fn handle_update_buffer_file(
2827 this: ModelHandle<Self>,
2828 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2829 _: Arc<Client>,
2830 mut cx: AsyncAppContext,
2831 ) -> Result<()> {
2832 this.update(&mut cx, |this, cx| {
2833 let payload = envelope.payload.clone();
2834 let buffer_id = payload.buffer_id;
2835 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2836 let worktree = this
2837 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2838 .ok_or_else(|| anyhow!("no such worktree"))?;
2839 let file = File::from_proto(file, worktree.clone(), cx)?;
2840 let buffer = this
2841 .opened_buffers
2842 .get_mut(&buffer_id)
2843 .and_then(|b| b.upgrade(cx))
2844 .ok_or_else(|| anyhow!("no such buffer"))?;
2845 buffer.update(cx, |buffer, cx| {
2846 buffer.file_updated(Box::new(file), cx).detach();
2847 });
2848 Ok(())
2849 })
2850 }
2851
2852 async fn handle_save_buffer(
2853 this: ModelHandle<Self>,
2854 envelope: TypedEnvelope<proto::SaveBuffer>,
2855 _: Arc<Client>,
2856 mut cx: AsyncAppContext,
2857 ) -> Result<proto::BufferSaved> {
2858 let buffer_id = envelope.payload.buffer_id;
2859 let requested_version = envelope.payload.version.try_into()?;
2860
2861 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2862 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2863 let buffer = this
2864 .opened_buffers
2865 .get(&buffer_id)
2866 .map(|buffer| buffer.upgrade(cx).unwrap())
2867 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2868 Ok::<_, anyhow::Error>((project_id, buffer))
2869 })?;
2870 buffer
2871 .update(&mut cx, |buffer, _| {
2872 buffer.wait_for_version(requested_version)
2873 })
2874 .await;
2875
2876 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2877 Ok(proto::BufferSaved {
2878 project_id,
2879 buffer_id,
2880 version: (&saved_version).into(),
2881 mtime: Some(mtime.into()),
2882 })
2883 }
2884
2885 async fn handle_format_buffers(
2886 this: ModelHandle<Self>,
2887 envelope: TypedEnvelope<proto::FormatBuffers>,
2888 _: Arc<Client>,
2889 mut cx: AsyncAppContext,
2890 ) -> Result<proto::FormatBuffersResponse> {
2891 let sender_id = envelope.original_sender_id()?;
2892 let format = this.update(&mut cx, |this, cx| {
2893 let mut buffers = HashSet::default();
2894 for buffer_id in &envelope.payload.buffer_ids {
2895 buffers.insert(
2896 this.opened_buffers
2897 .get(buffer_id)
2898 .map(|buffer| buffer.upgrade(cx).unwrap())
2899 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2900 );
2901 }
2902 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2903 })?;
2904
2905 let project_transaction = format.await?;
2906 let project_transaction = this.update(&mut cx, |this, cx| {
2907 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2908 });
2909 Ok(proto::FormatBuffersResponse {
2910 transaction: Some(project_transaction),
2911 })
2912 }
2913
2914 async fn handle_get_completions(
2915 this: ModelHandle<Self>,
2916 envelope: TypedEnvelope<proto::GetCompletions>,
2917 _: Arc<Client>,
2918 mut cx: AsyncAppContext,
2919 ) -> Result<proto::GetCompletionsResponse> {
2920 let position = envelope
2921 .payload
2922 .position
2923 .and_then(language::proto::deserialize_anchor)
2924 .ok_or_else(|| anyhow!("invalid position"))?;
2925 let version = clock::Global::from(envelope.payload.version);
2926 let buffer = this.read_with(&cx, |this, cx| {
2927 this.opened_buffers
2928 .get(&envelope.payload.buffer_id)
2929 .map(|buffer| buffer.upgrade(cx).unwrap())
2930 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2931 })?;
2932 buffer
2933 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2934 .await;
2935 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2936 let completions = this
2937 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2938 .await?;
2939
2940 Ok(proto::GetCompletionsResponse {
2941 completions: completions
2942 .iter()
2943 .map(language::proto::serialize_completion)
2944 .collect(),
2945 version: (&version).into(),
2946 })
2947 }
2948
2949 async fn handle_apply_additional_edits_for_completion(
2950 this: ModelHandle<Self>,
2951 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2952 _: Arc<Client>,
2953 mut cx: AsyncAppContext,
2954 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2955 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2956 let buffer = this
2957 .opened_buffers
2958 .get(&envelope.payload.buffer_id)
2959 .map(|buffer| buffer.upgrade(cx).unwrap())
2960 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2961 let language = buffer.read(cx).language();
2962 let completion = language::proto::deserialize_completion(
2963 envelope
2964 .payload
2965 .completion
2966 .ok_or_else(|| anyhow!("invalid completion"))?,
2967 language,
2968 )?;
2969 Ok::<_, anyhow::Error>(
2970 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2971 )
2972 })?;
2973
2974 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2975 transaction: apply_additional_edits
2976 .await?
2977 .as_ref()
2978 .map(language::proto::serialize_transaction),
2979 })
2980 }
2981
2982 async fn handle_get_code_actions(
2983 this: ModelHandle<Self>,
2984 envelope: TypedEnvelope<proto::GetCodeActions>,
2985 _: Arc<Client>,
2986 mut cx: AsyncAppContext,
2987 ) -> Result<proto::GetCodeActionsResponse> {
2988 let start = envelope
2989 .payload
2990 .start
2991 .and_then(language::proto::deserialize_anchor)
2992 .ok_or_else(|| anyhow!("invalid start"))?;
2993 let end = envelope
2994 .payload
2995 .end
2996 .and_then(language::proto::deserialize_anchor)
2997 .ok_or_else(|| anyhow!("invalid end"))?;
2998 let buffer = this.update(&mut cx, |this, cx| {
2999 this.opened_buffers
3000 .get(&envelope.payload.buffer_id)
3001 .map(|buffer| buffer.upgrade(cx).unwrap())
3002 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3003 })?;
3004 buffer
3005 .update(&mut cx, |buffer, _| {
3006 buffer.wait_for_version(envelope.payload.version.into())
3007 })
3008 .await;
3009
3010 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3011 let code_actions = this.update(&mut cx, |this, cx| {
3012 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3013 })?;
3014
3015 Ok(proto::GetCodeActionsResponse {
3016 actions: code_actions
3017 .await?
3018 .iter()
3019 .map(language::proto::serialize_code_action)
3020 .collect(),
3021 version: (&version).into(),
3022 })
3023 }
3024
3025 async fn handle_apply_code_action(
3026 this: ModelHandle<Self>,
3027 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3028 _: Arc<Client>,
3029 mut cx: AsyncAppContext,
3030 ) -> Result<proto::ApplyCodeActionResponse> {
3031 let sender_id = envelope.original_sender_id()?;
3032 let action = language::proto::deserialize_code_action(
3033 envelope
3034 .payload
3035 .action
3036 .ok_or_else(|| anyhow!("invalid action"))?,
3037 )?;
3038 let apply_code_action = this.update(&mut cx, |this, cx| {
3039 let buffer = this
3040 .opened_buffers
3041 .get(&envelope.payload.buffer_id)
3042 .map(|buffer| buffer.upgrade(cx).unwrap())
3043 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3044 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3045 })?;
3046
3047 let project_transaction = apply_code_action.await?;
3048 let project_transaction = this.update(&mut cx, |this, cx| {
3049 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3050 });
3051 Ok(proto::ApplyCodeActionResponse {
3052 transaction: Some(project_transaction),
3053 })
3054 }
3055
3056 async fn handle_lsp_command<T: LspCommand>(
3057 this: ModelHandle<Self>,
3058 envelope: TypedEnvelope<T::ProtoRequest>,
3059 _: Arc<Client>,
3060 mut cx: AsyncAppContext,
3061 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3062 where
3063 <T::LspRequest as lsp::request::Request>::Result: Send,
3064 {
3065 let sender_id = envelope.original_sender_id()?;
3066 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3067 let buffer_handle = this.read_with(&cx, |this, _| {
3068 this.opened_buffers
3069 .get(&buffer_id)
3070 .map(|buffer| buffer.upgrade(&cx).unwrap())
3071 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3072 })?;
3073 let request = T::from_proto(
3074 envelope.payload,
3075 this.clone(),
3076 buffer_handle.clone(),
3077 cx.clone(),
3078 )
3079 .await?;
3080 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3081 let response = this
3082 .update(&mut cx, |this, cx| {
3083 this.request_lsp(buffer_handle, request, cx)
3084 })
3085 .await?;
3086 this.update(&mut cx, |this, cx| {
3087 Ok(T::response_to_proto(
3088 response,
3089 this,
3090 sender_id,
3091 &buffer_version,
3092 cx,
3093 ))
3094 })
3095 }
3096
3097 async fn handle_get_project_symbols(
3098 this: ModelHandle<Self>,
3099 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3100 _: Arc<Client>,
3101 mut cx: AsyncAppContext,
3102 ) -> Result<proto::GetProjectSymbolsResponse> {
3103 let symbols = this
3104 .update(&mut cx, |this, cx| {
3105 this.symbols(&envelope.payload.query, cx)
3106 })
3107 .await?;
3108
3109 Ok(proto::GetProjectSymbolsResponse {
3110 symbols: symbols.iter().map(serialize_symbol).collect(),
3111 })
3112 }
3113
3114 async fn handle_search_project(
3115 this: ModelHandle<Self>,
3116 envelope: TypedEnvelope<proto::SearchProject>,
3117 _: Arc<Client>,
3118 mut cx: AsyncAppContext,
3119 ) -> Result<proto::SearchProjectResponse> {
3120 let peer_id = envelope.original_sender_id()?;
3121 let query = SearchQuery::from_proto(envelope.payload)?;
3122 let result = this
3123 .update(&mut cx, |this, cx| this.search(query, cx))
3124 .await?;
3125
3126 this.update(&mut cx, |this, cx| {
3127 let mut locations = Vec::new();
3128 for (buffer, ranges) in result {
3129 for range in ranges {
3130 let start = serialize_anchor(&range.start);
3131 let end = serialize_anchor(&range.end);
3132 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3133 locations.push(proto::Location {
3134 buffer: Some(buffer),
3135 start: Some(start),
3136 end: Some(end),
3137 });
3138 }
3139 }
3140 Ok(proto::SearchProjectResponse { locations })
3141 })
3142 }
3143
3144 async fn handle_open_buffer_for_symbol(
3145 this: ModelHandle<Self>,
3146 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3147 _: Arc<Client>,
3148 mut cx: AsyncAppContext,
3149 ) -> Result<proto::OpenBufferForSymbolResponse> {
3150 let peer_id = envelope.original_sender_id()?;
3151 let symbol = envelope
3152 .payload
3153 .symbol
3154 .ok_or_else(|| anyhow!("invalid symbol"))?;
3155 let symbol = this.read_with(&cx, |this, _| {
3156 let symbol = this.deserialize_symbol(symbol)?;
3157 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3158 if signature == symbol.signature {
3159 Ok(symbol)
3160 } else {
3161 Err(anyhow!("invalid symbol signature"))
3162 }
3163 })?;
3164 let buffer = this
3165 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3166 .await?;
3167
3168 Ok(proto::OpenBufferForSymbolResponse {
3169 buffer: Some(this.update(&mut cx, |this, cx| {
3170 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3171 })),
3172 })
3173 }
3174
3175 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3176 let mut hasher = Sha256::new();
3177 hasher.update(worktree_id.to_proto().to_be_bytes());
3178 hasher.update(path.to_string_lossy().as_bytes());
3179 hasher.update(self.nonce.to_be_bytes());
3180 hasher.finalize().as_slice().try_into().unwrap()
3181 }
3182
3183 async fn handle_open_buffer(
3184 this: ModelHandle<Self>,
3185 envelope: TypedEnvelope<proto::OpenBuffer>,
3186 _: Arc<Client>,
3187 mut cx: AsyncAppContext,
3188 ) -> Result<proto::OpenBufferResponse> {
3189 let peer_id = envelope.original_sender_id()?;
3190 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3191 let open_buffer = this.update(&mut cx, |this, cx| {
3192 this.open_buffer(
3193 ProjectPath {
3194 worktree_id,
3195 path: PathBuf::from(envelope.payload.path).into(),
3196 },
3197 cx,
3198 )
3199 });
3200
3201 let buffer = open_buffer.await?;
3202 this.update(&mut cx, |this, cx| {
3203 Ok(proto::OpenBufferResponse {
3204 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3205 })
3206 })
3207 }
3208
3209 fn serialize_project_transaction_for_peer(
3210 &mut self,
3211 project_transaction: ProjectTransaction,
3212 peer_id: PeerId,
3213 cx: &AppContext,
3214 ) -> proto::ProjectTransaction {
3215 let mut serialized_transaction = proto::ProjectTransaction {
3216 buffers: Default::default(),
3217 transactions: Default::default(),
3218 };
3219 for (buffer, transaction) in project_transaction.0 {
3220 serialized_transaction
3221 .buffers
3222 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3223 serialized_transaction
3224 .transactions
3225 .push(language::proto::serialize_transaction(&transaction));
3226 }
3227 serialized_transaction
3228 }
3229
3230 fn deserialize_project_transaction(
3231 &mut self,
3232 message: proto::ProjectTransaction,
3233 push_to_history: bool,
3234 cx: &mut ModelContext<Self>,
3235 ) -> Task<Result<ProjectTransaction>> {
3236 cx.spawn(|this, mut cx| async move {
3237 let mut project_transaction = ProjectTransaction::default();
3238 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3239 let buffer = this
3240 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3241 .await?;
3242 let transaction = language::proto::deserialize_transaction(transaction)?;
3243 project_transaction.0.insert(buffer, transaction);
3244 }
3245
3246 for (buffer, transaction) in &project_transaction.0 {
3247 buffer
3248 .update(&mut cx, |buffer, _| {
3249 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3250 })
3251 .await;
3252
3253 if push_to_history {
3254 buffer.update(&mut cx, |buffer, _| {
3255 buffer.push_transaction(transaction.clone(), Instant::now());
3256 });
3257 }
3258 }
3259
3260 Ok(project_transaction)
3261 })
3262 }
3263
3264 fn serialize_buffer_for_peer(
3265 &mut self,
3266 buffer: &ModelHandle<Buffer>,
3267 peer_id: PeerId,
3268 cx: &AppContext,
3269 ) -> proto::Buffer {
3270 let buffer_id = buffer.read(cx).remote_id();
3271 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3272 if shared_buffers.insert(buffer_id) {
3273 proto::Buffer {
3274 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3275 }
3276 } else {
3277 proto::Buffer {
3278 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3279 }
3280 }
3281 }
3282
3283 fn deserialize_buffer(
3284 &mut self,
3285 buffer: proto::Buffer,
3286 cx: &mut ModelContext<Self>,
3287 ) -> Task<Result<ModelHandle<Buffer>>> {
3288 let replica_id = self.replica_id();
3289
3290 let opened_buffer_tx = self.opened_buffer.0.clone();
3291 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3292 cx.spawn(|this, mut cx| async move {
3293 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3294 proto::buffer::Variant::Id(id) => {
3295 let buffer = loop {
3296 let buffer = this.read_with(&cx, |this, cx| {
3297 this.opened_buffers
3298 .get(&id)
3299 .and_then(|buffer| buffer.upgrade(cx))
3300 });
3301 if let Some(buffer) = buffer {
3302 break buffer;
3303 }
3304 opened_buffer_rx
3305 .next()
3306 .await
3307 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3308 };
3309 Ok(buffer)
3310 }
3311 proto::buffer::Variant::State(mut buffer) => {
3312 let mut buffer_worktree = None;
3313 let mut buffer_file = None;
3314 if let Some(file) = buffer.file.take() {
3315 this.read_with(&cx, |this, cx| {
3316 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3317 let worktree =
3318 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3319 anyhow!("no worktree found for id {}", file.worktree_id)
3320 })?;
3321 buffer_file =
3322 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3323 as Box<dyn language::File>);
3324 buffer_worktree = Some(worktree);
3325 Ok::<_, anyhow::Error>(())
3326 })?;
3327 }
3328
3329 let buffer = cx.add_model(|cx| {
3330 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3331 });
3332
3333 this.update(&mut cx, |this, cx| {
3334 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3335 })?;
3336
3337 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3338 Ok(buffer)
3339 }
3340 }
3341 })
3342 }
3343
3344 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3345 let language = self
3346 .languages
3347 .get_language(&serialized_symbol.language_name);
3348 let start = serialized_symbol
3349 .start
3350 .ok_or_else(|| anyhow!("invalid start"))?;
3351 let end = serialized_symbol
3352 .end
3353 .ok_or_else(|| anyhow!("invalid end"))?;
3354 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3355 Ok(Symbol {
3356 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3357 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3358 language_name: serialized_symbol.language_name.clone(),
3359 label: language
3360 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3361 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3362 name: serialized_symbol.name,
3363 path: PathBuf::from(serialized_symbol.path),
3364 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3365 kind,
3366 signature: serialized_symbol
3367 .signature
3368 .try_into()
3369 .map_err(|_| anyhow!("invalid signature"))?,
3370 })
3371 }
3372
3373 async fn handle_close_buffer(
3374 _: ModelHandle<Self>,
3375 _: TypedEnvelope<proto::CloseBuffer>,
3376 _: Arc<Client>,
3377 _: AsyncAppContext,
3378 ) -> Result<()> {
3379 // TODO: use this for following
3380 Ok(())
3381 }
3382
3383 async fn handle_buffer_saved(
3384 this: ModelHandle<Self>,
3385 envelope: TypedEnvelope<proto::BufferSaved>,
3386 _: Arc<Client>,
3387 mut cx: AsyncAppContext,
3388 ) -> Result<()> {
3389 let version = envelope.payload.version.try_into()?;
3390 let mtime = envelope
3391 .payload
3392 .mtime
3393 .ok_or_else(|| anyhow!("missing mtime"))?
3394 .into();
3395
3396 this.update(&mut cx, |this, cx| {
3397 let buffer = this
3398 .opened_buffers
3399 .get(&envelope.payload.buffer_id)
3400 .and_then(|buffer| buffer.upgrade(cx));
3401 if let Some(buffer) = buffer {
3402 buffer.update(cx, |buffer, cx| {
3403 buffer.did_save(version, mtime, None, cx);
3404 });
3405 }
3406 Ok(())
3407 })
3408 }
3409
3410 async fn handle_buffer_reloaded(
3411 this: ModelHandle<Self>,
3412 envelope: TypedEnvelope<proto::BufferReloaded>,
3413 _: Arc<Client>,
3414 mut cx: AsyncAppContext,
3415 ) -> Result<()> {
3416 let payload = envelope.payload.clone();
3417 let version = payload.version.try_into()?;
3418 let mtime = payload
3419 .mtime
3420 .ok_or_else(|| anyhow!("missing mtime"))?
3421 .into();
3422 this.update(&mut cx, |this, cx| {
3423 let buffer = this
3424 .opened_buffers
3425 .get(&payload.buffer_id)
3426 .and_then(|buffer| buffer.upgrade(cx));
3427 if let Some(buffer) = buffer {
3428 buffer.update(cx, |buffer, cx| {
3429 buffer.did_reload(version, mtime, cx);
3430 });
3431 }
3432 Ok(())
3433 })
3434 }
3435
3436 pub fn match_paths<'a>(
3437 &self,
3438 query: &'a str,
3439 include_ignored: bool,
3440 smart_case: bool,
3441 max_results: usize,
3442 cancel_flag: &'a AtomicBool,
3443 cx: &AppContext,
3444 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3445 let worktrees = self
3446 .worktrees(cx)
3447 .filter(|worktree| worktree.read(cx).is_visible())
3448 .collect::<Vec<_>>();
3449 let include_root_name = worktrees.len() > 1;
3450 let candidate_sets = worktrees
3451 .into_iter()
3452 .map(|worktree| CandidateSet {
3453 snapshot: worktree.read(cx).snapshot(),
3454 include_ignored,
3455 include_root_name,
3456 })
3457 .collect::<Vec<_>>();
3458
3459 let background = cx.background().clone();
3460 async move {
3461 fuzzy::match_paths(
3462 candidate_sets.as_slice(),
3463 query,
3464 smart_case,
3465 max_results,
3466 cancel_flag,
3467 background,
3468 )
3469 .await
3470 }
3471 }
3472}
3473
3474impl WorktreeHandle {
3475 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3476 match self {
3477 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3478 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3479 }
3480 }
3481}
3482
3483impl OpenBuffer {
3484 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3485 match self {
3486 OpenBuffer::Strong(handle) => Some(handle.clone()),
3487 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3488 OpenBuffer::Loading(_) => None,
3489 }
3490 }
3491}
3492
3493struct CandidateSet {
3494 snapshot: Snapshot,
3495 include_ignored: bool,
3496 include_root_name: bool,
3497}
3498
3499impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3500 type Candidates = CandidateSetIter<'a>;
3501
3502 fn id(&self) -> usize {
3503 self.snapshot.id().to_usize()
3504 }
3505
3506 fn len(&self) -> usize {
3507 if self.include_ignored {
3508 self.snapshot.file_count()
3509 } else {
3510 self.snapshot.visible_file_count()
3511 }
3512 }
3513
3514 fn prefix(&self) -> Arc<str> {
3515 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3516 self.snapshot.root_name().into()
3517 } else if self.include_root_name {
3518 format!("{}/", self.snapshot.root_name()).into()
3519 } else {
3520 "".into()
3521 }
3522 }
3523
3524 fn candidates(&'a self, start: usize) -> Self::Candidates {
3525 CandidateSetIter {
3526 traversal: self.snapshot.files(self.include_ignored, start),
3527 }
3528 }
3529}
3530
3531struct CandidateSetIter<'a> {
3532 traversal: Traversal<'a>,
3533}
3534
3535impl<'a> Iterator for CandidateSetIter<'a> {
3536 type Item = PathMatchCandidate<'a>;
3537
3538 fn next(&mut self) -> Option<Self::Item> {
3539 self.traversal.next().map(|entry| {
3540 if let EntryKind::File(char_bag) = entry.kind {
3541 PathMatchCandidate {
3542 path: &entry.path,
3543 char_bag,
3544 }
3545 } else {
3546 unreachable!()
3547 }
3548 })
3549 }
3550}
3551
3552impl Entity for Project {
3553 type Event = Event;
3554
3555 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3556 match &self.client_state {
3557 ProjectClientState::Local { remote_id_rx, .. } => {
3558 if let Some(project_id) = *remote_id_rx.borrow() {
3559 self.client
3560 .send(proto::UnregisterProject { project_id })
3561 .log_err();
3562 }
3563 }
3564 ProjectClientState::Remote { remote_id, .. } => {
3565 self.client
3566 .send(proto::LeaveProject {
3567 project_id: *remote_id,
3568 })
3569 .log_err();
3570 }
3571 }
3572 }
3573
3574 fn app_will_quit(
3575 &mut self,
3576 _: &mut MutableAppContext,
3577 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3578 let shutdown_futures = self
3579 .language_servers
3580 .drain()
3581 .filter_map(|(_, server)| server.shutdown())
3582 .collect::<Vec<_>>();
3583 Some(
3584 async move {
3585 futures::future::join_all(shutdown_futures).await;
3586 }
3587 .boxed(),
3588 )
3589 }
3590}
3591
3592impl Collaborator {
3593 fn from_proto(
3594 message: proto::Collaborator,
3595 user_store: &ModelHandle<UserStore>,
3596 cx: &mut AsyncAppContext,
3597 ) -> impl Future<Output = Result<Self>> {
3598 let user = user_store.update(cx, |user_store, cx| {
3599 user_store.fetch_user(message.user_id, cx)
3600 });
3601
3602 async move {
3603 Ok(Self {
3604 peer_id: PeerId(message.peer_id),
3605 user: user.await?,
3606 replica_id: message.replica_id as ReplicaId,
3607 })
3608 }
3609 }
3610}
3611
3612impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3613 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3614 Self {
3615 worktree_id,
3616 path: path.as_ref().into(),
3617 }
3618 }
3619}
3620
3621impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3622 fn from(options: lsp::CreateFileOptions) -> Self {
3623 Self {
3624 overwrite: options.overwrite.unwrap_or(false),
3625 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3626 }
3627 }
3628}
3629
3630impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3631 fn from(options: lsp::RenameFileOptions) -> Self {
3632 Self {
3633 overwrite: options.overwrite.unwrap_or(false),
3634 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3635 }
3636 }
3637}
3638
3639impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3640 fn from(options: lsp::DeleteFileOptions) -> Self {
3641 Self {
3642 recursive: options.recursive.unwrap_or(false),
3643 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3644 }
3645 }
3646}
3647
3648fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3649 proto::Symbol {
3650 source_worktree_id: symbol.source_worktree_id.to_proto(),
3651 worktree_id: symbol.worktree_id.to_proto(),
3652 language_name: symbol.language_name.clone(),
3653 name: symbol.name.clone(),
3654 kind: unsafe { mem::transmute(symbol.kind) },
3655 path: symbol.path.to_string_lossy().to_string(),
3656 start: Some(proto::Point {
3657 row: symbol.range.start.row,
3658 column: symbol.range.start.column,
3659 }),
3660 end: Some(proto::Point {
3661 row: symbol.range.end.row,
3662 column: symbol.range.end.column,
3663 }),
3664 signature: symbol.signature.to_vec(),
3665 }
3666}
3667
3668fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3669 let mut path_components = path.components();
3670 let mut base_components = base.components();
3671 let mut components: Vec<Component> = Vec::new();
3672 loop {
3673 match (path_components.next(), base_components.next()) {
3674 (None, None) => break,
3675 (Some(a), None) => {
3676 components.push(a);
3677 components.extend(path_components.by_ref());
3678 break;
3679 }
3680 (None, _) => components.push(Component::ParentDir),
3681 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3682 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3683 (Some(a), Some(_)) => {
3684 components.push(Component::ParentDir);
3685 for _ in base_components {
3686 components.push(Component::ParentDir);
3687 }
3688 components.push(a);
3689 components.extend(path_components.by_ref());
3690 break;
3691 }
3692 }
3693 }
3694 components.iter().map(|c| c.as_os_str()).collect()
3695}
3696
3697#[cfg(test)]
3698mod tests {
3699 use super::{Event, *};
3700 use fs::RealFs;
3701 use futures::StreamExt;
3702 use gpui::test::subscribe;
3703 use language::{
3704 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3705 };
3706 use lsp::Url;
3707 use serde_json::json;
3708 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3709 use unindent::Unindent as _;
3710 use util::test::temp_tree;
3711 use worktree::WorktreeHandle as _;
3712
3713 #[gpui::test]
3714 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3715 let dir = temp_tree(json!({
3716 "root": {
3717 "apple": "",
3718 "banana": {
3719 "carrot": {
3720 "date": "",
3721 "endive": "",
3722 }
3723 },
3724 "fennel": {
3725 "grape": "",
3726 }
3727 }
3728 }));
3729
3730 let root_link_path = dir.path().join("root_link");
3731 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3732 unix::fs::symlink(
3733 &dir.path().join("root/fennel"),
3734 &dir.path().join("root/finnochio"),
3735 )
3736 .unwrap();
3737
3738 let project = Project::test(Arc::new(RealFs), cx);
3739
3740 let (tree, _) = project
3741 .update(cx, |project, cx| {
3742 project.find_or_create_local_worktree(&root_link_path, true, cx)
3743 })
3744 .await
3745 .unwrap();
3746
3747 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3748 .await;
3749 cx.read(|cx| {
3750 let tree = tree.read(cx);
3751 assert_eq!(tree.file_count(), 5);
3752 assert_eq!(
3753 tree.inode_for_path("fennel/grape"),
3754 tree.inode_for_path("finnochio/grape")
3755 );
3756 });
3757
3758 let cancel_flag = Default::default();
3759 let results = project
3760 .read_with(cx, |project, cx| {
3761 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3762 })
3763 .await;
3764 assert_eq!(
3765 results
3766 .into_iter()
3767 .map(|result| result.path)
3768 .collect::<Vec<Arc<Path>>>(),
3769 vec![
3770 PathBuf::from("banana/carrot/date").into(),
3771 PathBuf::from("banana/carrot/endive").into(),
3772 ]
3773 );
3774 }
3775
3776 #[gpui::test]
3777 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3778 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3779 let progress_token = language_server_config
3780 .disk_based_diagnostics_progress_token
3781 .clone()
3782 .unwrap();
3783
3784 let language = Arc::new(Language::new(
3785 LanguageConfig {
3786 name: "Rust".into(),
3787 path_suffixes: vec!["rs".to_string()],
3788 language_server: Some(language_server_config),
3789 ..Default::default()
3790 },
3791 Some(tree_sitter_rust::language()),
3792 ));
3793
3794 let fs = FakeFs::new(cx.background());
3795 fs.insert_tree(
3796 "/dir",
3797 json!({
3798 "a.rs": "fn a() { A }",
3799 "b.rs": "const y: i32 = 1",
3800 }),
3801 )
3802 .await;
3803
3804 let project = Project::test(fs, cx);
3805 project.update(cx, |project, _| {
3806 Arc::get_mut(&mut project.languages).unwrap().add(language);
3807 });
3808
3809 let (tree, _) = project
3810 .update(cx, |project, cx| {
3811 project.find_or_create_local_worktree("/dir", true, cx)
3812 })
3813 .await
3814 .unwrap();
3815 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3816
3817 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3818 .await;
3819
3820 // Cause worktree to start the fake language server
3821 let _buffer = project
3822 .update(cx, |project, cx| {
3823 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3824 })
3825 .await
3826 .unwrap();
3827
3828 let mut events = subscribe(&project, cx);
3829
3830 let mut fake_server = fake_servers.next().await.unwrap();
3831 fake_server.start_progress(&progress_token).await;
3832 assert_eq!(
3833 events.next().await.unwrap(),
3834 Event::DiskBasedDiagnosticsStarted
3835 );
3836
3837 fake_server.start_progress(&progress_token).await;
3838 fake_server.end_progress(&progress_token).await;
3839 fake_server.start_progress(&progress_token).await;
3840
3841 fake_server
3842 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3843 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3844 version: None,
3845 diagnostics: vec![lsp::Diagnostic {
3846 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3847 severity: Some(lsp::DiagnosticSeverity::ERROR),
3848 message: "undefined variable 'A'".to_string(),
3849 ..Default::default()
3850 }],
3851 })
3852 .await;
3853 assert_eq!(
3854 events.next().await.unwrap(),
3855 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3856 );
3857
3858 fake_server.end_progress(&progress_token).await;
3859 fake_server.end_progress(&progress_token).await;
3860 assert_eq!(
3861 events.next().await.unwrap(),
3862 Event::DiskBasedDiagnosticsUpdated
3863 );
3864 assert_eq!(
3865 events.next().await.unwrap(),
3866 Event::DiskBasedDiagnosticsFinished
3867 );
3868
3869 let buffer = project
3870 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3871 .await
3872 .unwrap();
3873
3874 buffer.read_with(cx, |buffer, _| {
3875 let snapshot = buffer.snapshot();
3876 let diagnostics = snapshot
3877 .diagnostics_in_range::<_, Point>(0..buffer.len())
3878 .collect::<Vec<_>>();
3879 assert_eq!(
3880 diagnostics,
3881 &[DiagnosticEntry {
3882 range: Point::new(0, 9)..Point::new(0, 10),
3883 diagnostic: Diagnostic {
3884 severity: lsp::DiagnosticSeverity::ERROR,
3885 message: "undefined variable 'A'".to_string(),
3886 group_id: 0,
3887 is_primary: true,
3888 ..Default::default()
3889 }
3890 }]
3891 )
3892 });
3893 }
3894
3895 #[gpui::test]
3896 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3897 let dir = temp_tree(json!({
3898 "root": {
3899 "dir1": {},
3900 "dir2": {
3901 "dir3": {}
3902 }
3903 }
3904 }));
3905
3906 let project = Project::test(Arc::new(RealFs), cx);
3907 let (tree, _) = project
3908 .update(cx, |project, cx| {
3909 project.find_or_create_local_worktree(&dir.path(), true, cx)
3910 })
3911 .await
3912 .unwrap();
3913
3914 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3915 .await;
3916
3917 let cancel_flag = Default::default();
3918 let results = project
3919 .read_with(cx, |project, cx| {
3920 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3921 })
3922 .await;
3923
3924 assert!(results.is_empty());
3925 }
3926
3927 #[gpui::test]
3928 async fn test_definition(cx: &mut gpui::TestAppContext) {
3929 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3930 let language = Arc::new(Language::new(
3931 LanguageConfig {
3932 name: "Rust".into(),
3933 path_suffixes: vec!["rs".to_string()],
3934 language_server: Some(language_server_config),
3935 ..Default::default()
3936 },
3937 Some(tree_sitter_rust::language()),
3938 ));
3939
3940 let fs = FakeFs::new(cx.background());
3941 fs.insert_tree(
3942 "/dir",
3943 json!({
3944 "a.rs": "const fn a() { A }",
3945 "b.rs": "const y: i32 = crate::a()",
3946 }),
3947 )
3948 .await;
3949
3950 let project = Project::test(fs, cx);
3951 project.update(cx, |project, _| {
3952 Arc::get_mut(&mut project.languages).unwrap().add(language);
3953 });
3954
3955 let (tree, _) = project
3956 .update(cx, |project, cx| {
3957 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3958 })
3959 .await
3960 .unwrap();
3961 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3962 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3963 .await;
3964
3965 let buffer = project
3966 .update(cx, |project, cx| {
3967 project.open_buffer(
3968 ProjectPath {
3969 worktree_id,
3970 path: Path::new("").into(),
3971 },
3972 cx,
3973 )
3974 })
3975 .await
3976 .unwrap();
3977
3978 let mut fake_server = fake_servers.next().await.unwrap();
3979 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3980 let params = params.text_document_position_params;
3981 assert_eq!(
3982 params.text_document.uri.to_file_path().unwrap(),
3983 Path::new("/dir/b.rs"),
3984 );
3985 assert_eq!(params.position, lsp::Position::new(0, 22));
3986
3987 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3988 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3989 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3990 )))
3991 });
3992
3993 let mut definitions = project
3994 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3995 .await
3996 .unwrap();
3997
3998 assert_eq!(definitions.len(), 1);
3999 let definition = definitions.pop().unwrap();
4000 cx.update(|cx| {
4001 let target_buffer = definition.buffer.read(cx);
4002 assert_eq!(
4003 target_buffer
4004 .file()
4005 .unwrap()
4006 .as_local()
4007 .unwrap()
4008 .abs_path(cx),
4009 Path::new("/dir/a.rs"),
4010 );
4011 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4012 assert_eq!(
4013 list_worktrees(&project, cx),
4014 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4015 );
4016
4017 drop(definition);
4018 });
4019 cx.read(|cx| {
4020 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4021 });
4022
4023 fn list_worktrees<'a>(
4024 project: &'a ModelHandle<Project>,
4025 cx: &'a AppContext,
4026 ) -> Vec<(&'a Path, bool)> {
4027 project
4028 .read(cx)
4029 .worktrees(cx)
4030 .map(|worktree| {
4031 let worktree = worktree.read(cx);
4032 (
4033 worktree.as_local().unwrap().abs_path().as_ref(),
4034 worktree.is_visible(),
4035 )
4036 })
4037 .collect::<Vec<_>>()
4038 }
4039 }
4040
4041 #[gpui::test]
4042 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4043 let fs = FakeFs::new(cx.background());
4044 fs.insert_tree(
4045 "/dir",
4046 json!({
4047 "file1": "the old contents",
4048 }),
4049 )
4050 .await;
4051
4052 let project = Project::test(fs.clone(), cx);
4053 let worktree_id = project
4054 .update(cx, |p, cx| {
4055 p.find_or_create_local_worktree("/dir", true, cx)
4056 })
4057 .await
4058 .unwrap()
4059 .0
4060 .read_with(cx, |tree, _| tree.id());
4061
4062 let buffer = project
4063 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4064 .await
4065 .unwrap();
4066 buffer
4067 .update(cx, |buffer, cx| {
4068 assert_eq!(buffer.text(), "the old contents");
4069 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4070 buffer.save(cx)
4071 })
4072 .await
4073 .unwrap();
4074
4075 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4076 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4077 }
4078
4079 #[gpui::test]
4080 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4081 let fs = FakeFs::new(cx.background());
4082 fs.insert_tree(
4083 "/dir",
4084 json!({
4085 "file1": "the old contents",
4086 }),
4087 )
4088 .await;
4089
4090 let project = Project::test(fs.clone(), cx);
4091 let worktree_id = project
4092 .update(cx, |p, cx| {
4093 p.find_or_create_local_worktree("/dir/file1", true, cx)
4094 })
4095 .await
4096 .unwrap()
4097 .0
4098 .read_with(cx, |tree, _| tree.id());
4099
4100 let buffer = project
4101 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4102 .await
4103 .unwrap();
4104 buffer
4105 .update(cx, |buffer, cx| {
4106 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4107 buffer.save(cx)
4108 })
4109 .await
4110 .unwrap();
4111
4112 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4113 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4114 }
4115
4116 #[gpui::test(retries = 5)]
4117 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4118 let dir = temp_tree(json!({
4119 "a": {
4120 "file1": "",
4121 "file2": "",
4122 "file3": "",
4123 },
4124 "b": {
4125 "c": {
4126 "file4": "",
4127 "file5": "",
4128 }
4129 }
4130 }));
4131
4132 let project = Project::test(Arc::new(RealFs), cx);
4133 let rpc = project.read_with(cx, |p, _| p.client.clone());
4134
4135 let (tree, _) = project
4136 .update(cx, |p, cx| {
4137 p.find_or_create_local_worktree(dir.path(), true, cx)
4138 })
4139 .await
4140 .unwrap();
4141 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4142
4143 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4144 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4145 async move { buffer.await.unwrap() }
4146 };
4147 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4148 tree.read_with(cx, |tree, _| {
4149 tree.entry_for_path(path)
4150 .expect(&format!("no entry for path {}", path))
4151 .id
4152 })
4153 };
4154
4155 let buffer2 = buffer_for_path("a/file2", cx).await;
4156 let buffer3 = buffer_for_path("a/file3", cx).await;
4157 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4158 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4159
4160 let file2_id = id_for_path("a/file2", &cx);
4161 let file3_id = id_for_path("a/file3", &cx);
4162 let file4_id = id_for_path("b/c/file4", &cx);
4163
4164 // Wait for the initial scan.
4165 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4166 .await;
4167
4168 // Create a remote copy of this worktree.
4169 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4170 let (remote, load_task) = cx.update(|cx| {
4171 Worktree::remote(
4172 1,
4173 1,
4174 initial_snapshot.to_proto(&Default::default(), true),
4175 rpc.clone(),
4176 cx,
4177 )
4178 });
4179 load_task.await;
4180
4181 cx.read(|cx| {
4182 assert!(!buffer2.read(cx).is_dirty());
4183 assert!(!buffer3.read(cx).is_dirty());
4184 assert!(!buffer4.read(cx).is_dirty());
4185 assert!(!buffer5.read(cx).is_dirty());
4186 });
4187
4188 // Rename and delete files and directories.
4189 tree.flush_fs_events(&cx).await;
4190 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4191 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4192 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4193 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4194 tree.flush_fs_events(&cx).await;
4195
4196 let expected_paths = vec![
4197 "a",
4198 "a/file1",
4199 "a/file2.new",
4200 "b",
4201 "d",
4202 "d/file3",
4203 "d/file4",
4204 ];
4205
4206 cx.read(|app| {
4207 assert_eq!(
4208 tree.read(app)
4209 .paths()
4210 .map(|p| p.to_str().unwrap())
4211 .collect::<Vec<_>>(),
4212 expected_paths
4213 );
4214
4215 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4216 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4217 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4218
4219 assert_eq!(
4220 buffer2.read(app).file().unwrap().path().as_ref(),
4221 Path::new("a/file2.new")
4222 );
4223 assert_eq!(
4224 buffer3.read(app).file().unwrap().path().as_ref(),
4225 Path::new("d/file3")
4226 );
4227 assert_eq!(
4228 buffer4.read(app).file().unwrap().path().as_ref(),
4229 Path::new("d/file4")
4230 );
4231 assert_eq!(
4232 buffer5.read(app).file().unwrap().path().as_ref(),
4233 Path::new("b/c/file5")
4234 );
4235
4236 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4237 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4238 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4239 assert!(buffer5.read(app).file().unwrap().is_deleted());
4240 });
4241
4242 // Update the remote worktree. Check that it becomes consistent with the
4243 // local worktree.
4244 remote.update(cx, |remote, cx| {
4245 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4246 &initial_snapshot,
4247 1,
4248 1,
4249 true,
4250 );
4251 remote
4252 .as_remote_mut()
4253 .unwrap()
4254 .snapshot
4255 .apply_remote_update(update_message)
4256 .unwrap();
4257
4258 assert_eq!(
4259 remote
4260 .paths()
4261 .map(|p| p.to_str().unwrap())
4262 .collect::<Vec<_>>(),
4263 expected_paths
4264 );
4265 });
4266 }
4267
4268 #[gpui::test]
4269 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4270 let fs = FakeFs::new(cx.background());
4271 fs.insert_tree(
4272 "/the-dir",
4273 json!({
4274 "a.txt": "a-contents",
4275 "b.txt": "b-contents",
4276 }),
4277 )
4278 .await;
4279
4280 let project = Project::test(fs.clone(), cx);
4281 let worktree_id = project
4282 .update(cx, |p, cx| {
4283 p.find_or_create_local_worktree("/the-dir", true, cx)
4284 })
4285 .await
4286 .unwrap()
4287 .0
4288 .read_with(cx, |tree, _| tree.id());
4289
4290 // Spawn multiple tasks to open paths, repeating some paths.
4291 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4292 (
4293 p.open_buffer((worktree_id, "a.txt"), cx),
4294 p.open_buffer((worktree_id, "b.txt"), cx),
4295 p.open_buffer((worktree_id, "a.txt"), cx),
4296 )
4297 });
4298
4299 let buffer_a_1 = buffer_a_1.await.unwrap();
4300 let buffer_a_2 = buffer_a_2.await.unwrap();
4301 let buffer_b = buffer_b.await.unwrap();
4302 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4303 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4304
4305 // There is only one buffer per path.
4306 let buffer_a_id = buffer_a_1.id();
4307 assert_eq!(buffer_a_2.id(), buffer_a_id);
4308
4309 // Open the same path again while it is still open.
4310 drop(buffer_a_1);
4311 let buffer_a_3 = project
4312 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4313 .await
4314 .unwrap();
4315
4316 // There's still only one buffer per path.
4317 assert_eq!(buffer_a_3.id(), buffer_a_id);
4318 }
4319
4320 #[gpui::test]
4321 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4322 use std::fs;
4323
4324 let dir = temp_tree(json!({
4325 "file1": "abc",
4326 "file2": "def",
4327 "file3": "ghi",
4328 }));
4329
4330 let project = Project::test(Arc::new(RealFs), cx);
4331 let (worktree, _) = project
4332 .update(cx, |p, cx| {
4333 p.find_or_create_local_worktree(dir.path(), true, cx)
4334 })
4335 .await
4336 .unwrap();
4337 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4338
4339 worktree.flush_fs_events(&cx).await;
4340 worktree
4341 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4342 .await;
4343
4344 let buffer1 = project
4345 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4346 .await
4347 .unwrap();
4348 let events = Rc::new(RefCell::new(Vec::new()));
4349
4350 // initially, the buffer isn't dirty.
4351 buffer1.update(cx, |buffer, cx| {
4352 cx.subscribe(&buffer1, {
4353 let events = events.clone();
4354 move |_, _, event, _| events.borrow_mut().push(event.clone())
4355 })
4356 .detach();
4357
4358 assert!(!buffer.is_dirty());
4359 assert!(events.borrow().is_empty());
4360
4361 buffer.edit(vec![1..2], "", cx);
4362 });
4363
4364 // after the first edit, the buffer is dirty, and emits a dirtied event.
4365 buffer1.update(cx, |buffer, cx| {
4366 assert!(buffer.text() == "ac");
4367 assert!(buffer.is_dirty());
4368 assert_eq!(
4369 *events.borrow(),
4370 &[language::Event::Edited, language::Event::Dirtied]
4371 );
4372 events.borrow_mut().clear();
4373 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4374 });
4375
4376 // after saving, the buffer is not dirty, and emits a saved event.
4377 buffer1.update(cx, |buffer, cx| {
4378 assert!(!buffer.is_dirty());
4379 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4380 events.borrow_mut().clear();
4381
4382 buffer.edit(vec![1..1], "B", cx);
4383 buffer.edit(vec![2..2], "D", cx);
4384 });
4385
4386 // after editing again, the buffer is dirty, and emits another dirty event.
4387 buffer1.update(cx, |buffer, cx| {
4388 assert!(buffer.text() == "aBDc");
4389 assert!(buffer.is_dirty());
4390 assert_eq!(
4391 *events.borrow(),
4392 &[
4393 language::Event::Edited,
4394 language::Event::Dirtied,
4395 language::Event::Edited,
4396 ],
4397 );
4398 events.borrow_mut().clear();
4399
4400 // TODO - currently, after restoring the buffer to its
4401 // previously-saved state, the is still considered dirty.
4402 buffer.edit([1..3], "", cx);
4403 assert!(buffer.text() == "ac");
4404 assert!(buffer.is_dirty());
4405 });
4406
4407 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4408
4409 // When a file is deleted, the buffer is considered dirty.
4410 let events = Rc::new(RefCell::new(Vec::new()));
4411 let buffer2 = project
4412 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4413 .await
4414 .unwrap();
4415 buffer2.update(cx, |_, cx| {
4416 cx.subscribe(&buffer2, {
4417 let events = events.clone();
4418 move |_, _, event, _| events.borrow_mut().push(event.clone())
4419 })
4420 .detach();
4421 });
4422
4423 fs::remove_file(dir.path().join("file2")).unwrap();
4424 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4425 assert_eq!(
4426 *events.borrow(),
4427 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4428 );
4429
4430 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4431 let events = Rc::new(RefCell::new(Vec::new()));
4432 let buffer3 = project
4433 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4434 .await
4435 .unwrap();
4436 buffer3.update(cx, |_, cx| {
4437 cx.subscribe(&buffer3, {
4438 let events = events.clone();
4439 move |_, _, event, _| events.borrow_mut().push(event.clone())
4440 })
4441 .detach();
4442 });
4443
4444 worktree.flush_fs_events(&cx).await;
4445 buffer3.update(cx, |buffer, cx| {
4446 buffer.edit(Some(0..0), "x", cx);
4447 });
4448 events.borrow_mut().clear();
4449 fs::remove_file(dir.path().join("file3")).unwrap();
4450 buffer3
4451 .condition(&cx, |_, _| !events.borrow().is_empty())
4452 .await;
4453 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4454 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4455 }
4456
4457 #[gpui::test]
4458 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4459 use std::fs;
4460
4461 let initial_contents = "aaa\nbbbbb\nc\n";
4462 let dir = temp_tree(json!({ "the-file": initial_contents }));
4463
4464 let project = Project::test(Arc::new(RealFs), cx);
4465 let (worktree, _) = project
4466 .update(cx, |p, cx| {
4467 p.find_or_create_local_worktree(dir.path(), true, cx)
4468 })
4469 .await
4470 .unwrap();
4471 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4472
4473 worktree
4474 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4475 .await;
4476
4477 let abs_path = dir.path().join("the-file");
4478 let buffer = project
4479 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4480 .await
4481 .unwrap();
4482
4483 // TODO
4484 // Add a cursor on each row.
4485 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4486 // assert!(!buffer.is_dirty());
4487 // buffer.add_selection_set(
4488 // &(0..3)
4489 // .map(|row| Selection {
4490 // id: row as usize,
4491 // start: Point::new(row, 1),
4492 // end: Point::new(row, 1),
4493 // reversed: false,
4494 // goal: SelectionGoal::None,
4495 // })
4496 // .collect::<Vec<_>>(),
4497 // cx,
4498 // )
4499 // });
4500
4501 // Change the file on disk, adding two new lines of text, and removing
4502 // one line.
4503 buffer.read_with(cx, |buffer, _| {
4504 assert!(!buffer.is_dirty());
4505 assert!(!buffer.has_conflict());
4506 });
4507 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4508 fs::write(&abs_path, new_contents).unwrap();
4509
4510 // Because the buffer was not modified, it is reloaded from disk. Its
4511 // contents are edited according to the diff between the old and new
4512 // file contents.
4513 buffer
4514 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4515 .await;
4516
4517 buffer.update(cx, |buffer, _| {
4518 assert_eq!(buffer.text(), new_contents);
4519 assert!(!buffer.is_dirty());
4520 assert!(!buffer.has_conflict());
4521
4522 // TODO
4523 // let cursor_positions = buffer
4524 // .selection_set(selection_set_id)
4525 // .unwrap()
4526 // .selections::<Point>(&*buffer)
4527 // .map(|selection| {
4528 // assert_eq!(selection.start, selection.end);
4529 // selection.start
4530 // })
4531 // .collect::<Vec<_>>();
4532 // assert_eq!(
4533 // cursor_positions,
4534 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4535 // );
4536 });
4537
4538 // Modify the buffer
4539 buffer.update(cx, |buffer, cx| {
4540 buffer.edit(vec![0..0], " ", cx);
4541 assert!(buffer.is_dirty());
4542 assert!(!buffer.has_conflict());
4543 });
4544
4545 // Change the file on disk again, adding blank lines to the beginning.
4546 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4547
4548 // Because the buffer is modified, it doesn't reload from disk, but is
4549 // marked as having a conflict.
4550 buffer
4551 .condition(&cx, |buffer, _| buffer.has_conflict())
4552 .await;
4553 }
4554
4555 #[gpui::test]
4556 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4557 let fs = FakeFs::new(cx.background());
4558 fs.insert_tree(
4559 "/the-dir",
4560 json!({
4561 "a.rs": "
4562 fn foo(mut v: Vec<usize>) {
4563 for x in &v {
4564 v.push(1);
4565 }
4566 }
4567 "
4568 .unindent(),
4569 }),
4570 )
4571 .await;
4572
4573 let project = Project::test(fs.clone(), cx);
4574 let (worktree, _) = project
4575 .update(cx, |p, cx| {
4576 p.find_or_create_local_worktree("/the-dir", true, cx)
4577 })
4578 .await
4579 .unwrap();
4580 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4581
4582 let buffer = project
4583 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4584 .await
4585 .unwrap();
4586
4587 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4588 let message = lsp::PublishDiagnosticsParams {
4589 uri: buffer_uri.clone(),
4590 diagnostics: vec![
4591 lsp::Diagnostic {
4592 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4593 severity: Some(DiagnosticSeverity::WARNING),
4594 message: "error 1".to_string(),
4595 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4596 location: lsp::Location {
4597 uri: buffer_uri.clone(),
4598 range: lsp::Range::new(
4599 lsp::Position::new(1, 8),
4600 lsp::Position::new(1, 9),
4601 ),
4602 },
4603 message: "error 1 hint 1".to_string(),
4604 }]),
4605 ..Default::default()
4606 },
4607 lsp::Diagnostic {
4608 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4609 severity: Some(DiagnosticSeverity::HINT),
4610 message: "error 1 hint 1".to_string(),
4611 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4612 location: lsp::Location {
4613 uri: buffer_uri.clone(),
4614 range: lsp::Range::new(
4615 lsp::Position::new(1, 8),
4616 lsp::Position::new(1, 9),
4617 ),
4618 },
4619 message: "original diagnostic".to_string(),
4620 }]),
4621 ..Default::default()
4622 },
4623 lsp::Diagnostic {
4624 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4625 severity: Some(DiagnosticSeverity::ERROR),
4626 message: "error 2".to_string(),
4627 related_information: Some(vec![
4628 lsp::DiagnosticRelatedInformation {
4629 location: lsp::Location {
4630 uri: buffer_uri.clone(),
4631 range: lsp::Range::new(
4632 lsp::Position::new(1, 13),
4633 lsp::Position::new(1, 15),
4634 ),
4635 },
4636 message: "error 2 hint 1".to_string(),
4637 },
4638 lsp::DiagnosticRelatedInformation {
4639 location: lsp::Location {
4640 uri: buffer_uri.clone(),
4641 range: lsp::Range::new(
4642 lsp::Position::new(1, 13),
4643 lsp::Position::new(1, 15),
4644 ),
4645 },
4646 message: "error 2 hint 2".to_string(),
4647 },
4648 ]),
4649 ..Default::default()
4650 },
4651 lsp::Diagnostic {
4652 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4653 severity: Some(DiagnosticSeverity::HINT),
4654 message: "error 2 hint 1".to_string(),
4655 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4656 location: lsp::Location {
4657 uri: buffer_uri.clone(),
4658 range: lsp::Range::new(
4659 lsp::Position::new(2, 8),
4660 lsp::Position::new(2, 17),
4661 ),
4662 },
4663 message: "original diagnostic".to_string(),
4664 }]),
4665 ..Default::default()
4666 },
4667 lsp::Diagnostic {
4668 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4669 severity: Some(DiagnosticSeverity::HINT),
4670 message: "error 2 hint 2".to_string(),
4671 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4672 location: lsp::Location {
4673 uri: buffer_uri.clone(),
4674 range: lsp::Range::new(
4675 lsp::Position::new(2, 8),
4676 lsp::Position::new(2, 17),
4677 ),
4678 },
4679 message: "original diagnostic".to_string(),
4680 }]),
4681 ..Default::default()
4682 },
4683 ],
4684 version: None,
4685 };
4686
4687 project
4688 .update(cx, |p, cx| {
4689 p.update_diagnostics(message, &Default::default(), cx)
4690 })
4691 .unwrap();
4692 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4693
4694 assert_eq!(
4695 buffer
4696 .diagnostics_in_range::<_, Point>(0..buffer.len())
4697 .collect::<Vec<_>>(),
4698 &[
4699 DiagnosticEntry {
4700 range: Point::new(1, 8)..Point::new(1, 9),
4701 diagnostic: Diagnostic {
4702 severity: DiagnosticSeverity::WARNING,
4703 message: "error 1".to_string(),
4704 group_id: 0,
4705 is_primary: true,
4706 ..Default::default()
4707 }
4708 },
4709 DiagnosticEntry {
4710 range: Point::new(1, 8)..Point::new(1, 9),
4711 diagnostic: Diagnostic {
4712 severity: DiagnosticSeverity::HINT,
4713 message: "error 1 hint 1".to_string(),
4714 group_id: 0,
4715 is_primary: false,
4716 ..Default::default()
4717 }
4718 },
4719 DiagnosticEntry {
4720 range: Point::new(1, 13)..Point::new(1, 15),
4721 diagnostic: Diagnostic {
4722 severity: DiagnosticSeverity::HINT,
4723 message: "error 2 hint 1".to_string(),
4724 group_id: 1,
4725 is_primary: false,
4726 ..Default::default()
4727 }
4728 },
4729 DiagnosticEntry {
4730 range: Point::new(1, 13)..Point::new(1, 15),
4731 diagnostic: Diagnostic {
4732 severity: DiagnosticSeverity::HINT,
4733 message: "error 2 hint 2".to_string(),
4734 group_id: 1,
4735 is_primary: false,
4736 ..Default::default()
4737 }
4738 },
4739 DiagnosticEntry {
4740 range: Point::new(2, 8)..Point::new(2, 17),
4741 diagnostic: Diagnostic {
4742 severity: DiagnosticSeverity::ERROR,
4743 message: "error 2".to_string(),
4744 group_id: 1,
4745 is_primary: true,
4746 ..Default::default()
4747 }
4748 }
4749 ]
4750 );
4751
4752 assert_eq!(
4753 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4754 &[
4755 DiagnosticEntry {
4756 range: Point::new(1, 8)..Point::new(1, 9),
4757 diagnostic: Diagnostic {
4758 severity: DiagnosticSeverity::WARNING,
4759 message: "error 1".to_string(),
4760 group_id: 0,
4761 is_primary: true,
4762 ..Default::default()
4763 }
4764 },
4765 DiagnosticEntry {
4766 range: Point::new(1, 8)..Point::new(1, 9),
4767 diagnostic: Diagnostic {
4768 severity: DiagnosticSeverity::HINT,
4769 message: "error 1 hint 1".to_string(),
4770 group_id: 0,
4771 is_primary: false,
4772 ..Default::default()
4773 }
4774 },
4775 ]
4776 );
4777 assert_eq!(
4778 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4779 &[
4780 DiagnosticEntry {
4781 range: Point::new(1, 13)..Point::new(1, 15),
4782 diagnostic: Diagnostic {
4783 severity: DiagnosticSeverity::HINT,
4784 message: "error 2 hint 1".to_string(),
4785 group_id: 1,
4786 is_primary: false,
4787 ..Default::default()
4788 }
4789 },
4790 DiagnosticEntry {
4791 range: Point::new(1, 13)..Point::new(1, 15),
4792 diagnostic: Diagnostic {
4793 severity: DiagnosticSeverity::HINT,
4794 message: "error 2 hint 2".to_string(),
4795 group_id: 1,
4796 is_primary: false,
4797 ..Default::default()
4798 }
4799 },
4800 DiagnosticEntry {
4801 range: Point::new(2, 8)..Point::new(2, 17),
4802 diagnostic: Diagnostic {
4803 severity: DiagnosticSeverity::ERROR,
4804 message: "error 2".to_string(),
4805 group_id: 1,
4806 is_primary: true,
4807 ..Default::default()
4808 }
4809 }
4810 ]
4811 );
4812 }
4813
4814 #[gpui::test]
4815 async fn test_rename(cx: &mut gpui::TestAppContext) {
4816 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4817 let language = Arc::new(Language::new(
4818 LanguageConfig {
4819 name: "Rust".into(),
4820 path_suffixes: vec!["rs".to_string()],
4821 language_server: Some(language_server_config),
4822 ..Default::default()
4823 },
4824 Some(tree_sitter_rust::language()),
4825 ));
4826
4827 let fs = FakeFs::new(cx.background());
4828 fs.insert_tree(
4829 "/dir",
4830 json!({
4831 "one.rs": "const ONE: usize = 1;",
4832 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4833 }),
4834 )
4835 .await;
4836
4837 let project = Project::test(fs.clone(), cx);
4838 project.update(cx, |project, _| {
4839 Arc::get_mut(&mut project.languages).unwrap().add(language);
4840 });
4841
4842 let (tree, _) = project
4843 .update(cx, |project, cx| {
4844 project.find_or_create_local_worktree("/dir", true, cx)
4845 })
4846 .await
4847 .unwrap();
4848 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4849 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4850 .await;
4851
4852 let buffer = project
4853 .update(cx, |project, cx| {
4854 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4855 })
4856 .await
4857 .unwrap();
4858
4859 let mut fake_server = fake_servers.next().await.unwrap();
4860
4861 let response = project.update(cx, |project, cx| {
4862 project.prepare_rename(buffer.clone(), 7, cx)
4863 });
4864 fake_server
4865 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4866 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4867 assert_eq!(params.position, lsp::Position::new(0, 7));
4868 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4869 lsp::Position::new(0, 6),
4870 lsp::Position::new(0, 9),
4871 )))
4872 })
4873 .next()
4874 .await
4875 .unwrap();
4876 let range = response.await.unwrap().unwrap();
4877 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4878 assert_eq!(range, 6..9);
4879
4880 let response = project.update(cx, |project, cx| {
4881 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4882 });
4883 fake_server
4884 .handle_request::<lsp::request::Rename, _>(|params, _| {
4885 assert_eq!(
4886 params.text_document_position.text_document.uri.as_str(),
4887 "file:///dir/one.rs"
4888 );
4889 assert_eq!(
4890 params.text_document_position.position,
4891 lsp::Position::new(0, 7)
4892 );
4893 assert_eq!(params.new_name, "THREE");
4894 Some(lsp::WorkspaceEdit {
4895 changes: Some(
4896 [
4897 (
4898 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4899 vec![lsp::TextEdit::new(
4900 lsp::Range::new(
4901 lsp::Position::new(0, 6),
4902 lsp::Position::new(0, 9),
4903 ),
4904 "THREE".to_string(),
4905 )],
4906 ),
4907 (
4908 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4909 vec![
4910 lsp::TextEdit::new(
4911 lsp::Range::new(
4912 lsp::Position::new(0, 24),
4913 lsp::Position::new(0, 27),
4914 ),
4915 "THREE".to_string(),
4916 ),
4917 lsp::TextEdit::new(
4918 lsp::Range::new(
4919 lsp::Position::new(0, 35),
4920 lsp::Position::new(0, 38),
4921 ),
4922 "THREE".to_string(),
4923 ),
4924 ],
4925 ),
4926 ]
4927 .into_iter()
4928 .collect(),
4929 ),
4930 ..Default::default()
4931 })
4932 })
4933 .next()
4934 .await
4935 .unwrap();
4936 let mut transaction = response.await.unwrap().0;
4937 assert_eq!(transaction.len(), 2);
4938 assert_eq!(
4939 transaction
4940 .remove_entry(&buffer)
4941 .unwrap()
4942 .0
4943 .read_with(cx, |buffer, _| buffer.text()),
4944 "const THREE: usize = 1;"
4945 );
4946 assert_eq!(
4947 transaction
4948 .into_keys()
4949 .next()
4950 .unwrap()
4951 .read_with(cx, |buffer, _| buffer.text()),
4952 "const TWO: usize = one::THREE + one::THREE;"
4953 );
4954 }
4955
4956 #[gpui::test]
4957 async fn test_search(cx: &mut gpui::TestAppContext) {
4958 let fs = FakeFs::new(cx.background());
4959 fs.insert_tree(
4960 "/dir",
4961 json!({
4962 "one.rs": "const ONE: usize = 1;",
4963 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4964 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4965 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4966 }),
4967 )
4968 .await;
4969 let project = Project::test(fs.clone(), cx);
4970 let (tree, _) = project
4971 .update(cx, |project, cx| {
4972 project.find_or_create_local_worktree("/dir", true, cx)
4973 })
4974 .await
4975 .unwrap();
4976 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4977 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4978 .await;
4979
4980 assert_eq!(
4981 search(&project, SearchQuery::text("TWO", false, true), cx)
4982 .await
4983 .unwrap(),
4984 HashMap::from_iter([
4985 ("two.rs".to_string(), vec![6..9]),
4986 ("three.rs".to_string(), vec![37..40])
4987 ])
4988 );
4989
4990 let buffer_4 = project
4991 .update(cx, |project, cx| {
4992 project.open_buffer((worktree_id, "four.rs"), cx)
4993 })
4994 .await
4995 .unwrap();
4996 buffer_4.update(cx, |buffer, cx| {
4997 buffer.edit([20..28, 31..43], "two::TWO", cx);
4998 });
4999
5000 assert_eq!(
5001 search(&project, SearchQuery::text("TWO", false, true), cx)
5002 .await
5003 .unwrap(),
5004 HashMap::from_iter([
5005 ("two.rs".to_string(), vec![6..9]),
5006 ("three.rs".to_string(), vec![37..40]),
5007 ("four.rs".to_string(), vec![25..28, 36..39])
5008 ])
5009 );
5010
5011 async fn search(
5012 project: &ModelHandle<Project>,
5013 query: SearchQuery,
5014 cx: &mut gpui::TestAppContext,
5015 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5016 let results = project
5017 .update(cx, |project, cx| project.search(query, cx))
5018 .await?;
5019
5020 Ok(results
5021 .into_iter()
5022 .map(|(buffer, ranges)| {
5023 buffer.read_with(cx, |buffer, _| {
5024 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5025 let ranges = ranges
5026 .into_iter()
5027 .map(|range| range.to_offset(buffer))
5028 .collect::<Vec<_>>();
5029 (path, ranges)
5030 })
5031 })
5032 .collect())
5033 }
5034 }
5035}