1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 },
96}
97
98#[derive(Clone, Debug)]
99pub struct Collaborator {
100 pub user: Arc<User>,
101 pub peer_id: PeerId,
102 pub replica_id: ReplicaId,
103}
104
105#[derive(Clone, Debug, PartialEq)]
106pub enum Event {
107 ActiveEntryChanged(Option<ProjectEntry>),
108 WorktreeRemoved(WorktreeId),
109 DiskBasedDiagnosticsStarted,
110 DiskBasedDiagnosticsUpdated,
111 DiskBasedDiagnosticsFinished,
112 DiagnosticsUpdated(ProjectPath),
113}
114
115#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
116pub struct ProjectPath {
117 pub worktree_id: WorktreeId,
118 pub path: Arc<Path>,
119}
120
121#[derive(Clone, Debug, Default, PartialEq)]
122pub struct DiagnosticSummary {
123 pub error_count: usize,
124 pub warning_count: usize,
125 pub info_count: usize,
126 pub hint_count: usize,
127}
128
129#[derive(Debug)]
130pub struct Location {
131 pub buffer: ModelHandle<Buffer>,
132 pub range: Range<language::Anchor>,
133}
134
135#[derive(Debug)]
136pub struct DocumentHighlight {
137 pub range: Range<language::Anchor>,
138 pub kind: DocumentHighlightKind,
139}
140
141#[derive(Clone, Debug)]
142pub struct Symbol {
143 pub source_worktree_id: WorktreeId,
144 pub worktree_id: WorktreeId,
145 pub language_name: String,
146 pub path: PathBuf,
147 pub label: CodeLabel,
148 pub name: String,
149 pub kind: lsp::SymbolKind,
150 pub range: Range<PointUtf16>,
151 pub signature: [u8; 32],
152}
153
154#[derive(Default)]
155pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
156
157impl DiagnosticSummary {
158 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
159 let mut this = Self {
160 error_count: 0,
161 warning_count: 0,
162 info_count: 0,
163 hint_count: 0,
164 };
165
166 for entry in diagnostics {
167 if entry.diagnostic.is_primary {
168 match entry.diagnostic.severity {
169 DiagnosticSeverity::ERROR => this.error_count += 1,
170 DiagnosticSeverity::WARNING => this.warning_count += 1,
171 DiagnosticSeverity::INFORMATION => this.info_count += 1,
172 DiagnosticSeverity::HINT => this.hint_count += 1,
173 _ => {}
174 }
175 }
176 }
177
178 this
179 }
180
181 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
182 proto::DiagnosticSummary {
183 path: path.to_string_lossy().to_string(),
184 error_count: self.error_count as u32,
185 warning_count: self.warning_count as u32,
186 info_count: self.info_count as u32,
187 hint_count: self.hint_count as u32,
188 }
189 }
190}
191
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
193pub struct ProjectEntry {
194 pub worktree_id: WorktreeId,
195 pub entry_id: usize,
196}
197
198impl Project {
199 pub fn init(client: &Arc<Client>) {
200 client.add_entity_message_handler(Self::handle_add_collaborator);
201 client.add_entity_message_handler(Self::handle_buffer_reloaded);
202 client.add_entity_message_handler(Self::handle_buffer_saved);
203 client.add_entity_message_handler(Self::handle_close_buffer);
204 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
206 client.add_entity_message_handler(Self::handle_remove_collaborator);
207 client.add_entity_message_handler(Self::handle_register_worktree);
208 client.add_entity_message_handler(Self::handle_unregister_worktree);
209 client.add_entity_message_handler(Self::handle_unshare_project);
210 client.add_entity_message_handler(Self::handle_update_buffer_file);
211 client.add_entity_message_handler(Self::handle_update_buffer);
212 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
213 client.add_entity_message_handler(Self::handle_update_worktree);
214 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
215 client.add_entity_request_handler(Self::handle_apply_code_action);
216 client.add_entity_request_handler(Self::handle_format_buffers);
217 client.add_entity_request_handler(Self::handle_get_code_actions);
218 client.add_entity_request_handler(Self::handle_get_completions);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
224 client.add_entity_request_handler(Self::handle_search_project);
225 client.add_entity_request_handler(Self::handle_get_project_symbols);
226 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
227 client.add_entity_request_handler(Self::handle_open_buffer);
228 client.add_entity_request_handler(Self::handle_save_buffer);
229 }
230
231 pub fn local(
232 client: Arc<Client>,
233 user_store: ModelHandle<UserStore>,
234 languages: Arc<LanguageRegistry>,
235 fs: Arc<dyn Fs>,
236 cx: &mut MutableAppContext,
237 ) -> ModelHandle<Self> {
238 cx.add_model(|cx: &mut ModelContext<Self>| {
239 let (remote_id_tx, remote_id_rx) = watch::channel();
240 let _maintain_remote_id_task = cx.spawn_weak({
241 let rpc = client.clone();
242 move |this, mut cx| {
243 async move {
244 let mut status = rpc.status();
245 while let Some(status) = status.next().await {
246 if let Some(this) = this.upgrade(&cx) {
247 let remote_id = if let client::Status::Connected { .. } = status {
248 let response = rpc.request(proto::RegisterProject {}).await?;
249 Some(response.project_id)
250 } else {
251 None
252 };
253
254 if let Some(project_id) = remote_id {
255 let mut registrations = Vec::new();
256 this.update(&mut cx, |this, cx| {
257 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
258 registrations.push(worktree.update(
259 cx,
260 |worktree, cx| {
261 let worktree = worktree.as_local_mut().unwrap();
262 worktree.register(project_id, cx)
263 },
264 ));
265 }
266 });
267 for registration in registrations {
268 registration.await?;
269 }
270 }
271 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
272 }
273 }
274 Ok(())
275 }
276 .log_err()
277 }
278 });
279
280 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
281 Self {
282 worktrees: Default::default(),
283 collaborators: Default::default(),
284 opened_buffers: Default::default(),
285 shared_buffers: Default::default(),
286 loading_buffers: Default::default(),
287 loading_local_worktrees: Default::default(),
288 client_state: ProjectClientState::Local {
289 is_shared: false,
290 remote_id_tx,
291 remote_id_rx,
292 _maintain_remote_id_task,
293 },
294 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
295 subscriptions: Vec::new(),
296 active_entry: None,
297 languages,
298 client,
299 user_store,
300 fs,
301 language_servers_with_diagnostics_running: 0,
302 language_servers: Default::default(),
303 started_language_servers: Default::default(),
304 nonce: StdRng::from_entropy().gen(),
305 }
306 })
307 }
308
309 pub async fn remote(
310 remote_id: u64,
311 client: Arc<Client>,
312 user_store: ModelHandle<UserStore>,
313 languages: Arc<LanguageRegistry>,
314 fs: Arc<dyn Fs>,
315 cx: &mut AsyncAppContext,
316 ) -> Result<ModelHandle<Self>> {
317 client.authenticate_and_connect(&cx).await?;
318
319 let response = client
320 .request(proto::JoinProject {
321 project_id: remote_id,
322 })
323 .await?;
324
325 let replica_id = response.replica_id as ReplicaId;
326
327 let mut worktrees = Vec::new();
328 for worktree in response.worktrees {
329 let (worktree, load_task) = cx
330 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
331 worktrees.push(worktree);
332 load_task.detach();
333 }
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 let this = cx.add_model(|cx| {
337 let mut this = Self {
338 worktrees: Vec::new(),
339 loading_buffers: Default::default(),
340 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
341 shared_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 active_entry: None,
344 collaborators: Default::default(),
345 languages,
346 user_store: user_store.clone(),
347 fs,
348 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
349 client,
350 client_state: ProjectClientState::Remote {
351 sharing_has_stopped: false,
352 remote_id,
353 replica_id,
354 },
355 language_servers_with_diagnostics_running: 0,
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 opened_buffers: Default::default(),
359 nonce: StdRng::from_entropy().gen(),
360 };
361 for worktree in worktrees {
362 this.add_worktree(&worktree, cx);
363 }
364 this
365 });
366
367 let user_ids = response
368 .collaborators
369 .iter()
370 .map(|peer| peer.user_id)
371 .collect();
372 user_store
373 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
374 .await?;
375 let mut collaborators = HashMap::default();
376 for message in response.collaborators {
377 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
378 collaborators.insert(collaborator.peer_id, collaborator);
379 }
380
381 this.update(cx, |this, _| {
382 this.collaborators = collaborators;
383 });
384
385 Ok(this)
386 }
387
388 #[cfg(any(test, feature = "test-support"))]
389 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
390 let languages = Arc::new(LanguageRegistry::new());
391 let http_client = client::test::FakeHttpClient::with_404_response();
392 let client = client::Client::new(http_client.clone());
393 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
394 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
395 }
396
397 #[cfg(any(test, feature = "test-support"))]
398 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
399 self.opened_buffers
400 .get(&remote_id)
401 .and_then(|buffer| buffer.upgrade(cx))
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn languages(&self) -> &Arc<LanguageRegistry> {
406 &self.languages
407 }
408
409 #[cfg(any(test, feature = "test-support"))]
410 pub fn check_invariants(&self, cx: &AppContext) {
411 if self.is_local() {
412 let mut worktree_root_paths = HashMap::default();
413 for worktree in self.worktrees(cx) {
414 let worktree = worktree.read(cx);
415 let abs_path = worktree.as_local().unwrap().abs_path().clone();
416 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
417 assert_eq!(
418 prev_worktree_id,
419 None,
420 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
421 abs_path,
422 worktree.id(),
423 prev_worktree_id
424 )
425 }
426 } else {
427 let replica_id = self.replica_id();
428 for buffer in self.opened_buffers.values() {
429 if let Some(buffer) = buffer.upgrade(cx) {
430 let buffer = buffer.read(cx);
431 assert_eq!(
432 buffer.deferred_ops_len(),
433 0,
434 "replica {}, buffer {} has deferred operations",
435 replica_id,
436 buffer.remote_id()
437 );
438 }
439 }
440 }
441 }
442
443 #[cfg(any(test, feature = "test-support"))]
444 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
445 let path = path.into();
446 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
447 self.opened_buffers.iter().any(|(_, buffer)| {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
450 if file.worktree == worktree && file.path() == &path.path {
451 return true;
452 }
453 }
454 }
455 false
456 })
457 } else {
458 false
459 }
460 }
461
462 pub fn fs(&self) -> &Arc<dyn Fs> {
463 &self.fs
464 }
465
466 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
467 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
468 *remote_id_tx.borrow_mut() = remote_id;
469 }
470
471 self.subscriptions.clear();
472 if let Some(remote_id) = remote_id {
473 self.subscriptions
474 .push(self.client.add_model_for_remote_entity(remote_id, cx));
475 }
476 }
477
478 pub fn remote_id(&self) -> Option<u64> {
479 match &self.client_state {
480 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
481 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
482 }
483 }
484
485 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
486 let mut id = None;
487 let mut watch = None;
488 match &self.client_state {
489 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
490 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
491 }
492
493 async move {
494 if let Some(id) = id {
495 return id;
496 }
497 let mut watch = watch.unwrap();
498 loop {
499 let id = *watch.borrow();
500 if let Some(id) = id {
501 return id;
502 }
503 watch.next().await;
504 }
505 }
506 }
507
508 pub fn replica_id(&self) -> ReplicaId {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => 0,
511 ProjectClientState::Remote { replica_id, .. } => *replica_id,
512 }
513 }
514
515 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
516 &self.collaborators
517 }
518
519 pub fn worktrees<'a>(
520 &'a self,
521 cx: &'a AppContext,
522 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
523 self.worktrees
524 .iter()
525 .filter_map(move |worktree| worktree.upgrade(cx))
526 }
527
528 pub fn visible_worktrees<'a>(
529 &'a self,
530 cx: &'a AppContext,
531 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
532 self.worktrees.iter().filter_map(|worktree| {
533 worktree.upgrade(cx).and_then(|worktree| {
534 if worktree.read(cx).is_visible() {
535 Some(worktree)
536 } else {
537 None
538 }
539 })
540 })
541 }
542
543 pub fn worktree_for_id(
544 &self,
545 id: WorktreeId,
546 cx: &AppContext,
547 ) -> Option<ModelHandle<Worktree>> {
548 self.worktrees(cx)
549 .find(|worktree| worktree.read(cx).id() == id)
550 }
551
552 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
553 let rpc = self.client.clone();
554 cx.spawn(|this, mut cx| async move {
555 let project_id = this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local {
557 is_shared,
558 remote_id_rx,
559 ..
560 } = &mut this.client_state
561 {
562 *is_shared = true;
563
564 for open_buffer in this.opened_buffers.values_mut() {
565 match open_buffer {
566 OpenBuffer::Strong(_) => {}
567 OpenBuffer::Weak(buffer) => {
568 if let Some(buffer) = buffer.upgrade(cx) {
569 *open_buffer = OpenBuffer::Strong(buffer);
570 }
571 }
572 OpenBuffer::Loading(_) => unreachable!(),
573 }
574 }
575
576 for worktree_handle in this.worktrees.iter_mut() {
577 match worktree_handle {
578 WorktreeHandle::Strong(_) => {}
579 WorktreeHandle::Weak(worktree) => {
580 if let Some(worktree) = worktree.upgrade(cx) {
581 *worktree_handle = WorktreeHandle::Strong(worktree);
582 }
583 }
584 }
585 }
586
587 remote_id_rx
588 .borrow()
589 .ok_or_else(|| anyhow!("no project id"))
590 } else {
591 Err(anyhow!("can't share a remote project"))
592 }
593 })?;
594
595 rpc.request(proto::ShareProject { project_id }).await?;
596
597 let mut tasks = Vec::new();
598 this.update(&mut cx, |this, cx| {
599 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
600 worktree.update(cx, |worktree, cx| {
601 let worktree = worktree.as_local_mut().unwrap();
602 tasks.push(worktree.share(project_id, cx));
603 });
604 }
605 });
606 for task in tasks {
607 task.await?;
608 }
609 this.update(&mut cx, |_, cx| cx.notify());
610 Ok(())
611 })
612 }
613
614 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
615 let rpc = self.client.clone();
616 cx.spawn(|this, mut cx| async move {
617 let project_id = this.update(&mut cx, |this, cx| {
618 if let ProjectClientState::Local {
619 is_shared,
620 remote_id_rx,
621 ..
622 } = &mut this.client_state
623 {
624 *is_shared = false;
625
626 for open_buffer in this.opened_buffers.values_mut() {
627 match open_buffer {
628 OpenBuffer::Strong(buffer) => {
629 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
630 }
631 _ => {}
632 }
633 }
634
635 for worktree_handle in this.worktrees.iter_mut() {
636 match worktree_handle {
637 WorktreeHandle::Strong(worktree) => {
638 if !worktree.read(cx).is_visible() {
639 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
640 }
641 }
642 _ => {}
643 }
644 }
645
646 remote_id_rx
647 .borrow()
648 .ok_or_else(|| anyhow!("no project id"))
649 } else {
650 Err(anyhow!("can't share a remote project"))
651 }
652 })?;
653
654 rpc.send(proto::UnshareProject { project_id })?;
655 this.update(&mut cx, |this, cx| {
656 this.collaborators.clear();
657 this.shared_buffers.clear();
658 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
659 worktree.update(cx, |worktree, _| {
660 worktree.as_local_mut().unwrap().unshare();
661 });
662 }
663 cx.notify()
664 });
665 Ok(())
666 })
667 }
668
669 pub fn is_read_only(&self) -> bool {
670 match &self.client_state {
671 ProjectClientState::Local { .. } => false,
672 ProjectClientState::Remote {
673 sharing_has_stopped,
674 ..
675 } => *sharing_has_stopped,
676 }
677 }
678
679 pub fn is_local(&self) -> bool {
680 match &self.client_state {
681 ProjectClientState::Local { .. } => true,
682 ProjectClientState::Remote { .. } => false,
683 }
684 }
685
686 pub fn is_remote(&self) -> bool {
687 !self.is_local()
688 }
689
690 pub fn open_buffer(
691 &mut self,
692 path: impl Into<ProjectPath>,
693 cx: &mut ModelContext<Self>,
694 ) -> Task<Result<ModelHandle<Buffer>>> {
695 let project_path = path.into();
696 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
697 worktree
698 } else {
699 return Task::ready(Err(anyhow!("no such worktree")));
700 };
701
702 // If there is already a buffer for the given path, then return it.
703 let existing_buffer = self.get_open_buffer(&project_path, cx);
704 if let Some(existing_buffer) = existing_buffer {
705 return Task::ready(Ok(existing_buffer));
706 }
707
708 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
709 // If the given path is already being loaded, then wait for that existing
710 // task to complete and return the same buffer.
711 hash_map::Entry::Occupied(e) => e.get().clone(),
712
713 // Otherwise, record the fact that this path is now being loaded.
714 hash_map::Entry::Vacant(entry) => {
715 let (mut tx, rx) = postage::watch::channel();
716 entry.insert(rx.clone());
717
718 let load_buffer = if worktree.read(cx).is_local() {
719 self.open_local_buffer(&project_path.path, &worktree, cx)
720 } else {
721 self.open_remote_buffer(&project_path.path, &worktree, cx)
722 };
723
724 cx.spawn(move |this, mut cx| async move {
725 let load_result = load_buffer.await;
726 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
727 // Record the fact that the buffer is no longer loading.
728 this.loading_buffers.remove(&project_path);
729 let buffer = load_result.map_err(Arc::new)?;
730 Ok(buffer)
731 }));
732 })
733 .detach();
734 rx
735 }
736 };
737
738 cx.foreground().spawn(async move {
739 loop {
740 if let Some(result) = loading_watch.borrow().as_ref() {
741 match result {
742 Ok(buffer) => return Ok(buffer.clone()),
743 Err(error) => return Err(anyhow!("{}", error)),
744 }
745 }
746 loading_watch.next().await;
747 }
748 })
749 }
750
751 fn open_local_buffer(
752 &mut self,
753 path: &Arc<Path>,
754 worktree: &ModelHandle<Worktree>,
755 cx: &mut ModelContext<Self>,
756 ) -> Task<Result<ModelHandle<Buffer>>> {
757 let load_buffer = worktree.update(cx, |worktree, cx| {
758 let worktree = worktree.as_local_mut().unwrap();
759 worktree.load_buffer(path, cx)
760 });
761 let worktree = worktree.downgrade();
762 cx.spawn(|this, mut cx| async move {
763 let buffer = load_buffer.await?;
764 let worktree = worktree
765 .upgrade(&cx)
766 .ok_or_else(|| anyhow!("worktree was removed"))?;
767 this.update(&mut cx, |this, cx| {
768 this.register_buffer(&buffer, Some(&worktree), cx)
769 })?;
770 Ok(buffer)
771 })
772 }
773
774 fn open_remote_buffer(
775 &mut self,
776 path: &Arc<Path>,
777 worktree: &ModelHandle<Worktree>,
778 cx: &mut ModelContext<Self>,
779 ) -> Task<Result<ModelHandle<Buffer>>> {
780 let rpc = self.client.clone();
781 let project_id = self.remote_id().unwrap();
782 let remote_worktree_id = worktree.read(cx).id();
783 let path = path.clone();
784 let path_string = path.to_string_lossy().to_string();
785 cx.spawn(|this, mut cx| async move {
786 let response = rpc
787 .request(proto::OpenBuffer {
788 project_id,
789 worktree_id: remote_worktree_id.to_proto(),
790 path: path_string,
791 })
792 .await?;
793 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
794 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
795 .await
796 })
797 }
798
799 fn open_local_buffer_via_lsp(
800 &mut self,
801 abs_path: lsp::Url,
802 lang_name: String,
803 lang_server: Arc<LanguageServer>,
804 cx: &mut ModelContext<Self>,
805 ) -> Task<Result<ModelHandle<Buffer>>> {
806 cx.spawn(|this, mut cx| async move {
807 let abs_path = abs_path
808 .to_file_path()
809 .map_err(|_| anyhow!("can't convert URI to path"))?;
810 let (worktree, relative_path) = if let Some(result) =
811 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
812 {
813 result
814 } else {
815 let worktree = this
816 .update(&mut cx, |this, cx| {
817 this.create_local_worktree(&abs_path, false, cx)
818 })
819 .await?;
820 this.update(&mut cx, |this, cx| {
821 this.language_servers
822 .insert((worktree.read(cx).id(), lang_name), lang_server);
823 });
824 (worktree, PathBuf::new())
825 };
826
827 let project_path = ProjectPath {
828 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
829 path: relative_path.into(),
830 };
831 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
832 .await
833 })
834 }
835
836 pub fn save_buffer_as(
837 &mut self,
838 buffer: ModelHandle<Buffer>,
839 abs_path: PathBuf,
840 cx: &mut ModelContext<Project>,
841 ) -> Task<Result<()>> {
842 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
843 cx.spawn(|this, mut cx| async move {
844 let (worktree, path) = worktree_task.await?;
845 worktree
846 .update(&mut cx, |worktree, cx| {
847 worktree
848 .as_local_mut()
849 .unwrap()
850 .save_buffer_as(buffer.clone(), path, cx)
851 })
852 .await?;
853 this.update(&mut cx, |this, cx| {
854 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
855 });
856 Ok(())
857 })
858 }
859
860 pub fn get_open_buffer(
861 &mut self,
862 path: &ProjectPath,
863 cx: &mut ModelContext<Self>,
864 ) -> Option<ModelHandle<Buffer>> {
865 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
866 self.opened_buffers.values().find_map(|buffer| {
867 let buffer = buffer.upgrade(cx)?;
868 let file = File::from_dyn(buffer.read(cx).file())?;
869 if file.worktree == worktree && file.path() == &path.path {
870 Some(buffer)
871 } else {
872 None
873 }
874 })
875 }
876
877 fn register_buffer(
878 &mut self,
879 buffer: &ModelHandle<Buffer>,
880 worktree: Option<&ModelHandle<Worktree>>,
881 cx: &mut ModelContext<Self>,
882 ) -> Result<()> {
883 let remote_id = buffer.read(cx).remote_id();
884 let open_buffer = if self.is_remote() || self.is_shared() {
885 OpenBuffer::Strong(buffer.clone())
886 } else {
887 OpenBuffer::Weak(buffer.downgrade())
888 };
889
890 match self.opened_buffers.insert(remote_id, open_buffer) {
891 None => {}
892 Some(OpenBuffer::Loading(operations)) => {
893 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
894 }
895 Some(OpenBuffer::Weak(existing_handle)) => {
896 if existing_handle.upgrade(cx).is_some() {
897 Err(anyhow!(
898 "already registered buffer with remote id {}",
899 remote_id
900 ))?
901 }
902 }
903 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
904 "already registered buffer with remote id {}",
905 remote_id
906 ))?,
907 }
908 self.assign_language_to_buffer(&buffer, worktree, cx);
909 Ok(())
910 }
911
912 fn assign_language_to_buffer(
913 &mut self,
914 buffer: &ModelHandle<Buffer>,
915 worktree: Option<&ModelHandle<Worktree>>,
916 cx: &mut ModelContext<Self>,
917 ) -> Option<()> {
918 let (path, full_path) = {
919 let file = buffer.read(cx).file()?;
920 (file.path().clone(), file.full_path(cx))
921 };
922
923 // If the buffer has a language, set it and start/assign the language server
924 if let Some(language) = self.languages.select_language(&full_path) {
925 buffer.update(cx, |buffer, cx| {
926 buffer.set_language(Some(language.clone()), cx);
927 });
928
929 // For local worktrees, start a language server if needed.
930 // Also assign the language server and any previously stored diagnostics to the buffer.
931 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
932 let worktree_id = local_worktree.id();
933 let worktree_abs_path = local_worktree.abs_path().clone();
934 let buffer = buffer.downgrade();
935 let language_server =
936 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
937
938 cx.spawn_weak(|_, mut cx| async move {
939 if let Some(language_server) = language_server.await {
940 if let Some(buffer) = buffer.upgrade(&cx) {
941 buffer.update(&mut cx, |buffer, cx| {
942 buffer.set_language_server(Some(language_server), cx);
943 });
944 }
945 }
946 })
947 .detach();
948 }
949 }
950
951 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
952 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
953 buffer.update(cx, |buffer, cx| {
954 buffer.update_diagnostics(diagnostics, None, cx).log_err();
955 });
956 }
957 }
958
959 None
960 }
961
962 fn start_language_server(
963 &mut self,
964 worktree_id: WorktreeId,
965 worktree_path: Arc<Path>,
966 language: Arc<Language>,
967 cx: &mut ModelContext<Self>,
968 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
969 enum LspEvent {
970 DiagnosticsStart,
971 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
972 DiagnosticsFinish,
973 }
974
975 let key = (worktree_id, language.name().to_string());
976 self.started_language_servers
977 .entry(key.clone())
978 .or_insert_with(|| {
979 let language_server = self.languages.start_language_server(
980 &language,
981 worktree_path,
982 self.client.http_client(),
983 cx,
984 );
985 let rpc = self.client.clone();
986 cx.spawn_weak(|this, mut cx| async move {
987 let language_server = language_server?.await.log_err()?;
988 if let Some(this) = this.upgrade(&cx) {
989 this.update(&mut cx, |this, _| {
990 this.language_servers.insert(key, language_server.clone());
991 });
992 }
993
994 let disk_based_sources = language
995 .disk_based_diagnostic_sources()
996 .cloned()
997 .unwrap_or_default();
998 let disk_based_diagnostics_progress_token =
999 language.disk_based_diagnostics_progress_token().cloned();
1000 let has_disk_based_diagnostic_progress_token =
1001 disk_based_diagnostics_progress_token.is_some();
1002 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1003
1004 // Listen for `PublishDiagnostics` notifications.
1005 language_server
1006 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1007 let diagnostics_tx = diagnostics_tx.clone();
1008 move |params| {
1009 if !has_disk_based_diagnostic_progress_token {
1010 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1011 }
1012 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1013 .ok();
1014 if !has_disk_based_diagnostic_progress_token {
1015 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1016 }
1017 }
1018 })
1019 .detach();
1020
1021 // Listen for `Progress` notifications. Send an event when the language server
1022 // transitions between running jobs and not running any jobs.
1023 let mut running_jobs_for_this_server: i32 = 0;
1024 language_server
1025 .on_notification::<lsp::notification::Progress, _>(move |params| {
1026 let token = match params.token {
1027 lsp::NumberOrString::Number(_) => None,
1028 lsp::NumberOrString::String(token) => Some(token),
1029 };
1030
1031 if token == disk_based_diagnostics_progress_token {
1032 match params.value {
1033 lsp::ProgressParamsValue::WorkDone(progress) => {
1034 match progress {
1035 lsp::WorkDoneProgress::Begin(_) => {
1036 running_jobs_for_this_server += 1;
1037 if running_jobs_for_this_server == 1 {
1038 block_on(
1039 diagnostics_tx
1040 .send(LspEvent::DiagnosticsStart),
1041 )
1042 .ok();
1043 }
1044 }
1045 lsp::WorkDoneProgress::End(_) => {
1046 running_jobs_for_this_server -= 1;
1047 if running_jobs_for_this_server == 0 {
1048 block_on(
1049 diagnostics_tx
1050 .send(LspEvent::DiagnosticsFinish),
1051 )
1052 .ok();
1053 }
1054 }
1055 _ => {}
1056 }
1057 }
1058 }
1059 }
1060 })
1061 .detach();
1062
1063 // Process all the LSP events.
1064 cx.spawn(|mut cx| async move {
1065 while let Ok(message) = diagnostics_rx.recv().await {
1066 let this = this.upgrade(&cx)?;
1067 match message {
1068 LspEvent::DiagnosticsStart => {
1069 this.update(&mut cx, |this, cx| {
1070 this.disk_based_diagnostics_started(cx);
1071 if let Some(project_id) = this.remote_id() {
1072 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1073 project_id,
1074 })
1075 .log_err();
1076 }
1077 });
1078 }
1079 LspEvent::DiagnosticsUpdate(mut params) => {
1080 language.process_diagnostics(&mut params);
1081 this.update(&mut cx, |this, cx| {
1082 this.update_diagnostics(params, &disk_based_sources, cx)
1083 .log_err();
1084 });
1085 }
1086 LspEvent::DiagnosticsFinish => {
1087 this.update(&mut cx, |this, cx| {
1088 this.disk_based_diagnostics_finished(cx);
1089 if let Some(project_id) = this.remote_id() {
1090 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1091 project_id,
1092 })
1093 .log_err();
1094 }
1095 });
1096 }
1097 }
1098 }
1099 Some(())
1100 })
1101 .detach();
1102
1103 Some(language_server)
1104 })
1105 .shared()
1106 })
1107 .clone()
1108 }
1109
1110 pub fn update_diagnostics(
1111 &mut self,
1112 params: lsp::PublishDiagnosticsParams,
1113 disk_based_sources: &HashSet<String>,
1114 cx: &mut ModelContext<Self>,
1115 ) -> Result<()> {
1116 let abs_path = params
1117 .uri
1118 .to_file_path()
1119 .map_err(|_| anyhow!("URI is not a file"))?;
1120 let mut next_group_id = 0;
1121 let mut diagnostics = Vec::default();
1122 let mut primary_diagnostic_group_ids = HashMap::default();
1123 let mut sources_by_group_id = HashMap::default();
1124 let mut supporting_diagnostic_severities = HashMap::default();
1125 for diagnostic in ¶ms.diagnostics {
1126 let source = diagnostic.source.as_ref();
1127 let code = diagnostic.code.as_ref().map(|code| match code {
1128 lsp::NumberOrString::Number(code) => code.to_string(),
1129 lsp::NumberOrString::String(code) => code.clone(),
1130 });
1131 let range = range_from_lsp(diagnostic.range);
1132 let is_supporting = diagnostic
1133 .related_information
1134 .as_ref()
1135 .map_or(false, |infos| {
1136 infos.iter().any(|info| {
1137 primary_diagnostic_group_ids.contains_key(&(
1138 source,
1139 code.clone(),
1140 range_from_lsp(info.location.range),
1141 ))
1142 })
1143 });
1144
1145 if is_supporting {
1146 if let Some(severity) = diagnostic.severity {
1147 supporting_diagnostic_severities
1148 .insert((source, code.clone(), range), severity);
1149 }
1150 } else {
1151 let group_id = post_inc(&mut next_group_id);
1152 let is_disk_based =
1153 source.map_or(false, |source| disk_based_sources.contains(source));
1154
1155 sources_by_group_id.insert(group_id, source);
1156 primary_diagnostic_group_ids
1157 .insert((source, code.clone(), range.clone()), group_id);
1158
1159 diagnostics.push(DiagnosticEntry {
1160 range,
1161 diagnostic: Diagnostic {
1162 code: code.clone(),
1163 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1164 message: diagnostic.message.clone(),
1165 group_id,
1166 is_primary: true,
1167 is_valid: true,
1168 is_disk_based,
1169 },
1170 });
1171 if let Some(infos) = &diagnostic.related_information {
1172 for info in infos {
1173 if info.location.uri == params.uri && !info.message.is_empty() {
1174 let range = range_from_lsp(info.location.range);
1175 diagnostics.push(DiagnosticEntry {
1176 range,
1177 diagnostic: Diagnostic {
1178 code: code.clone(),
1179 severity: DiagnosticSeverity::INFORMATION,
1180 message: info.message.clone(),
1181 group_id,
1182 is_primary: false,
1183 is_valid: true,
1184 is_disk_based,
1185 },
1186 });
1187 }
1188 }
1189 }
1190 }
1191 }
1192
1193 for entry in &mut diagnostics {
1194 let diagnostic = &mut entry.diagnostic;
1195 if !diagnostic.is_primary {
1196 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1197 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1198 source,
1199 diagnostic.code.clone(),
1200 entry.range.clone(),
1201 )) {
1202 diagnostic.severity = severity;
1203 }
1204 }
1205 }
1206
1207 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1208 Ok(())
1209 }
1210
1211 pub fn update_diagnostic_entries(
1212 &mut self,
1213 abs_path: PathBuf,
1214 version: Option<i32>,
1215 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1216 cx: &mut ModelContext<Project>,
1217 ) -> Result<(), anyhow::Error> {
1218 let (worktree, relative_path) = self
1219 .find_local_worktree(&abs_path, cx)
1220 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1221 let project_path = ProjectPath {
1222 worktree_id: worktree.read(cx).id(),
1223 path: relative_path.into(),
1224 };
1225
1226 for buffer in self.opened_buffers.values() {
1227 if let Some(buffer) = buffer.upgrade(cx) {
1228 if buffer
1229 .read(cx)
1230 .file()
1231 .map_or(false, |file| *file.path() == project_path.path)
1232 {
1233 buffer.update(cx, |buffer, cx| {
1234 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1235 })?;
1236 break;
1237 }
1238 }
1239 }
1240 worktree.update(cx, |worktree, cx| {
1241 worktree
1242 .as_local_mut()
1243 .ok_or_else(|| anyhow!("not a local worktree"))?
1244 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1245 })?;
1246 cx.emit(Event::DiagnosticsUpdated(project_path));
1247 Ok(())
1248 }
1249
1250 pub fn format(
1251 &self,
1252 buffers: HashSet<ModelHandle<Buffer>>,
1253 push_to_history: bool,
1254 cx: &mut ModelContext<Project>,
1255 ) -> Task<Result<ProjectTransaction>> {
1256 let mut local_buffers = Vec::new();
1257 let mut remote_buffers = None;
1258 for buffer_handle in buffers {
1259 let buffer = buffer_handle.read(cx);
1260 let worktree;
1261 if let Some(file) = File::from_dyn(buffer.file()) {
1262 worktree = file.worktree.clone();
1263 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1264 let lang_server;
1265 if let Some(lang) = buffer.language() {
1266 if let Some(server) = self
1267 .language_servers
1268 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1269 {
1270 lang_server = server.clone();
1271 } else {
1272 return Task::ready(Ok(Default::default()));
1273 };
1274 } else {
1275 return Task::ready(Ok(Default::default()));
1276 }
1277
1278 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1279 } else {
1280 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1281 }
1282 } else {
1283 return Task::ready(Ok(Default::default()));
1284 }
1285 }
1286
1287 let remote_buffers = self.remote_id().zip(remote_buffers);
1288 let client = self.client.clone();
1289
1290 cx.spawn(|this, mut cx| async move {
1291 let mut project_transaction = ProjectTransaction::default();
1292
1293 if let Some((project_id, remote_buffers)) = remote_buffers {
1294 let response = client
1295 .request(proto::FormatBuffers {
1296 project_id,
1297 buffer_ids: remote_buffers
1298 .iter()
1299 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1300 .collect(),
1301 })
1302 .await?
1303 .transaction
1304 .ok_or_else(|| anyhow!("missing transaction"))?;
1305 project_transaction = this
1306 .update(&mut cx, |this, cx| {
1307 this.deserialize_project_transaction(response, push_to_history, cx)
1308 })
1309 .await?;
1310 }
1311
1312 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1313 let lsp_edits = lang_server
1314 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1315 text_document: lsp::TextDocumentIdentifier::new(
1316 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1317 ),
1318 options: Default::default(),
1319 work_done_progress_params: Default::default(),
1320 })
1321 .await?;
1322
1323 if let Some(lsp_edits) = lsp_edits {
1324 let edits = buffer
1325 .update(&mut cx, |buffer, cx| {
1326 buffer.edits_from_lsp(lsp_edits, None, cx)
1327 })
1328 .await?;
1329 buffer.update(&mut cx, |buffer, cx| {
1330 buffer.finalize_last_transaction();
1331 buffer.start_transaction();
1332 for (range, text) in edits {
1333 buffer.edit([range], text, cx);
1334 }
1335 if buffer.end_transaction(cx).is_some() {
1336 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1337 if !push_to_history {
1338 buffer.forget_transaction(transaction.id);
1339 }
1340 project_transaction.0.insert(cx.handle(), transaction);
1341 }
1342 });
1343 }
1344 }
1345
1346 Ok(project_transaction)
1347 })
1348 }
1349
1350 pub fn definition<T: ToPointUtf16>(
1351 &self,
1352 buffer: &ModelHandle<Buffer>,
1353 position: T,
1354 cx: &mut ModelContext<Self>,
1355 ) -> Task<Result<Vec<Location>>> {
1356 let position = position.to_point_utf16(buffer.read(cx));
1357 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1358 }
1359
1360 pub fn references<T: ToPointUtf16>(
1361 &self,
1362 buffer: &ModelHandle<Buffer>,
1363 position: T,
1364 cx: &mut ModelContext<Self>,
1365 ) -> Task<Result<Vec<Location>>> {
1366 let position = position.to_point_utf16(buffer.read(cx));
1367 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1368 }
1369
1370 pub fn document_highlights<T: ToPointUtf16>(
1371 &self,
1372 buffer: &ModelHandle<Buffer>,
1373 position: T,
1374 cx: &mut ModelContext<Self>,
1375 ) -> Task<Result<Vec<DocumentHighlight>>> {
1376 let position = position.to_point_utf16(buffer.read(cx));
1377 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1378 }
1379
1380 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1381 if self.is_local() {
1382 let mut language_servers = HashMap::default();
1383 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1384 if let Some((worktree, language)) = self
1385 .worktree_for_id(*worktree_id, cx)
1386 .and_then(|worktree| worktree.read(cx).as_local())
1387 .zip(self.languages.get_language(language_name))
1388 {
1389 language_servers
1390 .entry(Arc::as_ptr(language_server))
1391 .or_insert((
1392 language_server.clone(),
1393 *worktree_id,
1394 worktree.abs_path().clone(),
1395 language.clone(),
1396 ));
1397 }
1398 }
1399
1400 let mut requests = Vec::new();
1401 for (language_server, _, _, _) in language_servers.values() {
1402 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1403 lsp::WorkspaceSymbolParams {
1404 query: query.to_string(),
1405 ..Default::default()
1406 },
1407 ));
1408 }
1409
1410 cx.spawn_weak(|this, cx| async move {
1411 let responses = futures::future::try_join_all(requests).await?;
1412
1413 let mut symbols = Vec::new();
1414 if let Some(this) = this.upgrade(&cx) {
1415 this.read_with(&cx, |this, cx| {
1416 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1417 language_servers.into_values().zip(responses)
1418 {
1419 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1420 |lsp_symbol| {
1421 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1422 let mut worktree_id = source_worktree_id;
1423 let path;
1424 if let Some((worktree, rel_path)) =
1425 this.find_local_worktree(&abs_path, cx)
1426 {
1427 worktree_id = worktree.read(cx).id();
1428 path = rel_path;
1429 } else {
1430 path = relativize_path(&worktree_abs_path, &abs_path);
1431 }
1432
1433 let label = language
1434 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1435 .unwrap_or_else(|| {
1436 CodeLabel::plain(lsp_symbol.name.clone(), None)
1437 });
1438 let signature = this.symbol_signature(worktree_id, &path);
1439
1440 Some(Symbol {
1441 source_worktree_id,
1442 worktree_id,
1443 language_name: language.name().to_string(),
1444 name: lsp_symbol.name,
1445 kind: lsp_symbol.kind,
1446 label,
1447 path,
1448 range: range_from_lsp(lsp_symbol.location.range),
1449 signature,
1450 })
1451 },
1452 ));
1453 }
1454 })
1455 }
1456
1457 Ok(symbols)
1458 })
1459 } else if let Some(project_id) = self.remote_id() {
1460 let request = self.client.request(proto::GetProjectSymbols {
1461 project_id,
1462 query: query.to_string(),
1463 });
1464 cx.spawn_weak(|this, cx| async move {
1465 let response = request.await?;
1466 let mut symbols = Vec::new();
1467 if let Some(this) = this.upgrade(&cx) {
1468 this.read_with(&cx, |this, _| {
1469 symbols.extend(
1470 response
1471 .symbols
1472 .into_iter()
1473 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1474 );
1475 })
1476 }
1477 Ok(symbols)
1478 })
1479 } else {
1480 Task::ready(Ok(Default::default()))
1481 }
1482 }
1483
1484 pub fn open_buffer_for_symbol(
1485 &mut self,
1486 symbol: &Symbol,
1487 cx: &mut ModelContext<Self>,
1488 ) -> Task<Result<ModelHandle<Buffer>>> {
1489 if self.is_local() {
1490 let language_server = if let Some(server) = self
1491 .language_servers
1492 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1493 {
1494 server.clone()
1495 } else {
1496 return Task::ready(Err(anyhow!(
1497 "language server for worktree and language not found"
1498 )));
1499 };
1500
1501 let worktree_abs_path = if let Some(worktree_abs_path) = self
1502 .worktree_for_id(symbol.worktree_id, cx)
1503 .and_then(|worktree| worktree.read(cx).as_local())
1504 .map(|local_worktree| local_worktree.abs_path())
1505 {
1506 worktree_abs_path
1507 } else {
1508 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1509 };
1510 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1511 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1512 uri
1513 } else {
1514 return Task::ready(Err(anyhow!("invalid symbol path")));
1515 };
1516
1517 self.open_local_buffer_via_lsp(
1518 symbol_uri,
1519 symbol.language_name.clone(),
1520 language_server,
1521 cx,
1522 )
1523 } else if let Some(project_id) = self.remote_id() {
1524 let request = self.client.request(proto::OpenBufferForSymbol {
1525 project_id,
1526 symbol: Some(serialize_symbol(symbol)),
1527 });
1528 cx.spawn(|this, mut cx| async move {
1529 let response = request.await?;
1530 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1531 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1532 .await
1533 })
1534 } else {
1535 Task::ready(Err(anyhow!("project does not have a remote id")))
1536 }
1537 }
1538
1539 pub fn completions<T: ToPointUtf16>(
1540 &self,
1541 source_buffer_handle: &ModelHandle<Buffer>,
1542 position: T,
1543 cx: &mut ModelContext<Self>,
1544 ) -> Task<Result<Vec<Completion>>> {
1545 let source_buffer_handle = source_buffer_handle.clone();
1546 let source_buffer = source_buffer_handle.read(cx);
1547 let buffer_id = source_buffer.remote_id();
1548 let language = source_buffer.language().cloned();
1549 let worktree;
1550 let buffer_abs_path;
1551 if let Some(file) = File::from_dyn(source_buffer.file()) {
1552 worktree = file.worktree.clone();
1553 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1554 } else {
1555 return Task::ready(Ok(Default::default()));
1556 };
1557
1558 let position = position.to_point_utf16(source_buffer);
1559 let anchor = source_buffer.anchor_after(position);
1560
1561 if worktree.read(cx).as_local().is_some() {
1562 let buffer_abs_path = buffer_abs_path.unwrap();
1563 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1564 server
1565 } else {
1566 return Task::ready(Ok(Default::default()));
1567 };
1568
1569 cx.spawn(|_, cx| async move {
1570 let completions = lang_server
1571 .request::<lsp::request::Completion>(lsp::CompletionParams {
1572 text_document_position: lsp::TextDocumentPositionParams::new(
1573 lsp::TextDocumentIdentifier::new(
1574 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1575 ),
1576 position.to_lsp_position(),
1577 ),
1578 context: Default::default(),
1579 work_done_progress_params: Default::default(),
1580 partial_result_params: Default::default(),
1581 })
1582 .await
1583 .context("lsp completion request failed")?;
1584
1585 let completions = if let Some(completions) = completions {
1586 match completions {
1587 lsp::CompletionResponse::Array(completions) => completions,
1588 lsp::CompletionResponse::List(list) => list.items,
1589 }
1590 } else {
1591 Default::default()
1592 };
1593
1594 source_buffer_handle.read_with(&cx, |this, _| {
1595 Ok(completions
1596 .into_iter()
1597 .filter_map(|lsp_completion| {
1598 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1599 lsp::CompletionTextEdit::Edit(edit) => {
1600 (range_from_lsp(edit.range), edit.new_text.clone())
1601 }
1602 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1603 log::info!("unsupported insert/replace completion");
1604 return None;
1605 }
1606 };
1607
1608 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1609 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1610 if clipped_start == old_range.start && clipped_end == old_range.end {
1611 Some(Completion {
1612 old_range: this.anchor_before(old_range.start)
1613 ..this.anchor_after(old_range.end),
1614 new_text,
1615 label: language
1616 .as_ref()
1617 .and_then(|l| l.label_for_completion(&lsp_completion))
1618 .unwrap_or_else(|| {
1619 CodeLabel::plain(
1620 lsp_completion.label.clone(),
1621 lsp_completion.filter_text.as_deref(),
1622 )
1623 }),
1624 lsp_completion,
1625 })
1626 } else {
1627 None
1628 }
1629 })
1630 .collect())
1631 })
1632 })
1633 } else if let Some(project_id) = self.remote_id() {
1634 let rpc = self.client.clone();
1635 let message = proto::GetCompletions {
1636 project_id,
1637 buffer_id,
1638 position: Some(language::proto::serialize_anchor(&anchor)),
1639 version: (&source_buffer.version()).into(),
1640 };
1641 cx.spawn_weak(|_, mut cx| async move {
1642 let response = rpc.request(message).await?;
1643
1644 source_buffer_handle
1645 .update(&mut cx, |buffer, _| {
1646 buffer.wait_for_version(response.version.into())
1647 })
1648 .await;
1649
1650 response
1651 .completions
1652 .into_iter()
1653 .map(|completion| {
1654 language::proto::deserialize_completion(completion, language.as_ref())
1655 })
1656 .collect()
1657 })
1658 } else {
1659 Task::ready(Ok(Default::default()))
1660 }
1661 }
1662
1663 pub fn apply_additional_edits_for_completion(
1664 &self,
1665 buffer_handle: ModelHandle<Buffer>,
1666 completion: Completion,
1667 push_to_history: bool,
1668 cx: &mut ModelContext<Self>,
1669 ) -> Task<Result<Option<Transaction>>> {
1670 let buffer = buffer_handle.read(cx);
1671 let buffer_id = buffer.remote_id();
1672
1673 if self.is_local() {
1674 let lang_server = if let Some(language_server) = buffer.language_server() {
1675 language_server.clone()
1676 } else {
1677 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1678 };
1679
1680 cx.spawn(|_, mut cx| async move {
1681 let resolved_completion = lang_server
1682 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1683 .await?;
1684 if let Some(edits) = resolved_completion.additional_text_edits {
1685 let edits = buffer_handle
1686 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1687 .await?;
1688 buffer_handle.update(&mut cx, |buffer, cx| {
1689 buffer.finalize_last_transaction();
1690 buffer.start_transaction();
1691 for (range, text) in edits {
1692 buffer.edit([range], text, cx);
1693 }
1694 let transaction = if buffer.end_transaction(cx).is_some() {
1695 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1696 if !push_to_history {
1697 buffer.forget_transaction(transaction.id);
1698 }
1699 Some(transaction)
1700 } else {
1701 None
1702 };
1703 Ok(transaction)
1704 })
1705 } else {
1706 Ok(None)
1707 }
1708 })
1709 } else if let Some(project_id) = self.remote_id() {
1710 let client = self.client.clone();
1711 cx.spawn(|_, mut cx| async move {
1712 let response = client
1713 .request(proto::ApplyCompletionAdditionalEdits {
1714 project_id,
1715 buffer_id,
1716 completion: Some(language::proto::serialize_completion(&completion)),
1717 })
1718 .await?;
1719
1720 if let Some(transaction) = response.transaction {
1721 let transaction = language::proto::deserialize_transaction(transaction)?;
1722 buffer_handle
1723 .update(&mut cx, |buffer, _| {
1724 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1725 })
1726 .await;
1727 if push_to_history {
1728 buffer_handle.update(&mut cx, |buffer, _| {
1729 buffer.push_transaction(transaction.clone(), Instant::now());
1730 });
1731 }
1732 Ok(Some(transaction))
1733 } else {
1734 Ok(None)
1735 }
1736 })
1737 } else {
1738 Task::ready(Err(anyhow!("project does not have a remote id")))
1739 }
1740 }
1741
1742 pub fn code_actions<T: ToOffset>(
1743 &self,
1744 buffer_handle: &ModelHandle<Buffer>,
1745 range: Range<T>,
1746 cx: &mut ModelContext<Self>,
1747 ) -> Task<Result<Vec<CodeAction>>> {
1748 let buffer_handle = buffer_handle.clone();
1749 let buffer = buffer_handle.read(cx);
1750 let buffer_id = buffer.remote_id();
1751 let worktree;
1752 let buffer_abs_path;
1753 if let Some(file) = File::from_dyn(buffer.file()) {
1754 worktree = file.worktree.clone();
1755 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1756 } else {
1757 return Task::ready(Ok(Default::default()));
1758 };
1759 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1760
1761 if worktree.read(cx).as_local().is_some() {
1762 let buffer_abs_path = buffer_abs_path.unwrap();
1763 let lang_name;
1764 let lang_server;
1765 if let Some(lang) = buffer.language() {
1766 lang_name = lang.name().to_string();
1767 if let Some(server) = self
1768 .language_servers
1769 .get(&(worktree.read(cx).id(), lang_name.clone()))
1770 {
1771 lang_server = server.clone();
1772 } else {
1773 return Task::ready(Ok(Default::default()));
1774 };
1775 } else {
1776 return Task::ready(Ok(Default::default()));
1777 }
1778
1779 let lsp_range = lsp::Range::new(
1780 range.start.to_point_utf16(buffer).to_lsp_position(),
1781 range.end.to_point_utf16(buffer).to_lsp_position(),
1782 );
1783 cx.foreground().spawn(async move {
1784 Ok(lang_server
1785 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1786 text_document: lsp::TextDocumentIdentifier::new(
1787 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1788 ),
1789 range: lsp_range,
1790 work_done_progress_params: Default::default(),
1791 partial_result_params: Default::default(),
1792 context: lsp::CodeActionContext {
1793 diagnostics: Default::default(),
1794 only: Some(vec![
1795 lsp::CodeActionKind::QUICKFIX,
1796 lsp::CodeActionKind::REFACTOR,
1797 lsp::CodeActionKind::REFACTOR_EXTRACT,
1798 ]),
1799 },
1800 })
1801 .await?
1802 .unwrap_or_default()
1803 .into_iter()
1804 .filter_map(|entry| {
1805 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1806 Some(CodeAction {
1807 range: range.clone(),
1808 lsp_action,
1809 })
1810 } else {
1811 None
1812 }
1813 })
1814 .collect())
1815 })
1816 } else if let Some(project_id) = self.remote_id() {
1817 let rpc = self.client.clone();
1818 cx.spawn_weak(|_, mut cx| async move {
1819 let response = rpc
1820 .request(proto::GetCodeActions {
1821 project_id,
1822 buffer_id,
1823 start: Some(language::proto::serialize_anchor(&range.start)),
1824 end: Some(language::proto::serialize_anchor(&range.end)),
1825 })
1826 .await?;
1827
1828 buffer_handle
1829 .update(&mut cx, |buffer, _| {
1830 buffer.wait_for_version(response.version.into())
1831 })
1832 .await;
1833
1834 response
1835 .actions
1836 .into_iter()
1837 .map(language::proto::deserialize_code_action)
1838 .collect()
1839 })
1840 } else {
1841 Task::ready(Ok(Default::default()))
1842 }
1843 }
1844
1845 pub fn apply_code_action(
1846 &self,
1847 buffer_handle: ModelHandle<Buffer>,
1848 mut action: CodeAction,
1849 push_to_history: bool,
1850 cx: &mut ModelContext<Self>,
1851 ) -> Task<Result<ProjectTransaction>> {
1852 if self.is_local() {
1853 let buffer = buffer_handle.read(cx);
1854 let lang_name = if let Some(lang) = buffer.language() {
1855 lang.name().to_string()
1856 } else {
1857 return Task::ready(Ok(Default::default()));
1858 };
1859 let lang_server = if let Some(language_server) = buffer.language_server() {
1860 language_server.clone()
1861 } else {
1862 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1863 };
1864 let range = action.range.to_point_utf16(buffer);
1865
1866 cx.spawn(|this, mut cx| async move {
1867 if let Some(lsp_range) = action
1868 .lsp_action
1869 .data
1870 .as_mut()
1871 .and_then(|d| d.get_mut("codeActionParams"))
1872 .and_then(|d| d.get_mut("range"))
1873 {
1874 *lsp_range = serde_json::to_value(&lsp::Range::new(
1875 range.start.to_lsp_position(),
1876 range.end.to_lsp_position(),
1877 ))
1878 .unwrap();
1879 action.lsp_action = lang_server
1880 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1881 .await?;
1882 } else {
1883 let actions = this
1884 .update(&mut cx, |this, cx| {
1885 this.code_actions(&buffer_handle, action.range, cx)
1886 })
1887 .await?;
1888 action.lsp_action = actions
1889 .into_iter()
1890 .find(|a| a.lsp_action.title == action.lsp_action.title)
1891 .ok_or_else(|| anyhow!("code action is outdated"))?
1892 .lsp_action;
1893 }
1894
1895 if let Some(edit) = action.lsp_action.edit {
1896 Self::deserialize_workspace_edit(
1897 this,
1898 edit,
1899 push_to_history,
1900 lang_name,
1901 lang_server,
1902 &mut cx,
1903 )
1904 .await
1905 } else {
1906 Ok(ProjectTransaction::default())
1907 }
1908 })
1909 } else if let Some(project_id) = self.remote_id() {
1910 let client = self.client.clone();
1911 let request = proto::ApplyCodeAction {
1912 project_id,
1913 buffer_id: buffer_handle.read(cx).remote_id(),
1914 action: Some(language::proto::serialize_code_action(&action)),
1915 };
1916 cx.spawn(|this, mut cx| async move {
1917 let response = client
1918 .request(request)
1919 .await?
1920 .transaction
1921 .ok_or_else(|| anyhow!("missing transaction"))?;
1922 this.update(&mut cx, |this, cx| {
1923 this.deserialize_project_transaction(response, push_to_history, cx)
1924 })
1925 .await
1926 })
1927 } else {
1928 Task::ready(Err(anyhow!("project does not have a remote id")))
1929 }
1930 }
1931
1932 async fn deserialize_workspace_edit(
1933 this: ModelHandle<Self>,
1934 edit: lsp::WorkspaceEdit,
1935 push_to_history: bool,
1936 language_name: String,
1937 language_server: Arc<LanguageServer>,
1938 cx: &mut AsyncAppContext,
1939 ) -> Result<ProjectTransaction> {
1940 let fs = this.read_with(cx, |this, _| this.fs.clone());
1941 let mut operations = Vec::new();
1942 if let Some(document_changes) = edit.document_changes {
1943 match document_changes {
1944 lsp::DocumentChanges::Edits(edits) => {
1945 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1946 }
1947 lsp::DocumentChanges::Operations(ops) => operations = ops,
1948 }
1949 } else if let Some(changes) = edit.changes {
1950 operations.extend(changes.into_iter().map(|(uri, edits)| {
1951 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1952 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1953 uri,
1954 version: None,
1955 },
1956 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1957 })
1958 }));
1959 }
1960
1961 let mut project_transaction = ProjectTransaction::default();
1962 for operation in operations {
1963 match operation {
1964 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1965 let abs_path = op
1966 .uri
1967 .to_file_path()
1968 .map_err(|_| anyhow!("can't convert URI to path"))?;
1969
1970 if let Some(parent_path) = abs_path.parent() {
1971 fs.create_dir(parent_path).await?;
1972 }
1973 if abs_path.ends_with("/") {
1974 fs.create_dir(&abs_path).await?;
1975 } else {
1976 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1977 .await?;
1978 }
1979 }
1980 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1981 let source_abs_path = op
1982 .old_uri
1983 .to_file_path()
1984 .map_err(|_| anyhow!("can't convert URI to path"))?;
1985 let target_abs_path = op
1986 .new_uri
1987 .to_file_path()
1988 .map_err(|_| anyhow!("can't convert URI to path"))?;
1989 fs.rename(
1990 &source_abs_path,
1991 &target_abs_path,
1992 op.options.map(Into::into).unwrap_or_default(),
1993 )
1994 .await?;
1995 }
1996 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1997 let abs_path = op
1998 .uri
1999 .to_file_path()
2000 .map_err(|_| anyhow!("can't convert URI to path"))?;
2001 let options = op.options.map(Into::into).unwrap_or_default();
2002 if abs_path.ends_with("/") {
2003 fs.remove_dir(&abs_path, options).await?;
2004 } else {
2005 fs.remove_file(&abs_path, options).await?;
2006 }
2007 }
2008 lsp::DocumentChangeOperation::Edit(op) => {
2009 let buffer_to_edit = this
2010 .update(cx, |this, cx| {
2011 this.open_local_buffer_via_lsp(
2012 op.text_document.uri,
2013 language_name.clone(),
2014 language_server.clone(),
2015 cx,
2016 )
2017 })
2018 .await?;
2019
2020 let edits = buffer_to_edit
2021 .update(cx, |buffer, cx| {
2022 let edits = op.edits.into_iter().map(|edit| match edit {
2023 lsp::OneOf::Left(edit) => edit,
2024 lsp::OneOf::Right(edit) => edit.text_edit,
2025 });
2026 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2027 })
2028 .await?;
2029
2030 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2031 buffer.finalize_last_transaction();
2032 buffer.start_transaction();
2033 for (range, text) in edits {
2034 buffer.edit([range], text, cx);
2035 }
2036 let transaction = if buffer.end_transaction(cx).is_some() {
2037 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2038 if !push_to_history {
2039 buffer.forget_transaction(transaction.id);
2040 }
2041 Some(transaction)
2042 } else {
2043 None
2044 };
2045
2046 transaction
2047 });
2048 if let Some(transaction) = transaction {
2049 project_transaction.0.insert(buffer_to_edit, transaction);
2050 }
2051 }
2052 }
2053 }
2054
2055 Ok(project_transaction)
2056 }
2057
2058 pub fn prepare_rename<T: ToPointUtf16>(
2059 &self,
2060 buffer: ModelHandle<Buffer>,
2061 position: T,
2062 cx: &mut ModelContext<Self>,
2063 ) -> Task<Result<Option<Range<Anchor>>>> {
2064 let position = position.to_point_utf16(buffer.read(cx));
2065 self.request_lsp(buffer, PrepareRename { position }, cx)
2066 }
2067
2068 pub fn perform_rename<T: ToPointUtf16>(
2069 &self,
2070 buffer: ModelHandle<Buffer>,
2071 position: T,
2072 new_name: String,
2073 push_to_history: bool,
2074 cx: &mut ModelContext<Self>,
2075 ) -> Task<Result<ProjectTransaction>> {
2076 let position = position.to_point_utf16(buffer.read(cx));
2077 self.request_lsp(
2078 buffer,
2079 PerformRename {
2080 position,
2081 new_name,
2082 push_to_history,
2083 },
2084 cx,
2085 )
2086 }
2087
2088 pub fn search(
2089 &self,
2090 query: SearchQuery,
2091 cx: &mut ModelContext<Self>,
2092 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2093 if self.is_local() {
2094 let snapshots = self
2095 .visible_worktrees(cx)
2096 .filter_map(|tree| {
2097 let tree = tree.read(cx).as_local()?;
2098 Some(tree.snapshot())
2099 })
2100 .collect::<Vec<_>>();
2101
2102 let background = cx.background().clone();
2103 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2104 if path_count == 0 {
2105 return Task::ready(Ok(Default::default()));
2106 }
2107 let workers = background.num_cpus().min(path_count);
2108 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2109 cx.background()
2110 .spawn({
2111 let fs = self.fs.clone();
2112 let background = cx.background().clone();
2113 let query = query.clone();
2114 async move {
2115 let fs = &fs;
2116 let query = &query;
2117 let matching_paths_tx = &matching_paths_tx;
2118 let paths_per_worker = (path_count + workers - 1) / workers;
2119 let snapshots = &snapshots;
2120 background
2121 .scoped(|scope| {
2122 for worker_ix in 0..workers {
2123 let worker_start_ix = worker_ix * paths_per_worker;
2124 let worker_end_ix = worker_start_ix + paths_per_worker;
2125 scope.spawn(async move {
2126 let mut snapshot_start_ix = 0;
2127 let mut abs_path = PathBuf::new();
2128 for snapshot in snapshots {
2129 let snapshot_end_ix =
2130 snapshot_start_ix + snapshot.visible_file_count();
2131 if worker_end_ix <= snapshot_start_ix {
2132 break;
2133 } else if worker_start_ix > snapshot_end_ix {
2134 snapshot_start_ix = snapshot_end_ix;
2135 continue;
2136 } else {
2137 let start_in_snapshot = worker_start_ix
2138 .saturating_sub(snapshot_start_ix);
2139 let end_in_snapshot =
2140 cmp::min(worker_end_ix, snapshot_end_ix)
2141 - snapshot_start_ix;
2142
2143 for entry in snapshot
2144 .files(false, start_in_snapshot)
2145 .take(end_in_snapshot - start_in_snapshot)
2146 {
2147 if matching_paths_tx.is_closed() {
2148 break;
2149 }
2150
2151 abs_path.clear();
2152 abs_path.push(&snapshot.abs_path());
2153 abs_path.push(&entry.path);
2154 let matches = if let Some(file) =
2155 fs.open_sync(&abs_path).await.log_err()
2156 {
2157 query.detect(file).unwrap_or(false)
2158 } else {
2159 false
2160 };
2161
2162 if matches {
2163 let project_path =
2164 (snapshot.id(), entry.path.clone());
2165 if matching_paths_tx
2166 .send(project_path)
2167 .await
2168 .is_err()
2169 {
2170 break;
2171 }
2172 }
2173 }
2174
2175 snapshot_start_ix = snapshot_end_ix;
2176 }
2177 }
2178 });
2179 }
2180 })
2181 .await;
2182 }
2183 })
2184 .detach();
2185
2186 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2187 let open_buffers = self
2188 .opened_buffers
2189 .values()
2190 .filter_map(|b| b.upgrade(cx))
2191 .collect::<HashSet<_>>();
2192 cx.spawn(|this, cx| async move {
2193 for buffer in &open_buffers {
2194 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2195 buffers_tx.send((buffer.clone(), snapshot)).await?;
2196 }
2197
2198 let open_buffers = Rc::new(RefCell::new(open_buffers));
2199 while let Some(project_path) = matching_paths_rx.next().await {
2200 if buffers_tx.is_closed() {
2201 break;
2202 }
2203
2204 let this = this.clone();
2205 let open_buffers = open_buffers.clone();
2206 let buffers_tx = buffers_tx.clone();
2207 cx.spawn(|mut cx| async move {
2208 if let Some(buffer) = this
2209 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2210 .await
2211 .log_err()
2212 {
2213 if open_buffers.borrow_mut().insert(buffer.clone()) {
2214 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2215 buffers_tx.send((buffer, snapshot)).await?;
2216 }
2217 }
2218
2219 Ok::<_, anyhow::Error>(())
2220 })
2221 .detach();
2222 }
2223
2224 Ok::<_, anyhow::Error>(())
2225 })
2226 .detach_and_log_err(cx);
2227
2228 let background = cx.background().clone();
2229 cx.background().spawn(async move {
2230 let query = &query;
2231 let mut matched_buffers = Vec::new();
2232 for _ in 0..workers {
2233 matched_buffers.push(HashMap::default());
2234 }
2235 background
2236 .scoped(|scope| {
2237 for worker_matched_buffers in matched_buffers.iter_mut() {
2238 let mut buffers_rx = buffers_rx.clone();
2239 scope.spawn(async move {
2240 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2241 let buffer_matches = query
2242 .search(snapshot.as_rope())
2243 .await
2244 .iter()
2245 .map(|range| {
2246 snapshot.anchor_before(range.start)
2247 ..snapshot.anchor_after(range.end)
2248 })
2249 .collect::<Vec<_>>();
2250 if !buffer_matches.is_empty() {
2251 worker_matched_buffers
2252 .insert(buffer.clone(), buffer_matches);
2253 }
2254 }
2255 });
2256 }
2257 })
2258 .await;
2259 Ok(matched_buffers.into_iter().flatten().collect())
2260 })
2261 } else if let Some(project_id) = self.remote_id() {
2262 let request = self.client.request(query.to_proto(project_id));
2263 cx.spawn(|this, mut cx| async move {
2264 let response = request.await?;
2265 let mut result = HashMap::default();
2266 for location in response.locations {
2267 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2268 let target_buffer = this
2269 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2270 .await?;
2271 let start = location
2272 .start
2273 .and_then(deserialize_anchor)
2274 .ok_or_else(|| anyhow!("missing target start"))?;
2275 let end = location
2276 .end
2277 .and_then(deserialize_anchor)
2278 .ok_or_else(|| anyhow!("missing target end"))?;
2279 result
2280 .entry(target_buffer)
2281 .or_insert(Vec::new())
2282 .push(start..end)
2283 }
2284 Ok(result)
2285 })
2286 } else {
2287 Task::ready(Ok(Default::default()))
2288 }
2289 }
2290
2291 fn request_lsp<R: LspCommand>(
2292 &self,
2293 buffer_handle: ModelHandle<Buffer>,
2294 request: R,
2295 cx: &mut ModelContext<Self>,
2296 ) -> Task<Result<R::Response>>
2297 where
2298 <R::LspRequest as lsp::request::Request>::Result: Send,
2299 {
2300 let buffer = buffer_handle.read(cx);
2301 if self.is_local() {
2302 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2303 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2304 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2305 return cx.spawn(|this, cx| async move {
2306 let response = language_server
2307 .request::<R::LspRequest>(lsp_params)
2308 .await
2309 .context("lsp request failed")?;
2310 request
2311 .response_from_lsp(response, this, buffer_handle, cx)
2312 .await
2313 });
2314 }
2315 } else if let Some(project_id) = self.remote_id() {
2316 let rpc = self.client.clone();
2317 let message = request.to_proto(project_id, buffer);
2318 return cx.spawn(|this, cx| async move {
2319 let response = rpc.request(message).await?;
2320 request
2321 .response_from_proto(response, this, buffer_handle, cx)
2322 .await
2323 });
2324 }
2325 Task::ready(Ok(Default::default()))
2326 }
2327
2328 pub fn find_or_create_local_worktree(
2329 &mut self,
2330 abs_path: impl AsRef<Path>,
2331 visible: bool,
2332 cx: &mut ModelContext<Self>,
2333 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2334 let abs_path = abs_path.as_ref();
2335 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2336 Task::ready(Ok((tree.clone(), relative_path.into())))
2337 } else {
2338 let worktree = self.create_local_worktree(abs_path, visible, cx);
2339 cx.foreground()
2340 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2341 }
2342 }
2343
2344 pub fn find_local_worktree(
2345 &self,
2346 abs_path: &Path,
2347 cx: &AppContext,
2348 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2349 for tree in self.worktrees(cx) {
2350 if let Some(relative_path) = tree
2351 .read(cx)
2352 .as_local()
2353 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2354 {
2355 return Some((tree.clone(), relative_path.into()));
2356 }
2357 }
2358 None
2359 }
2360
2361 pub fn is_shared(&self) -> bool {
2362 match &self.client_state {
2363 ProjectClientState::Local { is_shared, .. } => *is_shared,
2364 ProjectClientState::Remote { .. } => false,
2365 }
2366 }
2367
2368 fn create_local_worktree(
2369 &mut self,
2370 abs_path: impl AsRef<Path>,
2371 visible: bool,
2372 cx: &mut ModelContext<Self>,
2373 ) -> Task<Result<ModelHandle<Worktree>>> {
2374 let fs = self.fs.clone();
2375 let client = self.client.clone();
2376 let path: Arc<Path> = abs_path.as_ref().into();
2377 let task = self
2378 .loading_local_worktrees
2379 .entry(path.clone())
2380 .or_insert_with(|| {
2381 cx.spawn(|project, mut cx| {
2382 async move {
2383 let worktree =
2384 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2385 .await;
2386 project.update(&mut cx, |project, _| {
2387 project.loading_local_worktrees.remove(&path);
2388 });
2389 let worktree = worktree?;
2390
2391 let (remote_project_id, is_shared) =
2392 project.update(&mut cx, |project, cx| {
2393 project.add_worktree(&worktree, cx);
2394 (project.remote_id(), project.is_shared())
2395 });
2396
2397 if let Some(project_id) = remote_project_id {
2398 if is_shared {
2399 worktree
2400 .update(&mut cx, |worktree, cx| {
2401 worktree.as_local_mut().unwrap().share(project_id, cx)
2402 })
2403 .await?;
2404 } else {
2405 worktree
2406 .update(&mut cx, |worktree, cx| {
2407 worktree.as_local_mut().unwrap().register(project_id, cx)
2408 })
2409 .await?;
2410 }
2411 }
2412
2413 Ok(worktree)
2414 }
2415 .map_err(|err| Arc::new(err))
2416 })
2417 .shared()
2418 })
2419 .clone();
2420 cx.foreground().spawn(async move {
2421 match task.await {
2422 Ok(worktree) => Ok(worktree),
2423 Err(err) => Err(anyhow!("{}", err)),
2424 }
2425 })
2426 }
2427
2428 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2429 self.worktrees.retain(|worktree| {
2430 worktree
2431 .upgrade(cx)
2432 .map_or(false, |w| w.read(cx).id() != id)
2433 });
2434 cx.notify();
2435 }
2436
2437 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2438 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2439 if worktree.read(cx).is_local() {
2440 cx.subscribe(&worktree, |this, worktree, _, cx| {
2441 this.update_local_worktree_buffers(worktree, cx);
2442 })
2443 .detach();
2444 }
2445
2446 let push_strong_handle = {
2447 let worktree = worktree.read(cx);
2448 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2449 };
2450 if push_strong_handle {
2451 self.worktrees
2452 .push(WorktreeHandle::Strong(worktree.clone()));
2453 } else {
2454 cx.observe_release(&worktree, |this, cx| {
2455 this.worktrees
2456 .retain(|worktree| worktree.upgrade(cx).is_some());
2457 cx.notify();
2458 })
2459 .detach();
2460 self.worktrees
2461 .push(WorktreeHandle::Weak(worktree.downgrade()));
2462 }
2463 cx.notify();
2464 }
2465
2466 fn update_local_worktree_buffers(
2467 &mut self,
2468 worktree_handle: ModelHandle<Worktree>,
2469 cx: &mut ModelContext<Self>,
2470 ) {
2471 let snapshot = worktree_handle.read(cx).snapshot();
2472 let mut buffers_to_delete = Vec::new();
2473 for (buffer_id, buffer) in &self.opened_buffers {
2474 if let Some(buffer) = buffer.upgrade(cx) {
2475 buffer.update(cx, |buffer, cx| {
2476 if let Some(old_file) = File::from_dyn(buffer.file()) {
2477 if old_file.worktree != worktree_handle {
2478 return;
2479 }
2480
2481 let new_file = if let Some(entry) = old_file
2482 .entry_id
2483 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2484 {
2485 File {
2486 is_local: true,
2487 entry_id: Some(entry.id),
2488 mtime: entry.mtime,
2489 path: entry.path.clone(),
2490 worktree: worktree_handle.clone(),
2491 }
2492 } else if let Some(entry) =
2493 snapshot.entry_for_path(old_file.path().as_ref())
2494 {
2495 File {
2496 is_local: true,
2497 entry_id: Some(entry.id),
2498 mtime: entry.mtime,
2499 path: entry.path.clone(),
2500 worktree: worktree_handle.clone(),
2501 }
2502 } else {
2503 File {
2504 is_local: true,
2505 entry_id: None,
2506 path: old_file.path().clone(),
2507 mtime: old_file.mtime(),
2508 worktree: worktree_handle.clone(),
2509 }
2510 };
2511
2512 if let Some(project_id) = self.remote_id() {
2513 self.client
2514 .send(proto::UpdateBufferFile {
2515 project_id,
2516 buffer_id: *buffer_id as u64,
2517 file: Some(new_file.to_proto()),
2518 })
2519 .log_err();
2520 }
2521 buffer.file_updated(Box::new(new_file), cx).detach();
2522 }
2523 });
2524 } else {
2525 buffers_to_delete.push(*buffer_id);
2526 }
2527 }
2528
2529 for buffer_id in buffers_to_delete {
2530 self.opened_buffers.remove(&buffer_id);
2531 }
2532 }
2533
2534 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2535 let new_active_entry = entry.and_then(|project_path| {
2536 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2537 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2538 Some(ProjectEntry {
2539 worktree_id: project_path.worktree_id,
2540 entry_id: entry.id,
2541 })
2542 });
2543 if new_active_entry != self.active_entry {
2544 self.active_entry = new_active_entry;
2545 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2546 }
2547 }
2548
2549 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2550 self.language_servers_with_diagnostics_running > 0
2551 }
2552
2553 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2554 let mut summary = DiagnosticSummary::default();
2555 for (_, path_summary) in self.diagnostic_summaries(cx) {
2556 summary.error_count += path_summary.error_count;
2557 summary.warning_count += path_summary.warning_count;
2558 summary.info_count += path_summary.info_count;
2559 summary.hint_count += path_summary.hint_count;
2560 }
2561 summary
2562 }
2563
2564 pub fn diagnostic_summaries<'a>(
2565 &'a self,
2566 cx: &'a AppContext,
2567 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2568 self.worktrees(cx).flat_map(move |worktree| {
2569 let worktree = worktree.read(cx);
2570 let worktree_id = worktree.id();
2571 worktree
2572 .diagnostic_summaries()
2573 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2574 })
2575 }
2576
2577 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2578 self.language_servers_with_diagnostics_running += 1;
2579 if self.language_servers_with_diagnostics_running == 1 {
2580 cx.emit(Event::DiskBasedDiagnosticsStarted);
2581 }
2582 }
2583
2584 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2585 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2586 self.language_servers_with_diagnostics_running -= 1;
2587 if self.language_servers_with_diagnostics_running == 0 {
2588 cx.emit(Event::DiskBasedDiagnosticsFinished);
2589 }
2590 }
2591
2592 pub fn active_entry(&self) -> Option<ProjectEntry> {
2593 self.active_entry
2594 }
2595
2596 // RPC message handlers
2597
2598 async fn handle_unshare_project(
2599 this: ModelHandle<Self>,
2600 _: TypedEnvelope<proto::UnshareProject>,
2601 _: Arc<Client>,
2602 mut cx: AsyncAppContext,
2603 ) -> Result<()> {
2604 this.update(&mut cx, |this, cx| {
2605 if let ProjectClientState::Remote {
2606 sharing_has_stopped,
2607 ..
2608 } = &mut this.client_state
2609 {
2610 *sharing_has_stopped = true;
2611 this.collaborators.clear();
2612 cx.notify();
2613 } else {
2614 unreachable!()
2615 }
2616 });
2617
2618 Ok(())
2619 }
2620
2621 async fn handle_add_collaborator(
2622 this: ModelHandle<Self>,
2623 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2624 _: Arc<Client>,
2625 mut cx: AsyncAppContext,
2626 ) -> Result<()> {
2627 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2628 let collaborator = envelope
2629 .payload
2630 .collaborator
2631 .take()
2632 .ok_or_else(|| anyhow!("empty collaborator"))?;
2633
2634 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2635 this.update(&mut cx, |this, cx| {
2636 this.collaborators
2637 .insert(collaborator.peer_id, collaborator);
2638 cx.notify();
2639 });
2640
2641 Ok(())
2642 }
2643
2644 async fn handle_remove_collaborator(
2645 this: ModelHandle<Self>,
2646 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2647 _: Arc<Client>,
2648 mut cx: AsyncAppContext,
2649 ) -> Result<()> {
2650 this.update(&mut cx, |this, cx| {
2651 let peer_id = PeerId(envelope.payload.peer_id);
2652 let replica_id = this
2653 .collaborators
2654 .remove(&peer_id)
2655 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2656 .replica_id;
2657 for (_, buffer) in &this.opened_buffers {
2658 if let Some(buffer) = buffer.upgrade(cx) {
2659 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2660 }
2661 }
2662 cx.notify();
2663 Ok(())
2664 })
2665 }
2666
2667 async fn handle_register_worktree(
2668 this: ModelHandle<Self>,
2669 envelope: TypedEnvelope<proto::RegisterWorktree>,
2670 client: Arc<Client>,
2671 mut cx: AsyncAppContext,
2672 ) -> Result<()> {
2673 this.update(&mut cx, |this, cx| {
2674 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2675 let replica_id = this.replica_id();
2676 let worktree = proto::Worktree {
2677 id: envelope.payload.worktree_id,
2678 root_name: envelope.payload.root_name,
2679 entries: Default::default(),
2680 diagnostic_summaries: Default::default(),
2681 visible: envelope.payload.visible,
2682 };
2683 let (worktree, load_task) =
2684 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2685 this.add_worktree(&worktree, cx);
2686 load_task.detach();
2687 Ok(())
2688 })
2689 }
2690
2691 async fn handle_unregister_worktree(
2692 this: ModelHandle<Self>,
2693 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2694 _: Arc<Client>,
2695 mut cx: AsyncAppContext,
2696 ) -> Result<()> {
2697 this.update(&mut cx, |this, cx| {
2698 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2699 this.remove_worktree(worktree_id, cx);
2700 Ok(())
2701 })
2702 }
2703
2704 async fn handle_update_worktree(
2705 this: ModelHandle<Self>,
2706 envelope: TypedEnvelope<proto::UpdateWorktree>,
2707 _: Arc<Client>,
2708 mut cx: AsyncAppContext,
2709 ) -> Result<()> {
2710 this.update(&mut cx, |this, cx| {
2711 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2712 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2713 worktree.update(cx, |worktree, _| {
2714 let worktree = worktree.as_remote_mut().unwrap();
2715 worktree.update_from_remote(envelope)
2716 })?;
2717 }
2718 Ok(())
2719 })
2720 }
2721
2722 async fn handle_update_diagnostic_summary(
2723 this: ModelHandle<Self>,
2724 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2725 _: Arc<Client>,
2726 mut cx: AsyncAppContext,
2727 ) -> Result<()> {
2728 this.update(&mut cx, |this, cx| {
2729 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2730 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2731 if let Some(summary) = envelope.payload.summary {
2732 let project_path = ProjectPath {
2733 worktree_id,
2734 path: Path::new(&summary.path).into(),
2735 };
2736 worktree.update(cx, |worktree, _| {
2737 worktree
2738 .as_remote_mut()
2739 .unwrap()
2740 .update_diagnostic_summary(project_path.path.clone(), &summary);
2741 });
2742 cx.emit(Event::DiagnosticsUpdated(project_path));
2743 }
2744 }
2745 Ok(())
2746 })
2747 }
2748
2749 async fn handle_disk_based_diagnostics_updating(
2750 this: ModelHandle<Self>,
2751 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2752 _: Arc<Client>,
2753 mut cx: AsyncAppContext,
2754 ) -> Result<()> {
2755 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2756 Ok(())
2757 }
2758
2759 async fn handle_disk_based_diagnostics_updated(
2760 this: ModelHandle<Self>,
2761 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2762 _: Arc<Client>,
2763 mut cx: AsyncAppContext,
2764 ) -> Result<()> {
2765 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2766 Ok(())
2767 }
2768
2769 async fn handle_update_buffer(
2770 this: ModelHandle<Self>,
2771 envelope: TypedEnvelope<proto::UpdateBuffer>,
2772 _: Arc<Client>,
2773 mut cx: AsyncAppContext,
2774 ) -> Result<()> {
2775 this.update(&mut cx, |this, cx| {
2776 let payload = envelope.payload.clone();
2777 let buffer_id = payload.buffer_id;
2778 let ops = payload
2779 .operations
2780 .into_iter()
2781 .map(|op| language::proto::deserialize_operation(op))
2782 .collect::<Result<Vec<_>, _>>()?;
2783 match this.opened_buffers.entry(buffer_id) {
2784 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2785 OpenBuffer::Strong(buffer) => {
2786 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2787 }
2788 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2789 OpenBuffer::Weak(_) => {}
2790 },
2791 hash_map::Entry::Vacant(e) => {
2792 e.insert(OpenBuffer::Loading(ops));
2793 }
2794 }
2795 Ok(())
2796 })
2797 }
2798
2799 async fn handle_update_buffer_file(
2800 this: ModelHandle<Self>,
2801 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2802 _: Arc<Client>,
2803 mut cx: AsyncAppContext,
2804 ) -> Result<()> {
2805 this.update(&mut cx, |this, cx| {
2806 let payload = envelope.payload.clone();
2807 let buffer_id = payload.buffer_id;
2808 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2809 let worktree = this
2810 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2811 .ok_or_else(|| anyhow!("no such worktree"))?;
2812 let file = File::from_proto(file, worktree.clone(), cx)?;
2813 let buffer = this
2814 .opened_buffers
2815 .get_mut(&buffer_id)
2816 .and_then(|b| b.upgrade(cx))
2817 .ok_or_else(|| anyhow!("no such buffer"))?;
2818 buffer.update(cx, |buffer, cx| {
2819 buffer.file_updated(Box::new(file), cx).detach();
2820 });
2821 Ok(())
2822 })
2823 }
2824
2825 async fn handle_save_buffer(
2826 this: ModelHandle<Self>,
2827 envelope: TypedEnvelope<proto::SaveBuffer>,
2828 _: Arc<Client>,
2829 mut cx: AsyncAppContext,
2830 ) -> Result<proto::BufferSaved> {
2831 let buffer_id = envelope.payload.buffer_id;
2832 let requested_version = envelope.payload.version.try_into()?;
2833
2834 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2835 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2836 let buffer = this
2837 .opened_buffers
2838 .get(&buffer_id)
2839 .map(|buffer| buffer.upgrade(cx).unwrap())
2840 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2841 Ok::<_, anyhow::Error>((project_id, buffer))
2842 })?;
2843
2844 if !buffer
2845 .read_with(&cx, |buffer, _| buffer.version())
2846 .observed_all(&requested_version)
2847 {
2848 Err(anyhow!("save request depends on unreceived edits"))?;
2849 }
2850
2851 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2852 Ok(proto::BufferSaved {
2853 project_id,
2854 buffer_id,
2855 version: (&saved_version).into(),
2856 mtime: Some(mtime.into()),
2857 })
2858 }
2859
2860 async fn handle_format_buffers(
2861 this: ModelHandle<Self>,
2862 envelope: TypedEnvelope<proto::FormatBuffers>,
2863 _: Arc<Client>,
2864 mut cx: AsyncAppContext,
2865 ) -> Result<proto::FormatBuffersResponse> {
2866 let sender_id = envelope.original_sender_id()?;
2867 let format = this.update(&mut cx, |this, cx| {
2868 let mut buffers = HashSet::default();
2869 for buffer_id in &envelope.payload.buffer_ids {
2870 buffers.insert(
2871 this.opened_buffers
2872 .get(buffer_id)
2873 .map(|buffer| buffer.upgrade(cx).unwrap())
2874 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2875 );
2876 }
2877 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2878 })?;
2879
2880 let project_transaction = format.await?;
2881 let project_transaction = this.update(&mut cx, |this, cx| {
2882 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2883 });
2884 Ok(proto::FormatBuffersResponse {
2885 transaction: Some(project_transaction),
2886 })
2887 }
2888
2889 async fn handle_get_completions(
2890 this: ModelHandle<Self>,
2891 envelope: TypedEnvelope<proto::GetCompletions>,
2892 _: Arc<Client>,
2893 mut cx: AsyncAppContext,
2894 ) -> Result<proto::GetCompletionsResponse> {
2895 let position = envelope
2896 .payload
2897 .position
2898 .and_then(language::proto::deserialize_anchor)
2899 .ok_or_else(|| anyhow!("invalid position"))?;
2900 let version = clock::Global::from(envelope.payload.version);
2901 let buffer = this.read_with(&cx, |this, cx| {
2902 this.opened_buffers
2903 .get(&envelope.payload.buffer_id)
2904 .map(|buffer| buffer.upgrade(cx).unwrap())
2905 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2906 })?;
2907 if !buffer
2908 .read_with(&cx, |buffer, _| buffer.version())
2909 .observed_all(&version)
2910 {
2911 Err(anyhow!("completion request depends on unreceived edits"))?;
2912 }
2913 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2914 let completions = this
2915 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2916 .await?;
2917
2918 Ok(proto::GetCompletionsResponse {
2919 completions: completions
2920 .iter()
2921 .map(language::proto::serialize_completion)
2922 .collect(),
2923 version: (&version).into(),
2924 })
2925 }
2926
2927 async fn handle_apply_additional_edits_for_completion(
2928 this: ModelHandle<Self>,
2929 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2930 _: Arc<Client>,
2931 mut cx: AsyncAppContext,
2932 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2933 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2934 let buffer = this
2935 .opened_buffers
2936 .get(&envelope.payload.buffer_id)
2937 .map(|buffer| buffer.upgrade(cx).unwrap())
2938 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2939 let language = buffer.read(cx).language();
2940 let completion = language::proto::deserialize_completion(
2941 envelope
2942 .payload
2943 .completion
2944 .ok_or_else(|| anyhow!("invalid completion"))?,
2945 language,
2946 )?;
2947 Ok::<_, anyhow::Error>(
2948 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2949 )
2950 })?;
2951
2952 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2953 transaction: apply_additional_edits
2954 .await?
2955 .as_ref()
2956 .map(language::proto::serialize_transaction),
2957 })
2958 }
2959
2960 async fn handle_get_code_actions(
2961 this: ModelHandle<Self>,
2962 envelope: TypedEnvelope<proto::GetCodeActions>,
2963 _: Arc<Client>,
2964 mut cx: AsyncAppContext,
2965 ) -> Result<proto::GetCodeActionsResponse> {
2966 let start = envelope
2967 .payload
2968 .start
2969 .and_then(language::proto::deserialize_anchor)
2970 .ok_or_else(|| anyhow!("invalid start"))?;
2971 let end = envelope
2972 .payload
2973 .end
2974 .and_then(language::proto::deserialize_anchor)
2975 .ok_or_else(|| anyhow!("invalid end"))?;
2976 let buffer = this.update(&mut cx, |this, cx| {
2977 this.opened_buffers
2978 .get(&envelope.payload.buffer_id)
2979 .map(|buffer| buffer.upgrade(cx).unwrap())
2980 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2981 })?;
2982 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2983 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2984 Err(anyhow!("code action request references unreceived edits"))?;
2985 }
2986 let code_actions = this.update(&mut cx, |this, cx| {
2987 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2988 })?;
2989
2990 Ok(proto::GetCodeActionsResponse {
2991 actions: code_actions
2992 .await?
2993 .iter()
2994 .map(language::proto::serialize_code_action)
2995 .collect(),
2996 version: (&version).into(),
2997 })
2998 }
2999
3000 async fn handle_apply_code_action(
3001 this: ModelHandle<Self>,
3002 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3003 _: Arc<Client>,
3004 mut cx: AsyncAppContext,
3005 ) -> Result<proto::ApplyCodeActionResponse> {
3006 let sender_id = envelope.original_sender_id()?;
3007 let action = language::proto::deserialize_code_action(
3008 envelope
3009 .payload
3010 .action
3011 .ok_or_else(|| anyhow!("invalid action"))?,
3012 )?;
3013 let apply_code_action = this.update(&mut cx, |this, cx| {
3014 let buffer = this
3015 .opened_buffers
3016 .get(&envelope.payload.buffer_id)
3017 .map(|buffer| buffer.upgrade(cx).unwrap())
3018 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3019 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3020 })?;
3021
3022 let project_transaction = apply_code_action.await?;
3023 let project_transaction = this.update(&mut cx, |this, cx| {
3024 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3025 });
3026 Ok(proto::ApplyCodeActionResponse {
3027 transaction: Some(project_transaction),
3028 })
3029 }
3030
3031 async fn handle_lsp_command<T: LspCommand>(
3032 this: ModelHandle<Self>,
3033 envelope: TypedEnvelope<T::ProtoRequest>,
3034 _: Arc<Client>,
3035 mut cx: AsyncAppContext,
3036 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3037 where
3038 <T::LspRequest as lsp::request::Request>::Result: Send,
3039 {
3040 let sender_id = envelope.original_sender_id()?;
3041 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
3042 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3043 let buffer_handle = this
3044 .opened_buffers
3045 .get(&buffer_id)
3046 .map(|buffer| buffer.upgrade(cx).unwrap())
3047 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3048 let buffer = buffer_handle.read(cx);
3049 let buffer_version = buffer.version();
3050 let request = T::from_proto(envelope.payload, this, buffer)?;
3051 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3052 })?;
3053 let response = request.await?;
3054 this.update(&mut cx, |this, cx| {
3055 Ok(T::response_to_proto(
3056 response,
3057 this,
3058 sender_id,
3059 &buffer_version,
3060 cx,
3061 ))
3062 })
3063 }
3064
3065 async fn handle_get_project_symbols(
3066 this: ModelHandle<Self>,
3067 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3068 _: Arc<Client>,
3069 mut cx: AsyncAppContext,
3070 ) -> Result<proto::GetProjectSymbolsResponse> {
3071 let symbols = this
3072 .update(&mut cx, |this, cx| {
3073 this.symbols(&envelope.payload.query, cx)
3074 })
3075 .await?;
3076
3077 Ok(proto::GetProjectSymbolsResponse {
3078 symbols: symbols.iter().map(serialize_symbol).collect(),
3079 })
3080 }
3081
3082 async fn handle_search_project(
3083 this: ModelHandle<Self>,
3084 envelope: TypedEnvelope<proto::SearchProject>,
3085 _: Arc<Client>,
3086 mut cx: AsyncAppContext,
3087 ) -> Result<proto::SearchProjectResponse> {
3088 let peer_id = envelope.original_sender_id()?;
3089 let query = SearchQuery::from_proto(envelope.payload)?;
3090 let result = this
3091 .update(&mut cx, |this, cx| this.search(query, cx))
3092 .await?;
3093
3094 this.update(&mut cx, |this, cx| {
3095 let mut locations = Vec::new();
3096 for (buffer, ranges) in result {
3097 for range in ranges {
3098 let start = serialize_anchor(&range.start);
3099 let end = serialize_anchor(&range.end);
3100 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3101 locations.push(proto::Location {
3102 buffer: Some(buffer),
3103 start: Some(start),
3104 end: Some(end),
3105 });
3106 }
3107 }
3108 Ok(proto::SearchProjectResponse { locations })
3109 })
3110 }
3111
3112 async fn handle_open_buffer_for_symbol(
3113 this: ModelHandle<Self>,
3114 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3115 _: Arc<Client>,
3116 mut cx: AsyncAppContext,
3117 ) -> Result<proto::OpenBufferForSymbolResponse> {
3118 let peer_id = envelope.original_sender_id()?;
3119 let symbol = envelope
3120 .payload
3121 .symbol
3122 .ok_or_else(|| anyhow!("invalid symbol"))?;
3123 let symbol = this.read_with(&cx, |this, _| {
3124 let symbol = this.deserialize_symbol(symbol)?;
3125 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3126 if signature == symbol.signature {
3127 Ok(symbol)
3128 } else {
3129 Err(anyhow!("invalid symbol signature"))
3130 }
3131 })?;
3132 let buffer = this
3133 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3134 .await?;
3135
3136 Ok(proto::OpenBufferForSymbolResponse {
3137 buffer: Some(this.update(&mut cx, |this, cx| {
3138 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3139 })),
3140 })
3141 }
3142
3143 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3144 let mut hasher = Sha256::new();
3145 hasher.update(worktree_id.to_proto().to_be_bytes());
3146 hasher.update(path.to_string_lossy().as_bytes());
3147 hasher.update(self.nonce.to_be_bytes());
3148 hasher.finalize().as_slice().try_into().unwrap()
3149 }
3150
3151 async fn handle_open_buffer(
3152 this: ModelHandle<Self>,
3153 envelope: TypedEnvelope<proto::OpenBuffer>,
3154 _: Arc<Client>,
3155 mut cx: AsyncAppContext,
3156 ) -> Result<proto::OpenBufferResponse> {
3157 let peer_id = envelope.original_sender_id()?;
3158 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3159 let open_buffer = this.update(&mut cx, |this, cx| {
3160 this.open_buffer(
3161 ProjectPath {
3162 worktree_id,
3163 path: PathBuf::from(envelope.payload.path).into(),
3164 },
3165 cx,
3166 )
3167 });
3168
3169 let buffer = open_buffer.await?;
3170 this.update(&mut cx, |this, cx| {
3171 Ok(proto::OpenBufferResponse {
3172 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3173 })
3174 })
3175 }
3176
3177 fn serialize_project_transaction_for_peer(
3178 &mut self,
3179 project_transaction: ProjectTransaction,
3180 peer_id: PeerId,
3181 cx: &AppContext,
3182 ) -> proto::ProjectTransaction {
3183 let mut serialized_transaction = proto::ProjectTransaction {
3184 buffers: Default::default(),
3185 transactions: Default::default(),
3186 };
3187 for (buffer, transaction) in project_transaction.0 {
3188 serialized_transaction
3189 .buffers
3190 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3191 serialized_transaction
3192 .transactions
3193 .push(language::proto::serialize_transaction(&transaction));
3194 }
3195 serialized_transaction
3196 }
3197
3198 fn deserialize_project_transaction(
3199 &mut self,
3200 message: proto::ProjectTransaction,
3201 push_to_history: bool,
3202 cx: &mut ModelContext<Self>,
3203 ) -> Task<Result<ProjectTransaction>> {
3204 cx.spawn(|this, mut cx| async move {
3205 let mut project_transaction = ProjectTransaction::default();
3206 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3207 let buffer = this
3208 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3209 .await?;
3210 let transaction = language::proto::deserialize_transaction(transaction)?;
3211 project_transaction.0.insert(buffer, transaction);
3212 }
3213
3214 for (buffer, transaction) in &project_transaction.0 {
3215 buffer
3216 .update(&mut cx, |buffer, _| {
3217 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3218 })
3219 .await;
3220
3221 if push_to_history {
3222 buffer.update(&mut cx, |buffer, _| {
3223 buffer.push_transaction(transaction.clone(), Instant::now());
3224 });
3225 }
3226 }
3227
3228 Ok(project_transaction)
3229 })
3230 }
3231
3232 fn serialize_buffer_for_peer(
3233 &mut self,
3234 buffer: &ModelHandle<Buffer>,
3235 peer_id: PeerId,
3236 cx: &AppContext,
3237 ) -> proto::Buffer {
3238 let buffer_id = buffer.read(cx).remote_id();
3239 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3240 if shared_buffers.insert(buffer_id) {
3241 proto::Buffer {
3242 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3243 }
3244 } else {
3245 proto::Buffer {
3246 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3247 }
3248 }
3249 }
3250
3251 fn deserialize_buffer(
3252 &mut self,
3253 buffer: proto::Buffer,
3254 cx: &mut ModelContext<Self>,
3255 ) -> Task<Result<ModelHandle<Buffer>>> {
3256 let replica_id = self.replica_id();
3257
3258 let opened_buffer_tx = self.opened_buffer.0.clone();
3259 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3260 cx.spawn(|this, mut cx| async move {
3261 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3262 proto::buffer::Variant::Id(id) => {
3263 let buffer = loop {
3264 let buffer = this.read_with(&cx, |this, cx| {
3265 this.opened_buffers
3266 .get(&id)
3267 .and_then(|buffer| buffer.upgrade(cx))
3268 });
3269 if let Some(buffer) = buffer {
3270 break buffer;
3271 }
3272 opened_buffer_rx
3273 .next()
3274 .await
3275 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3276 };
3277 Ok(buffer)
3278 }
3279 proto::buffer::Variant::State(mut buffer) => {
3280 let mut buffer_worktree = None;
3281 let mut buffer_file = None;
3282 if let Some(file) = buffer.file.take() {
3283 this.read_with(&cx, |this, cx| {
3284 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3285 let worktree =
3286 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3287 anyhow!("no worktree found for id {}", file.worktree_id)
3288 })?;
3289 buffer_file =
3290 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3291 as Box<dyn language::File>);
3292 buffer_worktree = Some(worktree);
3293 Ok::<_, anyhow::Error>(())
3294 })?;
3295 }
3296
3297 let buffer = cx.add_model(|cx| {
3298 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3299 });
3300
3301 this.update(&mut cx, |this, cx| {
3302 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3303 })?;
3304
3305 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3306 Ok(buffer)
3307 }
3308 }
3309 })
3310 }
3311
3312 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3313 let language = self
3314 .languages
3315 .get_language(&serialized_symbol.language_name);
3316 let start = serialized_symbol
3317 .start
3318 .ok_or_else(|| anyhow!("invalid start"))?;
3319 let end = serialized_symbol
3320 .end
3321 .ok_or_else(|| anyhow!("invalid end"))?;
3322 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3323 Ok(Symbol {
3324 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3325 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3326 language_name: serialized_symbol.language_name.clone(),
3327 label: language
3328 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3329 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3330 name: serialized_symbol.name,
3331 path: PathBuf::from(serialized_symbol.path),
3332 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3333 kind,
3334 signature: serialized_symbol
3335 .signature
3336 .try_into()
3337 .map_err(|_| anyhow!("invalid signature"))?,
3338 })
3339 }
3340
3341 async fn handle_close_buffer(
3342 _: ModelHandle<Self>,
3343 _: TypedEnvelope<proto::CloseBuffer>,
3344 _: Arc<Client>,
3345 _: AsyncAppContext,
3346 ) -> Result<()> {
3347 // TODO: use this for following
3348 Ok(())
3349 }
3350
3351 async fn handle_buffer_saved(
3352 this: ModelHandle<Self>,
3353 envelope: TypedEnvelope<proto::BufferSaved>,
3354 _: Arc<Client>,
3355 mut cx: AsyncAppContext,
3356 ) -> Result<()> {
3357 let version = envelope.payload.version.try_into()?;
3358 let mtime = envelope
3359 .payload
3360 .mtime
3361 .ok_or_else(|| anyhow!("missing mtime"))?
3362 .into();
3363
3364 this.update(&mut cx, |this, cx| {
3365 let buffer = this
3366 .opened_buffers
3367 .get(&envelope.payload.buffer_id)
3368 .and_then(|buffer| buffer.upgrade(cx));
3369 if let Some(buffer) = buffer {
3370 buffer.update(cx, |buffer, cx| {
3371 buffer.did_save(version, mtime, None, cx);
3372 });
3373 }
3374 Ok(())
3375 })
3376 }
3377
3378 async fn handle_buffer_reloaded(
3379 this: ModelHandle<Self>,
3380 envelope: TypedEnvelope<proto::BufferReloaded>,
3381 _: Arc<Client>,
3382 mut cx: AsyncAppContext,
3383 ) -> Result<()> {
3384 let payload = envelope.payload.clone();
3385 let version = payload.version.try_into()?;
3386 let mtime = payload
3387 .mtime
3388 .ok_or_else(|| anyhow!("missing mtime"))?
3389 .into();
3390 this.update(&mut cx, |this, cx| {
3391 let buffer = this
3392 .opened_buffers
3393 .get(&payload.buffer_id)
3394 .and_then(|buffer| buffer.upgrade(cx));
3395 if let Some(buffer) = buffer {
3396 buffer.update(cx, |buffer, cx| {
3397 buffer.did_reload(version, mtime, cx);
3398 });
3399 }
3400 Ok(())
3401 })
3402 }
3403
3404 pub fn match_paths<'a>(
3405 &self,
3406 query: &'a str,
3407 include_ignored: bool,
3408 smart_case: bool,
3409 max_results: usize,
3410 cancel_flag: &'a AtomicBool,
3411 cx: &AppContext,
3412 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3413 let worktrees = self
3414 .worktrees(cx)
3415 .filter(|worktree| worktree.read(cx).is_visible())
3416 .collect::<Vec<_>>();
3417 let include_root_name = worktrees.len() > 1;
3418 let candidate_sets = worktrees
3419 .into_iter()
3420 .map(|worktree| CandidateSet {
3421 snapshot: worktree.read(cx).snapshot(),
3422 include_ignored,
3423 include_root_name,
3424 })
3425 .collect::<Vec<_>>();
3426
3427 let background = cx.background().clone();
3428 async move {
3429 fuzzy::match_paths(
3430 candidate_sets.as_slice(),
3431 query,
3432 smart_case,
3433 max_results,
3434 cancel_flag,
3435 background,
3436 )
3437 .await
3438 }
3439 }
3440}
3441
3442impl WorktreeHandle {
3443 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3444 match self {
3445 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3446 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3447 }
3448 }
3449}
3450
3451impl OpenBuffer {
3452 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3453 match self {
3454 OpenBuffer::Strong(handle) => Some(handle.clone()),
3455 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3456 OpenBuffer::Loading(_) => None,
3457 }
3458 }
3459}
3460
3461struct CandidateSet {
3462 snapshot: Snapshot,
3463 include_ignored: bool,
3464 include_root_name: bool,
3465}
3466
3467impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3468 type Candidates = CandidateSetIter<'a>;
3469
3470 fn id(&self) -> usize {
3471 self.snapshot.id().to_usize()
3472 }
3473
3474 fn len(&self) -> usize {
3475 if self.include_ignored {
3476 self.snapshot.file_count()
3477 } else {
3478 self.snapshot.visible_file_count()
3479 }
3480 }
3481
3482 fn prefix(&self) -> Arc<str> {
3483 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3484 self.snapshot.root_name().into()
3485 } else if self.include_root_name {
3486 format!("{}/", self.snapshot.root_name()).into()
3487 } else {
3488 "".into()
3489 }
3490 }
3491
3492 fn candidates(&'a self, start: usize) -> Self::Candidates {
3493 CandidateSetIter {
3494 traversal: self.snapshot.files(self.include_ignored, start),
3495 }
3496 }
3497}
3498
3499struct CandidateSetIter<'a> {
3500 traversal: Traversal<'a>,
3501}
3502
3503impl<'a> Iterator for CandidateSetIter<'a> {
3504 type Item = PathMatchCandidate<'a>;
3505
3506 fn next(&mut self) -> Option<Self::Item> {
3507 self.traversal.next().map(|entry| {
3508 if let EntryKind::File(char_bag) = entry.kind {
3509 PathMatchCandidate {
3510 path: &entry.path,
3511 char_bag,
3512 }
3513 } else {
3514 unreachable!()
3515 }
3516 })
3517 }
3518}
3519
3520impl Entity for Project {
3521 type Event = Event;
3522
3523 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3524 match &self.client_state {
3525 ProjectClientState::Local { remote_id_rx, .. } => {
3526 if let Some(project_id) = *remote_id_rx.borrow() {
3527 self.client
3528 .send(proto::UnregisterProject { project_id })
3529 .log_err();
3530 }
3531 }
3532 ProjectClientState::Remote { remote_id, .. } => {
3533 self.client
3534 .send(proto::LeaveProject {
3535 project_id: *remote_id,
3536 })
3537 .log_err();
3538 }
3539 }
3540 }
3541
3542 fn app_will_quit(
3543 &mut self,
3544 _: &mut MutableAppContext,
3545 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3546 let shutdown_futures = self
3547 .language_servers
3548 .drain()
3549 .filter_map(|(_, server)| server.shutdown())
3550 .collect::<Vec<_>>();
3551 Some(
3552 async move {
3553 futures::future::join_all(shutdown_futures).await;
3554 }
3555 .boxed(),
3556 )
3557 }
3558}
3559
3560impl Collaborator {
3561 fn from_proto(
3562 message: proto::Collaborator,
3563 user_store: &ModelHandle<UserStore>,
3564 cx: &mut AsyncAppContext,
3565 ) -> impl Future<Output = Result<Self>> {
3566 let user = user_store.update(cx, |user_store, cx| {
3567 user_store.fetch_user(message.user_id, cx)
3568 });
3569
3570 async move {
3571 Ok(Self {
3572 peer_id: PeerId(message.peer_id),
3573 user: user.await?,
3574 replica_id: message.replica_id as ReplicaId,
3575 })
3576 }
3577 }
3578}
3579
3580impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3581 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3582 Self {
3583 worktree_id,
3584 path: path.as_ref().into(),
3585 }
3586 }
3587}
3588
3589impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3590 fn from(options: lsp::CreateFileOptions) -> Self {
3591 Self {
3592 overwrite: options.overwrite.unwrap_or(false),
3593 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3594 }
3595 }
3596}
3597
3598impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3599 fn from(options: lsp::RenameFileOptions) -> Self {
3600 Self {
3601 overwrite: options.overwrite.unwrap_or(false),
3602 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3603 }
3604 }
3605}
3606
3607impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3608 fn from(options: lsp::DeleteFileOptions) -> Self {
3609 Self {
3610 recursive: options.recursive.unwrap_or(false),
3611 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3612 }
3613 }
3614}
3615
3616fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3617 proto::Symbol {
3618 source_worktree_id: symbol.source_worktree_id.to_proto(),
3619 worktree_id: symbol.worktree_id.to_proto(),
3620 language_name: symbol.language_name.clone(),
3621 name: symbol.name.clone(),
3622 kind: unsafe { mem::transmute(symbol.kind) },
3623 path: symbol.path.to_string_lossy().to_string(),
3624 start: Some(proto::Point {
3625 row: symbol.range.start.row,
3626 column: symbol.range.start.column,
3627 }),
3628 end: Some(proto::Point {
3629 row: symbol.range.end.row,
3630 column: symbol.range.end.column,
3631 }),
3632 signature: symbol.signature.to_vec(),
3633 }
3634}
3635
3636fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3637 let mut path_components = path.components();
3638 let mut base_components = base.components();
3639 let mut components: Vec<Component> = Vec::new();
3640 loop {
3641 match (path_components.next(), base_components.next()) {
3642 (None, None) => break,
3643 (Some(a), None) => {
3644 components.push(a);
3645 components.extend(path_components.by_ref());
3646 break;
3647 }
3648 (None, _) => components.push(Component::ParentDir),
3649 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3650 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3651 (Some(a), Some(_)) => {
3652 components.push(Component::ParentDir);
3653 for _ in base_components {
3654 components.push(Component::ParentDir);
3655 }
3656 components.push(a);
3657 components.extend(path_components.by_ref());
3658 break;
3659 }
3660 }
3661 }
3662 components.iter().map(|c| c.as_os_str()).collect()
3663}
3664
3665#[cfg(test)]
3666mod tests {
3667 use super::{Event, *};
3668 use fs::RealFs;
3669 use futures::StreamExt;
3670 use gpui::test::subscribe;
3671 use language::{
3672 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3673 };
3674 use lsp::Url;
3675 use serde_json::json;
3676 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3677 use unindent::Unindent as _;
3678 use util::test::temp_tree;
3679 use worktree::WorktreeHandle as _;
3680
3681 #[gpui::test]
3682 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3683 let dir = temp_tree(json!({
3684 "root": {
3685 "apple": "",
3686 "banana": {
3687 "carrot": {
3688 "date": "",
3689 "endive": "",
3690 }
3691 },
3692 "fennel": {
3693 "grape": "",
3694 }
3695 }
3696 }));
3697
3698 let root_link_path = dir.path().join("root_link");
3699 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3700 unix::fs::symlink(
3701 &dir.path().join("root/fennel"),
3702 &dir.path().join("root/finnochio"),
3703 )
3704 .unwrap();
3705
3706 let project = Project::test(Arc::new(RealFs), cx);
3707
3708 let (tree, _) = project
3709 .update(cx, |project, cx| {
3710 project.find_or_create_local_worktree(&root_link_path, true, cx)
3711 })
3712 .await
3713 .unwrap();
3714
3715 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3716 .await;
3717 cx.read(|cx| {
3718 let tree = tree.read(cx);
3719 assert_eq!(tree.file_count(), 5);
3720 assert_eq!(
3721 tree.inode_for_path("fennel/grape"),
3722 tree.inode_for_path("finnochio/grape")
3723 );
3724 });
3725
3726 let cancel_flag = Default::default();
3727 let results = project
3728 .read_with(cx, |project, cx| {
3729 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3730 })
3731 .await;
3732 assert_eq!(
3733 results
3734 .into_iter()
3735 .map(|result| result.path)
3736 .collect::<Vec<Arc<Path>>>(),
3737 vec![
3738 PathBuf::from("banana/carrot/date").into(),
3739 PathBuf::from("banana/carrot/endive").into(),
3740 ]
3741 );
3742 }
3743
3744 #[gpui::test]
3745 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3746 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3747 let progress_token = language_server_config
3748 .disk_based_diagnostics_progress_token
3749 .clone()
3750 .unwrap();
3751
3752 let language = Arc::new(Language::new(
3753 LanguageConfig {
3754 name: "Rust".into(),
3755 path_suffixes: vec!["rs".to_string()],
3756 language_server: Some(language_server_config),
3757 ..Default::default()
3758 },
3759 Some(tree_sitter_rust::language()),
3760 ));
3761
3762 let fs = FakeFs::new(cx.background());
3763 fs.insert_tree(
3764 "/dir",
3765 json!({
3766 "a.rs": "fn a() { A }",
3767 "b.rs": "const y: i32 = 1",
3768 }),
3769 )
3770 .await;
3771
3772 let project = Project::test(fs, cx);
3773 project.update(cx, |project, _| {
3774 Arc::get_mut(&mut project.languages).unwrap().add(language);
3775 });
3776
3777 let (tree, _) = project
3778 .update(cx, |project, cx| {
3779 project.find_or_create_local_worktree("/dir", true, cx)
3780 })
3781 .await
3782 .unwrap();
3783 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3784
3785 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3786 .await;
3787
3788 // Cause worktree to start the fake language server
3789 let _buffer = project
3790 .update(cx, |project, cx| {
3791 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3792 })
3793 .await
3794 .unwrap();
3795
3796 let mut events = subscribe(&project, cx);
3797
3798 let mut fake_server = fake_servers.next().await.unwrap();
3799 fake_server.start_progress(&progress_token).await;
3800 assert_eq!(
3801 events.next().await.unwrap(),
3802 Event::DiskBasedDiagnosticsStarted
3803 );
3804
3805 fake_server.start_progress(&progress_token).await;
3806 fake_server.end_progress(&progress_token).await;
3807 fake_server.start_progress(&progress_token).await;
3808
3809 fake_server
3810 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3811 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3812 version: None,
3813 diagnostics: vec![lsp::Diagnostic {
3814 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3815 severity: Some(lsp::DiagnosticSeverity::ERROR),
3816 message: "undefined variable 'A'".to_string(),
3817 ..Default::default()
3818 }],
3819 })
3820 .await;
3821 assert_eq!(
3822 events.next().await.unwrap(),
3823 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3824 );
3825
3826 fake_server.end_progress(&progress_token).await;
3827 fake_server.end_progress(&progress_token).await;
3828 assert_eq!(
3829 events.next().await.unwrap(),
3830 Event::DiskBasedDiagnosticsUpdated
3831 );
3832 assert_eq!(
3833 events.next().await.unwrap(),
3834 Event::DiskBasedDiagnosticsFinished
3835 );
3836
3837 let buffer = project
3838 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3839 .await
3840 .unwrap();
3841
3842 buffer.read_with(cx, |buffer, _| {
3843 let snapshot = buffer.snapshot();
3844 let diagnostics = snapshot
3845 .diagnostics_in_range::<_, Point>(0..buffer.len())
3846 .collect::<Vec<_>>();
3847 assert_eq!(
3848 diagnostics,
3849 &[DiagnosticEntry {
3850 range: Point::new(0, 9)..Point::new(0, 10),
3851 diagnostic: Diagnostic {
3852 severity: lsp::DiagnosticSeverity::ERROR,
3853 message: "undefined variable 'A'".to_string(),
3854 group_id: 0,
3855 is_primary: true,
3856 ..Default::default()
3857 }
3858 }]
3859 )
3860 });
3861 }
3862
3863 #[gpui::test]
3864 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3865 let dir = temp_tree(json!({
3866 "root": {
3867 "dir1": {},
3868 "dir2": {
3869 "dir3": {}
3870 }
3871 }
3872 }));
3873
3874 let project = Project::test(Arc::new(RealFs), cx);
3875 let (tree, _) = project
3876 .update(cx, |project, cx| {
3877 project.find_or_create_local_worktree(&dir.path(), true, cx)
3878 })
3879 .await
3880 .unwrap();
3881
3882 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3883 .await;
3884
3885 let cancel_flag = Default::default();
3886 let results = project
3887 .read_with(cx, |project, cx| {
3888 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3889 })
3890 .await;
3891
3892 assert!(results.is_empty());
3893 }
3894
3895 #[gpui::test]
3896 async fn test_definition(cx: &mut gpui::TestAppContext) {
3897 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3898 let language = Arc::new(Language::new(
3899 LanguageConfig {
3900 name: "Rust".into(),
3901 path_suffixes: vec!["rs".to_string()],
3902 language_server: Some(language_server_config),
3903 ..Default::default()
3904 },
3905 Some(tree_sitter_rust::language()),
3906 ));
3907
3908 let fs = FakeFs::new(cx.background());
3909 fs.insert_tree(
3910 "/dir",
3911 json!({
3912 "a.rs": "const fn a() { A }",
3913 "b.rs": "const y: i32 = crate::a()",
3914 }),
3915 )
3916 .await;
3917
3918 let project = Project::test(fs, cx);
3919 project.update(cx, |project, _| {
3920 Arc::get_mut(&mut project.languages).unwrap().add(language);
3921 });
3922
3923 let (tree, _) = project
3924 .update(cx, |project, cx| {
3925 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3926 })
3927 .await
3928 .unwrap();
3929 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3930 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3931 .await;
3932
3933 let buffer = project
3934 .update(cx, |project, cx| {
3935 project.open_buffer(
3936 ProjectPath {
3937 worktree_id,
3938 path: Path::new("").into(),
3939 },
3940 cx,
3941 )
3942 })
3943 .await
3944 .unwrap();
3945
3946 let mut fake_server = fake_servers.next().await.unwrap();
3947 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3948 let params = params.text_document_position_params;
3949 assert_eq!(
3950 params.text_document.uri.to_file_path().unwrap(),
3951 Path::new("/dir/b.rs"),
3952 );
3953 assert_eq!(params.position, lsp::Position::new(0, 22));
3954
3955 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3956 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3957 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3958 )))
3959 });
3960
3961 let mut definitions = project
3962 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3963 .await
3964 .unwrap();
3965
3966 assert_eq!(definitions.len(), 1);
3967 let definition = definitions.pop().unwrap();
3968 cx.update(|cx| {
3969 let target_buffer = definition.buffer.read(cx);
3970 assert_eq!(
3971 target_buffer
3972 .file()
3973 .unwrap()
3974 .as_local()
3975 .unwrap()
3976 .abs_path(cx),
3977 Path::new("/dir/a.rs"),
3978 );
3979 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3980 assert_eq!(
3981 list_worktrees(&project, cx),
3982 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
3983 );
3984
3985 drop(definition);
3986 });
3987 cx.read(|cx| {
3988 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
3989 });
3990
3991 fn list_worktrees<'a>(
3992 project: &'a ModelHandle<Project>,
3993 cx: &'a AppContext,
3994 ) -> Vec<(&'a Path, bool)> {
3995 project
3996 .read(cx)
3997 .worktrees(cx)
3998 .map(|worktree| {
3999 let worktree = worktree.read(cx);
4000 (
4001 worktree.as_local().unwrap().abs_path().as_ref(),
4002 worktree.is_visible(),
4003 )
4004 })
4005 .collect::<Vec<_>>()
4006 }
4007 }
4008
4009 #[gpui::test]
4010 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4011 let fs = FakeFs::new(cx.background());
4012 fs.insert_tree(
4013 "/dir",
4014 json!({
4015 "file1": "the old contents",
4016 }),
4017 )
4018 .await;
4019
4020 let project = Project::test(fs.clone(), cx);
4021 let worktree_id = project
4022 .update(cx, |p, cx| {
4023 p.find_or_create_local_worktree("/dir", true, cx)
4024 })
4025 .await
4026 .unwrap()
4027 .0
4028 .read_with(cx, |tree, _| tree.id());
4029
4030 let buffer = project
4031 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4032 .await
4033 .unwrap();
4034 buffer
4035 .update(cx, |buffer, cx| {
4036 assert_eq!(buffer.text(), "the old contents");
4037 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4038 buffer.save(cx)
4039 })
4040 .await
4041 .unwrap();
4042
4043 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4044 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4045 }
4046
4047 #[gpui::test]
4048 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4049 let fs = FakeFs::new(cx.background());
4050 fs.insert_tree(
4051 "/dir",
4052 json!({
4053 "file1": "the old contents",
4054 }),
4055 )
4056 .await;
4057
4058 let project = Project::test(fs.clone(), cx);
4059 let worktree_id = project
4060 .update(cx, |p, cx| {
4061 p.find_or_create_local_worktree("/dir/file1", true, cx)
4062 })
4063 .await
4064 .unwrap()
4065 .0
4066 .read_with(cx, |tree, _| tree.id());
4067
4068 let buffer = project
4069 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4070 .await
4071 .unwrap();
4072 buffer
4073 .update(cx, |buffer, cx| {
4074 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4075 buffer.save(cx)
4076 })
4077 .await
4078 .unwrap();
4079
4080 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4081 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4082 }
4083
4084 #[gpui::test(retries = 5)]
4085 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4086 let dir = temp_tree(json!({
4087 "a": {
4088 "file1": "",
4089 "file2": "",
4090 "file3": "",
4091 },
4092 "b": {
4093 "c": {
4094 "file4": "",
4095 "file5": "",
4096 }
4097 }
4098 }));
4099
4100 let project = Project::test(Arc::new(RealFs), cx);
4101 let rpc = project.read_with(cx, |p, _| p.client.clone());
4102
4103 let (tree, _) = project
4104 .update(cx, |p, cx| {
4105 p.find_or_create_local_worktree(dir.path(), true, cx)
4106 })
4107 .await
4108 .unwrap();
4109 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4110
4111 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4112 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4113 async move { buffer.await.unwrap() }
4114 };
4115 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4116 tree.read_with(cx, |tree, _| {
4117 tree.entry_for_path(path)
4118 .expect(&format!("no entry for path {}", path))
4119 .id
4120 })
4121 };
4122
4123 let buffer2 = buffer_for_path("a/file2", cx).await;
4124 let buffer3 = buffer_for_path("a/file3", cx).await;
4125 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4126 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4127
4128 let file2_id = id_for_path("a/file2", &cx);
4129 let file3_id = id_for_path("a/file3", &cx);
4130 let file4_id = id_for_path("b/c/file4", &cx);
4131
4132 // Wait for the initial scan.
4133 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4134 .await;
4135
4136 // Create a remote copy of this worktree.
4137 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4138 let (remote, load_task) = cx.update(|cx| {
4139 Worktree::remote(
4140 1,
4141 1,
4142 initial_snapshot.to_proto(&Default::default(), true),
4143 rpc.clone(),
4144 cx,
4145 )
4146 });
4147 load_task.await;
4148
4149 cx.read(|cx| {
4150 assert!(!buffer2.read(cx).is_dirty());
4151 assert!(!buffer3.read(cx).is_dirty());
4152 assert!(!buffer4.read(cx).is_dirty());
4153 assert!(!buffer5.read(cx).is_dirty());
4154 });
4155
4156 // Rename and delete files and directories.
4157 tree.flush_fs_events(&cx).await;
4158 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4159 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4160 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4161 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4162 tree.flush_fs_events(&cx).await;
4163
4164 let expected_paths = vec![
4165 "a",
4166 "a/file1",
4167 "a/file2.new",
4168 "b",
4169 "d",
4170 "d/file3",
4171 "d/file4",
4172 ];
4173
4174 cx.read(|app| {
4175 assert_eq!(
4176 tree.read(app)
4177 .paths()
4178 .map(|p| p.to_str().unwrap())
4179 .collect::<Vec<_>>(),
4180 expected_paths
4181 );
4182
4183 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4184 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4185 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4186
4187 assert_eq!(
4188 buffer2.read(app).file().unwrap().path().as_ref(),
4189 Path::new("a/file2.new")
4190 );
4191 assert_eq!(
4192 buffer3.read(app).file().unwrap().path().as_ref(),
4193 Path::new("d/file3")
4194 );
4195 assert_eq!(
4196 buffer4.read(app).file().unwrap().path().as_ref(),
4197 Path::new("d/file4")
4198 );
4199 assert_eq!(
4200 buffer5.read(app).file().unwrap().path().as_ref(),
4201 Path::new("b/c/file5")
4202 );
4203
4204 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4205 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4206 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4207 assert!(buffer5.read(app).file().unwrap().is_deleted());
4208 });
4209
4210 // Update the remote worktree. Check that it becomes consistent with the
4211 // local worktree.
4212 remote.update(cx, |remote, cx| {
4213 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4214 &initial_snapshot,
4215 1,
4216 1,
4217 true,
4218 );
4219 remote
4220 .as_remote_mut()
4221 .unwrap()
4222 .snapshot
4223 .apply_remote_update(update_message)
4224 .unwrap();
4225
4226 assert_eq!(
4227 remote
4228 .paths()
4229 .map(|p| p.to_str().unwrap())
4230 .collect::<Vec<_>>(),
4231 expected_paths
4232 );
4233 });
4234 }
4235
4236 #[gpui::test]
4237 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4238 let fs = FakeFs::new(cx.background());
4239 fs.insert_tree(
4240 "/the-dir",
4241 json!({
4242 "a.txt": "a-contents",
4243 "b.txt": "b-contents",
4244 }),
4245 )
4246 .await;
4247
4248 let project = Project::test(fs.clone(), cx);
4249 let worktree_id = project
4250 .update(cx, |p, cx| {
4251 p.find_or_create_local_worktree("/the-dir", true, cx)
4252 })
4253 .await
4254 .unwrap()
4255 .0
4256 .read_with(cx, |tree, _| tree.id());
4257
4258 // Spawn multiple tasks to open paths, repeating some paths.
4259 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4260 (
4261 p.open_buffer((worktree_id, "a.txt"), cx),
4262 p.open_buffer((worktree_id, "b.txt"), cx),
4263 p.open_buffer((worktree_id, "a.txt"), cx),
4264 )
4265 });
4266
4267 let buffer_a_1 = buffer_a_1.await.unwrap();
4268 let buffer_a_2 = buffer_a_2.await.unwrap();
4269 let buffer_b = buffer_b.await.unwrap();
4270 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4271 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4272
4273 // There is only one buffer per path.
4274 let buffer_a_id = buffer_a_1.id();
4275 assert_eq!(buffer_a_2.id(), buffer_a_id);
4276
4277 // Open the same path again while it is still open.
4278 drop(buffer_a_1);
4279 let buffer_a_3 = project
4280 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4281 .await
4282 .unwrap();
4283
4284 // There's still only one buffer per path.
4285 assert_eq!(buffer_a_3.id(), buffer_a_id);
4286 }
4287
4288 #[gpui::test]
4289 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4290 use std::fs;
4291
4292 let dir = temp_tree(json!({
4293 "file1": "abc",
4294 "file2": "def",
4295 "file3": "ghi",
4296 }));
4297
4298 let project = Project::test(Arc::new(RealFs), cx);
4299 let (worktree, _) = project
4300 .update(cx, |p, cx| {
4301 p.find_or_create_local_worktree(dir.path(), true, cx)
4302 })
4303 .await
4304 .unwrap();
4305 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4306
4307 worktree.flush_fs_events(&cx).await;
4308 worktree
4309 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4310 .await;
4311
4312 let buffer1 = project
4313 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4314 .await
4315 .unwrap();
4316 let events = Rc::new(RefCell::new(Vec::new()));
4317
4318 // initially, the buffer isn't dirty.
4319 buffer1.update(cx, |buffer, cx| {
4320 cx.subscribe(&buffer1, {
4321 let events = events.clone();
4322 move |_, _, event, _| events.borrow_mut().push(event.clone())
4323 })
4324 .detach();
4325
4326 assert!(!buffer.is_dirty());
4327 assert!(events.borrow().is_empty());
4328
4329 buffer.edit(vec![1..2], "", cx);
4330 });
4331
4332 // after the first edit, the buffer is dirty, and emits a dirtied event.
4333 buffer1.update(cx, |buffer, cx| {
4334 assert!(buffer.text() == "ac");
4335 assert!(buffer.is_dirty());
4336 assert_eq!(
4337 *events.borrow(),
4338 &[language::Event::Edited, language::Event::Dirtied]
4339 );
4340 events.borrow_mut().clear();
4341 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4342 });
4343
4344 // after saving, the buffer is not dirty, and emits a saved event.
4345 buffer1.update(cx, |buffer, cx| {
4346 assert!(!buffer.is_dirty());
4347 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4348 events.borrow_mut().clear();
4349
4350 buffer.edit(vec![1..1], "B", cx);
4351 buffer.edit(vec![2..2], "D", cx);
4352 });
4353
4354 // after editing again, the buffer is dirty, and emits another dirty event.
4355 buffer1.update(cx, |buffer, cx| {
4356 assert!(buffer.text() == "aBDc");
4357 assert!(buffer.is_dirty());
4358 assert_eq!(
4359 *events.borrow(),
4360 &[
4361 language::Event::Edited,
4362 language::Event::Dirtied,
4363 language::Event::Edited,
4364 ],
4365 );
4366 events.borrow_mut().clear();
4367
4368 // TODO - currently, after restoring the buffer to its
4369 // previously-saved state, the is still considered dirty.
4370 buffer.edit([1..3], "", cx);
4371 assert!(buffer.text() == "ac");
4372 assert!(buffer.is_dirty());
4373 });
4374
4375 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4376
4377 // When a file is deleted, the buffer is considered dirty.
4378 let events = Rc::new(RefCell::new(Vec::new()));
4379 let buffer2 = project
4380 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4381 .await
4382 .unwrap();
4383 buffer2.update(cx, |_, cx| {
4384 cx.subscribe(&buffer2, {
4385 let events = events.clone();
4386 move |_, _, event, _| events.borrow_mut().push(event.clone())
4387 })
4388 .detach();
4389 });
4390
4391 fs::remove_file(dir.path().join("file2")).unwrap();
4392 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4393 assert_eq!(
4394 *events.borrow(),
4395 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4396 );
4397
4398 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4399 let events = Rc::new(RefCell::new(Vec::new()));
4400 let buffer3 = project
4401 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4402 .await
4403 .unwrap();
4404 buffer3.update(cx, |_, cx| {
4405 cx.subscribe(&buffer3, {
4406 let events = events.clone();
4407 move |_, _, event, _| events.borrow_mut().push(event.clone())
4408 })
4409 .detach();
4410 });
4411
4412 worktree.flush_fs_events(&cx).await;
4413 buffer3.update(cx, |buffer, cx| {
4414 buffer.edit(Some(0..0), "x", cx);
4415 });
4416 events.borrow_mut().clear();
4417 fs::remove_file(dir.path().join("file3")).unwrap();
4418 buffer3
4419 .condition(&cx, |_, _| !events.borrow().is_empty())
4420 .await;
4421 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4422 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4423 }
4424
4425 #[gpui::test]
4426 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4427 use std::fs;
4428
4429 let initial_contents = "aaa\nbbbbb\nc\n";
4430 let dir = temp_tree(json!({ "the-file": initial_contents }));
4431
4432 let project = Project::test(Arc::new(RealFs), cx);
4433 let (worktree, _) = project
4434 .update(cx, |p, cx| {
4435 p.find_or_create_local_worktree(dir.path(), true, cx)
4436 })
4437 .await
4438 .unwrap();
4439 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4440
4441 worktree
4442 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4443 .await;
4444
4445 let abs_path = dir.path().join("the-file");
4446 let buffer = project
4447 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4448 .await
4449 .unwrap();
4450
4451 // TODO
4452 // Add a cursor on each row.
4453 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4454 // assert!(!buffer.is_dirty());
4455 // buffer.add_selection_set(
4456 // &(0..3)
4457 // .map(|row| Selection {
4458 // id: row as usize,
4459 // start: Point::new(row, 1),
4460 // end: Point::new(row, 1),
4461 // reversed: false,
4462 // goal: SelectionGoal::None,
4463 // })
4464 // .collect::<Vec<_>>(),
4465 // cx,
4466 // )
4467 // });
4468
4469 // Change the file on disk, adding two new lines of text, and removing
4470 // one line.
4471 buffer.read_with(cx, |buffer, _| {
4472 assert!(!buffer.is_dirty());
4473 assert!(!buffer.has_conflict());
4474 });
4475 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4476 fs::write(&abs_path, new_contents).unwrap();
4477
4478 // Because the buffer was not modified, it is reloaded from disk. Its
4479 // contents are edited according to the diff between the old and new
4480 // file contents.
4481 buffer
4482 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4483 .await;
4484
4485 buffer.update(cx, |buffer, _| {
4486 assert_eq!(buffer.text(), new_contents);
4487 assert!(!buffer.is_dirty());
4488 assert!(!buffer.has_conflict());
4489
4490 // TODO
4491 // let cursor_positions = buffer
4492 // .selection_set(selection_set_id)
4493 // .unwrap()
4494 // .selections::<Point>(&*buffer)
4495 // .map(|selection| {
4496 // assert_eq!(selection.start, selection.end);
4497 // selection.start
4498 // })
4499 // .collect::<Vec<_>>();
4500 // assert_eq!(
4501 // cursor_positions,
4502 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4503 // );
4504 });
4505
4506 // Modify the buffer
4507 buffer.update(cx, |buffer, cx| {
4508 buffer.edit(vec![0..0], " ", cx);
4509 assert!(buffer.is_dirty());
4510 assert!(!buffer.has_conflict());
4511 });
4512
4513 // Change the file on disk again, adding blank lines to the beginning.
4514 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4515
4516 // Because the buffer is modified, it doesn't reload from disk, but is
4517 // marked as having a conflict.
4518 buffer
4519 .condition(&cx, |buffer, _| buffer.has_conflict())
4520 .await;
4521 }
4522
4523 #[gpui::test]
4524 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4525 let fs = FakeFs::new(cx.background());
4526 fs.insert_tree(
4527 "/the-dir",
4528 json!({
4529 "a.rs": "
4530 fn foo(mut v: Vec<usize>) {
4531 for x in &v {
4532 v.push(1);
4533 }
4534 }
4535 "
4536 .unindent(),
4537 }),
4538 )
4539 .await;
4540
4541 let project = Project::test(fs.clone(), cx);
4542 let (worktree, _) = project
4543 .update(cx, |p, cx| {
4544 p.find_or_create_local_worktree("/the-dir", true, cx)
4545 })
4546 .await
4547 .unwrap();
4548 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4549
4550 let buffer = project
4551 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4552 .await
4553 .unwrap();
4554
4555 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4556 let message = lsp::PublishDiagnosticsParams {
4557 uri: buffer_uri.clone(),
4558 diagnostics: vec![
4559 lsp::Diagnostic {
4560 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4561 severity: Some(DiagnosticSeverity::WARNING),
4562 message: "error 1".to_string(),
4563 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4564 location: lsp::Location {
4565 uri: buffer_uri.clone(),
4566 range: lsp::Range::new(
4567 lsp::Position::new(1, 8),
4568 lsp::Position::new(1, 9),
4569 ),
4570 },
4571 message: "error 1 hint 1".to_string(),
4572 }]),
4573 ..Default::default()
4574 },
4575 lsp::Diagnostic {
4576 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4577 severity: Some(DiagnosticSeverity::HINT),
4578 message: "error 1 hint 1".to_string(),
4579 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4580 location: lsp::Location {
4581 uri: buffer_uri.clone(),
4582 range: lsp::Range::new(
4583 lsp::Position::new(1, 8),
4584 lsp::Position::new(1, 9),
4585 ),
4586 },
4587 message: "original diagnostic".to_string(),
4588 }]),
4589 ..Default::default()
4590 },
4591 lsp::Diagnostic {
4592 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4593 severity: Some(DiagnosticSeverity::ERROR),
4594 message: "error 2".to_string(),
4595 related_information: Some(vec![
4596 lsp::DiagnosticRelatedInformation {
4597 location: lsp::Location {
4598 uri: buffer_uri.clone(),
4599 range: lsp::Range::new(
4600 lsp::Position::new(1, 13),
4601 lsp::Position::new(1, 15),
4602 ),
4603 },
4604 message: "error 2 hint 1".to_string(),
4605 },
4606 lsp::DiagnosticRelatedInformation {
4607 location: lsp::Location {
4608 uri: buffer_uri.clone(),
4609 range: lsp::Range::new(
4610 lsp::Position::new(1, 13),
4611 lsp::Position::new(1, 15),
4612 ),
4613 },
4614 message: "error 2 hint 2".to_string(),
4615 },
4616 ]),
4617 ..Default::default()
4618 },
4619 lsp::Diagnostic {
4620 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4621 severity: Some(DiagnosticSeverity::HINT),
4622 message: "error 2 hint 1".to_string(),
4623 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4624 location: lsp::Location {
4625 uri: buffer_uri.clone(),
4626 range: lsp::Range::new(
4627 lsp::Position::new(2, 8),
4628 lsp::Position::new(2, 17),
4629 ),
4630 },
4631 message: "original diagnostic".to_string(),
4632 }]),
4633 ..Default::default()
4634 },
4635 lsp::Diagnostic {
4636 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4637 severity: Some(DiagnosticSeverity::HINT),
4638 message: "error 2 hint 2".to_string(),
4639 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4640 location: lsp::Location {
4641 uri: buffer_uri.clone(),
4642 range: lsp::Range::new(
4643 lsp::Position::new(2, 8),
4644 lsp::Position::new(2, 17),
4645 ),
4646 },
4647 message: "original diagnostic".to_string(),
4648 }]),
4649 ..Default::default()
4650 },
4651 ],
4652 version: None,
4653 };
4654
4655 project
4656 .update(cx, |p, cx| {
4657 p.update_diagnostics(message, &Default::default(), cx)
4658 })
4659 .unwrap();
4660 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4661
4662 assert_eq!(
4663 buffer
4664 .diagnostics_in_range::<_, Point>(0..buffer.len())
4665 .collect::<Vec<_>>(),
4666 &[
4667 DiagnosticEntry {
4668 range: Point::new(1, 8)..Point::new(1, 9),
4669 diagnostic: Diagnostic {
4670 severity: DiagnosticSeverity::WARNING,
4671 message: "error 1".to_string(),
4672 group_id: 0,
4673 is_primary: true,
4674 ..Default::default()
4675 }
4676 },
4677 DiagnosticEntry {
4678 range: Point::new(1, 8)..Point::new(1, 9),
4679 diagnostic: Diagnostic {
4680 severity: DiagnosticSeverity::HINT,
4681 message: "error 1 hint 1".to_string(),
4682 group_id: 0,
4683 is_primary: false,
4684 ..Default::default()
4685 }
4686 },
4687 DiagnosticEntry {
4688 range: Point::new(1, 13)..Point::new(1, 15),
4689 diagnostic: Diagnostic {
4690 severity: DiagnosticSeverity::HINT,
4691 message: "error 2 hint 1".to_string(),
4692 group_id: 1,
4693 is_primary: false,
4694 ..Default::default()
4695 }
4696 },
4697 DiagnosticEntry {
4698 range: Point::new(1, 13)..Point::new(1, 15),
4699 diagnostic: Diagnostic {
4700 severity: DiagnosticSeverity::HINT,
4701 message: "error 2 hint 2".to_string(),
4702 group_id: 1,
4703 is_primary: false,
4704 ..Default::default()
4705 }
4706 },
4707 DiagnosticEntry {
4708 range: Point::new(2, 8)..Point::new(2, 17),
4709 diagnostic: Diagnostic {
4710 severity: DiagnosticSeverity::ERROR,
4711 message: "error 2".to_string(),
4712 group_id: 1,
4713 is_primary: true,
4714 ..Default::default()
4715 }
4716 }
4717 ]
4718 );
4719
4720 assert_eq!(
4721 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4722 &[
4723 DiagnosticEntry {
4724 range: Point::new(1, 8)..Point::new(1, 9),
4725 diagnostic: Diagnostic {
4726 severity: DiagnosticSeverity::WARNING,
4727 message: "error 1".to_string(),
4728 group_id: 0,
4729 is_primary: true,
4730 ..Default::default()
4731 }
4732 },
4733 DiagnosticEntry {
4734 range: Point::new(1, 8)..Point::new(1, 9),
4735 diagnostic: Diagnostic {
4736 severity: DiagnosticSeverity::HINT,
4737 message: "error 1 hint 1".to_string(),
4738 group_id: 0,
4739 is_primary: false,
4740 ..Default::default()
4741 }
4742 },
4743 ]
4744 );
4745 assert_eq!(
4746 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4747 &[
4748 DiagnosticEntry {
4749 range: Point::new(1, 13)..Point::new(1, 15),
4750 diagnostic: Diagnostic {
4751 severity: DiagnosticSeverity::HINT,
4752 message: "error 2 hint 1".to_string(),
4753 group_id: 1,
4754 is_primary: false,
4755 ..Default::default()
4756 }
4757 },
4758 DiagnosticEntry {
4759 range: Point::new(1, 13)..Point::new(1, 15),
4760 diagnostic: Diagnostic {
4761 severity: DiagnosticSeverity::HINT,
4762 message: "error 2 hint 2".to_string(),
4763 group_id: 1,
4764 is_primary: false,
4765 ..Default::default()
4766 }
4767 },
4768 DiagnosticEntry {
4769 range: Point::new(2, 8)..Point::new(2, 17),
4770 diagnostic: Diagnostic {
4771 severity: DiagnosticSeverity::ERROR,
4772 message: "error 2".to_string(),
4773 group_id: 1,
4774 is_primary: true,
4775 ..Default::default()
4776 }
4777 }
4778 ]
4779 );
4780 }
4781
4782 #[gpui::test]
4783 async fn test_rename(cx: &mut gpui::TestAppContext) {
4784 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4785 let language = Arc::new(Language::new(
4786 LanguageConfig {
4787 name: "Rust".into(),
4788 path_suffixes: vec!["rs".to_string()],
4789 language_server: Some(language_server_config),
4790 ..Default::default()
4791 },
4792 Some(tree_sitter_rust::language()),
4793 ));
4794
4795 let fs = FakeFs::new(cx.background());
4796 fs.insert_tree(
4797 "/dir",
4798 json!({
4799 "one.rs": "const ONE: usize = 1;",
4800 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4801 }),
4802 )
4803 .await;
4804
4805 let project = Project::test(fs.clone(), cx);
4806 project.update(cx, |project, _| {
4807 Arc::get_mut(&mut project.languages).unwrap().add(language);
4808 });
4809
4810 let (tree, _) = project
4811 .update(cx, |project, cx| {
4812 project.find_or_create_local_worktree("/dir", true, cx)
4813 })
4814 .await
4815 .unwrap();
4816 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4817 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4818 .await;
4819
4820 let buffer = project
4821 .update(cx, |project, cx| {
4822 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4823 })
4824 .await
4825 .unwrap();
4826
4827 let mut fake_server = fake_servers.next().await.unwrap();
4828
4829 let response = project.update(cx, |project, cx| {
4830 project.prepare_rename(buffer.clone(), 7, cx)
4831 });
4832 fake_server
4833 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4834 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4835 assert_eq!(params.position, lsp::Position::new(0, 7));
4836 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4837 lsp::Position::new(0, 6),
4838 lsp::Position::new(0, 9),
4839 )))
4840 })
4841 .next()
4842 .await
4843 .unwrap();
4844 let range = response.await.unwrap().unwrap();
4845 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4846 assert_eq!(range, 6..9);
4847
4848 let response = project.update(cx, |project, cx| {
4849 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4850 });
4851 fake_server
4852 .handle_request::<lsp::request::Rename, _>(|params, _| {
4853 assert_eq!(
4854 params.text_document_position.text_document.uri.as_str(),
4855 "file:///dir/one.rs"
4856 );
4857 assert_eq!(
4858 params.text_document_position.position,
4859 lsp::Position::new(0, 7)
4860 );
4861 assert_eq!(params.new_name, "THREE");
4862 Some(lsp::WorkspaceEdit {
4863 changes: Some(
4864 [
4865 (
4866 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4867 vec![lsp::TextEdit::new(
4868 lsp::Range::new(
4869 lsp::Position::new(0, 6),
4870 lsp::Position::new(0, 9),
4871 ),
4872 "THREE".to_string(),
4873 )],
4874 ),
4875 (
4876 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4877 vec![
4878 lsp::TextEdit::new(
4879 lsp::Range::new(
4880 lsp::Position::new(0, 24),
4881 lsp::Position::new(0, 27),
4882 ),
4883 "THREE".to_string(),
4884 ),
4885 lsp::TextEdit::new(
4886 lsp::Range::new(
4887 lsp::Position::new(0, 35),
4888 lsp::Position::new(0, 38),
4889 ),
4890 "THREE".to_string(),
4891 ),
4892 ],
4893 ),
4894 ]
4895 .into_iter()
4896 .collect(),
4897 ),
4898 ..Default::default()
4899 })
4900 })
4901 .next()
4902 .await
4903 .unwrap();
4904 let mut transaction = response.await.unwrap().0;
4905 assert_eq!(transaction.len(), 2);
4906 assert_eq!(
4907 transaction
4908 .remove_entry(&buffer)
4909 .unwrap()
4910 .0
4911 .read_with(cx, |buffer, _| buffer.text()),
4912 "const THREE: usize = 1;"
4913 );
4914 assert_eq!(
4915 transaction
4916 .into_keys()
4917 .next()
4918 .unwrap()
4919 .read_with(cx, |buffer, _| buffer.text()),
4920 "const TWO: usize = one::THREE + one::THREE;"
4921 );
4922 }
4923
4924 #[gpui::test]
4925 async fn test_search(cx: &mut gpui::TestAppContext) {
4926 let fs = FakeFs::new(cx.background());
4927 fs.insert_tree(
4928 "/dir",
4929 json!({
4930 "one.rs": "const ONE: usize = 1;",
4931 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4932 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4933 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4934 }),
4935 )
4936 .await;
4937 let project = Project::test(fs.clone(), cx);
4938 let (tree, _) = project
4939 .update(cx, |project, cx| {
4940 project.find_or_create_local_worktree("/dir", true, cx)
4941 })
4942 .await
4943 .unwrap();
4944 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4945 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4946 .await;
4947
4948 assert_eq!(
4949 search(&project, SearchQuery::text("TWO", false, true), cx)
4950 .await
4951 .unwrap(),
4952 HashMap::from_iter([
4953 ("two.rs".to_string(), vec![6..9]),
4954 ("three.rs".to_string(), vec![37..40])
4955 ])
4956 );
4957
4958 let buffer_4 = project
4959 .update(cx, |project, cx| {
4960 project.open_buffer((worktree_id, "four.rs"), cx)
4961 })
4962 .await
4963 .unwrap();
4964 buffer_4.update(cx, |buffer, cx| {
4965 buffer.edit([20..28, 31..43], "two::TWO", cx);
4966 });
4967
4968 assert_eq!(
4969 search(&project, SearchQuery::text("TWO", false, true), cx)
4970 .await
4971 .unwrap(),
4972 HashMap::from_iter([
4973 ("two.rs".to_string(), vec![6..9]),
4974 ("three.rs".to_string(), vec![37..40]),
4975 ("four.rs".to_string(), vec![25..28, 36..39])
4976 ])
4977 );
4978
4979 async fn search(
4980 project: &ModelHandle<Project>,
4981 query: SearchQuery,
4982 cx: &mut gpui::TestAppContext,
4983 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4984 let results = project
4985 .update(cx, |project, cx| project.search(query, cx))
4986 .await?;
4987
4988 Ok(results
4989 .into_iter()
4990 .map(|(buffer, ranges)| {
4991 buffer.read_with(cx, |buffer, _| {
4992 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4993 let ranges = ranges
4994 .into_iter()
4995 .map(|range| range.to_offset(buffer))
4996 .collect::<Vec<_>>();
4997 (path, ranges)
4998 })
4999 })
5000 .collect())
5001 }
5002 }
5003}