1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 },
96}
97
98#[derive(Clone, Debug)]
99pub struct Collaborator {
100 pub user: Arc<User>,
101 pub peer_id: PeerId,
102 pub replica_id: ReplicaId,
103}
104
105#[derive(Clone, Debug, PartialEq)]
106pub enum Event {
107 ActiveEntryChanged(Option<ProjectEntry>),
108 WorktreeRemoved(WorktreeId),
109 DiskBasedDiagnosticsStarted,
110 DiskBasedDiagnosticsUpdated,
111 DiskBasedDiagnosticsFinished,
112 DiagnosticsUpdated(ProjectPath),
113}
114
115#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
116pub struct ProjectPath {
117 pub worktree_id: WorktreeId,
118 pub path: Arc<Path>,
119}
120
121#[derive(Clone, Debug, Default, PartialEq)]
122pub struct DiagnosticSummary {
123 pub error_count: usize,
124 pub warning_count: usize,
125 pub info_count: usize,
126 pub hint_count: usize,
127}
128
129#[derive(Debug)]
130pub struct Location {
131 pub buffer: ModelHandle<Buffer>,
132 pub range: Range<language::Anchor>,
133}
134
135#[derive(Debug)]
136pub struct DocumentHighlight {
137 pub range: Range<language::Anchor>,
138 pub kind: DocumentHighlightKind,
139}
140
141#[derive(Clone, Debug)]
142pub struct Symbol {
143 pub source_worktree_id: WorktreeId,
144 pub worktree_id: WorktreeId,
145 pub language_name: String,
146 pub path: PathBuf,
147 pub label: CodeLabel,
148 pub name: String,
149 pub kind: lsp::SymbolKind,
150 pub range: Range<PointUtf16>,
151 pub signature: [u8; 32],
152}
153
154#[derive(Default)]
155pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
156
157impl DiagnosticSummary {
158 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
159 let mut this = Self {
160 error_count: 0,
161 warning_count: 0,
162 info_count: 0,
163 hint_count: 0,
164 };
165
166 for entry in diagnostics {
167 if entry.diagnostic.is_primary {
168 match entry.diagnostic.severity {
169 DiagnosticSeverity::ERROR => this.error_count += 1,
170 DiagnosticSeverity::WARNING => this.warning_count += 1,
171 DiagnosticSeverity::INFORMATION => this.info_count += 1,
172 DiagnosticSeverity::HINT => this.hint_count += 1,
173 _ => {}
174 }
175 }
176 }
177
178 this
179 }
180
181 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
182 proto::DiagnosticSummary {
183 path: path.to_string_lossy().to_string(),
184 error_count: self.error_count as u32,
185 warning_count: self.warning_count as u32,
186 info_count: self.info_count as u32,
187 hint_count: self.hint_count as u32,
188 }
189 }
190}
191
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
193pub struct ProjectEntry {
194 pub worktree_id: WorktreeId,
195 pub entry_id: usize,
196}
197
198impl Project {
199 pub fn init(client: &Arc<Client>) {
200 client.add_entity_message_handler(Self::handle_add_collaborator);
201 client.add_entity_message_handler(Self::handle_buffer_reloaded);
202 client.add_entity_message_handler(Self::handle_buffer_saved);
203 client.add_entity_message_handler(Self::handle_close_buffer);
204 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
206 client.add_entity_message_handler(Self::handle_remove_collaborator);
207 client.add_entity_message_handler(Self::handle_register_worktree);
208 client.add_entity_message_handler(Self::handle_unregister_worktree);
209 client.add_entity_message_handler(Self::handle_unshare_project);
210 client.add_entity_message_handler(Self::handle_update_buffer_file);
211 client.add_entity_message_handler(Self::handle_update_buffer);
212 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
213 client.add_entity_message_handler(Self::handle_update_worktree);
214 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
215 client.add_entity_request_handler(Self::handle_apply_code_action);
216 client.add_entity_request_handler(Self::handle_format_buffers);
217 client.add_entity_request_handler(Self::handle_get_code_actions);
218 client.add_entity_request_handler(Self::handle_get_completions);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
224 client.add_entity_request_handler(Self::handle_search_project);
225 client.add_entity_request_handler(Self::handle_get_project_symbols);
226 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
227 client.add_entity_request_handler(Self::handle_open_buffer);
228 client.add_entity_request_handler(Self::handle_save_buffer);
229 }
230
231 pub fn local(
232 client: Arc<Client>,
233 user_store: ModelHandle<UserStore>,
234 languages: Arc<LanguageRegistry>,
235 fs: Arc<dyn Fs>,
236 cx: &mut MutableAppContext,
237 ) -> ModelHandle<Self> {
238 cx.add_model(|cx: &mut ModelContext<Self>| {
239 let (remote_id_tx, remote_id_rx) = watch::channel();
240 let _maintain_remote_id_task = cx.spawn_weak({
241 let rpc = client.clone();
242 move |this, mut cx| {
243 async move {
244 let mut status = rpc.status();
245 while let Some(status) = status.next().await {
246 if let Some(this) = this.upgrade(&cx) {
247 let remote_id = if let client::Status::Connected { .. } = status {
248 let response = rpc.request(proto::RegisterProject {}).await?;
249 Some(response.project_id)
250 } else {
251 None
252 };
253
254 if let Some(project_id) = remote_id {
255 let mut registrations = Vec::new();
256 this.update(&mut cx, |this, cx| {
257 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
258 registrations.push(worktree.update(
259 cx,
260 |worktree, cx| {
261 let worktree = worktree.as_local_mut().unwrap();
262 worktree.register(project_id, cx)
263 },
264 ));
265 }
266 });
267 for registration in registrations {
268 registration.await?;
269 }
270 }
271 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
272 }
273 }
274 Ok(())
275 }
276 .log_err()
277 }
278 });
279
280 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
281 Self {
282 worktrees: Default::default(),
283 collaborators: Default::default(),
284 opened_buffers: Default::default(),
285 shared_buffers: Default::default(),
286 loading_buffers: Default::default(),
287 loading_local_worktrees: Default::default(),
288 client_state: ProjectClientState::Local {
289 is_shared: false,
290 remote_id_tx,
291 remote_id_rx,
292 _maintain_remote_id_task,
293 },
294 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
295 subscriptions: Vec::new(),
296 active_entry: None,
297 languages,
298 client,
299 user_store,
300 fs,
301 language_servers_with_diagnostics_running: 0,
302 language_servers: Default::default(),
303 started_language_servers: Default::default(),
304 nonce: StdRng::from_entropy().gen(),
305 }
306 })
307 }
308
309 pub async fn remote(
310 remote_id: u64,
311 client: Arc<Client>,
312 user_store: ModelHandle<UserStore>,
313 languages: Arc<LanguageRegistry>,
314 fs: Arc<dyn Fs>,
315 cx: &mut AsyncAppContext,
316 ) -> Result<ModelHandle<Self>> {
317 client.authenticate_and_connect(&cx).await?;
318
319 let response = client
320 .request(proto::JoinProject {
321 project_id: remote_id,
322 })
323 .await?;
324
325 let replica_id = response.replica_id as ReplicaId;
326
327 let mut worktrees = Vec::new();
328 for worktree in response.worktrees {
329 let (worktree, load_task) = cx
330 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
331 worktrees.push(worktree);
332 load_task.detach();
333 }
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 let this = cx.add_model(|cx| {
337 let mut this = Self {
338 worktrees: Vec::new(),
339 loading_buffers: Default::default(),
340 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
341 shared_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 active_entry: None,
344 collaborators: Default::default(),
345 languages,
346 user_store: user_store.clone(),
347 fs,
348 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
349 client,
350 client_state: ProjectClientState::Remote {
351 sharing_has_stopped: false,
352 remote_id,
353 replica_id,
354 },
355 language_servers_with_diagnostics_running: 0,
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 opened_buffers: Default::default(),
359 nonce: StdRng::from_entropy().gen(),
360 };
361 for worktree in worktrees {
362 this.add_worktree(&worktree, cx);
363 }
364 this
365 });
366
367 let user_ids = response
368 .collaborators
369 .iter()
370 .map(|peer| peer.user_id)
371 .collect();
372 user_store
373 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
374 .await?;
375 let mut collaborators = HashMap::default();
376 for message in response.collaborators {
377 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
378 collaborators.insert(collaborator.peer_id, collaborator);
379 }
380
381 this.update(cx, |this, _| {
382 this.collaborators = collaborators;
383 });
384
385 Ok(this)
386 }
387
388 #[cfg(any(test, feature = "test-support"))]
389 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
390 let languages = Arc::new(LanguageRegistry::new());
391 let http_client = client::test::FakeHttpClient::with_404_response();
392 let client = client::Client::new(http_client.clone());
393 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
394 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
395 }
396
397 #[cfg(any(test, feature = "test-support"))]
398 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
399 self.opened_buffers
400 .get(&remote_id)
401 .and_then(|buffer| buffer.upgrade(cx))
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn languages(&self) -> &Arc<LanguageRegistry> {
406 &self.languages
407 }
408
409 #[cfg(any(test, feature = "test-support"))]
410 pub fn check_invariants(&self, cx: &AppContext) {
411 if self.is_local() {
412 let mut worktree_root_paths = HashMap::default();
413 for worktree in self.worktrees(cx) {
414 let worktree = worktree.read(cx);
415 let abs_path = worktree.as_local().unwrap().abs_path().clone();
416 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
417 assert_eq!(
418 prev_worktree_id,
419 None,
420 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
421 abs_path,
422 worktree.id(),
423 prev_worktree_id
424 )
425 }
426 } else {
427 let replica_id = self.replica_id();
428 for buffer in self.opened_buffers.values() {
429 if let Some(buffer) = buffer.upgrade(cx) {
430 let buffer = buffer.read(cx);
431 assert_eq!(
432 buffer.deferred_ops_len(),
433 0,
434 "replica {}, buffer {} has deferred operations",
435 replica_id,
436 buffer.remote_id()
437 );
438 }
439 }
440 }
441 }
442
443 #[cfg(any(test, feature = "test-support"))]
444 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
445 let path = path.into();
446 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
447 self.opened_buffers.iter().any(|(_, buffer)| {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
450 if file.worktree == worktree && file.path() == &path.path {
451 return true;
452 }
453 }
454 }
455 false
456 })
457 } else {
458 false
459 }
460 }
461
462 pub fn fs(&self) -> &Arc<dyn Fs> {
463 &self.fs
464 }
465
466 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
467 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
468 *remote_id_tx.borrow_mut() = remote_id;
469 }
470
471 self.subscriptions.clear();
472 if let Some(remote_id) = remote_id {
473 self.subscriptions
474 .push(self.client.add_model_for_remote_entity(remote_id, cx));
475 }
476 }
477
478 pub fn remote_id(&self) -> Option<u64> {
479 match &self.client_state {
480 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
481 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
482 }
483 }
484
485 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
486 let mut id = None;
487 let mut watch = None;
488 match &self.client_state {
489 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
490 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
491 }
492
493 async move {
494 if let Some(id) = id {
495 return id;
496 }
497 let mut watch = watch.unwrap();
498 loop {
499 let id = *watch.borrow();
500 if let Some(id) = id {
501 return id;
502 }
503 watch.next().await;
504 }
505 }
506 }
507
508 pub fn replica_id(&self) -> ReplicaId {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => 0,
511 ProjectClientState::Remote { replica_id, .. } => *replica_id,
512 }
513 }
514
515 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
516 &self.collaborators
517 }
518
519 pub fn worktrees<'a>(
520 &'a self,
521 cx: &'a AppContext,
522 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
523 self.worktrees
524 .iter()
525 .filter_map(move |worktree| worktree.upgrade(cx))
526 }
527
528 pub fn visible_worktrees<'a>(
529 &'a self,
530 cx: &'a AppContext,
531 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
532 self.worktrees.iter().filter_map(|worktree| {
533 worktree.upgrade(cx).and_then(|worktree| {
534 if worktree.read(cx).is_visible() {
535 Some(worktree)
536 } else {
537 None
538 }
539 })
540 })
541 }
542
543 pub fn worktree_for_id(
544 &self,
545 id: WorktreeId,
546 cx: &AppContext,
547 ) -> Option<ModelHandle<Worktree>> {
548 self.worktrees(cx)
549 .find(|worktree| worktree.read(cx).id() == id)
550 }
551
552 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
553 let rpc = self.client.clone();
554 cx.spawn(|this, mut cx| async move {
555 let project_id = this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local {
557 is_shared,
558 remote_id_rx,
559 ..
560 } = &mut this.client_state
561 {
562 *is_shared = true;
563
564 for open_buffer in this.opened_buffers.values_mut() {
565 match open_buffer {
566 OpenBuffer::Strong(_) => {}
567 OpenBuffer::Weak(buffer) => {
568 if let Some(buffer) = buffer.upgrade(cx) {
569 *open_buffer = OpenBuffer::Strong(buffer);
570 }
571 }
572 OpenBuffer::Loading(_) => unreachable!(),
573 }
574 }
575
576 for worktree_handle in this.worktrees.iter_mut() {
577 match worktree_handle {
578 WorktreeHandle::Strong(_) => {}
579 WorktreeHandle::Weak(worktree) => {
580 if let Some(worktree) = worktree.upgrade(cx) {
581 *worktree_handle = WorktreeHandle::Strong(worktree);
582 }
583 }
584 }
585 }
586
587 remote_id_rx
588 .borrow()
589 .ok_or_else(|| anyhow!("no project id"))
590 } else {
591 Err(anyhow!("can't share a remote project"))
592 }
593 })?;
594
595 rpc.request(proto::ShareProject { project_id }).await?;
596
597 let mut tasks = Vec::new();
598 this.update(&mut cx, |this, cx| {
599 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
600 worktree.update(cx, |worktree, cx| {
601 let worktree = worktree.as_local_mut().unwrap();
602 tasks.push(worktree.share(project_id, cx));
603 });
604 }
605 });
606 for task in tasks {
607 task.await?;
608 }
609 this.update(&mut cx, |_, cx| cx.notify());
610 Ok(())
611 })
612 }
613
614 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
615 let rpc = self.client.clone();
616 cx.spawn(|this, mut cx| async move {
617 let project_id = this.update(&mut cx, |this, cx| {
618 if let ProjectClientState::Local {
619 is_shared,
620 remote_id_rx,
621 ..
622 } = &mut this.client_state
623 {
624 *is_shared = false;
625
626 for open_buffer in this.opened_buffers.values_mut() {
627 match open_buffer {
628 OpenBuffer::Strong(buffer) => {
629 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
630 }
631 _ => {}
632 }
633 }
634
635 for worktree_handle in this.worktrees.iter_mut() {
636 match worktree_handle {
637 WorktreeHandle::Strong(worktree) => {
638 if !worktree.read(cx).is_visible() {
639 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
640 }
641 }
642 _ => {}
643 }
644 }
645
646 remote_id_rx
647 .borrow()
648 .ok_or_else(|| anyhow!("no project id"))
649 } else {
650 Err(anyhow!("can't share a remote project"))
651 }
652 })?;
653
654 rpc.send(proto::UnshareProject { project_id })?;
655 this.update(&mut cx, |this, cx| {
656 this.collaborators.clear();
657 this.shared_buffers.clear();
658 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
659 worktree.update(cx, |worktree, _| {
660 worktree.as_local_mut().unwrap().unshare();
661 });
662 }
663 cx.notify()
664 });
665 Ok(())
666 })
667 }
668
669 pub fn is_read_only(&self) -> bool {
670 match &self.client_state {
671 ProjectClientState::Local { .. } => false,
672 ProjectClientState::Remote {
673 sharing_has_stopped,
674 ..
675 } => *sharing_has_stopped,
676 }
677 }
678
679 pub fn is_local(&self) -> bool {
680 match &self.client_state {
681 ProjectClientState::Local { .. } => true,
682 ProjectClientState::Remote { .. } => false,
683 }
684 }
685
686 pub fn is_remote(&self) -> bool {
687 !self.is_local()
688 }
689
690 pub fn open_buffer(
691 &mut self,
692 path: impl Into<ProjectPath>,
693 cx: &mut ModelContext<Self>,
694 ) -> Task<Result<ModelHandle<Buffer>>> {
695 let project_path = path.into();
696 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
697 worktree
698 } else {
699 return Task::ready(Err(anyhow!("no such worktree")));
700 };
701
702 // If there is already a buffer for the given path, then return it.
703 let existing_buffer = self.get_open_buffer(&project_path, cx);
704 if let Some(existing_buffer) = existing_buffer {
705 return Task::ready(Ok(existing_buffer));
706 }
707
708 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
709 // If the given path is already being loaded, then wait for that existing
710 // task to complete and return the same buffer.
711 hash_map::Entry::Occupied(e) => e.get().clone(),
712
713 // Otherwise, record the fact that this path is now being loaded.
714 hash_map::Entry::Vacant(entry) => {
715 let (mut tx, rx) = postage::watch::channel();
716 entry.insert(rx.clone());
717
718 let load_buffer = if worktree.read(cx).is_local() {
719 self.open_local_buffer(&project_path.path, &worktree, cx)
720 } else {
721 self.open_remote_buffer(&project_path.path, &worktree, cx)
722 };
723
724 cx.spawn(move |this, mut cx| async move {
725 let load_result = load_buffer.await;
726 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
727 // Record the fact that the buffer is no longer loading.
728 this.loading_buffers.remove(&project_path);
729 let buffer = load_result.map_err(Arc::new)?;
730 Ok(buffer)
731 }));
732 })
733 .detach();
734 rx
735 }
736 };
737
738 cx.foreground().spawn(async move {
739 loop {
740 if let Some(result) = loading_watch.borrow().as_ref() {
741 match result {
742 Ok(buffer) => return Ok(buffer.clone()),
743 Err(error) => return Err(anyhow!("{}", error)),
744 }
745 }
746 loading_watch.next().await;
747 }
748 })
749 }
750
751 fn open_local_buffer(
752 &mut self,
753 path: &Arc<Path>,
754 worktree: &ModelHandle<Worktree>,
755 cx: &mut ModelContext<Self>,
756 ) -> Task<Result<ModelHandle<Buffer>>> {
757 let load_buffer = worktree.update(cx, |worktree, cx| {
758 let worktree = worktree.as_local_mut().unwrap();
759 worktree.load_buffer(path, cx)
760 });
761 let worktree = worktree.downgrade();
762 cx.spawn(|this, mut cx| async move {
763 let buffer = load_buffer.await?;
764 let worktree = worktree
765 .upgrade(&cx)
766 .ok_or_else(|| anyhow!("worktree was removed"))?;
767 this.update(&mut cx, |this, cx| {
768 this.register_buffer(&buffer, Some(&worktree), cx)
769 })?;
770 Ok(buffer)
771 })
772 }
773
774 fn open_remote_buffer(
775 &mut self,
776 path: &Arc<Path>,
777 worktree: &ModelHandle<Worktree>,
778 cx: &mut ModelContext<Self>,
779 ) -> Task<Result<ModelHandle<Buffer>>> {
780 let rpc = self.client.clone();
781 let project_id = self.remote_id().unwrap();
782 let remote_worktree_id = worktree.read(cx).id();
783 let path = path.clone();
784 let path_string = path.to_string_lossy().to_string();
785 cx.spawn(|this, mut cx| async move {
786 let response = rpc
787 .request(proto::OpenBuffer {
788 project_id,
789 worktree_id: remote_worktree_id.to_proto(),
790 path: path_string,
791 })
792 .await?;
793 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
794 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
795 .await
796 })
797 }
798
799 fn open_local_buffer_via_lsp(
800 &mut self,
801 abs_path: lsp::Url,
802 lang_name: String,
803 lang_server: Arc<LanguageServer>,
804 cx: &mut ModelContext<Self>,
805 ) -> Task<Result<ModelHandle<Buffer>>> {
806 cx.spawn(|this, mut cx| async move {
807 let abs_path = abs_path
808 .to_file_path()
809 .map_err(|_| anyhow!("can't convert URI to path"))?;
810 let (worktree, relative_path) = if let Some(result) =
811 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
812 {
813 result
814 } else {
815 let worktree = this
816 .update(&mut cx, |this, cx| {
817 this.create_local_worktree(&abs_path, false, cx)
818 })
819 .await?;
820 this.update(&mut cx, |this, cx| {
821 this.language_servers
822 .insert((worktree.read(cx).id(), lang_name), lang_server);
823 });
824 (worktree, PathBuf::new())
825 };
826
827 let project_path = ProjectPath {
828 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
829 path: relative_path.into(),
830 };
831 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
832 .await
833 })
834 }
835
836 pub fn save_buffer_as(
837 &mut self,
838 buffer: ModelHandle<Buffer>,
839 abs_path: PathBuf,
840 cx: &mut ModelContext<Project>,
841 ) -> Task<Result<()>> {
842 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
843 cx.spawn(|this, mut cx| async move {
844 let (worktree, path) = worktree_task.await?;
845 worktree
846 .update(&mut cx, |worktree, cx| {
847 worktree
848 .as_local_mut()
849 .unwrap()
850 .save_buffer_as(buffer.clone(), path, cx)
851 })
852 .await?;
853 this.update(&mut cx, |this, cx| {
854 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
855 });
856 Ok(())
857 })
858 }
859
860 pub fn get_open_buffer(
861 &mut self,
862 path: &ProjectPath,
863 cx: &mut ModelContext<Self>,
864 ) -> Option<ModelHandle<Buffer>> {
865 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
866 self.opened_buffers.values().find_map(|buffer| {
867 let buffer = buffer.upgrade(cx)?;
868 let file = File::from_dyn(buffer.read(cx).file())?;
869 if file.worktree == worktree && file.path() == &path.path {
870 Some(buffer)
871 } else {
872 None
873 }
874 })
875 }
876
877 fn register_buffer(
878 &mut self,
879 buffer: &ModelHandle<Buffer>,
880 worktree: Option<&ModelHandle<Worktree>>,
881 cx: &mut ModelContext<Self>,
882 ) -> Result<()> {
883 let remote_id = buffer.read(cx).remote_id();
884 let open_buffer = if self.is_remote() || self.is_shared() {
885 OpenBuffer::Strong(buffer.clone())
886 } else {
887 OpenBuffer::Weak(buffer.downgrade())
888 };
889
890 match self.opened_buffers.insert(remote_id, open_buffer) {
891 None => {}
892 Some(OpenBuffer::Loading(operations)) => {
893 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
894 }
895 Some(OpenBuffer::Weak(existing_handle)) => {
896 if existing_handle.upgrade(cx).is_some() {
897 Err(anyhow!(
898 "already registered buffer with remote id {}",
899 remote_id
900 ))?
901 }
902 }
903 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
904 "already registered buffer with remote id {}",
905 remote_id
906 ))?,
907 }
908 self.assign_language_to_buffer(&buffer, worktree, cx);
909 Ok(())
910 }
911
912 fn assign_language_to_buffer(
913 &mut self,
914 buffer: &ModelHandle<Buffer>,
915 worktree: Option<&ModelHandle<Worktree>>,
916 cx: &mut ModelContext<Self>,
917 ) -> Option<()> {
918 let (path, full_path) = {
919 let file = buffer.read(cx).file()?;
920 (file.path().clone(), file.full_path(cx))
921 };
922
923 // If the buffer has a language, set it and start/assign the language server
924 if let Some(language) = self.languages.select_language(&full_path) {
925 buffer.update(cx, |buffer, cx| {
926 buffer.set_language(Some(language.clone()), cx);
927 });
928
929 // For local worktrees, start a language server if needed.
930 // Also assign the language server and any previously stored diagnostics to the buffer.
931 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
932 let worktree_id = local_worktree.id();
933 let worktree_abs_path = local_worktree.abs_path().clone();
934 let buffer = buffer.downgrade();
935 let language_server =
936 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
937
938 cx.spawn_weak(|_, mut cx| async move {
939 if let Some(language_server) = language_server.await {
940 if let Some(buffer) = buffer.upgrade(&cx) {
941 buffer.update(&mut cx, |buffer, cx| {
942 buffer.set_language_server(Some(language_server), cx);
943 });
944 }
945 }
946 })
947 .detach();
948 }
949 }
950
951 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
952 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
953 buffer.update(cx, |buffer, cx| {
954 buffer.update_diagnostics(diagnostics, None, cx).log_err();
955 });
956 }
957 }
958
959 None
960 }
961
962 fn start_language_server(
963 &mut self,
964 worktree_id: WorktreeId,
965 worktree_path: Arc<Path>,
966 language: Arc<Language>,
967 cx: &mut ModelContext<Self>,
968 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
969 enum LspEvent {
970 DiagnosticsStart,
971 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
972 DiagnosticsFinish,
973 }
974
975 let key = (worktree_id, language.name().to_string());
976 self.started_language_servers
977 .entry(key.clone())
978 .or_insert_with(|| {
979 let language_server = self.languages.start_language_server(
980 &language,
981 worktree_path,
982 self.client.http_client(),
983 cx,
984 );
985 let rpc = self.client.clone();
986 cx.spawn_weak(|this, mut cx| async move {
987 let language_server = language_server?.await.log_err()?;
988 if let Some(this) = this.upgrade(&cx) {
989 this.update(&mut cx, |this, _| {
990 this.language_servers.insert(key, language_server.clone());
991 });
992 }
993
994 let disk_based_sources = language
995 .disk_based_diagnostic_sources()
996 .cloned()
997 .unwrap_or_default();
998 let disk_based_diagnostics_progress_token =
999 language.disk_based_diagnostics_progress_token().cloned();
1000 let has_disk_based_diagnostic_progress_token =
1001 disk_based_diagnostics_progress_token.is_some();
1002 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1003
1004 // Listen for `PublishDiagnostics` notifications.
1005 language_server
1006 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1007 let diagnostics_tx = diagnostics_tx.clone();
1008 move |params| {
1009 if !has_disk_based_diagnostic_progress_token {
1010 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1011 }
1012 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1013 .ok();
1014 if !has_disk_based_diagnostic_progress_token {
1015 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1016 }
1017 }
1018 })
1019 .detach();
1020
1021 // Listen for `Progress` notifications. Send an event when the language server
1022 // transitions between running jobs and not running any jobs.
1023 let mut running_jobs_for_this_server: i32 = 0;
1024 language_server
1025 .on_notification::<lsp::notification::Progress, _>(move |params| {
1026 let token = match params.token {
1027 lsp::NumberOrString::Number(_) => None,
1028 lsp::NumberOrString::String(token) => Some(token),
1029 };
1030
1031 if token == disk_based_diagnostics_progress_token {
1032 match params.value {
1033 lsp::ProgressParamsValue::WorkDone(progress) => {
1034 match progress {
1035 lsp::WorkDoneProgress::Begin(_) => {
1036 running_jobs_for_this_server += 1;
1037 if running_jobs_for_this_server == 1 {
1038 block_on(
1039 diagnostics_tx
1040 .send(LspEvent::DiagnosticsStart),
1041 )
1042 .ok();
1043 }
1044 }
1045 lsp::WorkDoneProgress::End(_) => {
1046 running_jobs_for_this_server -= 1;
1047 if running_jobs_for_this_server == 0 {
1048 block_on(
1049 diagnostics_tx
1050 .send(LspEvent::DiagnosticsFinish),
1051 )
1052 .ok();
1053 }
1054 }
1055 _ => {}
1056 }
1057 }
1058 }
1059 }
1060 })
1061 .detach();
1062
1063 // Process all the LSP events.
1064 cx.spawn(|mut cx| async move {
1065 while let Ok(message) = diagnostics_rx.recv().await {
1066 let this = this.upgrade(&cx)?;
1067 match message {
1068 LspEvent::DiagnosticsStart => {
1069 this.update(&mut cx, |this, cx| {
1070 this.disk_based_diagnostics_started(cx);
1071 if let Some(project_id) = this.remote_id() {
1072 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1073 project_id,
1074 })
1075 .log_err();
1076 }
1077 });
1078 }
1079 LspEvent::DiagnosticsUpdate(mut params) => {
1080 language.process_diagnostics(&mut params);
1081 this.update(&mut cx, |this, cx| {
1082 this.update_diagnostics(params, &disk_based_sources, cx)
1083 .log_err();
1084 });
1085 }
1086 LspEvent::DiagnosticsFinish => {
1087 this.update(&mut cx, |this, cx| {
1088 this.disk_based_diagnostics_finished(cx);
1089 if let Some(project_id) = this.remote_id() {
1090 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1091 project_id,
1092 })
1093 .log_err();
1094 }
1095 });
1096 }
1097 }
1098 }
1099 Some(())
1100 })
1101 .detach();
1102
1103 Some(language_server)
1104 })
1105 .shared()
1106 })
1107 .clone()
1108 }
1109
1110 pub fn update_diagnostics(
1111 &mut self,
1112 params: lsp::PublishDiagnosticsParams,
1113 disk_based_sources: &HashSet<String>,
1114 cx: &mut ModelContext<Self>,
1115 ) -> Result<()> {
1116 let abs_path = params
1117 .uri
1118 .to_file_path()
1119 .map_err(|_| anyhow!("URI is not a file"))?;
1120 let mut next_group_id = 0;
1121 let mut diagnostics = Vec::default();
1122 let mut primary_diagnostic_group_ids = HashMap::default();
1123 let mut sources_by_group_id = HashMap::default();
1124 let mut supporting_diagnostic_severities = HashMap::default();
1125 for diagnostic in ¶ms.diagnostics {
1126 let source = diagnostic.source.as_ref();
1127 let code = diagnostic.code.as_ref().map(|code| match code {
1128 lsp::NumberOrString::Number(code) => code.to_string(),
1129 lsp::NumberOrString::String(code) => code.clone(),
1130 });
1131 let range = range_from_lsp(diagnostic.range);
1132 let is_supporting = diagnostic
1133 .related_information
1134 .as_ref()
1135 .map_or(false, |infos| {
1136 infos.iter().any(|info| {
1137 primary_diagnostic_group_ids.contains_key(&(
1138 source,
1139 code.clone(),
1140 range_from_lsp(info.location.range),
1141 ))
1142 })
1143 });
1144
1145 if is_supporting {
1146 if let Some(severity) = diagnostic.severity {
1147 supporting_diagnostic_severities
1148 .insert((source, code.clone(), range), severity);
1149 }
1150 } else {
1151 let group_id = post_inc(&mut next_group_id);
1152 let is_disk_based =
1153 source.map_or(false, |source| disk_based_sources.contains(source));
1154
1155 sources_by_group_id.insert(group_id, source);
1156 primary_diagnostic_group_ids
1157 .insert((source, code.clone(), range.clone()), group_id);
1158
1159 diagnostics.push(DiagnosticEntry {
1160 range,
1161 diagnostic: Diagnostic {
1162 code: code.clone(),
1163 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1164 message: diagnostic.message.clone(),
1165 group_id,
1166 is_primary: true,
1167 is_valid: true,
1168 is_disk_based,
1169 },
1170 });
1171 if let Some(infos) = &diagnostic.related_information {
1172 for info in infos {
1173 if info.location.uri == params.uri && !info.message.is_empty() {
1174 let range = range_from_lsp(info.location.range);
1175 diagnostics.push(DiagnosticEntry {
1176 range,
1177 diagnostic: Diagnostic {
1178 code: code.clone(),
1179 severity: DiagnosticSeverity::INFORMATION,
1180 message: info.message.clone(),
1181 group_id,
1182 is_primary: false,
1183 is_valid: true,
1184 is_disk_based,
1185 },
1186 });
1187 }
1188 }
1189 }
1190 }
1191 }
1192
1193 for entry in &mut diagnostics {
1194 let diagnostic = &mut entry.diagnostic;
1195 if !diagnostic.is_primary {
1196 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1197 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1198 source,
1199 diagnostic.code.clone(),
1200 entry.range.clone(),
1201 )) {
1202 diagnostic.severity = severity;
1203 }
1204 }
1205 }
1206
1207 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1208 Ok(())
1209 }
1210
1211 pub fn update_diagnostic_entries(
1212 &mut self,
1213 abs_path: PathBuf,
1214 version: Option<i32>,
1215 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1216 cx: &mut ModelContext<Project>,
1217 ) -> Result<(), anyhow::Error> {
1218 let (worktree, relative_path) = self
1219 .find_local_worktree(&abs_path, cx)
1220 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1221 if !worktree.read(cx).is_visible() {
1222 return Ok(());
1223 }
1224
1225 let project_path = ProjectPath {
1226 worktree_id: worktree.read(cx).id(),
1227 path: relative_path.into(),
1228 };
1229
1230 for buffer in self.opened_buffers.values() {
1231 if let Some(buffer) = buffer.upgrade(cx) {
1232 if buffer
1233 .read(cx)
1234 .file()
1235 .map_or(false, |file| *file.path() == project_path.path)
1236 {
1237 buffer.update(cx, |buffer, cx| {
1238 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1239 })?;
1240 break;
1241 }
1242 }
1243 }
1244 worktree.update(cx, |worktree, cx| {
1245 worktree
1246 .as_local_mut()
1247 .ok_or_else(|| anyhow!("not a local worktree"))?
1248 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1249 })?;
1250 cx.emit(Event::DiagnosticsUpdated(project_path));
1251 Ok(())
1252 }
1253
1254 pub fn format(
1255 &self,
1256 buffers: HashSet<ModelHandle<Buffer>>,
1257 push_to_history: bool,
1258 cx: &mut ModelContext<Project>,
1259 ) -> Task<Result<ProjectTransaction>> {
1260 let mut local_buffers = Vec::new();
1261 let mut remote_buffers = None;
1262 for buffer_handle in buffers {
1263 let buffer = buffer_handle.read(cx);
1264 let worktree;
1265 if let Some(file) = File::from_dyn(buffer.file()) {
1266 worktree = file.worktree.clone();
1267 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1268 let lang_server;
1269 if let Some(lang) = buffer.language() {
1270 if let Some(server) = self
1271 .language_servers
1272 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1273 {
1274 lang_server = server.clone();
1275 } else {
1276 return Task::ready(Ok(Default::default()));
1277 };
1278 } else {
1279 return Task::ready(Ok(Default::default()));
1280 }
1281
1282 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1283 } else {
1284 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1285 }
1286 } else {
1287 return Task::ready(Ok(Default::default()));
1288 }
1289 }
1290
1291 let remote_buffers = self.remote_id().zip(remote_buffers);
1292 let client = self.client.clone();
1293
1294 cx.spawn(|this, mut cx| async move {
1295 let mut project_transaction = ProjectTransaction::default();
1296
1297 if let Some((project_id, remote_buffers)) = remote_buffers {
1298 let response = client
1299 .request(proto::FormatBuffers {
1300 project_id,
1301 buffer_ids: remote_buffers
1302 .iter()
1303 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1304 .collect(),
1305 })
1306 .await?
1307 .transaction
1308 .ok_or_else(|| anyhow!("missing transaction"))?;
1309 project_transaction = this
1310 .update(&mut cx, |this, cx| {
1311 this.deserialize_project_transaction(response, push_to_history, cx)
1312 })
1313 .await?;
1314 }
1315
1316 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1317 let capabilities = if let Some(capabilities) = lang_server.capabilities().await {
1318 capabilities
1319 } else {
1320 continue;
1321 };
1322
1323 let text_document = lsp::TextDocumentIdentifier::new(
1324 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1325 );
1326 let lsp_edits = if capabilities
1327 .document_formatting_provider
1328 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1329 {
1330 lang_server
1331 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1332 text_document,
1333 options: Default::default(),
1334 work_done_progress_params: Default::default(),
1335 })
1336 .await?
1337 } else if capabilities
1338 .document_range_formatting_provider
1339 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1340 {
1341 let buffer_start = lsp::Position::new(0, 0);
1342 let buffer_end = buffer
1343 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1344 .to_lsp_position();
1345 lang_server
1346 .request::<lsp::request::RangeFormatting>(
1347 lsp::DocumentRangeFormattingParams {
1348 text_document,
1349 range: lsp::Range::new(buffer_start, buffer_end),
1350 options: Default::default(),
1351 work_done_progress_params: Default::default(),
1352 },
1353 )
1354 .await?
1355 } else {
1356 continue;
1357 };
1358
1359 if let Some(lsp_edits) = lsp_edits {
1360 let edits = buffer
1361 .update(&mut cx, |buffer, cx| {
1362 buffer.edits_from_lsp(lsp_edits, None, cx)
1363 })
1364 .await?;
1365 buffer.update(&mut cx, |buffer, cx| {
1366 buffer.finalize_last_transaction();
1367 buffer.start_transaction();
1368 for (range, text) in edits {
1369 buffer.edit([range], text, cx);
1370 }
1371 if buffer.end_transaction(cx).is_some() {
1372 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1373 if !push_to_history {
1374 buffer.forget_transaction(transaction.id);
1375 }
1376 project_transaction.0.insert(cx.handle(), transaction);
1377 }
1378 });
1379 }
1380 }
1381
1382 Ok(project_transaction)
1383 })
1384 }
1385
1386 pub fn definition<T: ToPointUtf16>(
1387 &self,
1388 buffer: &ModelHandle<Buffer>,
1389 position: T,
1390 cx: &mut ModelContext<Self>,
1391 ) -> Task<Result<Vec<Location>>> {
1392 let position = position.to_point_utf16(buffer.read(cx));
1393 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1394 }
1395
1396 pub fn references<T: ToPointUtf16>(
1397 &self,
1398 buffer: &ModelHandle<Buffer>,
1399 position: T,
1400 cx: &mut ModelContext<Self>,
1401 ) -> Task<Result<Vec<Location>>> {
1402 let position = position.to_point_utf16(buffer.read(cx));
1403 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1404 }
1405
1406 pub fn document_highlights<T: ToPointUtf16>(
1407 &self,
1408 buffer: &ModelHandle<Buffer>,
1409 position: T,
1410 cx: &mut ModelContext<Self>,
1411 ) -> Task<Result<Vec<DocumentHighlight>>> {
1412 let position = position.to_point_utf16(buffer.read(cx));
1413
1414 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1415 }
1416
1417 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1418 if self.is_local() {
1419 let mut language_servers = HashMap::default();
1420 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1421 if let Some((worktree, language)) = self
1422 .worktree_for_id(*worktree_id, cx)
1423 .and_then(|worktree| worktree.read(cx).as_local())
1424 .zip(self.languages.get_language(language_name))
1425 {
1426 language_servers
1427 .entry(Arc::as_ptr(language_server))
1428 .or_insert((
1429 language_server.clone(),
1430 *worktree_id,
1431 worktree.abs_path().clone(),
1432 language.clone(),
1433 ));
1434 }
1435 }
1436
1437 let mut requests = Vec::new();
1438 for (language_server, _, _, _) in language_servers.values() {
1439 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1440 lsp::WorkspaceSymbolParams {
1441 query: query.to_string(),
1442 ..Default::default()
1443 },
1444 ));
1445 }
1446
1447 cx.spawn_weak(|this, cx| async move {
1448 let responses = futures::future::try_join_all(requests).await?;
1449
1450 let mut symbols = Vec::new();
1451 if let Some(this) = this.upgrade(&cx) {
1452 this.read_with(&cx, |this, cx| {
1453 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1454 language_servers.into_values().zip(responses)
1455 {
1456 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1457 |lsp_symbol| {
1458 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1459 let mut worktree_id = source_worktree_id;
1460 let path;
1461 if let Some((worktree, rel_path)) =
1462 this.find_local_worktree(&abs_path, cx)
1463 {
1464 worktree_id = worktree.read(cx).id();
1465 path = rel_path;
1466 } else {
1467 path = relativize_path(&worktree_abs_path, &abs_path);
1468 }
1469
1470 let label = language
1471 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1472 .unwrap_or_else(|| {
1473 CodeLabel::plain(lsp_symbol.name.clone(), None)
1474 });
1475 let signature = this.symbol_signature(worktree_id, &path);
1476
1477 Some(Symbol {
1478 source_worktree_id,
1479 worktree_id,
1480 language_name: language.name().to_string(),
1481 name: lsp_symbol.name,
1482 kind: lsp_symbol.kind,
1483 label,
1484 path,
1485 range: range_from_lsp(lsp_symbol.location.range),
1486 signature,
1487 })
1488 },
1489 ));
1490 }
1491 })
1492 }
1493
1494 Ok(symbols)
1495 })
1496 } else if let Some(project_id) = self.remote_id() {
1497 let request = self.client.request(proto::GetProjectSymbols {
1498 project_id,
1499 query: query.to_string(),
1500 });
1501 cx.spawn_weak(|this, cx| async move {
1502 let response = request.await?;
1503 let mut symbols = Vec::new();
1504 if let Some(this) = this.upgrade(&cx) {
1505 this.read_with(&cx, |this, _| {
1506 symbols.extend(
1507 response
1508 .symbols
1509 .into_iter()
1510 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1511 );
1512 })
1513 }
1514 Ok(symbols)
1515 })
1516 } else {
1517 Task::ready(Ok(Default::default()))
1518 }
1519 }
1520
1521 pub fn open_buffer_for_symbol(
1522 &mut self,
1523 symbol: &Symbol,
1524 cx: &mut ModelContext<Self>,
1525 ) -> Task<Result<ModelHandle<Buffer>>> {
1526 if self.is_local() {
1527 let language_server = if let Some(server) = self
1528 .language_servers
1529 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1530 {
1531 server.clone()
1532 } else {
1533 return Task::ready(Err(anyhow!(
1534 "language server for worktree and language not found"
1535 )));
1536 };
1537
1538 let worktree_abs_path = if let Some(worktree_abs_path) = self
1539 .worktree_for_id(symbol.worktree_id, cx)
1540 .and_then(|worktree| worktree.read(cx).as_local())
1541 .map(|local_worktree| local_worktree.abs_path())
1542 {
1543 worktree_abs_path
1544 } else {
1545 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1546 };
1547 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1548 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1549 uri
1550 } else {
1551 return Task::ready(Err(anyhow!("invalid symbol path")));
1552 };
1553
1554 self.open_local_buffer_via_lsp(
1555 symbol_uri,
1556 symbol.language_name.clone(),
1557 language_server,
1558 cx,
1559 )
1560 } else if let Some(project_id) = self.remote_id() {
1561 let request = self.client.request(proto::OpenBufferForSymbol {
1562 project_id,
1563 symbol: Some(serialize_symbol(symbol)),
1564 });
1565 cx.spawn(|this, mut cx| async move {
1566 let response = request.await?;
1567 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1568 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1569 .await
1570 })
1571 } else {
1572 Task::ready(Err(anyhow!("project does not have a remote id")))
1573 }
1574 }
1575
1576 pub fn completions<T: ToPointUtf16>(
1577 &self,
1578 source_buffer_handle: &ModelHandle<Buffer>,
1579 position: T,
1580 cx: &mut ModelContext<Self>,
1581 ) -> Task<Result<Vec<Completion>>> {
1582 let source_buffer_handle = source_buffer_handle.clone();
1583 let source_buffer = source_buffer_handle.read(cx);
1584 let buffer_id = source_buffer.remote_id();
1585 let language = source_buffer.language().cloned();
1586 let worktree;
1587 let buffer_abs_path;
1588 if let Some(file) = File::from_dyn(source_buffer.file()) {
1589 worktree = file.worktree.clone();
1590 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1591 } else {
1592 return Task::ready(Ok(Default::default()));
1593 };
1594
1595 let position = position.to_point_utf16(source_buffer);
1596 let anchor = source_buffer.anchor_after(position);
1597
1598 if worktree.read(cx).as_local().is_some() {
1599 let buffer_abs_path = buffer_abs_path.unwrap();
1600 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1601 server
1602 } else {
1603 return Task::ready(Ok(Default::default()));
1604 };
1605
1606 cx.spawn(|_, cx| async move {
1607 let completions = lang_server
1608 .request::<lsp::request::Completion>(lsp::CompletionParams {
1609 text_document_position: lsp::TextDocumentPositionParams::new(
1610 lsp::TextDocumentIdentifier::new(
1611 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1612 ),
1613 position.to_lsp_position(),
1614 ),
1615 context: Default::default(),
1616 work_done_progress_params: Default::default(),
1617 partial_result_params: Default::default(),
1618 })
1619 .await
1620 .context("lsp completion request failed")?;
1621
1622 let completions = if let Some(completions) = completions {
1623 match completions {
1624 lsp::CompletionResponse::Array(completions) => completions,
1625 lsp::CompletionResponse::List(list) => list.items,
1626 }
1627 } else {
1628 Default::default()
1629 };
1630
1631 source_buffer_handle.read_with(&cx, |this, _| {
1632 Ok(completions
1633 .into_iter()
1634 .filter_map(|lsp_completion| {
1635 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1636 lsp::CompletionTextEdit::Edit(edit) => {
1637 (range_from_lsp(edit.range), edit.new_text.clone())
1638 }
1639 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1640 log::info!("unsupported insert/replace completion");
1641 return None;
1642 }
1643 };
1644
1645 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1646 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1647 if clipped_start == old_range.start && clipped_end == old_range.end {
1648 Some(Completion {
1649 old_range: this.anchor_before(old_range.start)
1650 ..this.anchor_after(old_range.end),
1651 new_text,
1652 label: language
1653 .as_ref()
1654 .and_then(|l| l.label_for_completion(&lsp_completion))
1655 .unwrap_or_else(|| {
1656 CodeLabel::plain(
1657 lsp_completion.label.clone(),
1658 lsp_completion.filter_text.as_deref(),
1659 )
1660 }),
1661 lsp_completion,
1662 })
1663 } else {
1664 None
1665 }
1666 })
1667 .collect())
1668 })
1669 })
1670 } else if let Some(project_id) = self.remote_id() {
1671 let rpc = self.client.clone();
1672 let message = proto::GetCompletions {
1673 project_id,
1674 buffer_id,
1675 position: Some(language::proto::serialize_anchor(&anchor)),
1676 version: (&source_buffer.version()).into(),
1677 };
1678 cx.spawn_weak(|_, mut cx| async move {
1679 let response = rpc.request(message).await?;
1680
1681 source_buffer_handle
1682 .update(&mut cx, |buffer, _| {
1683 buffer.wait_for_version(response.version.into())
1684 })
1685 .await;
1686
1687 response
1688 .completions
1689 .into_iter()
1690 .map(|completion| {
1691 language::proto::deserialize_completion(completion, language.as_ref())
1692 })
1693 .collect()
1694 })
1695 } else {
1696 Task::ready(Ok(Default::default()))
1697 }
1698 }
1699
1700 pub fn apply_additional_edits_for_completion(
1701 &self,
1702 buffer_handle: ModelHandle<Buffer>,
1703 completion: Completion,
1704 push_to_history: bool,
1705 cx: &mut ModelContext<Self>,
1706 ) -> Task<Result<Option<Transaction>>> {
1707 let buffer = buffer_handle.read(cx);
1708 let buffer_id = buffer.remote_id();
1709
1710 if self.is_local() {
1711 let lang_server = if let Some(language_server) = buffer.language_server() {
1712 language_server.clone()
1713 } else {
1714 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1715 };
1716
1717 cx.spawn(|_, mut cx| async move {
1718 let resolved_completion = lang_server
1719 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1720 .await?;
1721 if let Some(edits) = resolved_completion.additional_text_edits {
1722 let edits = buffer_handle
1723 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1724 .await?;
1725 buffer_handle.update(&mut cx, |buffer, cx| {
1726 buffer.finalize_last_transaction();
1727 buffer.start_transaction();
1728 for (range, text) in edits {
1729 buffer.edit([range], text, cx);
1730 }
1731 let transaction = if buffer.end_transaction(cx).is_some() {
1732 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1733 if !push_to_history {
1734 buffer.forget_transaction(transaction.id);
1735 }
1736 Some(transaction)
1737 } else {
1738 None
1739 };
1740 Ok(transaction)
1741 })
1742 } else {
1743 Ok(None)
1744 }
1745 })
1746 } else if let Some(project_id) = self.remote_id() {
1747 let client = self.client.clone();
1748 cx.spawn(|_, mut cx| async move {
1749 let response = client
1750 .request(proto::ApplyCompletionAdditionalEdits {
1751 project_id,
1752 buffer_id,
1753 completion: Some(language::proto::serialize_completion(&completion)),
1754 })
1755 .await?;
1756
1757 if let Some(transaction) = response.transaction {
1758 let transaction = language::proto::deserialize_transaction(transaction)?;
1759 buffer_handle
1760 .update(&mut cx, |buffer, _| {
1761 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1762 })
1763 .await;
1764 if push_to_history {
1765 buffer_handle.update(&mut cx, |buffer, _| {
1766 buffer.push_transaction(transaction.clone(), Instant::now());
1767 });
1768 }
1769 Ok(Some(transaction))
1770 } else {
1771 Ok(None)
1772 }
1773 })
1774 } else {
1775 Task::ready(Err(anyhow!("project does not have a remote id")))
1776 }
1777 }
1778
1779 pub fn code_actions<T: ToOffset>(
1780 &self,
1781 buffer_handle: &ModelHandle<Buffer>,
1782 range: Range<T>,
1783 cx: &mut ModelContext<Self>,
1784 ) -> Task<Result<Vec<CodeAction>>> {
1785 let buffer_handle = buffer_handle.clone();
1786 let buffer = buffer_handle.read(cx);
1787 let buffer_id = buffer.remote_id();
1788 let worktree;
1789 let buffer_abs_path;
1790 if let Some(file) = File::from_dyn(buffer.file()) {
1791 worktree = file.worktree.clone();
1792 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1793 } else {
1794 return Task::ready(Ok(Default::default()));
1795 };
1796 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1797
1798 if worktree.read(cx).as_local().is_some() {
1799 let buffer_abs_path = buffer_abs_path.unwrap();
1800 let lang_name;
1801 let lang_server;
1802 if let Some(lang) = buffer.language() {
1803 lang_name = lang.name().to_string();
1804 if let Some(server) = self
1805 .language_servers
1806 .get(&(worktree.read(cx).id(), lang_name.clone()))
1807 {
1808 lang_server = server.clone();
1809 } else {
1810 return Task::ready(Ok(Default::default()));
1811 };
1812 } else {
1813 return Task::ready(Ok(Default::default()));
1814 }
1815
1816 let lsp_range = lsp::Range::new(
1817 range.start.to_point_utf16(buffer).to_lsp_position(),
1818 range.end.to_point_utf16(buffer).to_lsp_position(),
1819 );
1820 cx.foreground().spawn(async move {
1821 if !lang_server
1822 .capabilities()
1823 .await
1824 .map_or(false, |capabilities| {
1825 capabilities.code_action_provider.is_some()
1826 })
1827 {
1828 return Ok(Default::default());
1829 }
1830
1831 Ok(lang_server
1832 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1833 text_document: lsp::TextDocumentIdentifier::new(
1834 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1835 ),
1836 range: lsp_range,
1837 work_done_progress_params: Default::default(),
1838 partial_result_params: Default::default(),
1839 context: lsp::CodeActionContext {
1840 diagnostics: Default::default(),
1841 only: Some(vec![
1842 lsp::CodeActionKind::QUICKFIX,
1843 lsp::CodeActionKind::REFACTOR,
1844 lsp::CodeActionKind::REFACTOR_EXTRACT,
1845 ]),
1846 },
1847 })
1848 .await?
1849 .unwrap_or_default()
1850 .into_iter()
1851 .filter_map(|entry| {
1852 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1853 Some(CodeAction {
1854 range: range.clone(),
1855 lsp_action,
1856 })
1857 } else {
1858 None
1859 }
1860 })
1861 .collect())
1862 })
1863 } else if let Some(project_id) = self.remote_id() {
1864 let rpc = self.client.clone();
1865 let version = buffer.version();
1866 cx.spawn_weak(|_, mut cx| async move {
1867 let response = rpc
1868 .request(proto::GetCodeActions {
1869 project_id,
1870 buffer_id,
1871 start: Some(language::proto::serialize_anchor(&range.start)),
1872 end: Some(language::proto::serialize_anchor(&range.end)),
1873 version: (&version).into(),
1874 })
1875 .await?;
1876
1877 buffer_handle
1878 .update(&mut cx, |buffer, _| {
1879 buffer.wait_for_version(response.version.into())
1880 })
1881 .await;
1882
1883 response
1884 .actions
1885 .into_iter()
1886 .map(language::proto::deserialize_code_action)
1887 .collect()
1888 })
1889 } else {
1890 Task::ready(Ok(Default::default()))
1891 }
1892 }
1893
1894 pub fn apply_code_action(
1895 &self,
1896 buffer_handle: ModelHandle<Buffer>,
1897 mut action: CodeAction,
1898 push_to_history: bool,
1899 cx: &mut ModelContext<Self>,
1900 ) -> Task<Result<ProjectTransaction>> {
1901 if self.is_local() {
1902 let buffer = buffer_handle.read(cx);
1903 let lang_name = if let Some(lang) = buffer.language() {
1904 lang.name().to_string()
1905 } else {
1906 return Task::ready(Ok(Default::default()));
1907 };
1908 let lang_server = if let Some(language_server) = buffer.language_server() {
1909 language_server.clone()
1910 } else {
1911 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1912 };
1913 let range = action.range.to_point_utf16(buffer);
1914
1915 cx.spawn(|this, mut cx| async move {
1916 if let Some(lsp_range) = action
1917 .lsp_action
1918 .data
1919 .as_mut()
1920 .and_then(|d| d.get_mut("codeActionParams"))
1921 .and_then(|d| d.get_mut("range"))
1922 {
1923 *lsp_range = serde_json::to_value(&lsp::Range::new(
1924 range.start.to_lsp_position(),
1925 range.end.to_lsp_position(),
1926 ))
1927 .unwrap();
1928 action.lsp_action = lang_server
1929 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1930 .await?;
1931 } else {
1932 let actions = this
1933 .update(&mut cx, |this, cx| {
1934 this.code_actions(&buffer_handle, action.range, cx)
1935 })
1936 .await?;
1937 action.lsp_action = actions
1938 .into_iter()
1939 .find(|a| a.lsp_action.title == action.lsp_action.title)
1940 .ok_or_else(|| anyhow!("code action is outdated"))?
1941 .lsp_action;
1942 }
1943
1944 if let Some(edit) = action.lsp_action.edit {
1945 Self::deserialize_workspace_edit(
1946 this,
1947 edit,
1948 push_to_history,
1949 lang_name,
1950 lang_server,
1951 &mut cx,
1952 )
1953 .await
1954 } else {
1955 Ok(ProjectTransaction::default())
1956 }
1957 })
1958 } else if let Some(project_id) = self.remote_id() {
1959 let client = self.client.clone();
1960 let request = proto::ApplyCodeAction {
1961 project_id,
1962 buffer_id: buffer_handle.read(cx).remote_id(),
1963 action: Some(language::proto::serialize_code_action(&action)),
1964 };
1965 cx.spawn(|this, mut cx| async move {
1966 let response = client
1967 .request(request)
1968 .await?
1969 .transaction
1970 .ok_or_else(|| anyhow!("missing transaction"))?;
1971 this.update(&mut cx, |this, cx| {
1972 this.deserialize_project_transaction(response, push_to_history, cx)
1973 })
1974 .await
1975 })
1976 } else {
1977 Task::ready(Err(anyhow!("project does not have a remote id")))
1978 }
1979 }
1980
1981 async fn deserialize_workspace_edit(
1982 this: ModelHandle<Self>,
1983 edit: lsp::WorkspaceEdit,
1984 push_to_history: bool,
1985 language_name: String,
1986 language_server: Arc<LanguageServer>,
1987 cx: &mut AsyncAppContext,
1988 ) -> Result<ProjectTransaction> {
1989 let fs = this.read_with(cx, |this, _| this.fs.clone());
1990 let mut operations = Vec::new();
1991 if let Some(document_changes) = edit.document_changes {
1992 match document_changes {
1993 lsp::DocumentChanges::Edits(edits) => {
1994 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1995 }
1996 lsp::DocumentChanges::Operations(ops) => operations = ops,
1997 }
1998 } else if let Some(changes) = edit.changes {
1999 operations.extend(changes.into_iter().map(|(uri, edits)| {
2000 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2001 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2002 uri,
2003 version: None,
2004 },
2005 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2006 })
2007 }));
2008 }
2009
2010 let mut project_transaction = ProjectTransaction::default();
2011 for operation in operations {
2012 match operation {
2013 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2014 let abs_path = op
2015 .uri
2016 .to_file_path()
2017 .map_err(|_| anyhow!("can't convert URI to path"))?;
2018
2019 if let Some(parent_path) = abs_path.parent() {
2020 fs.create_dir(parent_path).await?;
2021 }
2022 if abs_path.ends_with("/") {
2023 fs.create_dir(&abs_path).await?;
2024 } else {
2025 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2026 .await?;
2027 }
2028 }
2029 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2030 let source_abs_path = op
2031 .old_uri
2032 .to_file_path()
2033 .map_err(|_| anyhow!("can't convert URI to path"))?;
2034 let target_abs_path = op
2035 .new_uri
2036 .to_file_path()
2037 .map_err(|_| anyhow!("can't convert URI to path"))?;
2038 fs.rename(
2039 &source_abs_path,
2040 &target_abs_path,
2041 op.options.map(Into::into).unwrap_or_default(),
2042 )
2043 .await?;
2044 }
2045 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2046 let abs_path = op
2047 .uri
2048 .to_file_path()
2049 .map_err(|_| anyhow!("can't convert URI to path"))?;
2050 let options = op.options.map(Into::into).unwrap_or_default();
2051 if abs_path.ends_with("/") {
2052 fs.remove_dir(&abs_path, options).await?;
2053 } else {
2054 fs.remove_file(&abs_path, options).await?;
2055 }
2056 }
2057 lsp::DocumentChangeOperation::Edit(op) => {
2058 let buffer_to_edit = this
2059 .update(cx, |this, cx| {
2060 this.open_local_buffer_via_lsp(
2061 op.text_document.uri,
2062 language_name.clone(),
2063 language_server.clone(),
2064 cx,
2065 )
2066 })
2067 .await?;
2068
2069 let edits = buffer_to_edit
2070 .update(cx, |buffer, cx| {
2071 let edits = op.edits.into_iter().map(|edit| match edit {
2072 lsp::OneOf::Left(edit) => edit,
2073 lsp::OneOf::Right(edit) => edit.text_edit,
2074 });
2075 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2076 })
2077 .await?;
2078
2079 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2080 buffer.finalize_last_transaction();
2081 buffer.start_transaction();
2082 for (range, text) in edits {
2083 buffer.edit([range], text, cx);
2084 }
2085 let transaction = if buffer.end_transaction(cx).is_some() {
2086 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2087 if !push_to_history {
2088 buffer.forget_transaction(transaction.id);
2089 }
2090 Some(transaction)
2091 } else {
2092 None
2093 };
2094
2095 transaction
2096 });
2097 if let Some(transaction) = transaction {
2098 project_transaction.0.insert(buffer_to_edit, transaction);
2099 }
2100 }
2101 }
2102 }
2103
2104 Ok(project_transaction)
2105 }
2106
2107 pub fn prepare_rename<T: ToPointUtf16>(
2108 &self,
2109 buffer: ModelHandle<Buffer>,
2110 position: T,
2111 cx: &mut ModelContext<Self>,
2112 ) -> Task<Result<Option<Range<Anchor>>>> {
2113 let position = position.to_point_utf16(buffer.read(cx));
2114 self.request_lsp(buffer, PrepareRename { position }, cx)
2115 }
2116
2117 pub fn perform_rename<T: ToPointUtf16>(
2118 &self,
2119 buffer: ModelHandle<Buffer>,
2120 position: T,
2121 new_name: String,
2122 push_to_history: bool,
2123 cx: &mut ModelContext<Self>,
2124 ) -> Task<Result<ProjectTransaction>> {
2125 let position = position.to_point_utf16(buffer.read(cx));
2126 self.request_lsp(
2127 buffer,
2128 PerformRename {
2129 position,
2130 new_name,
2131 push_to_history,
2132 },
2133 cx,
2134 )
2135 }
2136
2137 pub fn search(
2138 &self,
2139 query: SearchQuery,
2140 cx: &mut ModelContext<Self>,
2141 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2142 if self.is_local() {
2143 let snapshots = self
2144 .visible_worktrees(cx)
2145 .filter_map(|tree| {
2146 let tree = tree.read(cx).as_local()?;
2147 Some(tree.snapshot())
2148 })
2149 .collect::<Vec<_>>();
2150
2151 let background = cx.background().clone();
2152 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2153 if path_count == 0 {
2154 return Task::ready(Ok(Default::default()));
2155 }
2156 let workers = background.num_cpus().min(path_count);
2157 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2158 cx.background()
2159 .spawn({
2160 let fs = self.fs.clone();
2161 let background = cx.background().clone();
2162 let query = query.clone();
2163 async move {
2164 let fs = &fs;
2165 let query = &query;
2166 let matching_paths_tx = &matching_paths_tx;
2167 let paths_per_worker = (path_count + workers - 1) / workers;
2168 let snapshots = &snapshots;
2169 background
2170 .scoped(|scope| {
2171 for worker_ix in 0..workers {
2172 let worker_start_ix = worker_ix * paths_per_worker;
2173 let worker_end_ix = worker_start_ix + paths_per_worker;
2174 scope.spawn(async move {
2175 let mut snapshot_start_ix = 0;
2176 let mut abs_path = PathBuf::new();
2177 for snapshot in snapshots {
2178 let snapshot_end_ix =
2179 snapshot_start_ix + snapshot.visible_file_count();
2180 if worker_end_ix <= snapshot_start_ix {
2181 break;
2182 } else if worker_start_ix > snapshot_end_ix {
2183 snapshot_start_ix = snapshot_end_ix;
2184 continue;
2185 } else {
2186 let start_in_snapshot = worker_start_ix
2187 .saturating_sub(snapshot_start_ix);
2188 let end_in_snapshot =
2189 cmp::min(worker_end_ix, snapshot_end_ix)
2190 - snapshot_start_ix;
2191
2192 for entry in snapshot
2193 .files(false, start_in_snapshot)
2194 .take(end_in_snapshot - start_in_snapshot)
2195 {
2196 if matching_paths_tx.is_closed() {
2197 break;
2198 }
2199
2200 abs_path.clear();
2201 abs_path.push(&snapshot.abs_path());
2202 abs_path.push(&entry.path);
2203 let matches = if let Some(file) =
2204 fs.open_sync(&abs_path).await.log_err()
2205 {
2206 query.detect(file).unwrap_or(false)
2207 } else {
2208 false
2209 };
2210
2211 if matches {
2212 let project_path =
2213 (snapshot.id(), entry.path.clone());
2214 if matching_paths_tx
2215 .send(project_path)
2216 .await
2217 .is_err()
2218 {
2219 break;
2220 }
2221 }
2222 }
2223
2224 snapshot_start_ix = snapshot_end_ix;
2225 }
2226 }
2227 });
2228 }
2229 })
2230 .await;
2231 }
2232 })
2233 .detach();
2234
2235 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2236 let open_buffers = self
2237 .opened_buffers
2238 .values()
2239 .filter_map(|b| b.upgrade(cx))
2240 .collect::<HashSet<_>>();
2241 cx.spawn(|this, cx| async move {
2242 for buffer in &open_buffers {
2243 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2244 buffers_tx.send((buffer.clone(), snapshot)).await?;
2245 }
2246
2247 let open_buffers = Rc::new(RefCell::new(open_buffers));
2248 while let Some(project_path) = matching_paths_rx.next().await {
2249 if buffers_tx.is_closed() {
2250 break;
2251 }
2252
2253 let this = this.clone();
2254 let open_buffers = open_buffers.clone();
2255 let buffers_tx = buffers_tx.clone();
2256 cx.spawn(|mut cx| async move {
2257 if let Some(buffer) = this
2258 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2259 .await
2260 .log_err()
2261 {
2262 if open_buffers.borrow_mut().insert(buffer.clone()) {
2263 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2264 buffers_tx.send((buffer, snapshot)).await?;
2265 }
2266 }
2267
2268 Ok::<_, anyhow::Error>(())
2269 })
2270 .detach();
2271 }
2272
2273 Ok::<_, anyhow::Error>(())
2274 })
2275 .detach_and_log_err(cx);
2276
2277 let background = cx.background().clone();
2278 cx.background().spawn(async move {
2279 let query = &query;
2280 let mut matched_buffers = Vec::new();
2281 for _ in 0..workers {
2282 matched_buffers.push(HashMap::default());
2283 }
2284 background
2285 .scoped(|scope| {
2286 for worker_matched_buffers in matched_buffers.iter_mut() {
2287 let mut buffers_rx = buffers_rx.clone();
2288 scope.spawn(async move {
2289 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2290 let buffer_matches = query
2291 .search(snapshot.as_rope())
2292 .await
2293 .iter()
2294 .map(|range| {
2295 snapshot.anchor_before(range.start)
2296 ..snapshot.anchor_after(range.end)
2297 })
2298 .collect::<Vec<_>>();
2299 if !buffer_matches.is_empty() {
2300 worker_matched_buffers
2301 .insert(buffer.clone(), buffer_matches);
2302 }
2303 }
2304 });
2305 }
2306 })
2307 .await;
2308 Ok(matched_buffers.into_iter().flatten().collect())
2309 })
2310 } else if let Some(project_id) = self.remote_id() {
2311 let request = self.client.request(query.to_proto(project_id));
2312 cx.spawn(|this, mut cx| async move {
2313 let response = request.await?;
2314 let mut result = HashMap::default();
2315 for location in response.locations {
2316 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2317 let target_buffer = this
2318 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2319 .await?;
2320 let start = location
2321 .start
2322 .and_then(deserialize_anchor)
2323 .ok_or_else(|| anyhow!("missing target start"))?;
2324 let end = location
2325 .end
2326 .and_then(deserialize_anchor)
2327 .ok_or_else(|| anyhow!("missing target end"))?;
2328 result
2329 .entry(target_buffer)
2330 .or_insert(Vec::new())
2331 .push(start..end)
2332 }
2333 Ok(result)
2334 })
2335 } else {
2336 Task::ready(Ok(Default::default()))
2337 }
2338 }
2339
2340 fn request_lsp<R: LspCommand>(
2341 &self,
2342 buffer_handle: ModelHandle<Buffer>,
2343 request: R,
2344 cx: &mut ModelContext<Self>,
2345 ) -> Task<Result<R::Response>>
2346 where
2347 <R::LspRequest as lsp::request::Request>::Result: Send,
2348 {
2349 let buffer = buffer_handle.read(cx);
2350 if self.is_local() {
2351 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2352 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2353 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2354 return cx.spawn(|this, cx| async move {
2355 if !language_server
2356 .capabilities()
2357 .await
2358 .map_or(false, |capabilities| {
2359 request.check_capabilities(&capabilities)
2360 })
2361 {
2362 return Ok(Default::default());
2363 }
2364
2365 let response = language_server
2366 .request::<R::LspRequest>(lsp_params)
2367 .await
2368 .context("lsp request failed")?;
2369 request
2370 .response_from_lsp(response, this, buffer_handle, cx)
2371 .await
2372 });
2373 }
2374 } else if let Some(project_id) = self.remote_id() {
2375 let rpc = self.client.clone();
2376 let message = request.to_proto(project_id, buffer);
2377 return cx.spawn(|this, cx| async move {
2378 let response = rpc.request(message).await?;
2379 request
2380 .response_from_proto(response, this, buffer_handle, cx)
2381 .await
2382 });
2383 }
2384 Task::ready(Ok(Default::default()))
2385 }
2386
2387 pub fn find_or_create_local_worktree(
2388 &mut self,
2389 abs_path: impl AsRef<Path>,
2390 visible: bool,
2391 cx: &mut ModelContext<Self>,
2392 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2393 let abs_path = abs_path.as_ref();
2394 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2395 Task::ready(Ok((tree.clone(), relative_path.into())))
2396 } else {
2397 let worktree = self.create_local_worktree(abs_path, visible, cx);
2398 cx.foreground()
2399 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2400 }
2401 }
2402
2403 pub fn find_local_worktree(
2404 &self,
2405 abs_path: &Path,
2406 cx: &AppContext,
2407 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2408 for tree in self.worktrees(cx) {
2409 if let Some(relative_path) = tree
2410 .read(cx)
2411 .as_local()
2412 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2413 {
2414 return Some((tree.clone(), relative_path.into()));
2415 }
2416 }
2417 None
2418 }
2419
2420 pub fn is_shared(&self) -> bool {
2421 match &self.client_state {
2422 ProjectClientState::Local { is_shared, .. } => *is_shared,
2423 ProjectClientState::Remote { .. } => false,
2424 }
2425 }
2426
2427 fn create_local_worktree(
2428 &mut self,
2429 abs_path: impl AsRef<Path>,
2430 visible: bool,
2431 cx: &mut ModelContext<Self>,
2432 ) -> Task<Result<ModelHandle<Worktree>>> {
2433 let fs = self.fs.clone();
2434 let client = self.client.clone();
2435 let path: Arc<Path> = abs_path.as_ref().into();
2436 let task = self
2437 .loading_local_worktrees
2438 .entry(path.clone())
2439 .or_insert_with(|| {
2440 cx.spawn(|project, mut cx| {
2441 async move {
2442 let worktree =
2443 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2444 .await;
2445 project.update(&mut cx, |project, _| {
2446 project.loading_local_worktrees.remove(&path);
2447 });
2448 let worktree = worktree?;
2449
2450 let (remote_project_id, is_shared) =
2451 project.update(&mut cx, |project, cx| {
2452 project.add_worktree(&worktree, cx);
2453 (project.remote_id(), project.is_shared())
2454 });
2455
2456 if let Some(project_id) = remote_project_id {
2457 if is_shared {
2458 worktree
2459 .update(&mut cx, |worktree, cx| {
2460 worktree.as_local_mut().unwrap().share(project_id, cx)
2461 })
2462 .await?;
2463 } else {
2464 worktree
2465 .update(&mut cx, |worktree, cx| {
2466 worktree.as_local_mut().unwrap().register(project_id, cx)
2467 })
2468 .await?;
2469 }
2470 }
2471
2472 Ok(worktree)
2473 }
2474 .map_err(|err| Arc::new(err))
2475 })
2476 .shared()
2477 })
2478 .clone();
2479 cx.foreground().spawn(async move {
2480 match task.await {
2481 Ok(worktree) => Ok(worktree),
2482 Err(err) => Err(anyhow!("{}", err)),
2483 }
2484 })
2485 }
2486
2487 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2488 self.worktrees.retain(|worktree| {
2489 worktree
2490 .upgrade(cx)
2491 .map_or(false, |w| w.read(cx).id() != id)
2492 });
2493 cx.notify();
2494 }
2495
2496 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2497 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2498 if worktree.read(cx).is_local() {
2499 cx.subscribe(&worktree, |this, worktree, _, cx| {
2500 this.update_local_worktree_buffers(worktree, cx);
2501 })
2502 .detach();
2503 }
2504
2505 let push_strong_handle = {
2506 let worktree = worktree.read(cx);
2507 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2508 };
2509 if push_strong_handle {
2510 self.worktrees
2511 .push(WorktreeHandle::Strong(worktree.clone()));
2512 } else {
2513 cx.observe_release(&worktree, |this, cx| {
2514 this.worktrees
2515 .retain(|worktree| worktree.upgrade(cx).is_some());
2516 cx.notify();
2517 })
2518 .detach();
2519 self.worktrees
2520 .push(WorktreeHandle::Weak(worktree.downgrade()));
2521 }
2522 cx.notify();
2523 }
2524
2525 fn update_local_worktree_buffers(
2526 &mut self,
2527 worktree_handle: ModelHandle<Worktree>,
2528 cx: &mut ModelContext<Self>,
2529 ) {
2530 let snapshot = worktree_handle.read(cx).snapshot();
2531 let mut buffers_to_delete = Vec::new();
2532 for (buffer_id, buffer) in &self.opened_buffers {
2533 if let Some(buffer) = buffer.upgrade(cx) {
2534 buffer.update(cx, |buffer, cx| {
2535 if let Some(old_file) = File::from_dyn(buffer.file()) {
2536 if old_file.worktree != worktree_handle {
2537 return;
2538 }
2539
2540 let new_file = if let Some(entry) = old_file
2541 .entry_id
2542 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2543 {
2544 File {
2545 is_local: true,
2546 entry_id: Some(entry.id),
2547 mtime: entry.mtime,
2548 path: entry.path.clone(),
2549 worktree: worktree_handle.clone(),
2550 }
2551 } else if let Some(entry) =
2552 snapshot.entry_for_path(old_file.path().as_ref())
2553 {
2554 File {
2555 is_local: true,
2556 entry_id: Some(entry.id),
2557 mtime: entry.mtime,
2558 path: entry.path.clone(),
2559 worktree: worktree_handle.clone(),
2560 }
2561 } else {
2562 File {
2563 is_local: true,
2564 entry_id: None,
2565 path: old_file.path().clone(),
2566 mtime: old_file.mtime(),
2567 worktree: worktree_handle.clone(),
2568 }
2569 };
2570
2571 if let Some(project_id) = self.remote_id() {
2572 self.client
2573 .send(proto::UpdateBufferFile {
2574 project_id,
2575 buffer_id: *buffer_id as u64,
2576 file: Some(new_file.to_proto()),
2577 })
2578 .log_err();
2579 }
2580 buffer.file_updated(Box::new(new_file), cx).detach();
2581 }
2582 });
2583 } else {
2584 buffers_to_delete.push(*buffer_id);
2585 }
2586 }
2587
2588 for buffer_id in buffers_to_delete {
2589 self.opened_buffers.remove(&buffer_id);
2590 }
2591 }
2592
2593 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2594 let new_active_entry = entry.and_then(|project_path| {
2595 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2596 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2597 Some(ProjectEntry {
2598 worktree_id: project_path.worktree_id,
2599 entry_id: entry.id,
2600 })
2601 });
2602 if new_active_entry != self.active_entry {
2603 self.active_entry = new_active_entry;
2604 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2605 }
2606 }
2607
2608 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2609 self.language_servers_with_diagnostics_running > 0
2610 }
2611
2612 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2613 let mut summary = DiagnosticSummary::default();
2614 for (_, path_summary) in self.diagnostic_summaries(cx) {
2615 summary.error_count += path_summary.error_count;
2616 summary.warning_count += path_summary.warning_count;
2617 summary.info_count += path_summary.info_count;
2618 summary.hint_count += path_summary.hint_count;
2619 }
2620 summary
2621 }
2622
2623 pub fn diagnostic_summaries<'a>(
2624 &'a self,
2625 cx: &'a AppContext,
2626 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2627 self.worktrees(cx).flat_map(move |worktree| {
2628 let worktree = worktree.read(cx);
2629 let worktree_id = worktree.id();
2630 worktree
2631 .diagnostic_summaries()
2632 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2633 })
2634 }
2635
2636 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2637 self.language_servers_with_diagnostics_running += 1;
2638 if self.language_servers_with_diagnostics_running == 1 {
2639 cx.emit(Event::DiskBasedDiagnosticsStarted);
2640 }
2641 }
2642
2643 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2644 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2645 self.language_servers_with_diagnostics_running -= 1;
2646 if self.language_servers_with_diagnostics_running == 0 {
2647 cx.emit(Event::DiskBasedDiagnosticsFinished);
2648 }
2649 }
2650
2651 pub fn active_entry(&self) -> Option<ProjectEntry> {
2652 self.active_entry
2653 }
2654
2655 // RPC message handlers
2656
2657 async fn handle_unshare_project(
2658 this: ModelHandle<Self>,
2659 _: TypedEnvelope<proto::UnshareProject>,
2660 _: Arc<Client>,
2661 mut cx: AsyncAppContext,
2662 ) -> Result<()> {
2663 this.update(&mut cx, |this, cx| {
2664 if let ProjectClientState::Remote {
2665 sharing_has_stopped,
2666 ..
2667 } = &mut this.client_state
2668 {
2669 *sharing_has_stopped = true;
2670 this.collaborators.clear();
2671 cx.notify();
2672 } else {
2673 unreachable!()
2674 }
2675 });
2676
2677 Ok(())
2678 }
2679
2680 async fn handle_add_collaborator(
2681 this: ModelHandle<Self>,
2682 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2683 _: Arc<Client>,
2684 mut cx: AsyncAppContext,
2685 ) -> Result<()> {
2686 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2687 let collaborator = envelope
2688 .payload
2689 .collaborator
2690 .take()
2691 .ok_or_else(|| anyhow!("empty collaborator"))?;
2692
2693 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2694 this.update(&mut cx, |this, cx| {
2695 this.collaborators
2696 .insert(collaborator.peer_id, collaborator);
2697 cx.notify();
2698 });
2699
2700 Ok(())
2701 }
2702
2703 async fn handle_remove_collaborator(
2704 this: ModelHandle<Self>,
2705 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2706 _: Arc<Client>,
2707 mut cx: AsyncAppContext,
2708 ) -> Result<()> {
2709 this.update(&mut cx, |this, cx| {
2710 let peer_id = PeerId(envelope.payload.peer_id);
2711 let replica_id = this
2712 .collaborators
2713 .remove(&peer_id)
2714 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2715 .replica_id;
2716 for (_, buffer) in &this.opened_buffers {
2717 if let Some(buffer) = buffer.upgrade(cx) {
2718 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2719 }
2720 }
2721 cx.notify();
2722 Ok(())
2723 })
2724 }
2725
2726 async fn handle_register_worktree(
2727 this: ModelHandle<Self>,
2728 envelope: TypedEnvelope<proto::RegisterWorktree>,
2729 client: Arc<Client>,
2730 mut cx: AsyncAppContext,
2731 ) -> Result<()> {
2732 this.update(&mut cx, |this, cx| {
2733 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2734 let replica_id = this.replica_id();
2735 let worktree = proto::Worktree {
2736 id: envelope.payload.worktree_id,
2737 root_name: envelope.payload.root_name,
2738 entries: Default::default(),
2739 diagnostic_summaries: Default::default(),
2740 visible: envelope.payload.visible,
2741 };
2742 let (worktree, load_task) =
2743 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2744 this.add_worktree(&worktree, cx);
2745 load_task.detach();
2746 Ok(())
2747 })
2748 }
2749
2750 async fn handle_unregister_worktree(
2751 this: ModelHandle<Self>,
2752 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2753 _: Arc<Client>,
2754 mut cx: AsyncAppContext,
2755 ) -> Result<()> {
2756 this.update(&mut cx, |this, cx| {
2757 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2758 this.remove_worktree(worktree_id, cx);
2759 Ok(())
2760 })
2761 }
2762
2763 async fn handle_update_worktree(
2764 this: ModelHandle<Self>,
2765 envelope: TypedEnvelope<proto::UpdateWorktree>,
2766 _: Arc<Client>,
2767 mut cx: AsyncAppContext,
2768 ) -> Result<()> {
2769 this.update(&mut cx, |this, cx| {
2770 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2771 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2772 worktree.update(cx, |worktree, _| {
2773 let worktree = worktree.as_remote_mut().unwrap();
2774 worktree.update_from_remote(envelope)
2775 })?;
2776 }
2777 Ok(())
2778 })
2779 }
2780
2781 async fn handle_update_diagnostic_summary(
2782 this: ModelHandle<Self>,
2783 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2784 _: Arc<Client>,
2785 mut cx: AsyncAppContext,
2786 ) -> Result<()> {
2787 this.update(&mut cx, |this, cx| {
2788 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2789 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2790 if let Some(summary) = envelope.payload.summary {
2791 let project_path = ProjectPath {
2792 worktree_id,
2793 path: Path::new(&summary.path).into(),
2794 };
2795 worktree.update(cx, |worktree, _| {
2796 worktree
2797 .as_remote_mut()
2798 .unwrap()
2799 .update_diagnostic_summary(project_path.path.clone(), &summary);
2800 });
2801 cx.emit(Event::DiagnosticsUpdated(project_path));
2802 }
2803 }
2804 Ok(())
2805 })
2806 }
2807
2808 async fn handle_disk_based_diagnostics_updating(
2809 this: ModelHandle<Self>,
2810 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2811 _: Arc<Client>,
2812 mut cx: AsyncAppContext,
2813 ) -> Result<()> {
2814 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2815 Ok(())
2816 }
2817
2818 async fn handle_disk_based_diagnostics_updated(
2819 this: ModelHandle<Self>,
2820 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2821 _: Arc<Client>,
2822 mut cx: AsyncAppContext,
2823 ) -> Result<()> {
2824 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2825 Ok(())
2826 }
2827
2828 async fn handle_update_buffer(
2829 this: ModelHandle<Self>,
2830 envelope: TypedEnvelope<proto::UpdateBuffer>,
2831 _: Arc<Client>,
2832 mut cx: AsyncAppContext,
2833 ) -> Result<()> {
2834 this.update(&mut cx, |this, cx| {
2835 let payload = envelope.payload.clone();
2836 let buffer_id = payload.buffer_id;
2837 let ops = payload
2838 .operations
2839 .into_iter()
2840 .map(|op| language::proto::deserialize_operation(op))
2841 .collect::<Result<Vec<_>, _>>()?;
2842 match this.opened_buffers.entry(buffer_id) {
2843 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2844 OpenBuffer::Strong(buffer) => {
2845 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2846 }
2847 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2848 OpenBuffer::Weak(_) => {}
2849 },
2850 hash_map::Entry::Vacant(e) => {
2851 e.insert(OpenBuffer::Loading(ops));
2852 }
2853 }
2854 Ok(())
2855 })
2856 }
2857
2858 async fn handle_update_buffer_file(
2859 this: ModelHandle<Self>,
2860 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2861 _: Arc<Client>,
2862 mut cx: AsyncAppContext,
2863 ) -> Result<()> {
2864 this.update(&mut cx, |this, cx| {
2865 let payload = envelope.payload.clone();
2866 let buffer_id = payload.buffer_id;
2867 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2868 let worktree = this
2869 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2870 .ok_or_else(|| anyhow!("no such worktree"))?;
2871 let file = File::from_proto(file, worktree.clone(), cx)?;
2872 let buffer = this
2873 .opened_buffers
2874 .get_mut(&buffer_id)
2875 .and_then(|b| b.upgrade(cx))
2876 .ok_or_else(|| anyhow!("no such buffer"))?;
2877 buffer.update(cx, |buffer, cx| {
2878 buffer.file_updated(Box::new(file), cx).detach();
2879 });
2880 Ok(())
2881 })
2882 }
2883
2884 async fn handle_save_buffer(
2885 this: ModelHandle<Self>,
2886 envelope: TypedEnvelope<proto::SaveBuffer>,
2887 _: Arc<Client>,
2888 mut cx: AsyncAppContext,
2889 ) -> Result<proto::BufferSaved> {
2890 let buffer_id = envelope.payload.buffer_id;
2891 let requested_version = envelope.payload.version.try_into()?;
2892
2893 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2894 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2895 let buffer = this
2896 .opened_buffers
2897 .get(&buffer_id)
2898 .map(|buffer| buffer.upgrade(cx).unwrap())
2899 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2900 Ok::<_, anyhow::Error>((project_id, buffer))
2901 })?;
2902 buffer
2903 .update(&mut cx, |buffer, _| {
2904 buffer.wait_for_version(requested_version)
2905 })
2906 .await;
2907
2908 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2909 Ok(proto::BufferSaved {
2910 project_id,
2911 buffer_id,
2912 version: (&saved_version).into(),
2913 mtime: Some(mtime.into()),
2914 })
2915 }
2916
2917 async fn handle_format_buffers(
2918 this: ModelHandle<Self>,
2919 envelope: TypedEnvelope<proto::FormatBuffers>,
2920 _: Arc<Client>,
2921 mut cx: AsyncAppContext,
2922 ) -> Result<proto::FormatBuffersResponse> {
2923 let sender_id = envelope.original_sender_id()?;
2924 let format = this.update(&mut cx, |this, cx| {
2925 let mut buffers = HashSet::default();
2926 for buffer_id in &envelope.payload.buffer_ids {
2927 buffers.insert(
2928 this.opened_buffers
2929 .get(buffer_id)
2930 .map(|buffer| buffer.upgrade(cx).unwrap())
2931 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2932 );
2933 }
2934 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2935 })?;
2936
2937 let project_transaction = format.await?;
2938 let project_transaction = this.update(&mut cx, |this, cx| {
2939 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2940 });
2941 Ok(proto::FormatBuffersResponse {
2942 transaction: Some(project_transaction),
2943 })
2944 }
2945
2946 async fn handle_get_completions(
2947 this: ModelHandle<Self>,
2948 envelope: TypedEnvelope<proto::GetCompletions>,
2949 _: Arc<Client>,
2950 mut cx: AsyncAppContext,
2951 ) -> Result<proto::GetCompletionsResponse> {
2952 let position = envelope
2953 .payload
2954 .position
2955 .and_then(language::proto::deserialize_anchor)
2956 .ok_or_else(|| anyhow!("invalid position"))?;
2957 let version = clock::Global::from(envelope.payload.version);
2958 let buffer = this.read_with(&cx, |this, cx| {
2959 this.opened_buffers
2960 .get(&envelope.payload.buffer_id)
2961 .map(|buffer| buffer.upgrade(cx).unwrap())
2962 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2963 })?;
2964 buffer
2965 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2966 .await;
2967 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2968 let completions = this
2969 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2970 .await?;
2971
2972 Ok(proto::GetCompletionsResponse {
2973 completions: completions
2974 .iter()
2975 .map(language::proto::serialize_completion)
2976 .collect(),
2977 version: (&version).into(),
2978 })
2979 }
2980
2981 async fn handle_apply_additional_edits_for_completion(
2982 this: ModelHandle<Self>,
2983 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2984 _: Arc<Client>,
2985 mut cx: AsyncAppContext,
2986 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2987 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2988 let buffer = this
2989 .opened_buffers
2990 .get(&envelope.payload.buffer_id)
2991 .map(|buffer| buffer.upgrade(cx).unwrap())
2992 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2993 let language = buffer.read(cx).language();
2994 let completion = language::proto::deserialize_completion(
2995 envelope
2996 .payload
2997 .completion
2998 .ok_or_else(|| anyhow!("invalid completion"))?,
2999 language,
3000 )?;
3001 Ok::<_, anyhow::Error>(
3002 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3003 )
3004 })?;
3005
3006 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3007 transaction: apply_additional_edits
3008 .await?
3009 .as_ref()
3010 .map(language::proto::serialize_transaction),
3011 })
3012 }
3013
3014 async fn handle_get_code_actions(
3015 this: ModelHandle<Self>,
3016 envelope: TypedEnvelope<proto::GetCodeActions>,
3017 _: Arc<Client>,
3018 mut cx: AsyncAppContext,
3019 ) -> Result<proto::GetCodeActionsResponse> {
3020 let start = envelope
3021 .payload
3022 .start
3023 .and_then(language::proto::deserialize_anchor)
3024 .ok_or_else(|| anyhow!("invalid start"))?;
3025 let end = envelope
3026 .payload
3027 .end
3028 .and_then(language::proto::deserialize_anchor)
3029 .ok_or_else(|| anyhow!("invalid end"))?;
3030 let buffer = this.update(&mut cx, |this, cx| {
3031 this.opened_buffers
3032 .get(&envelope.payload.buffer_id)
3033 .map(|buffer| buffer.upgrade(cx).unwrap())
3034 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3035 })?;
3036 buffer
3037 .update(&mut cx, |buffer, _| {
3038 buffer.wait_for_version(envelope.payload.version.into())
3039 })
3040 .await;
3041
3042 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3043 let code_actions = this.update(&mut cx, |this, cx| {
3044 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3045 })?;
3046
3047 Ok(proto::GetCodeActionsResponse {
3048 actions: code_actions
3049 .await?
3050 .iter()
3051 .map(language::proto::serialize_code_action)
3052 .collect(),
3053 version: (&version).into(),
3054 })
3055 }
3056
3057 async fn handle_apply_code_action(
3058 this: ModelHandle<Self>,
3059 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3060 _: Arc<Client>,
3061 mut cx: AsyncAppContext,
3062 ) -> Result<proto::ApplyCodeActionResponse> {
3063 let sender_id = envelope.original_sender_id()?;
3064 let action = language::proto::deserialize_code_action(
3065 envelope
3066 .payload
3067 .action
3068 .ok_or_else(|| anyhow!("invalid action"))?,
3069 )?;
3070 let apply_code_action = this.update(&mut cx, |this, cx| {
3071 let buffer = this
3072 .opened_buffers
3073 .get(&envelope.payload.buffer_id)
3074 .map(|buffer| buffer.upgrade(cx).unwrap())
3075 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3076 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3077 })?;
3078
3079 let project_transaction = apply_code_action.await?;
3080 let project_transaction = this.update(&mut cx, |this, cx| {
3081 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3082 });
3083 Ok(proto::ApplyCodeActionResponse {
3084 transaction: Some(project_transaction),
3085 })
3086 }
3087
3088 async fn handle_lsp_command<T: LspCommand>(
3089 this: ModelHandle<Self>,
3090 envelope: TypedEnvelope<T::ProtoRequest>,
3091 _: Arc<Client>,
3092 mut cx: AsyncAppContext,
3093 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3094 where
3095 <T::LspRequest as lsp::request::Request>::Result: Send,
3096 {
3097 let sender_id = envelope.original_sender_id()?;
3098 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3099 let buffer_handle = this.read_with(&cx, |this, _| {
3100 this.opened_buffers
3101 .get(&buffer_id)
3102 .map(|buffer| buffer.upgrade(&cx).unwrap())
3103 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3104 })?;
3105 let request = T::from_proto(
3106 envelope.payload,
3107 this.clone(),
3108 buffer_handle.clone(),
3109 cx.clone(),
3110 )
3111 .await?;
3112 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3113 let response = this
3114 .update(&mut cx, |this, cx| {
3115 this.request_lsp(buffer_handle, request, cx)
3116 })
3117 .await?;
3118 this.update(&mut cx, |this, cx| {
3119 Ok(T::response_to_proto(
3120 response,
3121 this,
3122 sender_id,
3123 &buffer_version,
3124 cx,
3125 ))
3126 })
3127 }
3128
3129 async fn handle_get_project_symbols(
3130 this: ModelHandle<Self>,
3131 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3132 _: Arc<Client>,
3133 mut cx: AsyncAppContext,
3134 ) -> Result<proto::GetProjectSymbolsResponse> {
3135 let symbols = this
3136 .update(&mut cx, |this, cx| {
3137 this.symbols(&envelope.payload.query, cx)
3138 })
3139 .await?;
3140
3141 Ok(proto::GetProjectSymbolsResponse {
3142 symbols: symbols.iter().map(serialize_symbol).collect(),
3143 })
3144 }
3145
3146 async fn handle_search_project(
3147 this: ModelHandle<Self>,
3148 envelope: TypedEnvelope<proto::SearchProject>,
3149 _: Arc<Client>,
3150 mut cx: AsyncAppContext,
3151 ) -> Result<proto::SearchProjectResponse> {
3152 let peer_id = envelope.original_sender_id()?;
3153 let query = SearchQuery::from_proto(envelope.payload)?;
3154 let result = this
3155 .update(&mut cx, |this, cx| this.search(query, cx))
3156 .await?;
3157
3158 this.update(&mut cx, |this, cx| {
3159 let mut locations = Vec::new();
3160 for (buffer, ranges) in result {
3161 for range in ranges {
3162 let start = serialize_anchor(&range.start);
3163 let end = serialize_anchor(&range.end);
3164 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3165 locations.push(proto::Location {
3166 buffer: Some(buffer),
3167 start: Some(start),
3168 end: Some(end),
3169 });
3170 }
3171 }
3172 Ok(proto::SearchProjectResponse { locations })
3173 })
3174 }
3175
3176 async fn handle_open_buffer_for_symbol(
3177 this: ModelHandle<Self>,
3178 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3179 _: Arc<Client>,
3180 mut cx: AsyncAppContext,
3181 ) -> Result<proto::OpenBufferForSymbolResponse> {
3182 let peer_id = envelope.original_sender_id()?;
3183 let symbol = envelope
3184 .payload
3185 .symbol
3186 .ok_or_else(|| anyhow!("invalid symbol"))?;
3187 let symbol = this.read_with(&cx, |this, _| {
3188 let symbol = this.deserialize_symbol(symbol)?;
3189 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3190 if signature == symbol.signature {
3191 Ok(symbol)
3192 } else {
3193 Err(anyhow!("invalid symbol signature"))
3194 }
3195 })?;
3196 let buffer = this
3197 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3198 .await?;
3199
3200 Ok(proto::OpenBufferForSymbolResponse {
3201 buffer: Some(this.update(&mut cx, |this, cx| {
3202 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3203 })),
3204 })
3205 }
3206
3207 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3208 let mut hasher = Sha256::new();
3209 hasher.update(worktree_id.to_proto().to_be_bytes());
3210 hasher.update(path.to_string_lossy().as_bytes());
3211 hasher.update(self.nonce.to_be_bytes());
3212 hasher.finalize().as_slice().try_into().unwrap()
3213 }
3214
3215 async fn handle_open_buffer(
3216 this: ModelHandle<Self>,
3217 envelope: TypedEnvelope<proto::OpenBuffer>,
3218 _: Arc<Client>,
3219 mut cx: AsyncAppContext,
3220 ) -> Result<proto::OpenBufferResponse> {
3221 let peer_id = envelope.original_sender_id()?;
3222 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3223 let open_buffer = this.update(&mut cx, |this, cx| {
3224 this.open_buffer(
3225 ProjectPath {
3226 worktree_id,
3227 path: PathBuf::from(envelope.payload.path).into(),
3228 },
3229 cx,
3230 )
3231 });
3232
3233 let buffer = open_buffer.await?;
3234 this.update(&mut cx, |this, cx| {
3235 Ok(proto::OpenBufferResponse {
3236 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3237 })
3238 })
3239 }
3240
3241 fn serialize_project_transaction_for_peer(
3242 &mut self,
3243 project_transaction: ProjectTransaction,
3244 peer_id: PeerId,
3245 cx: &AppContext,
3246 ) -> proto::ProjectTransaction {
3247 let mut serialized_transaction = proto::ProjectTransaction {
3248 buffers: Default::default(),
3249 transactions: Default::default(),
3250 };
3251 for (buffer, transaction) in project_transaction.0 {
3252 serialized_transaction
3253 .buffers
3254 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3255 serialized_transaction
3256 .transactions
3257 .push(language::proto::serialize_transaction(&transaction));
3258 }
3259 serialized_transaction
3260 }
3261
3262 fn deserialize_project_transaction(
3263 &mut self,
3264 message: proto::ProjectTransaction,
3265 push_to_history: bool,
3266 cx: &mut ModelContext<Self>,
3267 ) -> Task<Result<ProjectTransaction>> {
3268 cx.spawn(|this, mut cx| async move {
3269 let mut project_transaction = ProjectTransaction::default();
3270 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3271 let buffer = this
3272 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3273 .await?;
3274 let transaction = language::proto::deserialize_transaction(transaction)?;
3275 project_transaction.0.insert(buffer, transaction);
3276 }
3277
3278 for (buffer, transaction) in &project_transaction.0 {
3279 buffer
3280 .update(&mut cx, |buffer, _| {
3281 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3282 })
3283 .await;
3284
3285 if push_to_history {
3286 buffer.update(&mut cx, |buffer, _| {
3287 buffer.push_transaction(transaction.clone(), Instant::now());
3288 });
3289 }
3290 }
3291
3292 Ok(project_transaction)
3293 })
3294 }
3295
3296 fn serialize_buffer_for_peer(
3297 &mut self,
3298 buffer: &ModelHandle<Buffer>,
3299 peer_id: PeerId,
3300 cx: &AppContext,
3301 ) -> proto::Buffer {
3302 let buffer_id = buffer.read(cx).remote_id();
3303 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3304 if shared_buffers.insert(buffer_id) {
3305 proto::Buffer {
3306 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3307 }
3308 } else {
3309 proto::Buffer {
3310 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3311 }
3312 }
3313 }
3314
3315 fn deserialize_buffer(
3316 &mut self,
3317 buffer: proto::Buffer,
3318 cx: &mut ModelContext<Self>,
3319 ) -> Task<Result<ModelHandle<Buffer>>> {
3320 let replica_id = self.replica_id();
3321
3322 let opened_buffer_tx = self.opened_buffer.0.clone();
3323 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3324 cx.spawn(|this, mut cx| async move {
3325 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3326 proto::buffer::Variant::Id(id) => {
3327 let buffer = loop {
3328 let buffer = this.read_with(&cx, |this, cx| {
3329 this.opened_buffers
3330 .get(&id)
3331 .and_then(|buffer| buffer.upgrade(cx))
3332 });
3333 if let Some(buffer) = buffer {
3334 break buffer;
3335 }
3336 opened_buffer_rx
3337 .next()
3338 .await
3339 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3340 };
3341 Ok(buffer)
3342 }
3343 proto::buffer::Variant::State(mut buffer) => {
3344 let mut buffer_worktree = None;
3345 let mut buffer_file = None;
3346 if let Some(file) = buffer.file.take() {
3347 this.read_with(&cx, |this, cx| {
3348 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3349 let worktree =
3350 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3351 anyhow!("no worktree found for id {}", file.worktree_id)
3352 })?;
3353 buffer_file =
3354 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3355 as Box<dyn language::File>);
3356 buffer_worktree = Some(worktree);
3357 Ok::<_, anyhow::Error>(())
3358 })?;
3359 }
3360
3361 let buffer = cx.add_model(|cx| {
3362 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3363 });
3364
3365 this.update(&mut cx, |this, cx| {
3366 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3367 })?;
3368
3369 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3370 Ok(buffer)
3371 }
3372 }
3373 })
3374 }
3375
3376 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3377 let language = self
3378 .languages
3379 .get_language(&serialized_symbol.language_name);
3380 let start = serialized_symbol
3381 .start
3382 .ok_or_else(|| anyhow!("invalid start"))?;
3383 let end = serialized_symbol
3384 .end
3385 .ok_or_else(|| anyhow!("invalid end"))?;
3386 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3387 Ok(Symbol {
3388 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3389 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3390 language_name: serialized_symbol.language_name.clone(),
3391 label: language
3392 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3393 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3394 name: serialized_symbol.name,
3395 path: PathBuf::from(serialized_symbol.path),
3396 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3397 kind,
3398 signature: serialized_symbol
3399 .signature
3400 .try_into()
3401 .map_err(|_| anyhow!("invalid signature"))?,
3402 })
3403 }
3404
3405 async fn handle_close_buffer(
3406 _: ModelHandle<Self>,
3407 _: TypedEnvelope<proto::CloseBuffer>,
3408 _: Arc<Client>,
3409 _: AsyncAppContext,
3410 ) -> Result<()> {
3411 // TODO: use this for following
3412 Ok(())
3413 }
3414
3415 async fn handle_buffer_saved(
3416 this: ModelHandle<Self>,
3417 envelope: TypedEnvelope<proto::BufferSaved>,
3418 _: Arc<Client>,
3419 mut cx: AsyncAppContext,
3420 ) -> Result<()> {
3421 let version = envelope.payload.version.try_into()?;
3422 let mtime = envelope
3423 .payload
3424 .mtime
3425 .ok_or_else(|| anyhow!("missing mtime"))?
3426 .into();
3427
3428 this.update(&mut cx, |this, cx| {
3429 let buffer = this
3430 .opened_buffers
3431 .get(&envelope.payload.buffer_id)
3432 .and_then(|buffer| buffer.upgrade(cx));
3433 if let Some(buffer) = buffer {
3434 buffer.update(cx, |buffer, cx| {
3435 buffer.did_save(version, mtime, None, cx);
3436 });
3437 }
3438 Ok(())
3439 })
3440 }
3441
3442 async fn handle_buffer_reloaded(
3443 this: ModelHandle<Self>,
3444 envelope: TypedEnvelope<proto::BufferReloaded>,
3445 _: Arc<Client>,
3446 mut cx: AsyncAppContext,
3447 ) -> Result<()> {
3448 let payload = envelope.payload.clone();
3449 let version = payload.version.try_into()?;
3450 let mtime = payload
3451 .mtime
3452 .ok_or_else(|| anyhow!("missing mtime"))?
3453 .into();
3454 this.update(&mut cx, |this, cx| {
3455 let buffer = this
3456 .opened_buffers
3457 .get(&payload.buffer_id)
3458 .and_then(|buffer| buffer.upgrade(cx));
3459 if let Some(buffer) = buffer {
3460 buffer.update(cx, |buffer, cx| {
3461 buffer.did_reload(version, mtime, cx);
3462 });
3463 }
3464 Ok(())
3465 })
3466 }
3467
3468 pub fn match_paths<'a>(
3469 &self,
3470 query: &'a str,
3471 include_ignored: bool,
3472 smart_case: bool,
3473 max_results: usize,
3474 cancel_flag: &'a AtomicBool,
3475 cx: &AppContext,
3476 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3477 let worktrees = self
3478 .worktrees(cx)
3479 .filter(|worktree| worktree.read(cx).is_visible())
3480 .collect::<Vec<_>>();
3481 let include_root_name = worktrees.len() > 1;
3482 let candidate_sets = worktrees
3483 .into_iter()
3484 .map(|worktree| CandidateSet {
3485 snapshot: worktree.read(cx).snapshot(),
3486 include_ignored,
3487 include_root_name,
3488 })
3489 .collect::<Vec<_>>();
3490
3491 let background = cx.background().clone();
3492 async move {
3493 fuzzy::match_paths(
3494 candidate_sets.as_slice(),
3495 query,
3496 smart_case,
3497 max_results,
3498 cancel_flag,
3499 background,
3500 )
3501 .await
3502 }
3503 }
3504}
3505
3506impl WorktreeHandle {
3507 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3508 match self {
3509 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3510 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3511 }
3512 }
3513}
3514
3515impl OpenBuffer {
3516 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3517 match self {
3518 OpenBuffer::Strong(handle) => Some(handle.clone()),
3519 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3520 OpenBuffer::Loading(_) => None,
3521 }
3522 }
3523}
3524
3525struct CandidateSet {
3526 snapshot: Snapshot,
3527 include_ignored: bool,
3528 include_root_name: bool,
3529}
3530
3531impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3532 type Candidates = CandidateSetIter<'a>;
3533
3534 fn id(&self) -> usize {
3535 self.snapshot.id().to_usize()
3536 }
3537
3538 fn len(&self) -> usize {
3539 if self.include_ignored {
3540 self.snapshot.file_count()
3541 } else {
3542 self.snapshot.visible_file_count()
3543 }
3544 }
3545
3546 fn prefix(&self) -> Arc<str> {
3547 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3548 self.snapshot.root_name().into()
3549 } else if self.include_root_name {
3550 format!("{}/", self.snapshot.root_name()).into()
3551 } else {
3552 "".into()
3553 }
3554 }
3555
3556 fn candidates(&'a self, start: usize) -> Self::Candidates {
3557 CandidateSetIter {
3558 traversal: self.snapshot.files(self.include_ignored, start),
3559 }
3560 }
3561}
3562
3563struct CandidateSetIter<'a> {
3564 traversal: Traversal<'a>,
3565}
3566
3567impl<'a> Iterator for CandidateSetIter<'a> {
3568 type Item = PathMatchCandidate<'a>;
3569
3570 fn next(&mut self) -> Option<Self::Item> {
3571 self.traversal.next().map(|entry| {
3572 if let EntryKind::File(char_bag) = entry.kind {
3573 PathMatchCandidate {
3574 path: &entry.path,
3575 char_bag,
3576 }
3577 } else {
3578 unreachable!()
3579 }
3580 })
3581 }
3582}
3583
3584impl Entity for Project {
3585 type Event = Event;
3586
3587 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3588 match &self.client_state {
3589 ProjectClientState::Local { remote_id_rx, .. } => {
3590 if let Some(project_id) = *remote_id_rx.borrow() {
3591 self.client
3592 .send(proto::UnregisterProject { project_id })
3593 .log_err();
3594 }
3595 }
3596 ProjectClientState::Remote { remote_id, .. } => {
3597 self.client
3598 .send(proto::LeaveProject {
3599 project_id: *remote_id,
3600 })
3601 .log_err();
3602 }
3603 }
3604 }
3605
3606 fn app_will_quit(
3607 &mut self,
3608 _: &mut MutableAppContext,
3609 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3610 let shutdown_futures = self
3611 .language_servers
3612 .drain()
3613 .filter_map(|(_, server)| server.shutdown())
3614 .collect::<Vec<_>>();
3615 Some(
3616 async move {
3617 futures::future::join_all(shutdown_futures).await;
3618 }
3619 .boxed(),
3620 )
3621 }
3622}
3623
3624impl Collaborator {
3625 fn from_proto(
3626 message: proto::Collaborator,
3627 user_store: &ModelHandle<UserStore>,
3628 cx: &mut AsyncAppContext,
3629 ) -> impl Future<Output = Result<Self>> {
3630 let user = user_store.update(cx, |user_store, cx| {
3631 user_store.fetch_user(message.user_id, cx)
3632 });
3633
3634 async move {
3635 Ok(Self {
3636 peer_id: PeerId(message.peer_id),
3637 user: user.await?,
3638 replica_id: message.replica_id as ReplicaId,
3639 })
3640 }
3641 }
3642}
3643
3644impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3645 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3646 Self {
3647 worktree_id,
3648 path: path.as_ref().into(),
3649 }
3650 }
3651}
3652
3653impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3654 fn from(options: lsp::CreateFileOptions) -> Self {
3655 Self {
3656 overwrite: options.overwrite.unwrap_or(false),
3657 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3658 }
3659 }
3660}
3661
3662impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3663 fn from(options: lsp::RenameFileOptions) -> Self {
3664 Self {
3665 overwrite: options.overwrite.unwrap_or(false),
3666 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3667 }
3668 }
3669}
3670
3671impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3672 fn from(options: lsp::DeleteFileOptions) -> Self {
3673 Self {
3674 recursive: options.recursive.unwrap_or(false),
3675 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3676 }
3677 }
3678}
3679
3680fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3681 proto::Symbol {
3682 source_worktree_id: symbol.source_worktree_id.to_proto(),
3683 worktree_id: symbol.worktree_id.to_proto(),
3684 language_name: symbol.language_name.clone(),
3685 name: symbol.name.clone(),
3686 kind: unsafe { mem::transmute(symbol.kind) },
3687 path: symbol.path.to_string_lossy().to_string(),
3688 start: Some(proto::Point {
3689 row: symbol.range.start.row,
3690 column: symbol.range.start.column,
3691 }),
3692 end: Some(proto::Point {
3693 row: symbol.range.end.row,
3694 column: symbol.range.end.column,
3695 }),
3696 signature: symbol.signature.to_vec(),
3697 }
3698}
3699
3700fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3701 let mut path_components = path.components();
3702 let mut base_components = base.components();
3703 let mut components: Vec<Component> = Vec::new();
3704 loop {
3705 match (path_components.next(), base_components.next()) {
3706 (None, None) => break,
3707 (Some(a), None) => {
3708 components.push(a);
3709 components.extend(path_components.by_ref());
3710 break;
3711 }
3712 (None, _) => components.push(Component::ParentDir),
3713 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3714 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3715 (Some(a), Some(_)) => {
3716 components.push(Component::ParentDir);
3717 for _ in base_components {
3718 components.push(Component::ParentDir);
3719 }
3720 components.push(a);
3721 components.extend(path_components.by_ref());
3722 break;
3723 }
3724 }
3725 }
3726 components.iter().map(|c| c.as_os_str()).collect()
3727}
3728
3729#[cfg(test)]
3730mod tests {
3731 use super::{Event, *};
3732 use fs::RealFs;
3733 use futures::StreamExt;
3734 use gpui::test::subscribe;
3735 use language::{
3736 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3737 };
3738 use lsp::Url;
3739 use serde_json::json;
3740 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3741 use unindent::Unindent as _;
3742 use util::test::temp_tree;
3743 use worktree::WorktreeHandle as _;
3744
3745 #[gpui::test]
3746 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3747 let dir = temp_tree(json!({
3748 "root": {
3749 "apple": "",
3750 "banana": {
3751 "carrot": {
3752 "date": "",
3753 "endive": "",
3754 }
3755 },
3756 "fennel": {
3757 "grape": "",
3758 }
3759 }
3760 }));
3761
3762 let root_link_path = dir.path().join("root_link");
3763 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3764 unix::fs::symlink(
3765 &dir.path().join("root/fennel"),
3766 &dir.path().join("root/finnochio"),
3767 )
3768 .unwrap();
3769
3770 let project = Project::test(Arc::new(RealFs), cx);
3771
3772 let (tree, _) = project
3773 .update(cx, |project, cx| {
3774 project.find_or_create_local_worktree(&root_link_path, true, cx)
3775 })
3776 .await
3777 .unwrap();
3778
3779 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3780 .await;
3781 cx.read(|cx| {
3782 let tree = tree.read(cx);
3783 assert_eq!(tree.file_count(), 5);
3784 assert_eq!(
3785 tree.inode_for_path("fennel/grape"),
3786 tree.inode_for_path("finnochio/grape")
3787 );
3788 });
3789
3790 let cancel_flag = Default::default();
3791 let results = project
3792 .read_with(cx, |project, cx| {
3793 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3794 })
3795 .await;
3796 assert_eq!(
3797 results
3798 .into_iter()
3799 .map(|result| result.path)
3800 .collect::<Vec<Arc<Path>>>(),
3801 vec![
3802 PathBuf::from("banana/carrot/date").into(),
3803 PathBuf::from("banana/carrot/endive").into(),
3804 ]
3805 );
3806 }
3807
3808 #[gpui::test]
3809 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3810 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3811 let progress_token = language_server_config
3812 .disk_based_diagnostics_progress_token
3813 .clone()
3814 .unwrap();
3815
3816 let language = Arc::new(Language::new(
3817 LanguageConfig {
3818 name: "Rust".into(),
3819 path_suffixes: vec!["rs".to_string()],
3820 language_server: Some(language_server_config),
3821 ..Default::default()
3822 },
3823 Some(tree_sitter_rust::language()),
3824 ));
3825
3826 let fs = FakeFs::new(cx.background());
3827 fs.insert_tree(
3828 "/dir",
3829 json!({
3830 "a.rs": "fn a() { A }",
3831 "b.rs": "const y: i32 = 1",
3832 }),
3833 )
3834 .await;
3835
3836 let project = Project::test(fs, cx);
3837 project.update(cx, |project, _| {
3838 Arc::get_mut(&mut project.languages).unwrap().add(language);
3839 });
3840
3841 let (tree, _) = project
3842 .update(cx, |project, cx| {
3843 project.find_or_create_local_worktree("/dir", true, cx)
3844 })
3845 .await
3846 .unwrap();
3847 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3848
3849 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3850 .await;
3851
3852 // Cause worktree to start the fake language server
3853 let _buffer = project
3854 .update(cx, |project, cx| {
3855 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3856 })
3857 .await
3858 .unwrap();
3859
3860 let mut events = subscribe(&project, cx);
3861
3862 let mut fake_server = fake_servers.next().await.unwrap();
3863 fake_server.start_progress(&progress_token).await;
3864 assert_eq!(
3865 events.next().await.unwrap(),
3866 Event::DiskBasedDiagnosticsStarted
3867 );
3868
3869 fake_server.start_progress(&progress_token).await;
3870 fake_server.end_progress(&progress_token).await;
3871 fake_server.start_progress(&progress_token).await;
3872
3873 fake_server
3874 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3875 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3876 version: None,
3877 diagnostics: vec![lsp::Diagnostic {
3878 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3879 severity: Some(lsp::DiagnosticSeverity::ERROR),
3880 message: "undefined variable 'A'".to_string(),
3881 ..Default::default()
3882 }],
3883 })
3884 .await;
3885 assert_eq!(
3886 events.next().await.unwrap(),
3887 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3888 );
3889
3890 fake_server.end_progress(&progress_token).await;
3891 fake_server.end_progress(&progress_token).await;
3892 assert_eq!(
3893 events.next().await.unwrap(),
3894 Event::DiskBasedDiagnosticsUpdated
3895 );
3896 assert_eq!(
3897 events.next().await.unwrap(),
3898 Event::DiskBasedDiagnosticsFinished
3899 );
3900
3901 let buffer = project
3902 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3903 .await
3904 .unwrap();
3905
3906 buffer.read_with(cx, |buffer, _| {
3907 let snapshot = buffer.snapshot();
3908 let diagnostics = snapshot
3909 .diagnostics_in_range::<_, Point>(0..buffer.len())
3910 .collect::<Vec<_>>();
3911 assert_eq!(
3912 diagnostics,
3913 &[DiagnosticEntry {
3914 range: Point::new(0, 9)..Point::new(0, 10),
3915 diagnostic: Diagnostic {
3916 severity: lsp::DiagnosticSeverity::ERROR,
3917 message: "undefined variable 'A'".to_string(),
3918 group_id: 0,
3919 is_primary: true,
3920 ..Default::default()
3921 }
3922 }]
3923 )
3924 });
3925 }
3926
3927 #[gpui::test]
3928 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3929 let dir = temp_tree(json!({
3930 "root": {
3931 "dir1": {},
3932 "dir2": {
3933 "dir3": {}
3934 }
3935 }
3936 }));
3937
3938 let project = Project::test(Arc::new(RealFs), cx);
3939 let (tree, _) = project
3940 .update(cx, |project, cx| {
3941 project.find_or_create_local_worktree(&dir.path(), true, cx)
3942 })
3943 .await
3944 .unwrap();
3945
3946 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3947 .await;
3948
3949 let cancel_flag = Default::default();
3950 let results = project
3951 .read_with(cx, |project, cx| {
3952 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3953 })
3954 .await;
3955
3956 assert!(results.is_empty());
3957 }
3958
3959 #[gpui::test]
3960 async fn test_definition(cx: &mut gpui::TestAppContext) {
3961 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3962 let language = Arc::new(Language::new(
3963 LanguageConfig {
3964 name: "Rust".into(),
3965 path_suffixes: vec!["rs".to_string()],
3966 language_server: Some(language_server_config),
3967 ..Default::default()
3968 },
3969 Some(tree_sitter_rust::language()),
3970 ));
3971
3972 let fs = FakeFs::new(cx.background());
3973 fs.insert_tree(
3974 "/dir",
3975 json!({
3976 "a.rs": "const fn a() { A }",
3977 "b.rs": "const y: i32 = crate::a()",
3978 }),
3979 )
3980 .await;
3981
3982 let project = Project::test(fs, cx);
3983 project.update(cx, |project, _| {
3984 Arc::get_mut(&mut project.languages).unwrap().add(language);
3985 });
3986
3987 let (tree, _) = project
3988 .update(cx, |project, cx| {
3989 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3990 })
3991 .await
3992 .unwrap();
3993 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3994 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3995 .await;
3996
3997 let buffer = project
3998 .update(cx, |project, cx| {
3999 project.open_buffer(
4000 ProjectPath {
4001 worktree_id,
4002 path: Path::new("").into(),
4003 },
4004 cx,
4005 )
4006 })
4007 .await
4008 .unwrap();
4009
4010 let mut fake_server = fake_servers.next().await.unwrap();
4011 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
4012 let params = params.text_document_position_params;
4013 assert_eq!(
4014 params.text_document.uri.to_file_path().unwrap(),
4015 Path::new("/dir/b.rs"),
4016 );
4017 assert_eq!(params.position, lsp::Position::new(0, 22));
4018
4019 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
4020 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4021 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4022 )))
4023 });
4024
4025 let mut definitions = project
4026 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
4027 .await
4028 .unwrap();
4029
4030 assert_eq!(definitions.len(), 1);
4031 let definition = definitions.pop().unwrap();
4032 cx.update(|cx| {
4033 let target_buffer = definition.buffer.read(cx);
4034 assert_eq!(
4035 target_buffer
4036 .file()
4037 .unwrap()
4038 .as_local()
4039 .unwrap()
4040 .abs_path(cx),
4041 Path::new("/dir/a.rs"),
4042 );
4043 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4044 assert_eq!(
4045 list_worktrees(&project, cx),
4046 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4047 );
4048
4049 drop(definition);
4050 });
4051 cx.read(|cx| {
4052 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4053 });
4054
4055 fn list_worktrees<'a>(
4056 project: &'a ModelHandle<Project>,
4057 cx: &'a AppContext,
4058 ) -> Vec<(&'a Path, bool)> {
4059 project
4060 .read(cx)
4061 .worktrees(cx)
4062 .map(|worktree| {
4063 let worktree = worktree.read(cx);
4064 (
4065 worktree.as_local().unwrap().abs_path().as_ref(),
4066 worktree.is_visible(),
4067 )
4068 })
4069 .collect::<Vec<_>>()
4070 }
4071 }
4072
4073 #[gpui::test]
4074 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4075 let fs = FakeFs::new(cx.background());
4076 fs.insert_tree(
4077 "/dir",
4078 json!({
4079 "file1": "the old contents",
4080 }),
4081 )
4082 .await;
4083
4084 let project = Project::test(fs.clone(), cx);
4085 let worktree_id = project
4086 .update(cx, |p, cx| {
4087 p.find_or_create_local_worktree("/dir", true, cx)
4088 })
4089 .await
4090 .unwrap()
4091 .0
4092 .read_with(cx, |tree, _| tree.id());
4093
4094 let buffer = project
4095 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4096 .await
4097 .unwrap();
4098 buffer
4099 .update(cx, |buffer, cx| {
4100 assert_eq!(buffer.text(), "the old contents");
4101 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4102 buffer.save(cx)
4103 })
4104 .await
4105 .unwrap();
4106
4107 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4108 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4109 }
4110
4111 #[gpui::test]
4112 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4113 let fs = FakeFs::new(cx.background());
4114 fs.insert_tree(
4115 "/dir",
4116 json!({
4117 "file1": "the old contents",
4118 }),
4119 )
4120 .await;
4121
4122 let project = Project::test(fs.clone(), cx);
4123 let worktree_id = project
4124 .update(cx, |p, cx| {
4125 p.find_or_create_local_worktree("/dir/file1", true, cx)
4126 })
4127 .await
4128 .unwrap()
4129 .0
4130 .read_with(cx, |tree, _| tree.id());
4131
4132 let buffer = project
4133 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4134 .await
4135 .unwrap();
4136 buffer
4137 .update(cx, |buffer, cx| {
4138 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4139 buffer.save(cx)
4140 })
4141 .await
4142 .unwrap();
4143
4144 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4145 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4146 }
4147
4148 #[gpui::test(retries = 5)]
4149 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4150 let dir = temp_tree(json!({
4151 "a": {
4152 "file1": "",
4153 "file2": "",
4154 "file3": "",
4155 },
4156 "b": {
4157 "c": {
4158 "file4": "",
4159 "file5": "",
4160 }
4161 }
4162 }));
4163
4164 let project = Project::test(Arc::new(RealFs), cx);
4165 let rpc = project.read_with(cx, |p, _| p.client.clone());
4166
4167 let (tree, _) = project
4168 .update(cx, |p, cx| {
4169 p.find_or_create_local_worktree(dir.path(), true, cx)
4170 })
4171 .await
4172 .unwrap();
4173 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4174
4175 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4176 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4177 async move { buffer.await.unwrap() }
4178 };
4179 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4180 tree.read_with(cx, |tree, _| {
4181 tree.entry_for_path(path)
4182 .expect(&format!("no entry for path {}", path))
4183 .id
4184 })
4185 };
4186
4187 let buffer2 = buffer_for_path("a/file2", cx).await;
4188 let buffer3 = buffer_for_path("a/file3", cx).await;
4189 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4190 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4191
4192 let file2_id = id_for_path("a/file2", &cx);
4193 let file3_id = id_for_path("a/file3", &cx);
4194 let file4_id = id_for_path("b/c/file4", &cx);
4195
4196 // Wait for the initial scan.
4197 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4198 .await;
4199
4200 // Create a remote copy of this worktree.
4201 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4202 let (remote, load_task) = cx.update(|cx| {
4203 Worktree::remote(
4204 1,
4205 1,
4206 initial_snapshot.to_proto(&Default::default(), true),
4207 rpc.clone(),
4208 cx,
4209 )
4210 });
4211 load_task.await;
4212
4213 cx.read(|cx| {
4214 assert!(!buffer2.read(cx).is_dirty());
4215 assert!(!buffer3.read(cx).is_dirty());
4216 assert!(!buffer4.read(cx).is_dirty());
4217 assert!(!buffer5.read(cx).is_dirty());
4218 });
4219
4220 // Rename and delete files and directories.
4221 tree.flush_fs_events(&cx).await;
4222 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4223 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4224 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4225 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4226 tree.flush_fs_events(&cx).await;
4227
4228 let expected_paths = vec![
4229 "a",
4230 "a/file1",
4231 "a/file2.new",
4232 "b",
4233 "d",
4234 "d/file3",
4235 "d/file4",
4236 ];
4237
4238 cx.read(|app| {
4239 assert_eq!(
4240 tree.read(app)
4241 .paths()
4242 .map(|p| p.to_str().unwrap())
4243 .collect::<Vec<_>>(),
4244 expected_paths
4245 );
4246
4247 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4248 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4249 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4250
4251 assert_eq!(
4252 buffer2.read(app).file().unwrap().path().as_ref(),
4253 Path::new("a/file2.new")
4254 );
4255 assert_eq!(
4256 buffer3.read(app).file().unwrap().path().as_ref(),
4257 Path::new("d/file3")
4258 );
4259 assert_eq!(
4260 buffer4.read(app).file().unwrap().path().as_ref(),
4261 Path::new("d/file4")
4262 );
4263 assert_eq!(
4264 buffer5.read(app).file().unwrap().path().as_ref(),
4265 Path::new("b/c/file5")
4266 );
4267
4268 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4269 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4270 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4271 assert!(buffer5.read(app).file().unwrap().is_deleted());
4272 });
4273
4274 // Update the remote worktree. Check that it becomes consistent with the
4275 // local worktree.
4276 remote.update(cx, |remote, cx| {
4277 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4278 &initial_snapshot,
4279 1,
4280 1,
4281 true,
4282 );
4283 remote
4284 .as_remote_mut()
4285 .unwrap()
4286 .snapshot
4287 .apply_remote_update(update_message)
4288 .unwrap();
4289
4290 assert_eq!(
4291 remote
4292 .paths()
4293 .map(|p| p.to_str().unwrap())
4294 .collect::<Vec<_>>(),
4295 expected_paths
4296 );
4297 });
4298 }
4299
4300 #[gpui::test]
4301 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4302 let fs = FakeFs::new(cx.background());
4303 fs.insert_tree(
4304 "/the-dir",
4305 json!({
4306 "a.txt": "a-contents",
4307 "b.txt": "b-contents",
4308 }),
4309 )
4310 .await;
4311
4312 let project = Project::test(fs.clone(), cx);
4313 let worktree_id = project
4314 .update(cx, |p, cx| {
4315 p.find_or_create_local_worktree("/the-dir", true, cx)
4316 })
4317 .await
4318 .unwrap()
4319 .0
4320 .read_with(cx, |tree, _| tree.id());
4321
4322 // Spawn multiple tasks to open paths, repeating some paths.
4323 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4324 (
4325 p.open_buffer((worktree_id, "a.txt"), cx),
4326 p.open_buffer((worktree_id, "b.txt"), cx),
4327 p.open_buffer((worktree_id, "a.txt"), cx),
4328 )
4329 });
4330
4331 let buffer_a_1 = buffer_a_1.await.unwrap();
4332 let buffer_a_2 = buffer_a_2.await.unwrap();
4333 let buffer_b = buffer_b.await.unwrap();
4334 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4335 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4336
4337 // There is only one buffer per path.
4338 let buffer_a_id = buffer_a_1.id();
4339 assert_eq!(buffer_a_2.id(), buffer_a_id);
4340
4341 // Open the same path again while it is still open.
4342 drop(buffer_a_1);
4343 let buffer_a_3 = project
4344 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4345 .await
4346 .unwrap();
4347
4348 // There's still only one buffer per path.
4349 assert_eq!(buffer_a_3.id(), buffer_a_id);
4350 }
4351
4352 #[gpui::test]
4353 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4354 use std::fs;
4355
4356 let dir = temp_tree(json!({
4357 "file1": "abc",
4358 "file2": "def",
4359 "file3": "ghi",
4360 }));
4361
4362 let project = Project::test(Arc::new(RealFs), cx);
4363 let (worktree, _) = project
4364 .update(cx, |p, cx| {
4365 p.find_or_create_local_worktree(dir.path(), true, cx)
4366 })
4367 .await
4368 .unwrap();
4369 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4370
4371 worktree.flush_fs_events(&cx).await;
4372 worktree
4373 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4374 .await;
4375
4376 let buffer1 = project
4377 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4378 .await
4379 .unwrap();
4380 let events = Rc::new(RefCell::new(Vec::new()));
4381
4382 // initially, the buffer isn't dirty.
4383 buffer1.update(cx, |buffer, cx| {
4384 cx.subscribe(&buffer1, {
4385 let events = events.clone();
4386 move |_, _, event, _| events.borrow_mut().push(event.clone())
4387 })
4388 .detach();
4389
4390 assert!(!buffer.is_dirty());
4391 assert!(events.borrow().is_empty());
4392
4393 buffer.edit(vec![1..2], "", cx);
4394 });
4395
4396 // after the first edit, the buffer is dirty, and emits a dirtied event.
4397 buffer1.update(cx, |buffer, cx| {
4398 assert!(buffer.text() == "ac");
4399 assert!(buffer.is_dirty());
4400 assert_eq!(
4401 *events.borrow(),
4402 &[language::Event::Edited, language::Event::Dirtied]
4403 );
4404 events.borrow_mut().clear();
4405 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4406 });
4407
4408 // after saving, the buffer is not dirty, and emits a saved event.
4409 buffer1.update(cx, |buffer, cx| {
4410 assert!(!buffer.is_dirty());
4411 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4412 events.borrow_mut().clear();
4413
4414 buffer.edit(vec![1..1], "B", cx);
4415 buffer.edit(vec![2..2], "D", cx);
4416 });
4417
4418 // after editing again, the buffer is dirty, and emits another dirty event.
4419 buffer1.update(cx, |buffer, cx| {
4420 assert!(buffer.text() == "aBDc");
4421 assert!(buffer.is_dirty());
4422 assert_eq!(
4423 *events.borrow(),
4424 &[
4425 language::Event::Edited,
4426 language::Event::Dirtied,
4427 language::Event::Edited,
4428 ],
4429 );
4430 events.borrow_mut().clear();
4431
4432 // TODO - currently, after restoring the buffer to its
4433 // previously-saved state, the is still considered dirty.
4434 buffer.edit([1..3], "", cx);
4435 assert!(buffer.text() == "ac");
4436 assert!(buffer.is_dirty());
4437 });
4438
4439 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4440
4441 // When a file is deleted, the buffer is considered dirty.
4442 let events = Rc::new(RefCell::new(Vec::new()));
4443 let buffer2 = project
4444 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4445 .await
4446 .unwrap();
4447 buffer2.update(cx, |_, cx| {
4448 cx.subscribe(&buffer2, {
4449 let events = events.clone();
4450 move |_, _, event, _| events.borrow_mut().push(event.clone())
4451 })
4452 .detach();
4453 });
4454
4455 fs::remove_file(dir.path().join("file2")).unwrap();
4456 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4457 assert_eq!(
4458 *events.borrow(),
4459 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4460 );
4461
4462 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4463 let events = Rc::new(RefCell::new(Vec::new()));
4464 let buffer3 = project
4465 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4466 .await
4467 .unwrap();
4468 buffer3.update(cx, |_, cx| {
4469 cx.subscribe(&buffer3, {
4470 let events = events.clone();
4471 move |_, _, event, _| events.borrow_mut().push(event.clone())
4472 })
4473 .detach();
4474 });
4475
4476 worktree.flush_fs_events(&cx).await;
4477 buffer3.update(cx, |buffer, cx| {
4478 buffer.edit(Some(0..0), "x", cx);
4479 });
4480 events.borrow_mut().clear();
4481 fs::remove_file(dir.path().join("file3")).unwrap();
4482 buffer3
4483 .condition(&cx, |_, _| !events.borrow().is_empty())
4484 .await;
4485 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4486 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4487 }
4488
4489 #[gpui::test]
4490 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4491 use std::fs;
4492
4493 let initial_contents = "aaa\nbbbbb\nc\n";
4494 let dir = temp_tree(json!({ "the-file": initial_contents }));
4495
4496 let project = Project::test(Arc::new(RealFs), cx);
4497 let (worktree, _) = project
4498 .update(cx, |p, cx| {
4499 p.find_or_create_local_worktree(dir.path(), true, cx)
4500 })
4501 .await
4502 .unwrap();
4503 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4504
4505 worktree
4506 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4507 .await;
4508
4509 let abs_path = dir.path().join("the-file");
4510 let buffer = project
4511 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4512 .await
4513 .unwrap();
4514
4515 // TODO
4516 // Add a cursor on each row.
4517 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4518 // assert!(!buffer.is_dirty());
4519 // buffer.add_selection_set(
4520 // &(0..3)
4521 // .map(|row| Selection {
4522 // id: row as usize,
4523 // start: Point::new(row, 1),
4524 // end: Point::new(row, 1),
4525 // reversed: false,
4526 // goal: SelectionGoal::None,
4527 // })
4528 // .collect::<Vec<_>>(),
4529 // cx,
4530 // )
4531 // });
4532
4533 // Change the file on disk, adding two new lines of text, and removing
4534 // one line.
4535 buffer.read_with(cx, |buffer, _| {
4536 assert!(!buffer.is_dirty());
4537 assert!(!buffer.has_conflict());
4538 });
4539 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4540 fs::write(&abs_path, new_contents).unwrap();
4541
4542 // Because the buffer was not modified, it is reloaded from disk. Its
4543 // contents are edited according to the diff between the old and new
4544 // file contents.
4545 buffer
4546 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4547 .await;
4548
4549 buffer.update(cx, |buffer, _| {
4550 assert_eq!(buffer.text(), new_contents);
4551 assert!(!buffer.is_dirty());
4552 assert!(!buffer.has_conflict());
4553
4554 // TODO
4555 // let cursor_positions = buffer
4556 // .selection_set(selection_set_id)
4557 // .unwrap()
4558 // .selections::<Point>(&*buffer)
4559 // .map(|selection| {
4560 // assert_eq!(selection.start, selection.end);
4561 // selection.start
4562 // })
4563 // .collect::<Vec<_>>();
4564 // assert_eq!(
4565 // cursor_positions,
4566 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4567 // );
4568 });
4569
4570 // Modify the buffer
4571 buffer.update(cx, |buffer, cx| {
4572 buffer.edit(vec![0..0], " ", cx);
4573 assert!(buffer.is_dirty());
4574 assert!(!buffer.has_conflict());
4575 });
4576
4577 // Change the file on disk again, adding blank lines to the beginning.
4578 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4579
4580 // Because the buffer is modified, it doesn't reload from disk, but is
4581 // marked as having a conflict.
4582 buffer
4583 .condition(&cx, |buffer, _| buffer.has_conflict())
4584 .await;
4585 }
4586
4587 #[gpui::test]
4588 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4589 let fs = FakeFs::new(cx.background());
4590 fs.insert_tree(
4591 "/the-dir",
4592 json!({
4593 "a.rs": "
4594 fn foo(mut v: Vec<usize>) {
4595 for x in &v {
4596 v.push(1);
4597 }
4598 }
4599 "
4600 .unindent(),
4601 }),
4602 )
4603 .await;
4604
4605 let project = Project::test(fs.clone(), cx);
4606 let (worktree, _) = project
4607 .update(cx, |p, cx| {
4608 p.find_or_create_local_worktree("/the-dir", true, cx)
4609 })
4610 .await
4611 .unwrap();
4612 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4613
4614 let buffer = project
4615 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4616 .await
4617 .unwrap();
4618
4619 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4620 let message = lsp::PublishDiagnosticsParams {
4621 uri: buffer_uri.clone(),
4622 diagnostics: vec![
4623 lsp::Diagnostic {
4624 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4625 severity: Some(DiagnosticSeverity::WARNING),
4626 message: "error 1".to_string(),
4627 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4628 location: lsp::Location {
4629 uri: buffer_uri.clone(),
4630 range: lsp::Range::new(
4631 lsp::Position::new(1, 8),
4632 lsp::Position::new(1, 9),
4633 ),
4634 },
4635 message: "error 1 hint 1".to_string(),
4636 }]),
4637 ..Default::default()
4638 },
4639 lsp::Diagnostic {
4640 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4641 severity: Some(DiagnosticSeverity::HINT),
4642 message: "error 1 hint 1".to_string(),
4643 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4644 location: lsp::Location {
4645 uri: buffer_uri.clone(),
4646 range: lsp::Range::new(
4647 lsp::Position::new(1, 8),
4648 lsp::Position::new(1, 9),
4649 ),
4650 },
4651 message: "original diagnostic".to_string(),
4652 }]),
4653 ..Default::default()
4654 },
4655 lsp::Diagnostic {
4656 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4657 severity: Some(DiagnosticSeverity::ERROR),
4658 message: "error 2".to_string(),
4659 related_information: Some(vec![
4660 lsp::DiagnosticRelatedInformation {
4661 location: lsp::Location {
4662 uri: buffer_uri.clone(),
4663 range: lsp::Range::new(
4664 lsp::Position::new(1, 13),
4665 lsp::Position::new(1, 15),
4666 ),
4667 },
4668 message: "error 2 hint 1".to_string(),
4669 },
4670 lsp::DiagnosticRelatedInformation {
4671 location: lsp::Location {
4672 uri: buffer_uri.clone(),
4673 range: lsp::Range::new(
4674 lsp::Position::new(1, 13),
4675 lsp::Position::new(1, 15),
4676 ),
4677 },
4678 message: "error 2 hint 2".to_string(),
4679 },
4680 ]),
4681 ..Default::default()
4682 },
4683 lsp::Diagnostic {
4684 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4685 severity: Some(DiagnosticSeverity::HINT),
4686 message: "error 2 hint 1".to_string(),
4687 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4688 location: lsp::Location {
4689 uri: buffer_uri.clone(),
4690 range: lsp::Range::new(
4691 lsp::Position::new(2, 8),
4692 lsp::Position::new(2, 17),
4693 ),
4694 },
4695 message: "original diagnostic".to_string(),
4696 }]),
4697 ..Default::default()
4698 },
4699 lsp::Diagnostic {
4700 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4701 severity: Some(DiagnosticSeverity::HINT),
4702 message: "error 2 hint 2".to_string(),
4703 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4704 location: lsp::Location {
4705 uri: buffer_uri.clone(),
4706 range: lsp::Range::new(
4707 lsp::Position::new(2, 8),
4708 lsp::Position::new(2, 17),
4709 ),
4710 },
4711 message: "original diagnostic".to_string(),
4712 }]),
4713 ..Default::default()
4714 },
4715 ],
4716 version: None,
4717 };
4718
4719 project
4720 .update(cx, |p, cx| {
4721 p.update_diagnostics(message, &Default::default(), cx)
4722 })
4723 .unwrap();
4724 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4725
4726 assert_eq!(
4727 buffer
4728 .diagnostics_in_range::<_, Point>(0..buffer.len())
4729 .collect::<Vec<_>>(),
4730 &[
4731 DiagnosticEntry {
4732 range: Point::new(1, 8)..Point::new(1, 9),
4733 diagnostic: Diagnostic {
4734 severity: DiagnosticSeverity::WARNING,
4735 message: "error 1".to_string(),
4736 group_id: 0,
4737 is_primary: true,
4738 ..Default::default()
4739 }
4740 },
4741 DiagnosticEntry {
4742 range: Point::new(1, 8)..Point::new(1, 9),
4743 diagnostic: Diagnostic {
4744 severity: DiagnosticSeverity::HINT,
4745 message: "error 1 hint 1".to_string(),
4746 group_id: 0,
4747 is_primary: false,
4748 ..Default::default()
4749 }
4750 },
4751 DiagnosticEntry {
4752 range: Point::new(1, 13)..Point::new(1, 15),
4753 diagnostic: Diagnostic {
4754 severity: DiagnosticSeverity::HINT,
4755 message: "error 2 hint 1".to_string(),
4756 group_id: 1,
4757 is_primary: false,
4758 ..Default::default()
4759 }
4760 },
4761 DiagnosticEntry {
4762 range: Point::new(1, 13)..Point::new(1, 15),
4763 diagnostic: Diagnostic {
4764 severity: DiagnosticSeverity::HINT,
4765 message: "error 2 hint 2".to_string(),
4766 group_id: 1,
4767 is_primary: false,
4768 ..Default::default()
4769 }
4770 },
4771 DiagnosticEntry {
4772 range: Point::new(2, 8)..Point::new(2, 17),
4773 diagnostic: Diagnostic {
4774 severity: DiagnosticSeverity::ERROR,
4775 message: "error 2".to_string(),
4776 group_id: 1,
4777 is_primary: true,
4778 ..Default::default()
4779 }
4780 }
4781 ]
4782 );
4783
4784 assert_eq!(
4785 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4786 &[
4787 DiagnosticEntry {
4788 range: Point::new(1, 8)..Point::new(1, 9),
4789 diagnostic: Diagnostic {
4790 severity: DiagnosticSeverity::WARNING,
4791 message: "error 1".to_string(),
4792 group_id: 0,
4793 is_primary: true,
4794 ..Default::default()
4795 }
4796 },
4797 DiagnosticEntry {
4798 range: Point::new(1, 8)..Point::new(1, 9),
4799 diagnostic: Diagnostic {
4800 severity: DiagnosticSeverity::HINT,
4801 message: "error 1 hint 1".to_string(),
4802 group_id: 0,
4803 is_primary: false,
4804 ..Default::default()
4805 }
4806 },
4807 ]
4808 );
4809 assert_eq!(
4810 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4811 &[
4812 DiagnosticEntry {
4813 range: Point::new(1, 13)..Point::new(1, 15),
4814 diagnostic: Diagnostic {
4815 severity: DiagnosticSeverity::HINT,
4816 message: "error 2 hint 1".to_string(),
4817 group_id: 1,
4818 is_primary: false,
4819 ..Default::default()
4820 }
4821 },
4822 DiagnosticEntry {
4823 range: Point::new(1, 13)..Point::new(1, 15),
4824 diagnostic: Diagnostic {
4825 severity: DiagnosticSeverity::HINT,
4826 message: "error 2 hint 2".to_string(),
4827 group_id: 1,
4828 is_primary: false,
4829 ..Default::default()
4830 }
4831 },
4832 DiagnosticEntry {
4833 range: Point::new(2, 8)..Point::new(2, 17),
4834 diagnostic: Diagnostic {
4835 severity: DiagnosticSeverity::ERROR,
4836 message: "error 2".to_string(),
4837 group_id: 1,
4838 is_primary: true,
4839 ..Default::default()
4840 }
4841 }
4842 ]
4843 );
4844 }
4845
4846 #[gpui::test]
4847 async fn test_rename(cx: &mut gpui::TestAppContext) {
4848 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4849 let language = Arc::new(Language::new(
4850 LanguageConfig {
4851 name: "Rust".into(),
4852 path_suffixes: vec!["rs".to_string()],
4853 language_server: Some(language_server_config),
4854 ..Default::default()
4855 },
4856 Some(tree_sitter_rust::language()),
4857 ));
4858
4859 let fs = FakeFs::new(cx.background());
4860 fs.insert_tree(
4861 "/dir",
4862 json!({
4863 "one.rs": "const ONE: usize = 1;",
4864 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4865 }),
4866 )
4867 .await;
4868
4869 let project = Project::test(fs.clone(), cx);
4870 project.update(cx, |project, _| {
4871 Arc::get_mut(&mut project.languages).unwrap().add(language);
4872 });
4873
4874 let (tree, _) = project
4875 .update(cx, |project, cx| {
4876 project.find_or_create_local_worktree("/dir", true, cx)
4877 })
4878 .await
4879 .unwrap();
4880 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4881 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4882 .await;
4883
4884 let buffer = project
4885 .update(cx, |project, cx| {
4886 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4887 })
4888 .await
4889 .unwrap();
4890
4891 let mut fake_server = fake_servers.next().await.unwrap();
4892
4893 let response = project.update(cx, |project, cx| {
4894 project.prepare_rename(buffer.clone(), 7, cx)
4895 });
4896 fake_server
4897 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4898 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4899 assert_eq!(params.position, lsp::Position::new(0, 7));
4900 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4901 lsp::Position::new(0, 6),
4902 lsp::Position::new(0, 9),
4903 )))
4904 })
4905 .next()
4906 .await
4907 .unwrap();
4908 let range = response.await.unwrap().unwrap();
4909 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4910 assert_eq!(range, 6..9);
4911
4912 let response = project.update(cx, |project, cx| {
4913 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4914 });
4915 fake_server
4916 .handle_request::<lsp::request::Rename, _>(|params, _| {
4917 assert_eq!(
4918 params.text_document_position.text_document.uri.as_str(),
4919 "file:///dir/one.rs"
4920 );
4921 assert_eq!(
4922 params.text_document_position.position,
4923 lsp::Position::new(0, 7)
4924 );
4925 assert_eq!(params.new_name, "THREE");
4926 Some(lsp::WorkspaceEdit {
4927 changes: Some(
4928 [
4929 (
4930 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4931 vec![lsp::TextEdit::new(
4932 lsp::Range::new(
4933 lsp::Position::new(0, 6),
4934 lsp::Position::new(0, 9),
4935 ),
4936 "THREE".to_string(),
4937 )],
4938 ),
4939 (
4940 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4941 vec![
4942 lsp::TextEdit::new(
4943 lsp::Range::new(
4944 lsp::Position::new(0, 24),
4945 lsp::Position::new(0, 27),
4946 ),
4947 "THREE".to_string(),
4948 ),
4949 lsp::TextEdit::new(
4950 lsp::Range::new(
4951 lsp::Position::new(0, 35),
4952 lsp::Position::new(0, 38),
4953 ),
4954 "THREE".to_string(),
4955 ),
4956 ],
4957 ),
4958 ]
4959 .into_iter()
4960 .collect(),
4961 ),
4962 ..Default::default()
4963 })
4964 })
4965 .next()
4966 .await
4967 .unwrap();
4968 let mut transaction = response.await.unwrap().0;
4969 assert_eq!(transaction.len(), 2);
4970 assert_eq!(
4971 transaction
4972 .remove_entry(&buffer)
4973 .unwrap()
4974 .0
4975 .read_with(cx, |buffer, _| buffer.text()),
4976 "const THREE: usize = 1;"
4977 );
4978 assert_eq!(
4979 transaction
4980 .into_keys()
4981 .next()
4982 .unwrap()
4983 .read_with(cx, |buffer, _| buffer.text()),
4984 "const TWO: usize = one::THREE + one::THREE;"
4985 );
4986 }
4987
4988 #[gpui::test]
4989 async fn test_search(cx: &mut gpui::TestAppContext) {
4990 let fs = FakeFs::new(cx.background());
4991 fs.insert_tree(
4992 "/dir",
4993 json!({
4994 "one.rs": "const ONE: usize = 1;",
4995 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4996 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4997 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4998 }),
4999 )
5000 .await;
5001 let project = Project::test(fs.clone(), cx);
5002 let (tree, _) = project
5003 .update(cx, |project, cx| {
5004 project.find_or_create_local_worktree("/dir", true, cx)
5005 })
5006 .await
5007 .unwrap();
5008 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5009 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5010 .await;
5011
5012 assert_eq!(
5013 search(&project, SearchQuery::text("TWO", false, true), cx)
5014 .await
5015 .unwrap(),
5016 HashMap::from_iter([
5017 ("two.rs".to_string(), vec![6..9]),
5018 ("three.rs".to_string(), vec![37..40])
5019 ])
5020 );
5021
5022 let buffer_4 = project
5023 .update(cx, |project, cx| {
5024 project.open_buffer((worktree_id, "four.rs"), cx)
5025 })
5026 .await
5027 .unwrap();
5028 buffer_4.update(cx, |buffer, cx| {
5029 buffer.edit([20..28, 31..43], "two::TWO", cx);
5030 });
5031
5032 assert_eq!(
5033 search(&project, SearchQuery::text("TWO", false, true), cx)
5034 .await
5035 .unwrap(),
5036 HashMap::from_iter([
5037 ("two.rs".to_string(), vec![6..9]),
5038 ("three.rs".to_string(), vec![37..40]),
5039 ("four.rs".to_string(), vec![25..28, 36..39])
5040 ])
5041 );
5042
5043 async fn search(
5044 project: &ModelHandle<Project>,
5045 query: SearchQuery,
5046 cx: &mut gpui::TestAppContext,
5047 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5048 let results = project
5049 .update(cx, |project, cx| project.search(query, cx))
5050 .await?;
5051
5052 Ok(results
5053 .into_iter()
5054 .map(|(buffer, ranges)| {
5055 buffer.read_with(cx, |buffer, _| {
5056 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5057 let ranges = ranges
5058 .into_iter()
5059 .map(|range| range.to_offset(buffer))
5060 .collect::<Vec<_>>();
5061 (path, ranges)
5062 })
5063 })
5064 .collect())
5065 }
5066 }
5067}