1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 opened_buffers: HashMap<u64, OpenBuffer>,
68 nonce: u128,
69}
70
71enum OpenBuffer {
72 Strong(ModelHandle<Buffer>),
73 Weak(WeakModelHandle<Buffer>),
74 Loading(Vec<Operation>),
75}
76
77enum WorktreeHandle {
78 Strong(ModelHandle<Worktree>),
79 Weak(WeakModelHandle<Worktree>),
80}
81
82enum ProjectClientState {
83 Local {
84 is_shared: bool,
85 remote_id_tx: watch::Sender<Option<u64>>,
86 remote_id_rx: watch::Receiver<Option<u64>>,
87 _maintain_remote_id_task: Task<Option<()>>,
88 },
89 Remote {
90 sharing_has_stopped: bool,
91 remote_id: u64,
92 replica_id: ReplicaId,
93 },
94}
95
96#[derive(Clone, Debug)]
97pub struct Collaborator {
98 pub user: Arc<User>,
99 pub peer_id: PeerId,
100 pub replica_id: ReplicaId,
101}
102
103#[derive(Clone, Debug, PartialEq)]
104pub enum Event {
105 ActiveEntryChanged(Option<ProjectEntry>),
106 WorktreeRemoved(WorktreeId),
107 DiskBasedDiagnosticsStarted,
108 DiskBasedDiagnosticsUpdated,
109 DiskBasedDiagnosticsFinished,
110 DiagnosticsUpdated(ProjectPath),
111}
112
113#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
114pub struct ProjectPath {
115 pub worktree_id: WorktreeId,
116 pub path: Arc<Path>,
117}
118
119#[derive(Clone, Debug, Default, PartialEq)]
120pub struct DiagnosticSummary {
121 pub error_count: usize,
122 pub warning_count: usize,
123 pub info_count: usize,
124 pub hint_count: usize,
125}
126
127#[derive(Debug)]
128pub struct Location {
129 pub buffer: ModelHandle<Buffer>,
130 pub range: Range<language::Anchor>,
131}
132
133#[derive(Debug)]
134pub struct DocumentHighlight {
135 pub range: Range<language::Anchor>,
136 pub kind: DocumentHighlightKind,
137}
138
139#[derive(Clone, Debug)]
140pub struct Symbol {
141 pub source_worktree_id: WorktreeId,
142 pub worktree_id: WorktreeId,
143 pub language_name: String,
144 pub path: PathBuf,
145 pub label: CodeLabel,
146 pub name: String,
147 pub kind: lsp::SymbolKind,
148 pub range: Range<PointUtf16>,
149 pub signature: [u8; 32],
150}
151
152#[derive(Default)]
153pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
154
155impl DiagnosticSummary {
156 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
157 let mut this = Self {
158 error_count: 0,
159 warning_count: 0,
160 info_count: 0,
161 hint_count: 0,
162 };
163
164 for entry in diagnostics {
165 if entry.diagnostic.is_primary {
166 match entry.diagnostic.severity {
167 DiagnosticSeverity::ERROR => this.error_count += 1,
168 DiagnosticSeverity::WARNING => this.warning_count += 1,
169 DiagnosticSeverity::INFORMATION => this.info_count += 1,
170 DiagnosticSeverity::HINT => this.hint_count += 1,
171 _ => {}
172 }
173 }
174 }
175
176 this
177 }
178
179 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
180 proto::DiagnosticSummary {
181 path: path.to_string_lossy().to_string(),
182 error_count: self.error_count as u32,
183 warning_count: self.warning_count as u32,
184 info_count: self.info_count as u32,
185 hint_count: self.hint_count as u32,
186 }
187 }
188}
189
190#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
191pub struct ProjectEntry {
192 pub worktree_id: WorktreeId,
193 pub entry_id: usize,
194}
195
196impl Project {
197 pub fn init(client: &Arc<Client>) {
198 client.add_entity_message_handler(Self::handle_add_collaborator);
199 client.add_entity_message_handler(Self::handle_buffer_reloaded);
200 client.add_entity_message_handler(Self::handle_buffer_saved);
201 client.add_entity_message_handler(Self::handle_close_buffer);
202 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
203 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
204 client.add_entity_message_handler(Self::handle_remove_collaborator);
205 client.add_entity_message_handler(Self::handle_register_worktree);
206 client.add_entity_message_handler(Self::handle_unregister_worktree);
207 client.add_entity_message_handler(Self::handle_unshare_project);
208 client.add_entity_message_handler(Self::handle_update_buffer_file);
209 client.add_entity_message_handler(Self::handle_update_buffer);
210 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
211 client.add_entity_message_handler(Self::handle_update_worktree);
212 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
213 client.add_entity_request_handler(Self::handle_apply_code_action);
214 client.add_entity_request_handler(Self::handle_format_buffers);
215 client.add_entity_request_handler(Self::handle_get_code_actions);
216 client.add_entity_request_handler(Self::handle_get_completions);
217 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
218 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
222 client.add_entity_request_handler(Self::handle_search_project);
223 client.add_entity_request_handler(Self::handle_get_project_symbols);
224 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
225 client.add_entity_request_handler(Self::handle_open_buffer);
226 client.add_entity_request_handler(Self::handle_save_buffer);
227 }
228
229 pub fn local(
230 client: Arc<Client>,
231 user_store: ModelHandle<UserStore>,
232 languages: Arc<LanguageRegistry>,
233 fs: Arc<dyn Fs>,
234 cx: &mut MutableAppContext,
235 ) -> ModelHandle<Self> {
236 cx.add_model(|cx: &mut ModelContext<Self>| {
237 let (remote_id_tx, remote_id_rx) = watch::channel();
238 let _maintain_remote_id_task = cx.spawn_weak({
239 let rpc = client.clone();
240 move |this, mut cx| {
241 async move {
242 let mut status = rpc.status();
243 while let Some(status) = status.next().await {
244 if let Some(this) = this.upgrade(&cx) {
245 let remote_id = if let client::Status::Connected { .. } = status {
246 let response = rpc.request(proto::RegisterProject {}).await?;
247 Some(response.project_id)
248 } else {
249 None
250 };
251
252 if let Some(project_id) = remote_id {
253 let mut registrations = Vec::new();
254 this.update(&mut cx, |this, cx| {
255 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
256 registrations.push(worktree.update(
257 cx,
258 |worktree, cx| {
259 let worktree = worktree.as_local_mut().unwrap();
260 worktree.register(project_id, cx)
261 },
262 ));
263 }
264 });
265 for registration in registrations {
266 registration.await?;
267 }
268 }
269 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
270 }
271 }
272 Ok(())
273 }
274 .log_err()
275 }
276 });
277
278 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
279 Self {
280 worktrees: Default::default(),
281 collaborators: Default::default(),
282 opened_buffers: Default::default(),
283 shared_buffers: Default::default(),
284 loading_buffers: Default::default(),
285 client_state: ProjectClientState::Local {
286 is_shared: false,
287 remote_id_tx,
288 remote_id_rx,
289 _maintain_remote_id_task,
290 },
291 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
292 subscriptions: Vec::new(),
293 active_entry: None,
294 languages,
295 client,
296 user_store,
297 fs,
298 language_servers_with_diagnostics_running: 0,
299 language_servers: Default::default(),
300 started_language_servers: Default::default(),
301 nonce: StdRng::from_entropy().gen(),
302 }
303 })
304 }
305
306 pub async fn remote(
307 remote_id: u64,
308 client: Arc<Client>,
309 user_store: ModelHandle<UserStore>,
310 languages: Arc<LanguageRegistry>,
311 fs: Arc<dyn Fs>,
312 cx: &mut AsyncAppContext,
313 ) -> Result<ModelHandle<Self>> {
314 client.authenticate_and_connect(&cx).await?;
315
316 let response = client
317 .request(proto::JoinProject {
318 project_id: remote_id,
319 })
320 .await?;
321
322 let replica_id = response.replica_id as ReplicaId;
323
324 let mut worktrees = Vec::new();
325 for worktree in response.worktrees {
326 let (worktree, load_task) = cx
327 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
328 worktrees.push(worktree);
329 load_task.detach();
330 }
331
332 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
333 let this = cx.add_model(|cx| {
334 let mut this = Self {
335 worktrees: Vec::new(),
336 loading_buffers: Default::default(),
337 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
338 shared_buffers: Default::default(),
339 active_entry: None,
340 collaborators: Default::default(),
341 languages,
342 user_store: user_store.clone(),
343 fs,
344 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
345 client,
346 client_state: ProjectClientState::Remote {
347 sharing_has_stopped: false,
348 remote_id,
349 replica_id,
350 },
351 language_servers_with_diagnostics_running: 0,
352 language_servers: Default::default(),
353 started_language_servers: Default::default(),
354 opened_buffers: Default::default(),
355 nonce: StdRng::from_entropy().gen(),
356 };
357 for worktree in worktrees {
358 this.add_worktree(&worktree, cx);
359 }
360 this
361 });
362
363 let user_ids = response
364 .collaborators
365 .iter()
366 .map(|peer| peer.user_id)
367 .collect();
368 user_store
369 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
370 .await?;
371 let mut collaborators = HashMap::default();
372 for message in response.collaborators {
373 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
374 collaborators.insert(collaborator.peer_id, collaborator);
375 }
376
377 this.update(cx, |this, _| {
378 this.collaborators = collaborators;
379 });
380
381 Ok(this)
382 }
383
384 #[cfg(any(test, feature = "test-support"))]
385 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
386 let languages = Arc::new(LanguageRegistry::new());
387 let http_client = client::test::FakeHttpClient::with_404_response();
388 let client = client::Client::new(http_client.clone());
389 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
390 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
391 }
392
393 #[cfg(any(test, feature = "test-support"))]
394 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
395 self.opened_buffers
396 .get(&remote_id)
397 .and_then(|buffer| buffer.upgrade(cx))
398 }
399
400 #[cfg(any(test, feature = "test-support"))]
401 pub fn languages(&self) -> &Arc<LanguageRegistry> {
402 &self.languages
403 }
404
405 #[cfg(any(test, feature = "test-support"))]
406 pub fn check_invariants(&self, cx: &AppContext) {
407 if self.is_local() {
408 let mut worktree_root_paths = HashMap::default();
409 for worktree in self.worktrees(cx) {
410 let worktree = worktree.read(cx);
411 let abs_path = worktree.as_local().unwrap().abs_path().clone();
412 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
413 assert_eq!(
414 prev_worktree_id,
415 None,
416 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
417 abs_path,
418 worktree.id(),
419 prev_worktree_id
420 )
421 }
422 } else {
423 let replica_id = self.replica_id();
424 for buffer in self.opened_buffers.values() {
425 if let Some(buffer) = buffer.upgrade(cx) {
426 let buffer = buffer.read(cx);
427 assert_eq!(
428 buffer.deferred_ops_len(),
429 0,
430 "replica {}, buffer {} has deferred operations",
431 replica_id,
432 buffer.remote_id()
433 );
434 }
435 }
436 }
437 }
438
439 #[cfg(any(test, feature = "test-support"))]
440 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
441 let path = path.into();
442 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
443 self.opened_buffers.iter().any(|(_, buffer)| {
444 if let Some(buffer) = buffer.upgrade(cx) {
445 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
446 if file.worktree == worktree && file.path() == &path.path {
447 return true;
448 }
449 }
450 }
451 false
452 })
453 } else {
454 false
455 }
456 }
457
458 pub fn fs(&self) -> &Arc<dyn Fs> {
459 &self.fs
460 }
461
462 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
463 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
464 *remote_id_tx.borrow_mut() = remote_id;
465 }
466
467 self.subscriptions.clear();
468 if let Some(remote_id) = remote_id {
469 self.subscriptions
470 .push(self.client.add_model_for_remote_entity(remote_id, cx));
471 }
472 }
473
474 pub fn remote_id(&self) -> Option<u64> {
475 match &self.client_state {
476 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
477 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
478 }
479 }
480
481 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
482 let mut id = None;
483 let mut watch = None;
484 match &self.client_state {
485 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
486 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
487 }
488
489 async move {
490 if let Some(id) = id {
491 return id;
492 }
493 let mut watch = watch.unwrap();
494 loop {
495 let id = *watch.borrow();
496 if let Some(id) = id {
497 return id;
498 }
499 watch.next().await;
500 }
501 }
502 }
503
504 pub fn replica_id(&self) -> ReplicaId {
505 match &self.client_state {
506 ProjectClientState::Local { .. } => 0,
507 ProjectClientState::Remote { replica_id, .. } => *replica_id,
508 }
509 }
510
511 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
512 &self.collaborators
513 }
514
515 pub fn worktrees<'a>(
516 &'a self,
517 cx: &'a AppContext,
518 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
519 self.worktrees
520 .iter()
521 .filter_map(move |worktree| worktree.upgrade(cx))
522 }
523
524 pub fn visible_worktrees<'a>(
525 &'a self,
526 cx: &'a AppContext,
527 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
528 self.worktrees.iter().filter_map(|worktree| {
529 worktree.upgrade(cx).and_then(|worktree| {
530 if worktree.read(cx).is_visible() {
531 Some(worktree)
532 } else {
533 None
534 }
535 })
536 })
537 }
538
539 pub fn worktree_for_id(
540 &self,
541 id: WorktreeId,
542 cx: &AppContext,
543 ) -> Option<ModelHandle<Worktree>> {
544 self.worktrees(cx)
545 .find(|worktree| worktree.read(cx).id() == id)
546 }
547
548 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
549 let rpc = self.client.clone();
550 cx.spawn(|this, mut cx| async move {
551 let project_id = this.update(&mut cx, |this, cx| {
552 if let ProjectClientState::Local {
553 is_shared,
554 remote_id_rx,
555 ..
556 } = &mut this.client_state
557 {
558 *is_shared = true;
559
560 for open_buffer in this.opened_buffers.values_mut() {
561 match open_buffer {
562 OpenBuffer::Strong(_) => {}
563 OpenBuffer::Weak(buffer) => {
564 if let Some(buffer) = buffer.upgrade(cx) {
565 *open_buffer = OpenBuffer::Strong(buffer);
566 }
567 }
568 OpenBuffer::Loading(_) => unreachable!(),
569 }
570 }
571
572 for worktree_handle in this.worktrees.iter_mut() {
573 match worktree_handle {
574 WorktreeHandle::Strong(_) => {}
575 WorktreeHandle::Weak(worktree) => {
576 if let Some(worktree) = worktree.upgrade(cx) {
577 *worktree_handle = WorktreeHandle::Strong(worktree);
578 }
579 }
580 }
581 }
582
583 remote_id_rx
584 .borrow()
585 .ok_or_else(|| anyhow!("no project id"))
586 } else {
587 Err(anyhow!("can't share a remote project"))
588 }
589 })?;
590
591 rpc.request(proto::ShareProject { project_id }).await?;
592
593 let mut tasks = Vec::new();
594 this.update(&mut cx, |this, cx| {
595 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
596 worktree.update(cx, |worktree, cx| {
597 let worktree = worktree.as_local_mut().unwrap();
598 tasks.push(worktree.share(project_id, cx));
599 });
600 }
601 });
602 for task in tasks {
603 task.await?;
604 }
605 this.update(&mut cx, |_, cx| cx.notify());
606 Ok(())
607 })
608 }
609
610 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
611 let rpc = self.client.clone();
612 cx.spawn(|this, mut cx| async move {
613 let project_id = this.update(&mut cx, |this, cx| {
614 if let ProjectClientState::Local {
615 is_shared,
616 remote_id_rx,
617 ..
618 } = &mut this.client_state
619 {
620 *is_shared = false;
621
622 for open_buffer in this.opened_buffers.values_mut() {
623 match open_buffer {
624 OpenBuffer::Strong(buffer) => {
625 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
626 }
627 _ => {}
628 }
629 }
630
631 for worktree_handle in this.worktrees.iter_mut() {
632 match worktree_handle {
633 WorktreeHandle::Strong(worktree) => {
634 if !worktree.read(cx).is_visible() {
635 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
636 }
637 }
638 _ => {}
639 }
640 }
641
642 remote_id_rx
643 .borrow()
644 .ok_or_else(|| anyhow!("no project id"))
645 } else {
646 Err(anyhow!("can't share a remote project"))
647 }
648 })?;
649
650 rpc.send(proto::UnshareProject { project_id })?;
651 this.update(&mut cx, |this, cx| {
652 this.collaborators.clear();
653 this.shared_buffers.clear();
654 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
655 worktree.update(cx, |worktree, _| {
656 worktree.as_local_mut().unwrap().unshare();
657 });
658 }
659 cx.notify()
660 });
661 Ok(())
662 })
663 }
664
665 pub fn is_read_only(&self) -> bool {
666 match &self.client_state {
667 ProjectClientState::Local { .. } => false,
668 ProjectClientState::Remote {
669 sharing_has_stopped,
670 ..
671 } => *sharing_has_stopped,
672 }
673 }
674
675 pub fn is_local(&self) -> bool {
676 match &self.client_state {
677 ProjectClientState::Local { .. } => true,
678 ProjectClientState::Remote { .. } => false,
679 }
680 }
681
682 pub fn is_remote(&self) -> bool {
683 !self.is_local()
684 }
685
686 pub fn open_buffer(
687 &mut self,
688 path: impl Into<ProjectPath>,
689 cx: &mut ModelContext<Self>,
690 ) -> Task<Result<ModelHandle<Buffer>>> {
691 let project_path = path.into();
692 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
693 worktree
694 } else {
695 return Task::ready(Err(anyhow!("no such worktree")));
696 };
697
698 // If there is already a buffer for the given path, then return it.
699 let existing_buffer = self.get_open_buffer(&project_path, cx);
700 if let Some(existing_buffer) = existing_buffer {
701 return Task::ready(Ok(existing_buffer));
702 }
703
704 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
705 // If the given path is already being loaded, then wait for that existing
706 // task to complete and return the same buffer.
707 hash_map::Entry::Occupied(e) => e.get().clone(),
708
709 // Otherwise, record the fact that this path is now being loaded.
710 hash_map::Entry::Vacant(entry) => {
711 let (mut tx, rx) = postage::watch::channel();
712 entry.insert(rx.clone());
713
714 let load_buffer = if worktree.read(cx).is_local() {
715 self.open_local_buffer(&project_path.path, &worktree, cx)
716 } else {
717 self.open_remote_buffer(&project_path.path, &worktree, cx)
718 };
719
720 cx.spawn(move |this, mut cx| async move {
721 let load_result = load_buffer.await;
722 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
723 // Record the fact that the buffer is no longer loading.
724 this.loading_buffers.remove(&project_path);
725 let buffer = load_result.map_err(Arc::new)?;
726 Ok(buffer)
727 }));
728 })
729 .detach();
730 rx
731 }
732 };
733
734 cx.foreground().spawn(async move {
735 loop {
736 if let Some(result) = loading_watch.borrow().as_ref() {
737 match result {
738 Ok(buffer) => return Ok(buffer.clone()),
739 Err(error) => return Err(anyhow!("{}", error)),
740 }
741 }
742 loading_watch.next().await;
743 }
744 })
745 }
746
747 fn open_local_buffer(
748 &mut self,
749 path: &Arc<Path>,
750 worktree: &ModelHandle<Worktree>,
751 cx: &mut ModelContext<Self>,
752 ) -> Task<Result<ModelHandle<Buffer>>> {
753 let load_buffer = worktree.update(cx, |worktree, cx| {
754 let worktree = worktree.as_local_mut().unwrap();
755 worktree.load_buffer(path, cx)
756 });
757 let worktree = worktree.downgrade();
758 cx.spawn(|this, mut cx| async move {
759 let buffer = load_buffer.await?;
760 let worktree = worktree
761 .upgrade(&cx)
762 .ok_or_else(|| anyhow!("worktree was removed"))?;
763 this.update(&mut cx, |this, cx| {
764 this.register_buffer(&buffer, Some(&worktree), cx)
765 })?;
766 Ok(buffer)
767 })
768 }
769
770 fn open_remote_buffer(
771 &mut self,
772 path: &Arc<Path>,
773 worktree: &ModelHandle<Worktree>,
774 cx: &mut ModelContext<Self>,
775 ) -> Task<Result<ModelHandle<Buffer>>> {
776 let rpc = self.client.clone();
777 let project_id = self.remote_id().unwrap();
778 let remote_worktree_id = worktree.read(cx).id();
779 let path = path.clone();
780 let path_string = path.to_string_lossy().to_string();
781 cx.spawn(|this, mut cx| async move {
782 let response = rpc
783 .request(proto::OpenBuffer {
784 project_id,
785 worktree_id: remote_worktree_id.to_proto(),
786 path: path_string,
787 })
788 .await?;
789 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
790 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
791 .await
792 })
793 }
794
795 fn open_local_buffer_via_lsp(
796 &mut self,
797 abs_path: lsp::Url,
798 lang_name: String,
799 lang_server: Arc<LanguageServer>,
800 cx: &mut ModelContext<Self>,
801 ) -> Task<Result<ModelHandle<Buffer>>> {
802 cx.spawn(|this, mut cx| async move {
803 let abs_path = abs_path
804 .to_file_path()
805 .map_err(|_| anyhow!("can't convert URI to path"))?;
806 let (worktree, relative_path) = if let Some(result) =
807 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
808 {
809 result
810 } else {
811 let worktree = this
812 .update(&mut cx, |this, cx| {
813 this.create_local_worktree(&abs_path, false, cx)
814 })
815 .await?;
816 this.update(&mut cx, |this, cx| {
817 this.language_servers
818 .insert((worktree.read(cx).id(), lang_name), lang_server);
819 });
820 (worktree, PathBuf::new())
821 };
822
823 let project_path = ProjectPath {
824 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
825 path: relative_path.into(),
826 };
827 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
828 .await
829 })
830 }
831
832 pub fn save_buffer_as(
833 &self,
834 buffer: ModelHandle<Buffer>,
835 abs_path: PathBuf,
836 cx: &mut ModelContext<Project>,
837 ) -> Task<Result<()>> {
838 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
839 cx.spawn(|this, mut cx| async move {
840 let (worktree, path) = worktree_task.await?;
841 worktree
842 .update(&mut cx, |worktree, cx| {
843 worktree
844 .as_local_mut()
845 .unwrap()
846 .save_buffer_as(buffer.clone(), path, cx)
847 })
848 .await?;
849 this.update(&mut cx, |this, cx| {
850 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
851 });
852 Ok(())
853 })
854 }
855
856 pub fn get_open_buffer(
857 &mut self,
858 path: &ProjectPath,
859 cx: &mut ModelContext<Self>,
860 ) -> Option<ModelHandle<Buffer>> {
861 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
862 self.opened_buffers.values().find_map(|buffer| {
863 let buffer = buffer.upgrade(cx)?;
864 let file = File::from_dyn(buffer.read(cx).file())?;
865 if file.worktree == worktree && file.path() == &path.path {
866 Some(buffer)
867 } else {
868 None
869 }
870 })
871 }
872
873 fn register_buffer(
874 &mut self,
875 buffer: &ModelHandle<Buffer>,
876 worktree: Option<&ModelHandle<Worktree>>,
877 cx: &mut ModelContext<Self>,
878 ) -> Result<()> {
879 let remote_id = buffer.read(cx).remote_id();
880 let open_buffer = if self.is_remote() || self.is_shared() {
881 OpenBuffer::Strong(buffer.clone())
882 } else {
883 OpenBuffer::Weak(buffer.downgrade())
884 };
885
886 match self.opened_buffers.insert(remote_id, open_buffer) {
887 None => {}
888 Some(OpenBuffer::Loading(operations)) => {
889 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
890 }
891 Some(OpenBuffer::Weak(existing_handle)) => {
892 if existing_handle.upgrade(cx).is_some() {
893 Err(anyhow!(
894 "already registered buffer with remote id {}",
895 remote_id
896 ))?
897 }
898 }
899 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
900 "already registered buffer with remote id {}",
901 remote_id
902 ))?,
903 }
904 self.assign_language_to_buffer(&buffer, worktree, cx);
905 Ok(())
906 }
907
908 fn assign_language_to_buffer(
909 &mut self,
910 buffer: &ModelHandle<Buffer>,
911 worktree: Option<&ModelHandle<Worktree>>,
912 cx: &mut ModelContext<Self>,
913 ) -> Option<()> {
914 let (path, full_path) = {
915 let file = buffer.read(cx).file()?;
916 (file.path().clone(), file.full_path(cx))
917 };
918
919 // If the buffer has a language, set it and start/assign the language server
920 if let Some(language) = self.languages.select_language(&full_path) {
921 buffer.update(cx, |buffer, cx| {
922 buffer.set_language(Some(language.clone()), cx);
923 });
924
925 // For local worktrees, start a language server if needed.
926 // Also assign the language server and any previously stored diagnostics to the buffer.
927 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
928 let worktree_id = local_worktree.id();
929 let worktree_abs_path = local_worktree.abs_path().clone();
930 let buffer = buffer.downgrade();
931 let language_server =
932 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
933
934 cx.spawn_weak(|_, mut cx| async move {
935 if let Some(language_server) = language_server.await {
936 if let Some(buffer) = buffer.upgrade(&cx) {
937 buffer.update(&mut cx, |buffer, cx| {
938 buffer.set_language_server(Some(language_server), cx);
939 });
940 }
941 }
942 })
943 .detach();
944 }
945 }
946
947 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
948 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
949 buffer.update(cx, |buffer, cx| {
950 buffer.update_diagnostics(diagnostics, None, cx).log_err();
951 });
952 }
953 }
954
955 None
956 }
957
958 fn start_language_server(
959 &mut self,
960 worktree_id: WorktreeId,
961 worktree_path: Arc<Path>,
962 language: Arc<Language>,
963 cx: &mut ModelContext<Self>,
964 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
965 enum LspEvent {
966 DiagnosticsStart,
967 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
968 DiagnosticsFinish,
969 }
970
971 let key = (worktree_id, language.name().to_string());
972 self.started_language_servers
973 .entry(key.clone())
974 .or_insert_with(|| {
975 let language_server = self.languages.start_language_server(
976 &language,
977 worktree_path,
978 self.client.http_client(),
979 cx,
980 );
981 let rpc = self.client.clone();
982 cx.spawn_weak(|this, mut cx| async move {
983 let language_server = language_server?.await.log_err()?;
984 if let Some(this) = this.upgrade(&cx) {
985 this.update(&mut cx, |this, _| {
986 this.language_servers.insert(key, language_server.clone());
987 });
988 }
989
990 let disk_based_sources = language
991 .disk_based_diagnostic_sources()
992 .cloned()
993 .unwrap_or_default();
994 let disk_based_diagnostics_progress_token =
995 language.disk_based_diagnostics_progress_token().cloned();
996 let has_disk_based_diagnostic_progress_token =
997 disk_based_diagnostics_progress_token.is_some();
998 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
999
1000 // Listen for `PublishDiagnostics` notifications.
1001 language_server
1002 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1003 let diagnostics_tx = diagnostics_tx.clone();
1004 move |params| {
1005 if !has_disk_based_diagnostic_progress_token {
1006 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1007 }
1008 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1009 .ok();
1010 if !has_disk_based_diagnostic_progress_token {
1011 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1012 }
1013 }
1014 })
1015 .detach();
1016
1017 // Listen for `Progress` notifications. Send an event when the language server
1018 // transitions between running jobs and not running any jobs.
1019 let mut running_jobs_for_this_server: i32 = 0;
1020 language_server
1021 .on_notification::<lsp::notification::Progress, _>(move |params| {
1022 let token = match params.token {
1023 lsp::NumberOrString::Number(_) => None,
1024 lsp::NumberOrString::String(token) => Some(token),
1025 };
1026
1027 if token == disk_based_diagnostics_progress_token {
1028 match params.value {
1029 lsp::ProgressParamsValue::WorkDone(progress) => {
1030 match progress {
1031 lsp::WorkDoneProgress::Begin(_) => {
1032 running_jobs_for_this_server += 1;
1033 if running_jobs_for_this_server == 1 {
1034 block_on(
1035 diagnostics_tx
1036 .send(LspEvent::DiagnosticsStart),
1037 )
1038 .ok();
1039 }
1040 }
1041 lsp::WorkDoneProgress::End(_) => {
1042 running_jobs_for_this_server -= 1;
1043 if running_jobs_for_this_server == 0 {
1044 block_on(
1045 diagnostics_tx
1046 .send(LspEvent::DiagnosticsFinish),
1047 )
1048 .ok();
1049 }
1050 }
1051 _ => {}
1052 }
1053 }
1054 }
1055 }
1056 })
1057 .detach();
1058
1059 // Process all the LSP events.
1060 cx.spawn(|mut cx| async move {
1061 while let Ok(message) = diagnostics_rx.recv().await {
1062 let this = this.upgrade(&cx)?;
1063 match message {
1064 LspEvent::DiagnosticsStart => {
1065 this.update(&mut cx, |this, cx| {
1066 this.disk_based_diagnostics_started(cx);
1067 if let Some(project_id) = this.remote_id() {
1068 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1069 project_id,
1070 })
1071 .log_err();
1072 }
1073 });
1074 }
1075 LspEvent::DiagnosticsUpdate(mut params) => {
1076 language.process_diagnostics(&mut params);
1077 this.update(&mut cx, |this, cx| {
1078 this.update_diagnostics(params, &disk_based_sources, cx)
1079 .log_err();
1080 });
1081 }
1082 LspEvent::DiagnosticsFinish => {
1083 this.update(&mut cx, |this, cx| {
1084 this.disk_based_diagnostics_finished(cx);
1085 if let Some(project_id) = this.remote_id() {
1086 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1087 project_id,
1088 })
1089 .log_err();
1090 }
1091 });
1092 }
1093 }
1094 }
1095 Some(())
1096 })
1097 .detach();
1098
1099 Some(language_server)
1100 })
1101 .shared()
1102 })
1103 .clone()
1104 }
1105
1106 pub fn update_diagnostics(
1107 &mut self,
1108 params: lsp::PublishDiagnosticsParams,
1109 disk_based_sources: &HashSet<String>,
1110 cx: &mut ModelContext<Self>,
1111 ) -> Result<()> {
1112 let abs_path = params
1113 .uri
1114 .to_file_path()
1115 .map_err(|_| anyhow!("URI is not a file"))?;
1116 let mut next_group_id = 0;
1117 let mut diagnostics = Vec::default();
1118 let mut primary_diagnostic_group_ids = HashMap::default();
1119 let mut sources_by_group_id = HashMap::default();
1120 let mut supporting_diagnostic_severities = HashMap::default();
1121 for diagnostic in ¶ms.diagnostics {
1122 let source = diagnostic.source.as_ref();
1123 let code = diagnostic.code.as_ref().map(|code| match code {
1124 lsp::NumberOrString::Number(code) => code.to_string(),
1125 lsp::NumberOrString::String(code) => code.clone(),
1126 });
1127 let range = range_from_lsp(diagnostic.range);
1128 let is_supporting = diagnostic
1129 .related_information
1130 .as_ref()
1131 .map_or(false, |infos| {
1132 infos.iter().any(|info| {
1133 primary_diagnostic_group_ids.contains_key(&(
1134 source,
1135 code.clone(),
1136 range_from_lsp(info.location.range),
1137 ))
1138 })
1139 });
1140
1141 if is_supporting {
1142 if let Some(severity) = diagnostic.severity {
1143 supporting_diagnostic_severities
1144 .insert((source, code.clone(), range), severity);
1145 }
1146 } else {
1147 let group_id = post_inc(&mut next_group_id);
1148 let is_disk_based =
1149 source.map_or(false, |source| disk_based_sources.contains(source));
1150
1151 sources_by_group_id.insert(group_id, source);
1152 primary_diagnostic_group_ids
1153 .insert((source, code.clone(), range.clone()), group_id);
1154
1155 diagnostics.push(DiagnosticEntry {
1156 range,
1157 diagnostic: Diagnostic {
1158 code: code.clone(),
1159 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1160 message: diagnostic.message.clone(),
1161 group_id,
1162 is_primary: true,
1163 is_valid: true,
1164 is_disk_based,
1165 },
1166 });
1167 if let Some(infos) = &diagnostic.related_information {
1168 for info in infos {
1169 if info.location.uri == params.uri && !info.message.is_empty() {
1170 let range = range_from_lsp(info.location.range);
1171 diagnostics.push(DiagnosticEntry {
1172 range,
1173 diagnostic: Diagnostic {
1174 code: code.clone(),
1175 severity: DiagnosticSeverity::INFORMATION,
1176 message: info.message.clone(),
1177 group_id,
1178 is_primary: false,
1179 is_valid: true,
1180 is_disk_based,
1181 },
1182 });
1183 }
1184 }
1185 }
1186 }
1187 }
1188
1189 for entry in &mut diagnostics {
1190 let diagnostic = &mut entry.diagnostic;
1191 if !diagnostic.is_primary {
1192 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1193 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1194 source,
1195 diagnostic.code.clone(),
1196 entry.range.clone(),
1197 )) {
1198 diagnostic.severity = severity;
1199 }
1200 }
1201 }
1202
1203 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1204 Ok(())
1205 }
1206
1207 pub fn update_diagnostic_entries(
1208 &mut self,
1209 abs_path: PathBuf,
1210 version: Option<i32>,
1211 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1212 cx: &mut ModelContext<Project>,
1213 ) -> Result<(), anyhow::Error> {
1214 let (worktree, relative_path) = self
1215 .find_local_worktree(&abs_path, cx)
1216 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1217 let project_path = ProjectPath {
1218 worktree_id: worktree.read(cx).id(),
1219 path: relative_path.into(),
1220 };
1221
1222 for buffer in self.opened_buffers.values() {
1223 if let Some(buffer) = buffer.upgrade(cx) {
1224 if buffer
1225 .read(cx)
1226 .file()
1227 .map_or(false, |file| *file.path() == project_path.path)
1228 {
1229 buffer.update(cx, |buffer, cx| {
1230 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1231 })?;
1232 break;
1233 }
1234 }
1235 }
1236 worktree.update(cx, |worktree, cx| {
1237 worktree
1238 .as_local_mut()
1239 .ok_or_else(|| anyhow!("not a local worktree"))?
1240 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1241 })?;
1242 cx.emit(Event::DiagnosticsUpdated(project_path));
1243 Ok(())
1244 }
1245
1246 pub fn format(
1247 &self,
1248 buffers: HashSet<ModelHandle<Buffer>>,
1249 push_to_history: bool,
1250 cx: &mut ModelContext<Project>,
1251 ) -> Task<Result<ProjectTransaction>> {
1252 let mut local_buffers = Vec::new();
1253 let mut remote_buffers = None;
1254 for buffer_handle in buffers {
1255 let buffer = buffer_handle.read(cx);
1256 let worktree;
1257 if let Some(file) = File::from_dyn(buffer.file()) {
1258 worktree = file.worktree.clone();
1259 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1260 let lang_server;
1261 if let Some(lang) = buffer.language() {
1262 if let Some(server) = self
1263 .language_servers
1264 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1265 {
1266 lang_server = server.clone();
1267 } else {
1268 return Task::ready(Ok(Default::default()));
1269 };
1270 } else {
1271 return Task::ready(Ok(Default::default()));
1272 }
1273
1274 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1275 } else {
1276 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1277 }
1278 } else {
1279 return Task::ready(Ok(Default::default()));
1280 }
1281 }
1282
1283 let remote_buffers = self.remote_id().zip(remote_buffers);
1284 let client = self.client.clone();
1285
1286 cx.spawn(|this, mut cx| async move {
1287 let mut project_transaction = ProjectTransaction::default();
1288
1289 if let Some((project_id, remote_buffers)) = remote_buffers {
1290 let response = client
1291 .request(proto::FormatBuffers {
1292 project_id,
1293 buffer_ids: remote_buffers
1294 .iter()
1295 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1296 .collect(),
1297 })
1298 .await?
1299 .transaction
1300 .ok_or_else(|| anyhow!("missing transaction"))?;
1301 project_transaction = this
1302 .update(&mut cx, |this, cx| {
1303 this.deserialize_project_transaction(response, push_to_history, cx)
1304 })
1305 .await?;
1306 }
1307
1308 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1309 let lsp_edits = lang_server
1310 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1311 text_document: lsp::TextDocumentIdentifier::new(
1312 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1313 ),
1314 options: Default::default(),
1315 work_done_progress_params: Default::default(),
1316 })
1317 .await?;
1318
1319 if let Some(lsp_edits) = lsp_edits {
1320 let edits = buffer
1321 .update(&mut cx, |buffer, cx| {
1322 buffer.edits_from_lsp(lsp_edits, None, cx)
1323 })
1324 .await?;
1325 buffer.update(&mut cx, |buffer, cx| {
1326 buffer.finalize_last_transaction();
1327 buffer.start_transaction();
1328 for (range, text) in edits {
1329 buffer.edit([range], text, cx);
1330 }
1331 if buffer.end_transaction(cx).is_some() {
1332 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1333 if !push_to_history {
1334 buffer.forget_transaction(transaction.id);
1335 }
1336 project_transaction.0.insert(cx.handle(), transaction);
1337 }
1338 });
1339 }
1340 }
1341
1342 Ok(project_transaction)
1343 })
1344 }
1345
1346 pub fn definition<T: ToPointUtf16>(
1347 &self,
1348 buffer: &ModelHandle<Buffer>,
1349 position: T,
1350 cx: &mut ModelContext<Self>,
1351 ) -> Task<Result<Vec<Location>>> {
1352 let position = position.to_point_utf16(buffer.read(cx));
1353 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1354 }
1355
1356 pub fn references<T: ToPointUtf16>(
1357 &self,
1358 buffer: &ModelHandle<Buffer>,
1359 position: T,
1360 cx: &mut ModelContext<Self>,
1361 ) -> Task<Result<Vec<Location>>> {
1362 let position = position.to_point_utf16(buffer.read(cx));
1363 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1364 }
1365
1366 pub fn document_highlights<T: ToPointUtf16>(
1367 &self,
1368 buffer: &ModelHandle<Buffer>,
1369 position: T,
1370 cx: &mut ModelContext<Self>,
1371 ) -> Task<Result<Vec<DocumentHighlight>>> {
1372 let position = position.to_point_utf16(buffer.read(cx));
1373 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1374 }
1375
1376 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1377 if self.is_local() {
1378 let mut language_servers = HashMap::default();
1379 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1380 if let Some((worktree, language)) = self
1381 .worktree_for_id(*worktree_id, cx)
1382 .and_then(|worktree| worktree.read(cx).as_local())
1383 .zip(self.languages.get_language(language_name))
1384 {
1385 language_servers
1386 .entry(Arc::as_ptr(language_server))
1387 .or_insert((
1388 language_server.clone(),
1389 *worktree_id,
1390 worktree.abs_path().clone(),
1391 language.clone(),
1392 ));
1393 }
1394 }
1395
1396 let mut requests = Vec::new();
1397 for (language_server, _, _, _) in language_servers.values() {
1398 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1399 lsp::WorkspaceSymbolParams {
1400 query: query.to_string(),
1401 ..Default::default()
1402 },
1403 ));
1404 }
1405
1406 cx.spawn_weak(|this, cx| async move {
1407 let responses = futures::future::try_join_all(requests).await?;
1408
1409 let mut symbols = Vec::new();
1410 if let Some(this) = this.upgrade(&cx) {
1411 this.read_with(&cx, |this, cx| {
1412 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1413 language_servers.into_values().zip(responses)
1414 {
1415 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1416 |lsp_symbol| {
1417 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1418 let mut worktree_id = source_worktree_id;
1419 let path;
1420 if let Some((worktree, rel_path)) =
1421 this.find_local_worktree(&abs_path, cx)
1422 {
1423 worktree_id = worktree.read(cx).id();
1424 path = rel_path;
1425 } else {
1426 path = relativize_path(&worktree_abs_path, &abs_path);
1427 }
1428
1429 let label = language
1430 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1431 .unwrap_or_else(|| {
1432 CodeLabel::plain(lsp_symbol.name.clone(), None)
1433 });
1434 let signature = this.symbol_signature(worktree_id, &path);
1435
1436 Some(Symbol {
1437 source_worktree_id,
1438 worktree_id,
1439 language_name: language.name().to_string(),
1440 name: lsp_symbol.name,
1441 kind: lsp_symbol.kind,
1442 label,
1443 path,
1444 range: range_from_lsp(lsp_symbol.location.range),
1445 signature,
1446 })
1447 },
1448 ));
1449 }
1450 })
1451 }
1452
1453 Ok(symbols)
1454 })
1455 } else if let Some(project_id) = self.remote_id() {
1456 let request = self.client.request(proto::GetProjectSymbols {
1457 project_id,
1458 query: query.to_string(),
1459 });
1460 cx.spawn_weak(|this, cx| async move {
1461 let response = request.await?;
1462 let mut symbols = Vec::new();
1463 if let Some(this) = this.upgrade(&cx) {
1464 this.read_with(&cx, |this, _| {
1465 symbols.extend(
1466 response
1467 .symbols
1468 .into_iter()
1469 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1470 );
1471 })
1472 }
1473 Ok(symbols)
1474 })
1475 } else {
1476 Task::ready(Ok(Default::default()))
1477 }
1478 }
1479
1480 pub fn open_buffer_for_symbol(
1481 &mut self,
1482 symbol: &Symbol,
1483 cx: &mut ModelContext<Self>,
1484 ) -> Task<Result<ModelHandle<Buffer>>> {
1485 if self.is_local() {
1486 let language_server = if let Some(server) = self
1487 .language_servers
1488 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1489 {
1490 server.clone()
1491 } else {
1492 return Task::ready(Err(anyhow!(
1493 "language server for worktree and language not found"
1494 )));
1495 };
1496
1497 let worktree_abs_path = if let Some(worktree_abs_path) = self
1498 .worktree_for_id(symbol.worktree_id, cx)
1499 .and_then(|worktree| worktree.read(cx).as_local())
1500 .map(|local_worktree| local_worktree.abs_path())
1501 {
1502 worktree_abs_path
1503 } else {
1504 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1505 };
1506 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1507 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1508 uri
1509 } else {
1510 return Task::ready(Err(anyhow!("invalid symbol path")));
1511 };
1512
1513 self.open_local_buffer_via_lsp(
1514 symbol_uri,
1515 symbol.language_name.clone(),
1516 language_server,
1517 cx,
1518 )
1519 } else if let Some(project_id) = self.remote_id() {
1520 let request = self.client.request(proto::OpenBufferForSymbol {
1521 project_id,
1522 symbol: Some(serialize_symbol(symbol)),
1523 });
1524 cx.spawn(|this, mut cx| async move {
1525 let response = request.await?;
1526 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1527 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1528 .await
1529 })
1530 } else {
1531 Task::ready(Err(anyhow!("project does not have a remote id")))
1532 }
1533 }
1534
1535 pub fn completions<T: ToPointUtf16>(
1536 &self,
1537 source_buffer_handle: &ModelHandle<Buffer>,
1538 position: T,
1539 cx: &mut ModelContext<Self>,
1540 ) -> Task<Result<Vec<Completion>>> {
1541 let source_buffer_handle = source_buffer_handle.clone();
1542 let source_buffer = source_buffer_handle.read(cx);
1543 let buffer_id = source_buffer.remote_id();
1544 let language = source_buffer.language().cloned();
1545 let worktree;
1546 let buffer_abs_path;
1547 if let Some(file) = File::from_dyn(source_buffer.file()) {
1548 worktree = file.worktree.clone();
1549 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1550 } else {
1551 return Task::ready(Ok(Default::default()));
1552 };
1553
1554 let position = position.to_point_utf16(source_buffer);
1555 let anchor = source_buffer.anchor_after(position);
1556
1557 if worktree.read(cx).as_local().is_some() {
1558 let buffer_abs_path = buffer_abs_path.unwrap();
1559 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1560 server
1561 } else {
1562 return Task::ready(Ok(Default::default()));
1563 };
1564
1565 cx.spawn(|_, cx| async move {
1566 let completions = lang_server
1567 .request::<lsp::request::Completion>(lsp::CompletionParams {
1568 text_document_position: lsp::TextDocumentPositionParams::new(
1569 lsp::TextDocumentIdentifier::new(
1570 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1571 ),
1572 position.to_lsp_position(),
1573 ),
1574 context: Default::default(),
1575 work_done_progress_params: Default::default(),
1576 partial_result_params: Default::default(),
1577 })
1578 .await
1579 .context("lsp completion request failed")?;
1580
1581 let completions = if let Some(completions) = completions {
1582 match completions {
1583 lsp::CompletionResponse::Array(completions) => completions,
1584 lsp::CompletionResponse::List(list) => list.items,
1585 }
1586 } else {
1587 Default::default()
1588 };
1589
1590 source_buffer_handle.read_with(&cx, |this, _| {
1591 Ok(completions
1592 .into_iter()
1593 .filter_map(|lsp_completion| {
1594 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1595 lsp::CompletionTextEdit::Edit(edit) => {
1596 (range_from_lsp(edit.range), edit.new_text.clone())
1597 }
1598 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1599 log::info!("unsupported insert/replace completion");
1600 return None;
1601 }
1602 };
1603
1604 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1605 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1606 if clipped_start == old_range.start && clipped_end == old_range.end {
1607 Some(Completion {
1608 old_range: this.anchor_before(old_range.start)
1609 ..this.anchor_after(old_range.end),
1610 new_text,
1611 label: language
1612 .as_ref()
1613 .and_then(|l| l.label_for_completion(&lsp_completion))
1614 .unwrap_or_else(|| {
1615 CodeLabel::plain(
1616 lsp_completion.label.clone(),
1617 lsp_completion.filter_text.as_deref(),
1618 )
1619 }),
1620 lsp_completion,
1621 })
1622 } else {
1623 None
1624 }
1625 })
1626 .collect())
1627 })
1628 })
1629 } else if let Some(project_id) = self.remote_id() {
1630 let rpc = self.client.clone();
1631 let message = proto::GetCompletions {
1632 project_id,
1633 buffer_id,
1634 position: Some(language::proto::serialize_anchor(&anchor)),
1635 version: (&source_buffer.version()).into(),
1636 };
1637 cx.spawn_weak(|_, mut cx| async move {
1638 let response = rpc.request(message).await?;
1639
1640 source_buffer_handle
1641 .update(&mut cx, |buffer, _| {
1642 buffer.wait_for_version(response.version.into())
1643 })
1644 .await;
1645
1646 response
1647 .completions
1648 .into_iter()
1649 .map(|completion| {
1650 language::proto::deserialize_completion(completion, language.as_ref())
1651 })
1652 .collect()
1653 })
1654 } else {
1655 Task::ready(Ok(Default::default()))
1656 }
1657 }
1658
1659 pub fn apply_additional_edits_for_completion(
1660 &self,
1661 buffer_handle: ModelHandle<Buffer>,
1662 completion: Completion,
1663 push_to_history: bool,
1664 cx: &mut ModelContext<Self>,
1665 ) -> Task<Result<Option<Transaction>>> {
1666 let buffer = buffer_handle.read(cx);
1667 let buffer_id = buffer.remote_id();
1668
1669 if self.is_local() {
1670 let lang_server = if let Some(language_server) = buffer.language_server() {
1671 language_server.clone()
1672 } else {
1673 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1674 };
1675
1676 cx.spawn(|_, mut cx| async move {
1677 let resolved_completion = lang_server
1678 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1679 .await?;
1680 if let Some(edits) = resolved_completion.additional_text_edits {
1681 let edits = buffer_handle
1682 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1683 .await?;
1684 buffer_handle.update(&mut cx, |buffer, cx| {
1685 buffer.finalize_last_transaction();
1686 buffer.start_transaction();
1687 for (range, text) in edits {
1688 buffer.edit([range], text, cx);
1689 }
1690 let transaction = if buffer.end_transaction(cx).is_some() {
1691 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1692 if !push_to_history {
1693 buffer.forget_transaction(transaction.id);
1694 }
1695 Some(transaction)
1696 } else {
1697 None
1698 };
1699 Ok(transaction)
1700 })
1701 } else {
1702 Ok(None)
1703 }
1704 })
1705 } else if let Some(project_id) = self.remote_id() {
1706 let client = self.client.clone();
1707 cx.spawn(|_, mut cx| async move {
1708 let response = client
1709 .request(proto::ApplyCompletionAdditionalEdits {
1710 project_id,
1711 buffer_id,
1712 completion: Some(language::proto::serialize_completion(&completion)),
1713 })
1714 .await?;
1715
1716 if let Some(transaction) = response.transaction {
1717 let transaction = language::proto::deserialize_transaction(transaction)?;
1718 buffer_handle
1719 .update(&mut cx, |buffer, _| {
1720 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1721 })
1722 .await;
1723 if push_to_history {
1724 buffer_handle.update(&mut cx, |buffer, _| {
1725 buffer.push_transaction(transaction.clone(), Instant::now());
1726 });
1727 }
1728 Ok(Some(transaction))
1729 } else {
1730 Ok(None)
1731 }
1732 })
1733 } else {
1734 Task::ready(Err(anyhow!("project does not have a remote id")))
1735 }
1736 }
1737
1738 pub fn code_actions<T: ToOffset>(
1739 &self,
1740 buffer_handle: &ModelHandle<Buffer>,
1741 range: Range<T>,
1742 cx: &mut ModelContext<Self>,
1743 ) -> Task<Result<Vec<CodeAction>>> {
1744 let buffer_handle = buffer_handle.clone();
1745 let buffer = buffer_handle.read(cx);
1746 let buffer_id = buffer.remote_id();
1747 let worktree;
1748 let buffer_abs_path;
1749 if let Some(file) = File::from_dyn(buffer.file()) {
1750 worktree = file.worktree.clone();
1751 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1752 } else {
1753 return Task::ready(Ok(Default::default()));
1754 };
1755 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1756
1757 if worktree.read(cx).as_local().is_some() {
1758 let buffer_abs_path = buffer_abs_path.unwrap();
1759 let lang_name;
1760 let lang_server;
1761 if let Some(lang) = buffer.language() {
1762 lang_name = lang.name().to_string();
1763 if let Some(server) = self
1764 .language_servers
1765 .get(&(worktree.read(cx).id(), lang_name.clone()))
1766 {
1767 lang_server = server.clone();
1768 } else {
1769 return Task::ready(Ok(Default::default()));
1770 };
1771 } else {
1772 return Task::ready(Ok(Default::default()));
1773 }
1774
1775 let lsp_range = lsp::Range::new(
1776 range.start.to_point_utf16(buffer).to_lsp_position(),
1777 range.end.to_point_utf16(buffer).to_lsp_position(),
1778 );
1779 cx.foreground().spawn(async move {
1780 Ok(lang_server
1781 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1782 text_document: lsp::TextDocumentIdentifier::new(
1783 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1784 ),
1785 range: lsp_range,
1786 work_done_progress_params: Default::default(),
1787 partial_result_params: Default::default(),
1788 context: lsp::CodeActionContext {
1789 diagnostics: Default::default(),
1790 only: Some(vec![
1791 lsp::CodeActionKind::QUICKFIX,
1792 lsp::CodeActionKind::REFACTOR,
1793 lsp::CodeActionKind::REFACTOR_EXTRACT,
1794 ]),
1795 },
1796 })
1797 .await?
1798 .unwrap_or_default()
1799 .into_iter()
1800 .filter_map(|entry| {
1801 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1802 Some(CodeAction {
1803 range: range.clone(),
1804 lsp_action,
1805 })
1806 } else {
1807 None
1808 }
1809 })
1810 .collect())
1811 })
1812 } else if let Some(project_id) = self.remote_id() {
1813 let rpc = self.client.clone();
1814 cx.spawn_weak(|_, mut cx| async move {
1815 let response = rpc
1816 .request(proto::GetCodeActions {
1817 project_id,
1818 buffer_id,
1819 start: Some(language::proto::serialize_anchor(&range.start)),
1820 end: Some(language::proto::serialize_anchor(&range.end)),
1821 })
1822 .await?;
1823
1824 buffer_handle
1825 .update(&mut cx, |buffer, _| {
1826 buffer.wait_for_version(response.version.into())
1827 })
1828 .await;
1829
1830 response
1831 .actions
1832 .into_iter()
1833 .map(language::proto::deserialize_code_action)
1834 .collect()
1835 })
1836 } else {
1837 Task::ready(Ok(Default::default()))
1838 }
1839 }
1840
1841 pub fn apply_code_action(
1842 &self,
1843 buffer_handle: ModelHandle<Buffer>,
1844 mut action: CodeAction,
1845 push_to_history: bool,
1846 cx: &mut ModelContext<Self>,
1847 ) -> Task<Result<ProjectTransaction>> {
1848 if self.is_local() {
1849 let buffer = buffer_handle.read(cx);
1850 let lang_name = if let Some(lang) = buffer.language() {
1851 lang.name().to_string()
1852 } else {
1853 return Task::ready(Ok(Default::default()));
1854 };
1855 let lang_server = if let Some(language_server) = buffer.language_server() {
1856 language_server.clone()
1857 } else {
1858 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1859 };
1860 let range = action.range.to_point_utf16(buffer);
1861
1862 cx.spawn(|this, mut cx| async move {
1863 if let Some(lsp_range) = action
1864 .lsp_action
1865 .data
1866 .as_mut()
1867 .and_then(|d| d.get_mut("codeActionParams"))
1868 .and_then(|d| d.get_mut("range"))
1869 {
1870 *lsp_range = serde_json::to_value(&lsp::Range::new(
1871 range.start.to_lsp_position(),
1872 range.end.to_lsp_position(),
1873 ))
1874 .unwrap();
1875 action.lsp_action = lang_server
1876 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1877 .await?;
1878 } else {
1879 let actions = this
1880 .update(&mut cx, |this, cx| {
1881 this.code_actions(&buffer_handle, action.range, cx)
1882 })
1883 .await?;
1884 action.lsp_action = actions
1885 .into_iter()
1886 .find(|a| a.lsp_action.title == action.lsp_action.title)
1887 .ok_or_else(|| anyhow!("code action is outdated"))?
1888 .lsp_action;
1889 }
1890
1891 if let Some(edit) = action.lsp_action.edit {
1892 Self::deserialize_workspace_edit(
1893 this,
1894 edit,
1895 push_to_history,
1896 lang_name,
1897 lang_server,
1898 &mut cx,
1899 )
1900 .await
1901 } else {
1902 Ok(ProjectTransaction::default())
1903 }
1904 })
1905 } else if let Some(project_id) = self.remote_id() {
1906 let client = self.client.clone();
1907 let request = proto::ApplyCodeAction {
1908 project_id,
1909 buffer_id: buffer_handle.read(cx).remote_id(),
1910 action: Some(language::proto::serialize_code_action(&action)),
1911 };
1912 cx.spawn(|this, mut cx| async move {
1913 let response = client
1914 .request(request)
1915 .await?
1916 .transaction
1917 .ok_or_else(|| anyhow!("missing transaction"))?;
1918 this.update(&mut cx, |this, cx| {
1919 this.deserialize_project_transaction(response, push_to_history, cx)
1920 })
1921 .await
1922 })
1923 } else {
1924 Task::ready(Err(anyhow!("project does not have a remote id")))
1925 }
1926 }
1927
1928 async fn deserialize_workspace_edit(
1929 this: ModelHandle<Self>,
1930 edit: lsp::WorkspaceEdit,
1931 push_to_history: bool,
1932 language_name: String,
1933 language_server: Arc<LanguageServer>,
1934 cx: &mut AsyncAppContext,
1935 ) -> Result<ProjectTransaction> {
1936 let fs = this.read_with(cx, |this, _| this.fs.clone());
1937 let mut operations = Vec::new();
1938 if let Some(document_changes) = edit.document_changes {
1939 match document_changes {
1940 lsp::DocumentChanges::Edits(edits) => {
1941 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1942 }
1943 lsp::DocumentChanges::Operations(ops) => operations = ops,
1944 }
1945 } else if let Some(changes) = edit.changes {
1946 operations.extend(changes.into_iter().map(|(uri, edits)| {
1947 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1948 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1949 uri,
1950 version: None,
1951 },
1952 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1953 })
1954 }));
1955 }
1956
1957 let mut project_transaction = ProjectTransaction::default();
1958 for operation in operations {
1959 match operation {
1960 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1961 let abs_path = op
1962 .uri
1963 .to_file_path()
1964 .map_err(|_| anyhow!("can't convert URI to path"))?;
1965
1966 if let Some(parent_path) = abs_path.parent() {
1967 fs.create_dir(parent_path).await?;
1968 }
1969 if abs_path.ends_with("/") {
1970 fs.create_dir(&abs_path).await?;
1971 } else {
1972 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1973 .await?;
1974 }
1975 }
1976 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1977 let source_abs_path = op
1978 .old_uri
1979 .to_file_path()
1980 .map_err(|_| anyhow!("can't convert URI to path"))?;
1981 let target_abs_path = op
1982 .new_uri
1983 .to_file_path()
1984 .map_err(|_| anyhow!("can't convert URI to path"))?;
1985 fs.rename(
1986 &source_abs_path,
1987 &target_abs_path,
1988 op.options.map(Into::into).unwrap_or_default(),
1989 )
1990 .await?;
1991 }
1992 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
1993 let abs_path = op
1994 .uri
1995 .to_file_path()
1996 .map_err(|_| anyhow!("can't convert URI to path"))?;
1997 let options = op.options.map(Into::into).unwrap_or_default();
1998 if abs_path.ends_with("/") {
1999 fs.remove_dir(&abs_path, options).await?;
2000 } else {
2001 fs.remove_file(&abs_path, options).await?;
2002 }
2003 }
2004 lsp::DocumentChangeOperation::Edit(op) => {
2005 let buffer_to_edit = this
2006 .update(cx, |this, cx| {
2007 this.open_local_buffer_via_lsp(
2008 op.text_document.uri,
2009 language_name.clone(),
2010 language_server.clone(),
2011 cx,
2012 )
2013 })
2014 .await?;
2015
2016 let edits = buffer_to_edit
2017 .update(cx, |buffer, cx| {
2018 let edits = op.edits.into_iter().map(|edit| match edit {
2019 lsp::OneOf::Left(edit) => edit,
2020 lsp::OneOf::Right(edit) => edit.text_edit,
2021 });
2022 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2023 })
2024 .await?;
2025
2026 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2027 buffer.finalize_last_transaction();
2028 buffer.start_transaction();
2029 for (range, text) in edits {
2030 buffer.edit([range], text, cx);
2031 }
2032 let transaction = if buffer.end_transaction(cx).is_some() {
2033 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2034 if !push_to_history {
2035 buffer.forget_transaction(transaction.id);
2036 }
2037 Some(transaction)
2038 } else {
2039 None
2040 };
2041
2042 transaction
2043 });
2044 if let Some(transaction) = transaction {
2045 project_transaction.0.insert(buffer_to_edit, transaction);
2046 }
2047 }
2048 }
2049 }
2050
2051 Ok(project_transaction)
2052 }
2053
2054 pub fn prepare_rename<T: ToPointUtf16>(
2055 &self,
2056 buffer: ModelHandle<Buffer>,
2057 position: T,
2058 cx: &mut ModelContext<Self>,
2059 ) -> Task<Result<Option<Range<Anchor>>>> {
2060 let position = position.to_point_utf16(buffer.read(cx));
2061 self.request_lsp(buffer, PrepareRename { position }, cx)
2062 }
2063
2064 pub fn perform_rename<T: ToPointUtf16>(
2065 &self,
2066 buffer: ModelHandle<Buffer>,
2067 position: T,
2068 new_name: String,
2069 push_to_history: bool,
2070 cx: &mut ModelContext<Self>,
2071 ) -> Task<Result<ProjectTransaction>> {
2072 let position = position.to_point_utf16(buffer.read(cx));
2073 self.request_lsp(
2074 buffer,
2075 PerformRename {
2076 position,
2077 new_name,
2078 push_to_history,
2079 },
2080 cx,
2081 )
2082 }
2083
2084 pub fn search(
2085 &self,
2086 query: SearchQuery,
2087 cx: &mut ModelContext<Self>,
2088 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2089 if self.is_local() {
2090 let snapshots = self
2091 .visible_worktrees(cx)
2092 .filter_map(|tree| {
2093 let tree = tree.read(cx).as_local()?;
2094 Some(tree.snapshot())
2095 })
2096 .collect::<Vec<_>>();
2097
2098 let background = cx.background().clone();
2099 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2100 if path_count == 0 {
2101 return Task::ready(Ok(Default::default()));
2102 }
2103 let workers = background.num_cpus().min(path_count);
2104 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2105 cx.background()
2106 .spawn({
2107 let fs = self.fs.clone();
2108 let background = cx.background().clone();
2109 let query = query.clone();
2110 async move {
2111 let fs = &fs;
2112 let query = &query;
2113 let matching_paths_tx = &matching_paths_tx;
2114 let paths_per_worker = (path_count + workers - 1) / workers;
2115 let snapshots = &snapshots;
2116 background
2117 .scoped(|scope| {
2118 for worker_ix in 0..workers {
2119 let worker_start_ix = worker_ix * paths_per_worker;
2120 let worker_end_ix = worker_start_ix + paths_per_worker;
2121 scope.spawn(async move {
2122 let mut snapshot_start_ix = 0;
2123 let mut abs_path = PathBuf::new();
2124 for snapshot in snapshots {
2125 let snapshot_end_ix =
2126 snapshot_start_ix + snapshot.visible_file_count();
2127 if worker_end_ix <= snapshot_start_ix {
2128 break;
2129 } else if worker_start_ix > snapshot_end_ix {
2130 snapshot_start_ix = snapshot_end_ix;
2131 continue;
2132 } else {
2133 let start_in_snapshot = worker_start_ix
2134 .saturating_sub(snapshot_start_ix);
2135 let end_in_snapshot =
2136 cmp::min(worker_end_ix, snapshot_end_ix)
2137 - snapshot_start_ix;
2138
2139 for entry in snapshot
2140 .files(false, start_in_snapshot)
2141 .take(end_in_snapshot - start_in_snapshot)
2142 {
2143 if matching_paths_tx.is_closed() {
2144 break;
2145 }
2146
2147 abs_path.clear();
2148 abs_path.push(&snapshot.abs_path());
2149 abs_path.push(&entry.path);
2150 let matches = if let Some(file) =
2151 fs.open_sync(&abs_path).await.log_err()
2152 {
2153 query.detect(file).unwrap_or(false)
2154 } else {
2155 false
2156 };
2157
2158 if matches {
2159 let project_path =
2160 (snapshot.id(), entry.path.clone());
2161 if matching_paths_tx
2162 .send(project_path)
2163 .await
2164 .is_err()
2165 {
2166 break;
2167 }
2168 }
2169 }
2170
2171 snapshot_start_ix = snapshot_end_ix;
2172 }
2173 }
2174 });
2175 }
2176 })
2177 .await;
2178 }
2179 })
2180 .detach();
2181
2182 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2183 let open_buffers = self
2184 .opened_buffers
2185 .values()
2186 .filter_map(|b| b.upgrade(cx))
2187 .collect::<HashSet<_>>();
2188 cx.spawn(|this, cx| async move {
2189 for buffer in &open_buffers {
2190 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2191 buffers_tx.send((buffer.clone(), snapshot)).await?;
2192 }
2193
2194 let open_buffers = Rc::new(RefCell::new(open_buffers));
2195 while let Some(project_path) = matching_paths_rx.next().await {
2196 if buffers_tx.is_closed() {
2197 break;
2198 }
2199
2200 let this = this.clone();
2201 let open_buffers = open_buffers.clone();
2202 let buffers_tx = buffers_tx.clone();
2203 cx.spawn(|mut cx| async move {
2204 if let Some(buffer) = this
2205 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2206 .await
2207 .log_err()
2208 {
2209 if open_buffers.borrow_mut().insert(buffer.clone()) {
2210 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2211 buffers_tx.send((buffer, snapshot)).await?;
2212 }
2213 }
2214
2215 Ok::<_, anyhow::Error>(())
2216 })
2217 .detach();
2218 }
2219
2220 Ok::<_, anyhow::Error>(())
2221 })
2222 .detach_and_log_err(cx);
2223
2224 let background = cx.background().clone();
2225 cx.background().spawn(async move {
2226 let query = &query;
2227 let mut matched_buffers = Vec::new();
2228 for _ in 0..workers {
2229 matched_buffers.push(HashMap::default());
2230 }
2231 background
2232 .scoped(|scope| {
2233 for worker_matched_buffers in matched_buffers.iter_mut() {
2234 let mut buffers_rx = buffers_rx.clone();
2235 scope.spawn(async move {
2236 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2237 let buffer_matches = query
2238 .search(snapshot.as_rope())
2239 .await
2240 .iter()
2241 .map(|range| {
2242 snapshot.anchor_before(range.start)
2243 ..snapshot.anchor_after(range.end)
2244 })
2245 .collect::<Vec<_>>();
2246 if !buffer_matches.is_empty() {
2247 worker_matched_buffers
2248 .insert(buffer.clone(), buffer_matches);
2249 }
2250 }
2251 });
2252 }
2253 })
2254 .await;
2255 Ok(matched_buffers.into_iter().flatten().collect())
2256 })
2257 } else if let Some(project_id) = self.remote_id() {
2258 let request = self.client.request(query.to_proto(project_id));
2259 cx.spawn(|this, mut cx| async move {
2260 let response = request.await?;
2261 let mut result = HashMap::default();
2262 for location in response.locations {
2263 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2264 let target_buffer = this
2265 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2266 .await?;
2267 let start = location
2268 .start
2269 .and_then(deserialize_anchor)
2270 .ok_or_else(|| anyhow!("missing target start"))?;
2271 let end = location
2272 .end
2273 .and_then(deserialize_anchor)
2274 .ok_or_else(|| anyhow!("missing target end"))?;
2275 result
2276 .entry(target_buffer)
2277 .or_insert(Vec::new())
2278 .push(start..end)
2279 }
2280 Ok(result)
2281 })
2282 } else {
2283 Task::ready(Ok(Default::default()))
2284 }
2285 }
2286
2287 fn request_lsp<R: LspCommand>(
2288 &self,
2289 buffer_handle: ModelHandle<Buffer>,
2290 request: R,
2291 cx: &mut ModelContext<Self>,
2292 ) -> Task<Result<R::Response>>
2293 where
2294 <R::LspRequest as lsp::request::Request>::Result: Send,
2295 {
2296 let buffer = buffer_handle.read(cx);
2297 if self.is_local() {
2298 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2299 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2300 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2301 return cx.spawn(|this, cx| async move {
2302 let response = language_server
2303 .request::<R::LspRequest>(lsp_params)
2304 .await
2305 .context("lsp request failed")?;
2306 request
2307 .response_from_lsp(response, this, buffer_handle, cx)
2308 .await
2309 });
2310 }
2311 } else if let Some(project_id) = self.remote_id() {
2312 let rpc = self.client.clone();
2313 let message = request.to_proto(project_id, buffer);
2314 return cx.spawn(|this, cx| async move {
2315 let response = rpc.request(message).await?;
2316 request
2317 .response_from_proto(response, this, buffer_handle, cx)
2318 .await
2319 });
2320 }
2321 Task::ready(Ok(Default::default()))
2322 }
2323
2324 pub fn find_or_create_local_worktree(
2325 &self,
2326 abs_path: impl AsRef<Path>,
2327 visible: bool,
2328 cx: &mut ModelContext<Self>,
2329 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2330 let abs_path = abs_path.as_ref();
2331 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2332 Task::ready(Ok((tree.clone(), relative_path.into())))
2333 } else {
2334 let worktree = self.create_local_worktree(abs_path, visible, cx);
2335 cx.foreground()
2336 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2337 }
2338 }
2339
2340 pub fn find_local_worktree(
2341 &self,
2342 abs_path: &Path,
2343 cx: &AppContext,
2344 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2345 for tree in self.worktrees(cx) {
2346 if let Some(relative_path) = tree
2347 .read(cx)
2348 .as_local()
2349 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2350 {
2351 return Some((tree.clone(), relative_path.into()));
2352 }
2353 }
2354 None
2355 }
2356
2357 pub fn is_shared(&self) -> bool {
2358 match &self.client_state {
2359 ProjectClientState::Local { is_shared, .. } => *is_shared,
2360 ProjectClientState::Remote { .. } => false,
2361 }
2362 }
2363
2364 fn create_local_worktree(
2365 &self,
2366 abs_path: impl AsRef<Path>,
2367 visible: bool,
2368 cx: &mut ModelContext<Self>,
2369 ) -> Task<Result<ModelHandle<Worktree>>> {
2370 let fs = self.fs.clone();
2371 let client = self.client.clone();
2372 let path = Arc::from(abs_path.as_ref());
2373 cx.spawn(|project, mut cx| async move {
2374 let worktree = Worktree::local(client.clone(), path, visible, fs, &mut cx).await?;
2375
2376 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2377 project.add_worktree(&worktree, cx);
2378 (project.remote_id(), project.is_shared())
2379 });
2380
2381 if let Some(project_id) = remote_project_id {
2382 if is_shared {
2383 worktree
2384 .update(&mut cx, |worktree, cx| {
2385 worktree.as_local_mut().unwrap().share(project_id, cx)
2386 })
2387 .await?;
2388 } else {
2389 worktree
2390 .update(&mut cx, |worktree, cx| {
2391 worktree.as_local_mut().unwrap().register(project_id, cx)
2392 })
2393 .await?;
2394 }
2395 }
2396
2397 Ok(worktree)
2398 })
2399 }
2400
2401 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2402 self.worktrees.retain(|worktree| {
2403 worktree
2404 .upgrade(cx)
2405 .map_or(false, |w| w.read(cx).id() != id)
2406 });
2407 cx.notify();
2408 }
2409
2410 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2411 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2412 if worktree.read(cx).is_local() {
2413 cx.subscribe(&worktree, |this, worktree, _, cx| {
2414 this.update_local_worktree_buffers(worktree, cx);
2415 })
2416 .detach();
2417 }
2418
2419 let push_strong_handle = {
2420 let worktree = worktree.read(cx);
2421 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2422 };
2423 if push_strong_handle {
2424 self.worktrees
2425 .push(WorktreeHandle::Strong(worktree.clone()));
2426 } else {
2427 cx.observe_release(&worktree, |this, cx| {
2428 this.worktrees
2429 .retain(|worktree| worktree.upgrade(cx).is_some());
2430 cx.notify();
2431 })
2432 .detach();
2433 self.worktrees
2434 .push(WorktreeHandle::Weak(worktree.downgrade()));
2435 }
2436 cx.notify();
2437 }
2438
2439 fn update_local_worktree_buffers(
2440 &mut self,
2441 worktree_handle: ModelHandle<Worktree>,
2442 cx: &mut ModelContext<Self>,
2443 ) {
2444 let snapshot = worktree_handle.read(cx).snapshot();
2445 let mut buffers_to_delete = Vec::new();
2446 for (buffer_id, buffer) in &self.opened_buffers {
2447 if let Some(buffer) = buffer.upgrade(cx) {
2448 buffer.update(cx, |buffer, cx| {
2449 if let Some(old_file) = File::from_dyn(buffer.file()) {
2450 if old_file.worktree != worktree_handle {
2451 return;
2452 }
2453
2454 let new_file = if let Some(entry) = old_file
2455 .entry_id
2456 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2457 {
2458 File {
2459 is_local: true,
2460 entry_id: Some(entry.id),
2461 mtime: entry.mtime,
2462 path: entry.path.clone(),
2463 worktree: worktree_handle.clone(),
2464 }
2465 } else if let Some(entry) =
2466 snapshot.entry_for_path(old_file.path().as_ref())
2467 {
2468 File {
2469 is_local: true,
2470 entry_id: Some(entry.id),
2471 mtime: entry.mtime,
2472 path: entry.path.clone(),
2473 worktree: worktree_handle.clone(),
2474 }
2475 } else {
2476 File {
2477 is_local: true,
2478 entry_id: None,
2479 path: old_file.path().clone(),
2480 mtime: old_file.mtime(),
2481 worktree: worktree_handle.clone(),
2482 }
2483 };
2484
2485 if let Some(project_id) = self.remote_id() {
2486 self.client
2487 .send(proto::UpdateBufferFile {
2488 project_id,
2489 buffer_id: *buffer_id as u64,
2490 file: Some(new_file.to_proto()),
2491 })
2492 .log_err();
2493 }
2494 buffer.file_updated(Box::new(new_file), cx).detach();
2495 }
2496 });
2497 } else {
2498 buffers_to_delete.push(*buffer_id);
2499 }
2500 }
2501
2502 for buffer_id in buffers_to_delete {
2503 self.opened_buffers.remove(&buffer_id);
2504 }
2505 }
2506
2507 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2508 let new_active_entry = entry.and_then(|project_path| {
2509 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2510 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2511 Some(ProjectEntry {
2512 worktree_id: project_path.worktree_id,
2513 entry_id: entry.id,
2514 })
2515 });
2516 if new_active_entry != self.active_entry {
2517 self.active_entry = new_active_entry;
2518 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2519 }
2520 }
2521
2522 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2523 self.language_servers_with_diagnostics_running > 0
2524 }
2525
2526 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2527 let mut summary = DiagnosticSummary::default();
2528 for (_, path_summary) in self.diagnostic_summaries(cx) {
2529 summary.error_count += path_summary.error_count;
2530 summary.warning_count += path_summary.warning_count;
2531 summary.info_count += path_summary.info_count;
2532 summary.hint_count += path_summary.hint_count;
2533 }
2534 summary
2535 }
2536
2537 pub fn diagnostic_summaries<'a>(
2538 &'a self,
2539 cx: &'a AppContext,
2540 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2541 self.worktrees(cx).flat_map(move |worktree| {
2542 let worktree = worktree.read(cx);
2543 let worktree_id = worktree.id();
2544 worktree
2545 .diagnostic_summaries()
2546 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2547 })
2548 }
2549
2550 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2551 self.language_servers_with_diagnostics_running += 1;
2552 if self.language_servers_with_diagnostics_running == 1 {
2553 cx.emit(Event::DiskBasedDiagnosticsStarted);
2554 }
2555 }
2556
2557 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2558 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2559 self.language_servers_with_diagnostics_running -= 1;
2560 if self.language_servers_with_diagnostics_running == 0 {
2561 cx.emit(Event::DiskBasedDiagnosticsFinished);
2562 }
2563 }
2564
2565 pub fn active_entry(&self) -> Option<ProjectEntry> {
2566 self.active_entry
2567 }
2568
2569 // RPC message handlers
2570
2571 async fn handle_unshare_project(
2572 this: ModelHandle<Self>,
2573 _: TypedEnvelope<proto::UnshareProject>,
2574 _: Arc<Client>,
2575 mut cx: AsyncAppContext,
2576 ) -> Result<()> {
2577 this.update(&mut cx, |this, cx| {
2578 if let ProjectClientState::Remote {
2579 sharing_has_stopped,
2580 ..
2581 } = &mut this.client_state
2582 {
2583 *sharing_has_stopped = true;
2584 this.collaborators.clear();
2585 cx.notify();
2586 } else {
2587 unreachable!()
2588 }
2589 });
2590
2591 Ok(())
2592 }
2593
2594 async fn handle_add_collaborator(
2595 this: ModelHandle<Self>,
2596 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2597 _: Arc<Client>,
2598 mut cx: AsyncAppContext,
2599 ) -> Result<()> {
2600 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2601 let collaborator = envelope
2602 .payload
2603 .collaborator
2604 .take()
2605 .ok_or_else(|| anyhow!("empty collaborator"))?;
2606
2607 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2608 this.update(&mut cx, |this, cx| {
2609 this.collaborators
2610 .insert(collaborator.peer_id, collaborator);
2611 cx.notify();
2612 });
2613
2614 Ok(())
2615 }
2616
2617 async fn handle_remove_collaborator(
2618 this: ModelHandle<Self>,
2619 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2620 _: Arc<Client>,
2621 mut cx: AsyncAppContext,
2622 ) -> Result<()> {
2623 this.update(&mut cx, |this, cx| {
2624 let peer_id = PeerId(envelope.payload.peer_id);
2625 let replica_id = this
2626 .collaborators
2627 .remove(&peer_id)
2628 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2629 .replica_id;
2630 for (_, buffer) in &this.opened_buffers {
2631 if let Some(buffer) = buffer.upgrade(cx) {
2632 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2633 }
2634 }
2635 cx.notify();
2636 Ok(())
2637 })
2638 }
2639
2640 async fn handle_register_worktree(
2641 this: ModelHandle<Self>,
2642 envelope: TypedEnvelope<proto::RegisterWorktree>,
2643 client: Arc<Client>,
2644 mut cx: AsyncAppContext,
2645 ) -> Result<()> {
2646 this.update(&mut cx, |this, cx| {
2647 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2648 let replica_id = this.replica_id();
2649 let worktree = proto::Worktree {
2650 id: envelope.payload.worktree_id,
2651 root_name: envelope.payload.root_name,
2652 entries: Default::default(),
2653 diagnostic_summaries: Default::default(),
2654 visible: envelope.payload.visible,
2655 };
2656 let (worktree, load_task) =
2657 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2658 this.add_worktree(&worktree, cx);
2659 load_task.detach();
2660 Ok(())
2661 })
2662 }
2663
2664 async fn handle_unregister_worktree(
2665 this: ModelHandle<Self>,
2666 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2667 _: Arc<Client>,
2668 mut cx: AsyncAppContext,
2669 ) -> Result<()> {
2670 this.update(&mut cx, |this, cx| {
2671 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2672 this.remove_worktree(worktree_id, cx);
2673 Ok(())
2674 })
2675 }
2676
2677 async fn handle_update_worktree(
2678 this: ModelHandle<Self>,
2679 envelope: TypedEnvelope<proto::UpdateWorktree>,
2680 _: Arc<Client>,
2681 mut cx: AsyncAppContext,
2682 ) -> Result<()> {
2683 this.update(&mut cx, |this, cx| {
2684 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2685 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2686 worktree.update(cx, |worktree, _| {
2687 let worktree = worktree.as_remote_mut().unwrap();
2688 worktree.update_from_remote(envelope)
2689 })?;
2690 }
2691 Ok(())
2692 })
2693 }
2694
2695 async fn handle_update_diagnostic_summary(
2696 this: ModelHandle<Self>,
2697 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2698 _: Arc<Client>,
2699 mut cx: AsyncAppContext,
2700 ) -> Result<()> {
2701 this.update(&mut cx, |this, cx| {
2702 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2703 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2704 if let Some(summary) = envelope.payload.summary {
2705 let project_path = ProjectPath {
2706 worktree_id,
2707 path: Path::new(&summary.path).into(),
2708 };
2709 worktree.update(cx, |worktree, _| {
2710 worktree
2711 .as_remote_mut()
2712 .unwrap()
2713 .update_diagnostic_summary(project_path.path.clone(), &summary);
2714 });
2715 cx.emit(Event::DiagnosticsUpdated(project_path));
2716 }
2717 }
2718 Ok(())
2719 })
2720 }
2721
2722 async fn handle_disk_based_diagnostics_updating(
2723 this: ModelHandle<Self>,
2724 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2725 _: Arc<Client>,
2726 mut cx: AsyncAppContext,
2727 ) -> Result<()> {
2728 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2729 Ok(())
2730 }
2731
2732 async fn handle_disk_based_diagnostics_updated(
2733 this: ModelHandle<Self>,
2734 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2735 _: Arc<Client>,
2736 mut cx: AsyncAppContext,
2737 ) -> Result<()> {
2738 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2739 Ok(())
2740 }
2741
2742 async fn handle_update_buffer(
2743 this: ModelHandle<Self>,
2744 envelope: TypedEnvelope<proto::UpdateBuffer>,
2745 _: Arc<Client>,
2746 mut cx: AsyncAppContext,
2747 ) -> Result<()> {
2748 this.update(&mut cx, |this, cx| {
2749 let payload = envelope.payload.clone();
2750 let buffer_id = payload.buffer_id;
2751 let ops = payload
2752 .operations
2753 .into_iter()
2754 .map(|op| language::proto::deserialize_operation(op))
2755 .collect::<Result<Vec<_>, _>>()?;
2756 match this.opened_buffers.entry(buffer_id) {
2757 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2758 OpenBuffer::Strong(buffer) => {
2759 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2760 }
2761 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2762 OpenBuffer::Weak(_) => {}
2763 },
2764 hash_map::Entry::Vacant(e) => {
2765 e.insert(OpenBuffer::Loading(ops));
2766 }
2767 }
2768 Ok(())
2769 })
2770 }
2771
2772 async fn handle_update_buffer_file(
2773 this: ModelHandle<Self>,
2774 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2775 _: Arc<Client>,
2776 mut cx: AsyncAppContext,
2777 ) -> Result<()> {
2778 this.update(&mut cx, |this, cx| {
2779 let payload = envelope.payload.clone();
2780 let buffer_id = payload.buffer_id;
2781 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2782 let worktree = this
2783 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2784 .ok_or_else(|| anyhow!("no such worktree"))?;
2785 let file = File::from_proto(file, worktree.clone(), cx)?;
2786 let buffer = this
2787 .opened_buffers
2788 .get_mut(&buffer_id)
2789 .and_then(|b| b.upgrade(cx))
2790 .ok_or_else(|| anyhow!("no such buffer"))?;
2791 buffer.update(cx, |buffer, cx| {
2792 buffer.file_updated(Box::new(file), cx).detach();
2793 });
2794 Ok(())
2795 })
2796 }
2797
2798 async fn handle_save_buffer(
2799 this: ModelHandle<Self>,
2800 envelope: TypedEnvelope<proto::SaveBuffer>,
2801 _: Arc<Client>,
2802 mut cx: AsyncAppContext,
2803 ) -> Result<proto::BufferSaved> {
2804 let buffer_id = envelope.payload.buffer_id;
2805 let requested_version = envelope.payload.version.try_into()?;
2806
2807 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2808 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2809 let buffer = this
2810 .opened_buffers
2811 .get(&buffer_id)
2812 .map(|buffer| buffer.upgrade(cx).unwrap())
2813 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2814 Ok::<_, anyhow::Error>((project_id, buffer))
2815 })?;
2816
2817 if !buffer
2818 .read_with(&cx, |buffer, _| buffer.version())
2819 .observed_all(&requested_version)
2820 {
2821 Err(anyhow!("save request depends on unreceived edits"))?;
2822 }
2823
2824 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2825 Ok(proto::BufferSaved {
2826 project_id,
2827 buffer_id,
2828 version: (&saved_version).into(),
2829 mtime: Some(mtime.into()),
2830 })
2831 }
2832
2833 async fn handle_format_buffers(
2834 this: ModelHandle<Self>,
2835 envelope: TypedEnvelope<proto::FormatBuffers>,
2836 _: Arc<Client>,
2837 mut cx: AsyncAppContext,
2838 ) -> Result<proto::FormatBuffersResponse> {
2839 let sender_id = envelope.original_sender_id()?;
2840 let format = this.update(&mut cx, |this, cx| {
2841 let mut buffers = HashSet::default();
2842 for buffer_id in &envelope.payload.buffer_ids {
2843 buffers.insert(
2844 this.opened_buffers
2845 .get(buffer_id)
2846 .map(|buffer| buffer.upgrade(cx).unwrap())
2847 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2848 );
2849 }
2850 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2851 })?;
2852
2853 let project_transaction = format.await?;
2854 let project_transaction = this.update(&mut cx, |this, cx| {
2855 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2856 });
2857 Ok(proto::FormatBuffersResponse {
2858 transaction: Some(project_transaction),
2859 })
2860 }
2861
2862 async fn handle_get_completions(
2863 this: ModelHandle<Self>,
2864 envelope: TypedEnvelope<proto::GetCompletions>,
2865 _: Arc<Client>,
2866 mut cx: AsyncAppContext,
2867 ) -> Result<proto::GetCompletionsResponse> {
2868 let position = envelope
2869 .payload
2870 .position
2871 .and_then(language::proto::deserialize_anchor)
2872 .ok_or_else(|| anyhow!("invalid position"))?;
2873 let version = clock::Global::from(envelope.payload.version);
2874 let buffer = this.read_with(&cx, |this, cx| {
2875 this.opened_buffers
2876 .get(&envelope.payload.buffer_id)
2877 .map(|buffer| buffer.upgrade(cx).unwrap())
2878 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2879 })?;
2880 if !buffer
2881 .read_with(&cx, |buffer, _| buffer.version())
2882 .observed_all(&version)
2883 {
2884 Err(anyhow!("completion request depends on unreceived edits"))?;
2885 }
2886 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2887 let completions = this
2888 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2889 .await?;
2890
2891 Ok(proto::GetCompletionsResponse {
2892 completions: completions
2893 .iter()
2894 .map(language::proto::serialize_completion)
2895 .collect(),
2896 version: (&version).into(),
2897 })
2898 }
2899
2900 async fn handle_apply_additional_edits_for_completion(
2901 this: ModelHandle<Self>,
2902 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2903 _: Arc<Client>,
2904 mut cx: AsyncAppContext,
2905 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2906 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2907 let buffer = this
2908 .opened_buffers
2909 .get(&envelope.payload.buffer_id)
2910 .map(|buffer| buffer.upgrade(cx).unwrap())
2911 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2912 let language = buffer.read(cx).language();
2913 let completion = language::proto::deserialize_completion(
2914 envelope
2915 .payload
2916 .completion
2917 .ok_or_else(|| anyhow!("invalid completion"))?,
2918 language,
2919 )?;
2920 Ok::<_, anyhow::Error>(
2921 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2922 )
2923 })?;
2924
2925 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2926 transaction: apply_additional_edits
2927 .await?
2928 .as_ref()
2929 .map(language::proto::serialize_transaction),
2930 })
2931 }
2932
2933 async fn handle_get_code_actions(
2934 this: ModelHandle<Self>,
2935 envelope: TypedEnvelope<proto::GetCodeActions>,
2936 _: Arc<Client>,
2937 mut cx: AsyncAppContext,
2938 ) -> Result<proto::GetCodeActionsResponse> {
2939 let start = envelope
2940 .payload
2941 .start
2942 .and_then(language::proto::deserialize_anchor)
2943 .ok_or_else(|| anyhow!("invalid start"))?;
2944 let end = envelope
2945 .payload
2946 .end
2947 .and_then(language::proto::deserialize_anchor)
2948 .ok_or_else(|| anyhow!("invalid end"))?;
2949 let buffer = this.update(&mut cx, |this, cx| {
2950 this.opened_buffers
2951 .get(&envelope.payload.buffer_id)
2952 .map(|buffer| buffer.upgrade(cx).unwrap())
2953 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2954 })?;
2955 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2956 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2957 Err(anyhow!("code action request references unreceived edits"))?;
2958 }
2959 let code_actions = this.update(&mut cx, |this, cx| {
2960 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2961 })?;
2962
2963 Ok(proto::GetCodeActionsResponse {
2964 actions: code_actions
2965 .await?
2966 .iter()
2967 .map(language::proto::serialize_code_action)
2968 .collect(),
2969 version: (&version).into(),
2970 })
2971 }
2972
2973 async fn handle_apply_code_action(
2974 this: ModelHandle<Self>,
2975 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2976 _: Arc<Client>,
2977 mut cx: AsyncAppContext,
2978 ) -> Result<proto::ApplyCodeActionResponse> {
2979 let sender_id = envelope.original_sender_id()?;
2980 let action = language::proto::deserialize_code_action(
2981 envelope
2982 .payload
2983 .action
2984 .ok_or_else(|| anyhow!("invalid action"))?,
2985 )?;
2986 let apply_code_action = this.update(&mut cx, |this, cx| {
2987 let buffer = this
2988 .opened_buffers
2989 .get(&envelope.payload.buffer_id)
2990 .map(|buffer| buffer.upgrade(cx).unwrap())
2991 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2992 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
2993 })?;
2994
2995 let project_transaction = apply_code_action.await?;
2996 let project_transaction = this.update(&mut cx, |this, cx| {
2997 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2998 });
2999 Ok(proto::ApplyCodeActionResponse {
3000 transaction: Some(project_transaction),
3001 })
3002 }
3003
3004 async fn handle_lsp_command<T: LspCommand>(
3005 this: ModelHandle<Self>,
3006 envelope: TypedEnvelope<T::ProtoRequest>,
3007 _: Arc<Client>,
3008 mut cx: AsyncAppContext,
3009 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3010 where
3011 <T::LspRequest as lsp::request::Request>::Result: Send,
3012 {
3013 let sender_id = envelope.original_sender_id()?;
3014 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
3015 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3016 let buffer_handle = this
3017 .opened_buffers
3018 .get(&buffer_id)
3019 .map(|buffer| buffer.upgrade(cx).unwrap())
3020 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3021 let buffer = buffer_handle.read(cx);
3022 let buffer_version = buffer.version();
3023 let request = T::from_proto(envelope.payload, this, buffer)?;
3024 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3025 })?;
3026 let response = request.await?;
3027 this.update(&mut cx, |this, cx| {
3028 Ok(T::response_to_proto(
3029 response,
3030 this,
3031 sender_id,
3032 &buffer_version,
3033 cx,
3034 ))
3035 })
3036 }
3037
3038 async fn handle_get_project_symbols(
3039 this: ModelHandle<Self>,
3040 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3041 _: Arc<Client>,
3042 mut cx: AsyncAppContext,
3043 ) -> Result<proto::GetProjectSymbolsResponse> {
3044 let symbols = this
3045 .update(&mut cx, |this, cx| {
3046 this.symbols(&envelope.payload.query, cx)
3047 })
3048 .await?;
3049
3050 Ok(proto::GetProjectSymbolsResponse {
3051 symbols: symbols.iter().map(serialize_symbol).collect(),
3052 })
3053 }
3054
3055 async fn handle_search_project(
3056 this: ModelHandle<Self>,
3057 envelope: TypedEnvelope<proto::SearchProject>,
3058 _: Arc<Client>,
3059 mut cx: AsyncAppContext,
3060 ) -> Result<proto::SearchProjectResponse> {
3061 let peer_id = envelope.original_sender_id()?;
3062 let query = SearchQuery::from_proto(envelope.payload)?;
3063 let result = this
3064 .update(&mut cx, |this, cx| this.search(query, cx))
3065 .await?;
3066
3067 this.update(&mut cx, |this, cx| {
3068 let mut locations = Vec::new();
3069 for (buffer, ranges) in result {
3070 for range in ranges {
3071 let start = serialize_anchor(&range.start);
3072 let end = serialize_anchor(&range.end);
3073 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3074 locations.push(proto::Location {
3075 buffer: Some(buffer),
3076 start: Some(start),
3077 end: Some(end),
3078 });
3079 }
3080 }
3081 Ok(proto::SearchProjectResponse { locations })
3082 })
3083 }
3084
3085 async fn handle_open_buffer_for_symbol(
3086 this: ModelHandle<Self>,
3087 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3088 _: Arc<Client>,
3089 mut cx: AsyncAppContext,
3090 ) -> Result<proto::OpenBufferForSymbolResponse> {
3091 let peer_id = envelope.original_sender_id()?;
3092 let symbol = envelope
3093 .payload
3094 .symbol
3095 .ok_or_else(|| anyhow!("invalid symbol"))?;
3096 let symbol = this.read_with(&cx, |this, _| {
3097 let symbol = this.deserialize_symbol(symbol)?;
3098 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3099 if signature == symbol.signature {
3100 Ok(symbol)
3101 } else {
3102 Err(anyhow!("invalid symbol signature"))
3103 }
3104 })?;
3105 let buffer = this
3106 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3107 .await?;
3108
3109 Ok(proto::OpenBufferForSymbolResponse {
3110 buffer: Some(this.update(&mut cx, |this, cx| {
3111 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3112 })),
3113 })
3114 }
3115
3116 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3117 let mut hasher = Sha256::new();
3118 hasher.update(worktree_id.to_proto().to_be_bytes());
3119 hasher.update(path.to_string_lossy().as_bytes());
3120 hasher.update(self.nonce.to_be_bytes());
3121 hasher.finalize().as_slice().try_into().unwrap()
3122 }
3123
3124 async fn handle_open_buffer(
3125 this: ModelHandle<Self>,
3126 envelope: TypedEnvelope<proto::OpenBuffer>,
3127 _: Arc<Client>,
3128 mut cx: AsyncAppContext,
3129 ) -> Result<proto::OpenBufferResponse> {
3130 let peer_id = envelope.original_sender_id()?;
3131 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3132 let open_buffer = this.update(&mut cx, |this, cx| {
3133 this.open_buffer(
3134 ProjectPath {
3135 worktree_id,
3136 path: PathBuf::from(envelope.payload.path).into(),
3137 },
3138 cx,
3139 )
3140 });
3141
3142 let buffer = open_buffer.await?;
3143 this.update(&mut cx, |this, cx| {
3144 Ok(proto::OpenBufferResponse {
3145 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3146 })
3147 })
3148 }
3149
3150 fn serialize_project_transaction_for_peer(
3151 &mut self,
3152 project_transaction: ProjectTransaction,
3153 peer_id: PeerId,
3154 cx: &AppContext,
3155 ) -> proto::ProjectTransaction {
3156 let mut serialized_transaction = proto::ProjectTransaction {
3157 buffers: Default::default(),
3158 transactions: Default::default(),
3159 };
3160 for (buffer, transaction) in project_transaction.0 {
3161 serialized_transaction
3162 .buffers
3163 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3164 serialized_transaction
3165 .transactions
3166 .push(language::proto::serialize_transaction(&transaction));
3167 }
3168 serialized_transaction
3169 }
3170
3171 fn deserialize_project_transaction(
3172 &mut self,
3173 message: proto::ProjectTransaction,
3174 push_to_history: bool,
3175 cx: &mut ModelContext<Self>,
3176 ) -> Task<Result<ProjectTransaction>> {
3177 cx.spawn(|this, mut cx| async move {
3178 let mut project_transaction = ProjectTransaction::default();
3179 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3180 let buffer = this
3181 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3182 .await?;
3183 let transaction = language::proto::deserialize_transaction(transaction)?;
3184 project_transaction.0.insert(buffer, transaction);
3185 }
3186
3187 for (buffer, transaction) in &project_transaction.0 {
3188 buffer
3189 .update(&mut cx, |buffer, _| {
3190 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3191 })
3192 .await;
3193
3194 if push_to_history {
3195 buffer.update(&mut cx, |buffer, _| {
3196 buffer.push_transaction(transaction.clone(), Instant::now());
3197 });
3198 }
3199 }
3200
3201 Ok(project_transaction)
3202 })
3203 }
3204
3205 fn serialize_buffer_for_peer(
3206 &mut self,
3207 buffer: &ModelHandle<Buffer>,
3208 peer_id: PeerId,
3209 cx: &AppContext,
3210 ) -> proto::Buffer {
3211 let buffer_id = buffer.read(cx).remote_id();
3212 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3213 if shared_buffers.insert(buffer_id) {
3214 proto::Buffer {
3215 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3216 }
3217 } else {
3218 proto::Buffer {
3219 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3220 }
3221 }
3222 }
3223
3224 fn deserialize_buffer(
3225 &mut self,
3226 buffer: proto::Buffer,
3227 cx: &mut ModelContext<Self>,
3228 ) -> Task<Result<ModelHandle<Buffer>>> {
3229 let replica_id = self.replica_id();
3230
3231 let opened_buffer_tx = self.opened_buffer.0.clone();
3232 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3233 cx.spawn(|this, mut cx| async move {
3234 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3235 proto::buffer::Variant::Id(id) => {
3236 let buffer = loop {
3237 let buffer = this.read_with(&cx, |this, cx| {
3238 this.opened_buffers
3239 .get(&id)
3240 .and_then(|buffer| buffer.upgrade(cx))
3241 });
3242 if let Some(buffer) = buffer {
3243 break buffer;
3244 }
3245 opened_buffer_rx
3246 .next()
3247 .await
3248 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3249 };
3250 Ok(buffer)
3251 }
3252 proto::buffer::Variant::State(mut buffer) => {
3253 let mut buffer_worktree = None;
3254 let mut buffer_file = None;
3255 if let Some(file) = buffer.file.take() {
3256 this.read_with(&cx, |this, cx| {
3257 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3258 let worktree =
3259 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3260 anyhow!("no worktree found for id {}", file.worktree_id)
3261 })?;
3262 buffer_file =
3263 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3264 as Box<dyn language::File>);
3265 buffer_worktree = Some(worktree);
3266 Ok::<_, anyhow::Error>(())
3267 })?;
3268 }
3269
3270 let buffer = cx.add_model(|cx| {
3271 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3272 });
3273
3274 this.update(&mut cx, |this, cx| {
3275 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3276 })?;
3277
3278 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3279 Ok(buffer)
3280 }
3281 }
3282 })
3283 }
3284
3285 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3286 let language = self
3287 .languages
3288 .get_language(&serialized_symbol.language_name);
3289 let start = serialized_symbol
3290 .start
3291 .ok_or_else(|| anyhow!("invalid start"))?;
3292 let end = serialized_symbol
3293 .end
3294 .ok_or_else(|| anyhow!("invalid end"))?;
3295 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3296 Ok(Symbol {
3297 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3298 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3299 language_name: serialized_symbol.language_name.clone(),
3300 label: language
3301 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3302 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3303 name: serialized_symbol.name,
3304 path: PathBuf::from(serialized_symbol.path),
3305 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3306 kind,
3307 signature: serialized_symbol
3308 .signature
3309 .try_into()
3310 .map_err(|_| anyhow!("invalid signature"))?,
3311 })
3312 }
3313
3314 async fn handle_close_buffer(
3315 _: ModelHandle<Self>,
3316 _: TypedEnvelope<proto::CloseBuffer>,
3317 _: Arc<Client>,
3318 _: AsyncAppContext,
3319 ) -> Result<()> {
3320 // TODO: use this for following
3321 Ok(())
3322 }
3323
3324 async fn handle_buffer_saved(
3325 this: ModelHandle<Self>,
3326 envelope: TypedEnvelope<proto::BufferSaved>,
3327 _: Arc<Client>,
3328 mut cx: AsyncAppContext,
3329 ) -> Result<()> {
3330 let version = envelope.payload.version.try_into()?;
3331 let mtime = envelope
3332 .payload
3333 .mtime
3334 .ok_or_else(|| anyhow!("missing mtime"))?
3335 .into();
3336
3337 this.update(&mut cx, |this, cx| {
3338 let buffer = this
3339 .opened_buffers
3340 .get(&envelope.payload.buffer_id)
3341 .and_then(|buffer| buffer.upgrade(cx));
3342 if let Some(buffer) = buffer {
3343 buffer.update(cx, |buffer, cx| {
3344 buffer.did_save(version, mtime, None, cx);
3345 });
3346 }
3347 Ok(())
3348 })
3349 }
3350
3351 async fn handle_buffer_reloaded(
3352 this: ModelHandle<Self>,
3353 envelope: TypedEnvelope<proto::BufferReloaded>,
3354 _: Arc<Client>,
3355 mut cx: AsyncAppContext,
3356 ) -> Result<()> {
3357 let payload = envelope.payload.clone();
3358 let version = payload.version.try_into()?;
3359 let mtime = payload
3360 .mtime
3361 .ok_or_else(|| anyhow!("missing mtime"))?
3362 .into();
3363 this.update(&mut cx, |this, cx| {
3364 let buffer = this
3365 .opened_buffers
3366 .get(&payload.buffer_id)
3367 .and_then(|buffer| buffer.upgrade(cx));
3368 if let Some(buffer) = buffer {
3369 buffer.update(cx, |buffer, cx| {
3370 buffer.did_reload(version, mtime, cx);
3371 });
3372 }
3373 Ok(())
3374 })
3375 }
3376
3377 pub fn match_paths<'a>(
3378 &self,
3379 query: &'a str,
3380 include_ignored: bool,
3381 smart_case: bool,
3382 max_results: usize,
3383 cancel_flag: &'a AtomicBool,
3384 cx: &AppContext,
3385 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3386 let worktrees = self
3387 .worktrees(cx)
3388 .filter(|worktree| worktree.read(cx).is_visible())
3389 .collect::<Vec<_>>();
3390 let include_root_name = worktrees.len() > 1;
3391 let candidate_sets = worktrees
3392 .into_iter()
3393 .map(|worktree| CandidateSet {
3394 snapshot: worktree.read(cx).snapshot(),
3395 include_ignored,
3396 include_root_name,
3397 })
3398 .collect::<Vec<_>>();
3399
3400 let background = cx.background().clone();
3401 async move {
3402 fuzzy::match_paths(
3403 candidate_sets.as_slice(),
3404 query,
3405 smart_case,
3406 max_results,
3407 cancel_flag,
3408 background,
3409 )
3410 .await
3411 }
3412 }
3413}
3414
3415impl WorktreeHandle {
3416 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3417 match self {
3418 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3419 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3420 }
3421 }
3422}
3423
3424impl OpenBuffer {
3425 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3426 match self {
3427 OpenBuffer::Strong(handle) => Some(handle.clone()),
3428 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3429 OpenBuffer::Loading(_) => None,
3430 }
3431 }
3432}
3433
3434struct CandidateSet {
3435 snapshot: Snapshot,
3436 include_ignored: bool,
3437 include_root_name: bool,
3438}
3439
3440impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3441 type Candidates = CandidateSetIter<'a>;
3442
3443 fn id(&self) -> usize {
3444 self.snapshot.id().to_usize()
3445 }
3446
3447 fn len(&self) -> usize {
3448 if self.include_ignored {
3449 self.snapshot.file_count()
3450 } else {
3451 self.snapshot.visible_file_count()
3452 }
3453 }
3454
3455 fn prefix(&self) -> Arc<str> {
3456 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3457 self.snapshot.root_name().into()
3458 } else if self.include_root_name {
3459 format!("{}/", self.snapshot.root_name()).into()
3460 } else {
3461 "".into()
3462 }
3463 }
3464
3465 fn candidates(&'a self, start: usize) -> Self::Candidates {
3466 CandidateSetIter {
3467 traversal: self.snapshot.files(self.include_ignored, start),
3468 }
3469 }
3470}
3471
3472struct CandidateSetIter<'a> {
3473 traversal: Traversal<'a>,
3474}
3475
3476impl<'a> Iterator for CandidateSetIter<'a> {
3477 type Item = PathMatchCandidate<'a>;
3478
3479 fn next(&mut self) -> Option<Self::Item> {
3480 self.traversal.next().map(|entry| {
3481 if let EntryKind::File(char_bag) = entry.kind {
3482 PathMatchCandidate {
3483 path: &entry.path,
3484 char_bag,
3485 }
3486 } else {
3487 unreachable!()
3488 }
3489 })
3490 }
3491}
3492
3493impl Entity for Project {
3494 type Event = Event;
3495
3496 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3497 match &self.client_state {
3498 ProjectClientState::Local { remote_id_rx, .. } => {
3499 if let Some(project_id) = *remote_id_rx.borrow() {
3500 self.client
3501 .send(proto::UnregisterProject { project_id })
3502 .log_err();
3503 }
3504 }
3505 ProjectClientState::Remote { remote_id, .. } => {
3506 self.client
3507 .send(proto::LeaveProject {
3508 project_id: *remote_id,
3509 })
3510 .log_err();
3511 }
3512 }
3513 }
3514
3515 fn app_will_quit(
3516 &mut self,
3517 _: &mut MutableAppContext,
3518 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3519 let shutdown_futures = self
3520 .language_servers
3521 .drain()
3522 .filter_map(|(_, server)| server.shutdown())
3523 .collect::<Vec<_>>();
3524 Some(
3525 async move {
3526 futures::future::join_all(shutdown_futures).await;
3527 }
3528 .boxed(),
3529 )
3530 }
3531}
3532
3533impl Collaborator {
3534 fn from_proto(
3535 message: proto::Collaborator,
3536 user_store: &ModelHandle<UserStore>,
3537 cx: &mut AsyncAppContext,
3538 ) -> impl Future<Output = Result<Self>> {
3539 let user = user_store.update(cx, |user_store, cx| {
3540 user_store.fetch_user(message.user_id, cx)
3541 });
3542
3543 async move {
3544 Ok(Self {
3545 peer_id: PeerId(message.peer_id),
3546 user: user.await?,
3547 replica_id: message.replica_id as ReplicaId,
3548 })
3549 }
3550 }
3551}
3552
3553impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3554 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3555 Self {
3556 worktree_id,
3557 path: path.as_ref().into(),
3558 }
3559 }
3560}
3561
3562impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3563 fn from(options: lsp::CreateFileOptions) -> Self {
3564 Self {
3565 overwrite: options.overwrite.unwrap_or(false),
3566 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3567 }
3568 }
3569}
3570
3571impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3572 fn from(options: lsp::RenameFileOptions) -> Self {
3573 Self {
3574 overwrite: options.overwrite.unwrap_or(false),
3575 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3576 }
3577 }
3578}
3579
3580impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3581 fn from(options: lsp::DeleteFileOptions) -> Self {
3582 Self {
3583 recursive: options.recursive.unwrap_or(false),
3584 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3585 }
3586 }
3587}
3588
3589fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3590 proto::Symbol {
3591 source_worktree_id: symbol.source_worktree_id.to_proto(),
3592 worktree_id: symbol.worktree_id.to_proto(),
3593 language_name: symbol.language_name.clone(),
3594 name: symbol.name.clone(),
3595 kind: unsafe { mem::transmute(symbol.kind) },
3596 path: symbol.path.to_string_lossy().to_string(),
3597 start: Some(proto::Point {
3598 row: symbol.range.start.row,
3599 column: symbol.range.start.column,
3600 }),
3601 end: Some(proto::Point {
3602 row: symbol.range.end.row,
3603 column: symbol.range.end.column,
3604 }),
3605 signature: symbol.signature.to_vec(),
3606 }
3607}
3608
3609fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3610 let mut path_components = path.components();
3611 let mut base_components = base.components();
3612 let mut components: Vec<Component> = Vec::new();
3613 loop {
3614 match (path_components.next(), base_components.next()) {
3615 (None, None) => break,
3616 (Some(a), None) => {
3617 components.push(a);
3618 components.extend(path_components.by_ref());
3619 break;
3620 }
3621 (None, _) => components.push(Component::ParentDir),
3622 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3623 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3624 (Some(a), Some(_)) => {
3625 components.push(Component::ParentDir);
3626 for _ in base_components {
3627 components.push(Component::ParentDir);
3628 }
3629 components.push(a);
3630 components.extend(path_components.by_ref());
3631 break;
3632 }
3633 }
3634 }
3635 components.iter().map(|c| c.as_os_str()).collect()
3636}
3637
3638#[cfg(test)]
3639mod tests {
3640 use super::{Event, *};
3641 use fs::RealFs;
3642 use futures::StreamExt;
3643 use gpui::test::subscribe;
3644 use language::{
3645 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3646 };
3647 use lsp::Url;
3648 use serde_json::json;
3649 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3650 use unindent::Unindent as _;
3651 use util::test::temp_tree;
3652 use worktree::WorktreeHandle as _;
3653
3654 #[gpui::test]
3655 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3656 let dir = temp_tree(json!({
3657 "root": {
3658 "apple": "",
3659 "banana": {
3660 "carrot": {
3661 "date": "",
3662 "endive": "",
3663 }
3664 },
3665 "fennel": {
3666 "grape": "",
3667 }
3668 }
3669 }));
3670
3671 let root_link_path = dir.path().join("root_link");
3672 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3673 unix::fs::symlink(
3674 &dir.path().join("root/fennel"),
3675 &dir.path().join("root/finnochio"),
3676 )
3677 .unwrap();
3678
3679 let project = Project::test(Arc::new(RealFs), cx);
3680
3681 let (tree, _) = project
3682 .update(cx, |project, cx| {
3683 project.find_or_create_local_worktree(&root_link_path, true, cx)
3684 })
3685 .await
3686 .unwrap();
3687
3688 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3689 .await;
3690 cx.read(|cx| {
3691 let tree = tree.read(cx);
3692 assert_eq!(tree.file_count(), 5);
3693 assert_eq!(
3694 tree.inode_for_path("fennel/grape"),
3695 tree.inode_for_path("finnochio/grape")
3696 );
3697 });
3698
3699 let cancel_flag = Default::default();
3700 let results = project
3701 .read_with(cx, |project, cx| {
3702 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3703 })
3704 .await;
3705 assert_eq!(
3706 results
3707 .into_iter()
3708 .map(|result| result.path)
3709 .collect::<Vec<Arc<Path>>>(),
3710 vec![
3711 PathBuf::from("banana/carrot/date").into(),
3712 PathBuf::from("banana/carrot/endive").into(),
3713 ]
3714 );
3715 }
3716
3717 #[gpui::test]
3718 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3719 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3720 let progress_token = language_server_config
3721 .disk_based_diagnostics_progress_token
3722 .clone()
3723 .unwrap();
3724
3725 let language = Arc::new(Language::new(
3726 LanguageConfig {
3727 name: "Rust".into(),
3728 path_suffixes: vec!["rs".to_string()],
3729 language_server: Some(language_server_config),
3730 ..Default::default()
3731 },
3732 Some(tree_sitter_rust::language()),
3733 ));
3734
3735 let fs = FakeFs::new(cx.background());
3736 fs.insert_tree(
3737 "/dir",
3738 json!({
3739 "a.rs": "fn a() { A }",
3740 "b.rs": "const y: i32 = 1",
3741 }),
3742 )
3743 .await;
3744
3745 let project = Project::test(fs, cx);
3746 project.update(cx, |project, _| {
3747 Arc::get_mut(&mut project.languages).unwrap().add(language);
3748 });
3749
3750 let (tree, _) = project
3751 .update(cx, |project, cx| {
3752 project.find_or_create_local_worktree("/dir", true, cx)
3753 })
3754 .await
3755 .unwrap();
3756 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3757
3758 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3759 .await;
3760
3761 // Cause worktree to start the fake language server
3762 let _buffer = project
3763 .update(cx, |project, cx| {
3764 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3765 })
3766 .await
3767 .unwrap();
3768
3769 let mut events = subscribe(&project, cx);
3770
3771 let mut fake_server = fake_servers.next().await.unwrap();
3772 fake_server.start_progress(&progress_token).await;
3773 assert_eq!(
3774 events.next().await.unwrap(),
3775 Event::DiskBasedDiagnosticsStarted
3776 );
3777
3778 fake_server.start_progress(&progress_token).await;
3779 fake_server.end_progress(&progress_token).await;
3780 fake_server.start_progress(&progress_token).await;
3781
3782 fake_server
3783 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3784 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3785 version: None,
3786 diagnostics: vec![lsp::Diagnostic {
3787 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3788 severity: Some(lsp::DiagnosticSeverity::ERROR),
3789 message: "undefined variable 'A'".to_string(),
3790 ..Default::default()
3791 }],
3792 })
3793 .await;
3794 assert_eq!(
3795 events.next().await.unwrap(),
3796 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3797 );
3798
3799 fake_server.end_progress(&progress_token).await;
3800 fake_server.end_progress(&progress_token).await;
3801 assert_eq!(
3802 events.next().await.unwrap(),
3803 Event::DiskBasedDiagnosticsUpdated
3804 );
3805 assert_eq!(
3806 events.next().await.unwrap(),
3807 Event::DiskBasedDiagnosticsFinished
3808 );
3809
3810 let buffer = project
3811 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3812 .await
3813 .unwrap();
3814
3815 buffer.read_with(cx, |buffer, _| {
3816 let snapshot = buffer.snapshot();
3817 let diagnostics = snapshot
3818 .diagnostics_in_range::<_, Point>(0..buffer.len())
3819 .collect::<Vec<_>>();
3820 assert_eq!(
3821 diagnostics,
3822 &[DiagnosticEntry {
3823 range: Point::new(0, 9)..Point::new(0, 10),
3824 diagnostic: Diagnostic {
3825 severity: lsp::DiagnosticSeverity::ERROR,
3826 message: "undefined variable 'A'".to_string(),
3827 group_id: 0,
3828 is_primary: true,
3829 ..Default::default()
3830 }
3831 }]
3832 )
3833 });
3834 }
3835
3836 #[gpui::test]
3837 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3838 let dir = temp_tree(json!({
3839 "root": {
3840 "dir1": {},
3841 "dir2": {
3842 "dir3": {}
3843 }
3844 }
3845 }));
3846
3847 let project = Project::test(Arc::new(RealFs), cx);
3848 let (tree, _) = project
3849 .update(cx, |project, cx| {
3850 project.find_or_create_local_worktree(&dir.path(), true, cx)
3851 })
3852 .await
3853 .unwrap();
3854
3855 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3856 .await;
3857
3858 let cancel_flag = Default::default();
3859 let results = project
3860 .read_with(cx, |project, cx| {
3861 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3862 })
3863 .await;
3864
3865 assert!(results.is_empty());
3866 }
3867
3868 #[gpui::test]
3869 async fn test_definition(cx: &mut gpui::TestAppContext) {
3870 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3871 let language = Arc::new(Language::new(
3872 LanguageConfig {
3873 name: "Rust".into(),
3874 path_suffixes: vec!["rs".to_string()],
3875 language_server: Some(language_server_config),
3876 ..Default::default()
3877 },
3878 Some(tree_sitter_rust::language()),
3879 ));
3880
3881 let fs = FakeFs::new(cx.background());
3882 fs.insert_tree(
3883 "/dir",
3884 json!({
3885 "a.rs": "const fn a() { A }",
3886 "b.rs": "const y: i32 = crate::a()",
3887 }),
3888 )
3889 .await;
3890
3891 let project = Project::test(fs, cx);
3892 project.update(cx, |project, _| {
3893 Arc::get_mut(&mut project.languages).unwrap().add(language);
3894 });
3895
3896 let (tree, _) = project
3897 .update(cx, |project, cx| {
3898 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3899 })
3900 .await
3901 .unwrap();
3902 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3903 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3904 .await;
3905
3906 let buffer = project
3907 .update(cx, |project, cx| {
3908 project.open_buffer(
3909 ProjectPath {
3910 worktree_id,
3911 path: Path::new("").into(),
3912 },
3913 cx,
3914 )
3915 })
3916 .await
3917 .unwrap();
3918
3919 let mut fake_server = fake_servers.next().await.unwrap();
3920 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3921 let params = params.text_document_position_params;
3922 assert_eq!(
3923 params.text_document.uri.to_file_path().unwrap(),
3924 Path::new("/dir/b.rs"),
3925 );
3926 assert_eq!(params.position, lsp::Position::new(0, 22));
3927
3928 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3929 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3930 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3931 )))
3932 });
3933
3934 let mut definitions = project
3935 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3936 .await
3937 .unwrap();
3938
3939 assert_eq!(definitions.len(), 1);
3940 let definition = definitions.pop().unwrap();
3941 cx.update(|cx| {
3942 let target_buffer = definition.buffer.read(cx);
3943 assert_eq!(
3944 target_buffer
3945 .file()
3946 .unwrap()
3947 .as_local()
3948 .unwrap()
3949 .abs_path(cx),
3950 Path::new("/dir/a.rs"),
3951 );
3952 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3953 assert_eq!(
3954 list_worktrees(&project, cx),
3955 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
3956 );
3957
3958 drop(definition);
3959 });
3960 cx.read(|cx| {
3961 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
3962 });
3963
3964 fn list_worktrees<'a>(
3965 project: &'a ModelHandle<Project>,
3966 cx: &'a AppContext,
3967 ) -> Vec<(&'a Path, bool)> {
3968 project
3969 .read(cx)
3970 .worktrees(cx)
3971 .map(|worktree| {
3972 let worktree = worktree.read(cx);
3973 (
3974 worktree.as_local().unwrap().abs_path().as_ref(),
3975 worktree.is_visible(),
3976 )
3977 })
3978 .collect::<Vec<_>>()
3979 }
3980 }
3981
3982 #[gpui::test]
3983 async fn test_save_file(cx: &mut gpui::TestAppContext) {
3984 let fs = FakeFs::new(cx.background());
3985 fs.insert_tree(
3986 "/dir",
3987 json!({
3988 "file1": "the old contents",
3989 }),
3990 )
3991 .await;
3992
3993 let project = Project::test(fs.clone(), cx);
3994 let worktree_id = project
3995 .update(cx, |p, cx| {
3996 p.find_or_create_local_worktree("/dir", true, cx)
3997 })
3998 .await
3999 .unwrap()
4000 .0
4001 .read_with(cx, |tree, _| tree.id());
4002
4003 let buffer = project
4004 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4005 .await
4006 .unwrap();
4007 buffer
4008 .update(cx, |buffer, cx| {
4009 assert_eq!(buffer.text(), "the old contents");
4010 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4011 buffer.save(cx)
4012 })
4013 .await
4014 .unwrap();
4015
4016 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4017 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4018 }
4019
4020 #[gpui::test]
4021 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4022 let fs = FakeFs::new(cx.background());
4023 fs.insert_tree(
4024 "/dir",
4025 json!({
4026 "file1": "the old contents",
4027 }),
4028 )
4029 .await;
4030
4031 let project = Project::test(fs.clone(), cx);
4032 let worktree_id = project
4033 .update(cx, |p, cx| {
4034 p.find_or_create_local_worktree("/dir/file1", true, cx)
4035 })
4036 .await
4037 .unwrap()
4038 .0
4039 .read_with(cx, |tree, _| tree.id());
4040
4041 let buffer = project
4042 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4043 .await
4044 .unwrap();
4045 buffer
4046 .update(cx, |buffer, cx| {
4047 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4048 buffer.save(cx)
4049 })
4050 .await
4051 .unwrap();
4052
4053 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4054 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4055 }
4056
4057 #[gpui::test(retries = 5)]
4058 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4059 let dir = temp_tree(json!({
4060 "a": {
4061 "file1": "",
4062 "file2": "",
4063 "file3": "",
4064 },
4065 "b": {
4066 "c": {
4067 "file4": "",
4068 "file5": "",
4069 }
4070 }
4071 }));
4072
4073 let project = Project::test(Arc::new(RealFs), cx);
4074 let rpc = project.read_with(cx, |p, _| p.client.clone());
4075
4076 let (tree, _) = project
4077 .update(cx, |p, cx| {
4078 p.find_or_create_local_worktree(dir.path(), true, cx)
4079 })
4080 .await
4081 .unwrap();
4082 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4083
4084 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4085 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4086 async move { buffer.await.unwrap() }
4087 };
4088 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4089 tree.read_with(cx, |tree, _| {
4090 tree.entry_for_path(path)
4091 .expect(&format!("no entry for path {}", path))
4092 .id
4093 })
4094 };
4095
4096 let buffer2 = buffer_for_path("a/file2", cx).await;
4097 let buffer3 = buffer_for_path("a/file3", cx).await;
4098 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4099 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4100
4101 let file2_id = id_for_path("a/file2", &cx);
4102 let file3_id = id_for_path("a/file3", &cx);
4103 let file4_id = id_for_path("b/c/file4", &cx);
4104
4105 // Wait for the initial scan.
4106 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4107 .await;
4108
4109 // Create a remote copy of this worktree.
4110 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4111 let (remote, load_task) = cx.update(|cx| {
4112 Worktree::remote(
4113 1,
4114 1,
4115 initial_snapshot.to_proto(&Default::default(), true),
4116 rpc.clone(),
4117 cx,
4118 )
4119 });
4120 load_task.await;
4121
4122 cx.read(|cx| {
4123 assert!(!buffer2.read(cx).is_dirty());
4124 assert!(!buffer3.read(cx).is_dirty());
4125 assert!(!buffer4.read(cx).is_dirty());
4126 assert!(!buffer5.read(cx).is_dirty());
4127 });
4128
4129 // Rename and delete files and directories.
4130 tree.flush_fs_events(&cx).await;
4131 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4132 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4133 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4134 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4135 tree.flush_fs_events(&cx).await;
4136
4137 let expected_paths = vec![
4138 "a",
4139 "a/file1",
4140 "a/file2.new",
4141 "b",
4142 "d",
4143 "d/file3",
4144 "d/file4",
4145 ];
4146
4147 cx.read(|app| {
4148 assert_eq!(
4149 tree.read(app)
4150 .paths()
4151 .map(|p| p.to_str().unwrap())
4152 .collect::<Vec<_>>(),
4153 expected_paths
4154 );
4155
4156 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4157 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4158 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4159
4160 assert_eq!(
4161 buffer2.read(app).file().unwrap().path().as_ref(),
4162 Path::new("a/file2.new")
4163 );
4164 assert_eq!(
4165 buffer3.read(app).file().unwrap().path().as_ref(),
4166 Path::new("d/file3")
4167 );
4168 assert_eq!(
4169 buffer4.read(app).file().unwrap().path().as_ref(),
4170 Path::new("d/file4")
4171 );
4172 assert_eq!(
4173 buffer5.read(app).file().unwrap().path().as_ref(),
4174 Path::new("b/c/file5")
4175 );
4176
4177 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4178 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4179 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4180 assert!(buffer5.read(app).file().unwrap().is_deleted());
4181 });
4182
4183 // Update the remote worktree. Check that it becomes consistent with the
4184 // local worktree.
4185 remote.update(cx, |remote, cx| {
4186 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4187 &initial_snapshot,
4188 1,
4189 1,
4190 true,
4191 );
4192 remote
4193 .as_remote_mut()
4194 .unwrap()
4195 .snapshot
4196 .apply_remote_update(update_message)
4197 .unwrap();
4198
4199 assert_eq!(
4200 remote
4201 .paths()
4202 .map(|p| p.to_str().unwrap())
4203 .collect::<Vec<_>>(),
4204 expected_paths
4205 );
4206 });
4207 }
4208
4209 #[gpui::test]
4210 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4211 let fs = FakeFs::new(cx.background());
4212 fs.insert_tree(
4213 "/the-dir",
4214 json!({
4215 "a.txt": "a-contents",
4216 "b.txt": "b-contents",
4217 }),
4218 )
4219 .await;
4220
4221 let project = Project::test(fs.clone(), cx);
4222 let worktree_id = project
4223 .update(cx, |p, cx| {
4224 p.find_or_create_local_worktree("/the-dir", true, cx)
4225 })
4226 .await
4227 .unwrap()
4228 .0
4229 .read_with(cx, |tree, _| tree.id());
4230
4231 // Spawn multiple tasks to open paths, repeating some paths.
4232 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4233 (
4234 p.open_buffer((worktree_id, "a.txt"), cx),
4235 p.open_buffer((worktree_id, "b.txt"), cx),
4236 p.open_buffer((worktree_id, "a.txt"), cx),
4237 )
4238 });
4239
4240 let buffer_a_1 = buffer_a_1.await.unwrap();
4241 let buffer_a_2 = buffer_a_2.await.unwrap();
4242 let buffer_b = buffer_b.await.unwrap();
4243 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4244 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4245
4246 // There is only one buffer per path.
4247 let buffer_a_id = buffer_a_1.id();
4248 assert_eq!(buffer_a_2.id(), buffer_a_id);
4249
4250 // Open the same path again while it is still open.
4251 drop(buffer_a_1);
4252 let buffer_a_3 = project
4253 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4254 .await
4255 .unwrap();
4256
4257 // There's still only one buffer per path.
4258 assert_eq!(buffer_a_3.id(), buffer_a_id);
4259 }
4260
4261 #[gpui::test]
4262 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4263 use std::fs;
4264
4265 let dir = temp_tree(json!({
4266 "file1": "abc",
4267 "file2": "def",
4268 "file3": "ghi",
4269 }));
4270
4271 let project = Project::test(Arc::new(RealFs), cx);
4272 let (worktree, _) = project
4273 .update(cx, |p, cx| {
4274 p.find_or_create_local_worktree(dir.path(), true, cx)
4275 })
4276 .await
4277 .unwrap();
4278 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4279
4280 worktree.flush_fs_events(&cx).await;
4281 worktree
4282 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4283 .await;
4284
4285 let buffer1 = project
4286 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4287 .await
4288 .unwrap();
4289 let events = Rc::new(RefCell::new(Vec::new()));
4290
4291 // initially, the buffer isn't dirty.
4292 buffer1.update(cx, |buffer, cx| {
4293 cx.subscribe(&buffer1, {
4294 let events = events.clone();
4295 move |_, _, event, _| events.borrow_mut().push(event.clone())
4296 })
4297 .detach();
4298
4299 assert!(!buffer.is_dirty());
4300 assert!(events.borrow().is_empty());
4301
4302 buffer.edit(vec![1..2], "", cx);
4303 });
4304
4305 // after the first edit, the buffer is dirty, and emits a dirtied event.
4306 buffer1.update(cx, |buffer, cx| {
4307 assert!(buffer.text() == "ac");
4308 assert!(buffer.is_dirty());
4309 assert_eq!(
4310 *events.borrow(),
4311 &[language::Event::Edited, language::Event::Dirtied]
4312 );
4313 events.borrow_mut().clear();
4314 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4315 });
4316
4317 // after saving, the buffer is not dirty, and emits a saved event.
4318 buffer1.update(cx, |buffer, cx| {
4319 assert!(!buffer.is_dirty());
4320 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4321 events.borrow_mut().clear();
4322
4323 buffer.edit(vec![1..1], "B", cx);
4324 buffer.edit(vec![2..2], "D", cx);
4325 });
4326
4327 // after editing again, the buffer is dirty, and emits another dirty event.
4328 buffer1.update(cx, |buffer, cx| {
4329 assert!(buffer.text() == "aBDc");
4330 assert!(buffer.is_dirty());
4331 assert_eq!(
4332 *events.borrow(),
4333 &[
4334 language::Event::Edited,
4335 language::Event::Dirtied,
4336 language::Event::Edited,
4337 ],
4338 );
4339 events.borrow_mut().clear();
4340
4341 // TODO - currently, after restoring the buffer to its
4342 // previously-saved state, the is still considered dirty.
4343 buffer.edit([1..3], "", cx);
4344 assert!(buffer.text() == "ac");
4345 assert!(buffer.is_dirty());
4346 });
4347
4348 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4349
4350 // When a file is deleted, the buffer is considered dirty.
4351 let events = Rc::new(RefCell::new(Vec::new()));
4352 let buffer2 = project
4353 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4354 .await
4355 .unwrap();
4356 buffer2.update(cx, |_, cx| {
4357 cx.subscribe(&buffer2, {
4358 let events = events.clone();
4359 move |_, _, event, _| events.borrow_mut().push(event.clone())
4360 })
4361 .detach();
4362 });
4363
4364 fs::remove_file(dir.path().join("file2")).unwrap();
4365 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4366 assert_eq!(
4367 *events.borrow(),
4368 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4369 );
4370
4371 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4372 let events = Rc::new(RefCell::new(Vec::new()));
4373 let buffer3 = project
4374 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4375 .await
4376 .unwrap();
4377 buffer3.update(cx, |_, cx| {
4378 cx.subscribe(&buffer3, {
4379 let events = events.clone();
4380 move |_, _, event, _| events.borrow_mut().push(event.clone())
4381 })
4382 .detach();
4383 });
4384
4385 worktree.flush_fs_events(&cx).await;
4386 buffer3.update(cx, |buffer, cx| {
4387 buffer.edit(Some(0..0), "x", cx);
4388 });
4389 events.borrow_mut().clear();
4390 fs::remove_file(dir.path().join("file3")).unwrap();
4391 buffer3
4392 .condition(&cx, |_, _| !events.borrow().is_empty())
4393 .await;
4394 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4395 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4396 }
4397
4398 #[gpui::test]
4399 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4400 use std::fs;
4401
4402 let initial_contents = "aaa\nbbbbb\nc\n";
4403 let dir = temp_tree(json!({ "the-file": initial_contents }));
4404
4405 let project = Project::test(Arc::new(RealFs), cx);
4406 let (worktree, _) = project
4407 .update(cx, |p, cx| {
4408 p.find_or_create_local_worktree(dir.path(), true, cx)
4409 })
4410 .await
4411 .unwrap();
4412 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4413
4414 worktree
4415 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4416 .await;
4417
4418 let abs_path = dir.path().join("the-file");
4419 let buffer = project
4420 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4421 .await
4422 .unwrap();
4423
4424 // TODO
4425 // Add a cursor on each row.
4426 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4427 // assert!(!buffer.is_dirty());
4428 // buffer.add_selection_set(
4429 // &(0..3)
4430 // .map(|row| Selection {
4431 // id: row as usize,
4432 // start: Point::new(row, 1),
4433 // end: Point::new(row, 1),
4434 // reversed: false,
4435 // goal: SelectionGoal::None,
4436 // })
4437 // .collect::<Vec<_>>(),
4438 // cx,
4439 // )
4440 // });
4441
4442 // Change the file on disk, adding two new lines of text, and removing
4443 // one line.
4444 buffer.read_with(cx, |buffer, _| {
4445 assert!(!buffer.is_dirty());
4446 assert!(!buffer.has_conflict());
4447 });
4448 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4449 fs::write(&abs_path, new_contents).unwrap();
4450
4451 // Because the buffer was not modified, it is reloaded from disk. Its
4452 // contents are edited according to the diff between the old and new
4453 // file contents.
4454 buffer
4455 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4456 .await;
4457
4458 buffer.update(cx, |buffer, _| {
4459 assert_eq!(buffer.text(), new_contents);
4460 assert!(!buffer.is_dirty());
4461 assert!(!buffer.has_conflict());
4462
4463 // TODO
4464 // let cursor_positions = buffer
4465 // .selection_set(selection_set_id)
4466 // .unwrap()
4467 // .selections::<Point>(&*buffer)
4468 // .map(|selection| {
4469 // assert_eq!(selection.start, selection.end);
4470 // selection.start
4471 // })
4472 // .collect::<Vec<_>>();
4473 // assert_eq!(
4474 // cursor_positions,
4475 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4476 // );
4477 });
4478
4479 // Modify the buffer
4480 buffer.update(cx, |buffer, cx| {
4481 buffer.edit(vec![0..0], " ", cx);
4482 assert!(buffer.is_dirty());
4483 assert!(!buffer.has_conflict());
4484 });
4485
4486 // Change the file on disk again, adding blank lines to the beginning.
4487 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4488
4489 // Because the buffer is modified, it doesn't reload from disk, but is
4490 // marked as having a conflict.
4491 buffer
4492 .condition(&cx, |buffer, _| buffer.has_conflict())
4493 .await;
4494 }
4495
4496 #[gpui::test]
4497 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4498 let fs = FakeFs::new(cx.background());
4499 fs.insert_tree(
4500 "/the-dir",
4501 json!({
4502 "a.rs": "
4503 fn foo(mut v: Vec<usize>) {
4504 for x in &v {
4505 v.push(1);
4506 }
4507 }
4508 "
4509 .unindent(),
4510 }),
4511 )
4512 .await;
4513
4514 let project = Project::test(fs.clone(), cx);
4515 let (worktree, _) = project
4516 .update(cx, |p, cx| {
4517 p.find_or_create_local_worktree("/the-dir", true, cx)
4518 })
4519 .await
4520 .unwrap();
4521 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4522
4523 let buffer = project
4524 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4525 .await
4526 .unwrap();
4527
4528 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4529 let message = lsp::PublishDiagnosticsParams {
4530 uri: buffer_uri.clone(),
4531 diagnostics: vec![
4532 lsp::Diagnostic {
4533 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4534 severity: Some(DiagnosticSeverity::WARNING),
4535 message: "error 1".to_string(),
4536 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4537 location: lsp::Location {
4538 uri: buffer_uri.clone(),
4539 range: lsp::Range::new(
4540 lsp::Position::new(1, 8),
4541 lsp::Position::new(1, 9),
4542 ),
4543 },
4544 message: "error 1 hint 1".to_string(),
4545 }]),
4546 ..Default::default()
4547 },
4548 lsp::Diagnostic {
4549 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4550 severity: Some(DiagnosticSeverity::HINT),
4551 message: "error 1 hint 1".to_string(),
4552 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4553 location: lsp::Location {
4554 uri: buffer_uri.clone(),
4555 range: lsp::Range::new(
4556 lsp::Position::new(1, 8),
4557 lsp::Position::new(1, 9),
4558 ),
4559 },
4560 message: "original diagnostic".to_string(),
4561 }]),
4562 ..Default::default()
4563 },
4564 lsp::Diagnostic {
4565 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4566 severity: Some(DiagnosticSeverity::ERROR),
4567 message: "error 2".to_string(),
4568 related_information: Some(vec![
4569 lsp::DiagnosticRelatedInformation {
4570 location: lsp::Location {
4571 uri: buffer_uri.clone(),
4572 range: lsp::Range::new(
4573 lsp::Position::new(1, 13),
4574 lsp::Position::new(1, 15),
4575 ),
4576 },
4577 message: "error 2 hint 1".to_string(),
4578 },
4579 lsp::DiagnosticRelatedInformation {
4580 location: lsp::Location {
4581 uri: buffer_uri.clone(),
4582 range: lsp::Range::new(
4583 lsp::Position::new(1, 13),
4584 lsp::Position::new(1, 15),
4585 ),
4586 },
4587 message: "error 2 hint 2".to_string(),
4588 },
4589 ]),
4590 ..Default::default()
4591 },
4592 lsp::Diagnostic {
4593 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4594 severity: Some(DiagnosticSeverity::HINT),
4595 message: "error 2 hint 1".to_string(),
4596 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4597 location: lsp::Location {
4598 uri: buffer_uri.clone(),
4599 range: lsp::Range::new(
4600 lsp::Position::new(2, 8),
4601 lsp::Position::new(2, 17),
4602 ),
4603 },
4604 message: "original diagnostic".to_string(),
4605 }]),
4606 ..Default::default()
4607 },
4608 lsp::Diagnostic {
4609 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4610 severity: Some(DiagnosticSeverity::HINT),
4611 message: "error 2 hint 2".to_string(),
4612 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4613 location: lsp::Location {
4614 uri: buffer_uri.clone(),
4615 range: lsp::Range::new(
4616 lsp::Position::new(2, 8),
4617 lsp::Position::new(2, 17),
4618 ),
4619 },
4620 message: "original diagnostic".to_string(),
4621 }]),
4622 ..Default::default()
4623 },
4624 ],
4625 version: None,
4626 };
4627
4628 project
4629 .update(cx, |p, cx| {
4630 p.update_diagnostics(message, &Default::default(), cx)
4631 })
4632 .unwrap();
4633 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4634
4635 assert_eq!(
4636 buffer
4637 .diagnostics_in_range::<_, Point>(0..buffer.len())
4638 .collect::<Vec<_>>(),
4639 &[
4640 DiagnosticEntry {
4641 range: Point::new(1, 8)..Point::new(1, 9),
4642 diagnostic: Diagnostic {
4643 severity: DiagnosticSeverity::WARNING,
4644 message: "error 1".to_string(),
4645 group_id: 0,
4646 is_primary: true,
4647 ..Default::default()
4648 }
4649 },
4650 DiagnosticEntry {
4651 range: Point::new(1, 8)..Point::new(1, 9),
4652 diagnostic: Diagnostic {
4653 severity: DiagnosticSeverity::HINT,
4654 message: "error 1 hint 1".to_string(),
4655 group_id: 0,
4656 is_primary: false,
4657 ..Default::default()
4658 }
4659 },
4660 DiagnosticEntry {
4661 range: Point::new(1, 13)..Point::new(1, 15),
4662 diagnostic: Diagnostic {
4663 severity: DiagnosticSeverity::HINT,
4664 message: "error 2 hint 1".to_string(),
4665 group_id: 1,
4666 is_primary: false,
4667 ..Default::default()
4668 }
4669 },
4670 DiagnosticEntry {
4671 range: Point::new(1, 13)..Point::new(1, 15),
4672 diagnostic: Diagnostic {
4673 severity: DiagnosticSeverity::HINT,
4674 message: "error 2 hint 2".to_string(),
4675 group_id: 1,
4676 is_primary: false,
4677 ..Default::default()
4678 }
4679 },
4680 DiagnosticEntry {
4681 range: Point::new(2, 8)..Point::new(2, 17),
4682 diagnostic: Diagnostic {
4683 severity: DiagnosticSeverity::ERROR,
4684 message: "error 2".to_string(),
4685 group_id: 1,
4686 is_primary: true,
4687 ..Default::default()
4688 }
4689 }
4690 ]
4691 );
4692
4693 assert_eq!(
4694 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4695 &[
4696 DiagnosticEntry {
4697 range: Point::new(1, 8)..Point::new(1, 9),
4698 diagnostic: Diagnostic {
4699 severity: DiagnosticSeverity::WARNING,
4700 message: "error 1".to_string(),
4701 group_id: 0,
4702 is_primary: true,
4703 ..Default::default()
4704 }
4705 },
4706 DiagnosticEntry {
4707 range: Point::new(1, 8)..Point::new(1, 9),
4708 diagnostic: Diagnostic {
4709 severity: DiagnosticSeverity::HINT,
4710 message: "error 1 hint 1".to_string(),
4711 group_id: 0,
4712 is_primary: false,
4713 ..Default::default()
4714 }
4715 },
4716 ]
4717 );
4718 assert_eq!(
4719 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4720 &[
4721 DiagnosticEntry {
4722 range: Point::new(1, 13)..Point::new(1, 15),
4723 diagnostic: Diagnostic {
4724 severity: DiagnosticSeverity::HINT,
4725 message: "error 2 hint 1".to_string(),
4726 group_id: 1,
4727 is_primary: false,
4728 ..Default::default()
4729 }
4730 },
4731 DiagnosticEntry {
4732 range: Point::new(1, 13)..Point::new(1, 15),
4733 diagnostic: Diagnostic {
4734 severity: DiagnosticSeverity::HINT,
4735 message: "error 2 hint 2".to_string(),
4736 group_id: 1,
4737 is_primary: false,
4738 ..Default::default()
4739 }
4740 },
4741 DiagnosticEntry {
4742 range: Point::new(2, 8)..Point::new(2, 17),
4743 diagnostic: Diagnostic {
4744 severity: DiagnosticSeverity::ERROR,
4745 message: "error 2".to_string(),
4746 group_id: 1,
4747 is_primary: true,
4748 ..Default::default()
4749 }
4750 }
4751 ]
4752 );
4753 }
4754
4755 #[gpui::test]
4756 async fn test_rename(cx: &mut gpui::TestAppContext) {
4757 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4758 let language = Arc::new(Language::new(
4759 LanguageConfig {
4760 name: "Rust".into(),
4761 path_suffixes: vec!["rs".to_string()],
4762 language_server: Some(language_server_config),
4763 ..Default::default()
4764 },
4765 Some(tree_sitter_rust::language()),
4766 ));
4767
4768 let fs = FakeFs::new(cx.background());
4769 fs.insert_tree(
4770 "/dir",
4771 json!({
4772 "one.rs": "const ONE: usize = 1;",
4773 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4774 }),
4775 )
4776 .await;
4777
4778 let project = Project::test(fs.clone(), cx);
4779 project.update(cx, |project, _| {
4780 Arc::get_mut(&mut project.languages).unwrap().add(language);
4781 });
4782
4783 let (tree, _) = project
4784 .update(cx, |project, cx| {
4785 project.find_or_create_local_worktree("/dir", true, cx)
4786 })
4787 .await
4788 .unwrap();
4789 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4790 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4791 .await;
4792
4793 let buffer = project
4794 .update(cx, |project, cx| {
4795 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4796 })
4797 .await
4798 .unwrap();
4799
4800 let mut fake_server = fake_servers.next().await.unwrap();
4801
4802 let response = project.update(cx, |project, cx| {
4803 project.prepare_rename(buffer.clone(), 7, cx)
4804 });
4805 fake_server
4806 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4807 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4808 assert_eq!(params.position, lsp::Position::new(0, 7));
4809 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4810 lsp::Position::new(0, 6),
4811 lsp::Position::new(0, 9),
4812 )))
4813 })
4814 .next()
4815 .await
4816 .unwrap();
4817 let range = response.await.unwrap().unwrap();
4818 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4819 assert_eq!(range, 6..9);
4820
4821 let response = project.update(cx, |project, cx| {
4822 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4823 });
4824 fake_server
4825 .handle_request::<lsp::request::Rename, _>(|params, _| {
4826 assert_eq!(
4827 params.text_document_position.text_document.uri.as_str(),
4828 "file:///dir/one.rs"
4829 );
4830 assert_eq!(
4831 params.text_document_position.position,
4832 lsp::Position::new(0, 7)
4833 );
4834 assert_eq!(params.new_name, "THREE");
4835 Some(lsp::WorkspaceEdit {
4836 changes: Some(
4837 [
4838 (
4839 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4840 vec![lsp::TextEdit::new(
4841 lsp::Range::new(
4842 lsp::Position::new(0, 6),
4843 lsp::Position::new(0, 9),
4844 ),
4845 "THREE".to_string(),
4846 )],
4847 ),
4848 (
4849 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4850 vec![
4851 lsp::TextEdit::new(
4852 lsp::Range::new(
4853 lsp::Position::new(0, 24),
4854 lsp::Position::new(0, 27),
4855 ),
4856 "THREE".to_string(),
4857 ),
4858 lsp::TextEdit::new(
4859 lsp::Range::new(
4860 lsp::Position::new(0, 35),
4861 lsp::Position::new(0, 38),
4862 ),
4863 "THREE".to_string(),
4864 ),
4865 ],
4866 ),
4867 ]
4868 .into_iter()
4869 .collect(),
4870 ),
4871 ..Default::default()
4872 })
4873 })
4874 .next()
4875 .await
4876 .unwrap();
4877 let mut transaction = response.await.unwrap().0;
4878 assert_eq!(transaction.len(), 2);
4879 assert_eq!(
4880 transaction
4881 .remove_entry(&buffer)
4882 .unwrap()
4883 .0
4884 .read_with(cx, |buffer, _| buffer.text()),
4885 "const THREE: usize = 1;"
4886 );
4887 assert_eq!(
4888 transaction
4889 .into_keys()
4890 .next()
4891 .unwrap()
4892 .read_with(cx, |buffer, _| buffer.text()),
4893 "const TWO: usize = one::THREE + one::THREE;"
4894 );
4895 }
4896
4897 #[gpui::test]
4898 async fn test_search(cx: &mut gpui::TestAppContext) {
4899 let fs = FakeFs::new(cx.background());
4900 fs.insert_tree(
4901 "/dir",
4902 json!({
4903 "one.rs": "const ONE: usize = 1;",
4904 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4905 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4906 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4907 }),
4908 )
4909 .await;
4910 let project = Project::test(fs.clone(), cx);
4911 let (tree, _) = project
4912 .update(cx, |project, cx| {
4913 project.find_or_create_local_worktree("/dir", true, cx)
4914 })
4915 .await
4916 .unwrap();
4917 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4918 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4919 .await;
4920
4921 assert_eq!(
4922 search(&project, SearchQuery::text("TWO", false, true), cx)
4923 .await
4924 .unwrap(),
4925 HashMap::from_iter([
4926 ("two.rs".to_string(), vec![6..9]),
4927 ("three.rs".to_string(), vec![37..40])
4928 ])
4929 );
4930
4931 let buffer_4 = project
4932 .update(cx, |project, cx| {
4933 project.open_buffer((worktree_id, "four.rs"), cx)
4934 })
4935 .await
4936 .unwrap();
4937 buffer_4.update(cx, |buffer, cx| {
4938 buffer.edit([20..28, 31..43], "two::TWO", cx);
4939 });
4940
4941 assert_eq!(
4942 search(&project, SearchQuery::text("TWO", false, true), cx)
4943 .await
4944 .unwrap(),
4945 HashMap::from_iter([
4946 ("two.rs".to_string(), vec![6..9]),
4947 ("three.rs".to_string(), vec![37..40]),
4948 ("four.rs".to_string(), vec![25..28, 36..39])
4949 ])
4950 );
4951
4952 async fn search(
4953 project: &ModelHandle<Project>,
4954 query: SearchQuery,
4955 cx: &mut gpui::TestAppContext,
4956 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4957 let results = project
4958 .update(cx, |project, cx| project.search(query, cx))
4959 .await?;
4960
4961 Ok(results
4962 .into_iter()
4963 .map(|(buffer, ranges)| {
4964 buffer.read_with(cx, |buffer, _| {
4965 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4966 let ranges = ranges
4967 .into_iter()
4968 .map(|range| range.to_offset(buffer))
4969 .collect::<Vec<_>>();
4970 (path, ranges)
4971 })
4972 })
4973 .collect())
4974 }
4975 }
4976}