1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 opened_buffers: HashMap<u64, OpenBuffer>,
68 nonce: u128,
69}
70
71enum OpenBuffer {
72 Strong(ModelHandle<Buffer>),
73 Weak(WeakModelHandle<Buffer>),
74 Loading(Vec<Operation>),
75}
76
77enum WorktreeHandle {
78 Strong(ModelHandle<Worktree>),
79 Weak(WeakModelHandle<Worktree>),
80}
81
82enum ProjectClientState {
83 Local {
84 is_shared: bool,
85 remote_id_tx: watch::Sender<Option<u64>>,
86 remote_id_rx: watch::Receiver<Option<u64>>,
87 _maintain_remote_id_task: Task<Option<()>>,
88 },
89 Remote {
90 sharing_has_stopped: bool,
91 remote_id: u64,
92 replica_id: ReplicaId,
93 },
94}
95
96#[derive(Clone, Debug)]
97pub struct Collaborator {
98 pub user: Arc<User>,
99 pub peer_id: PeerId,
100 pub replica_id: ReplicaId,
101}
102
103#[derive(Clone, Debug, PartialEq)]
104pub enum Event {
105 ActiveEntryChanged(Option<ProjectEntry>),
106 WorktreeRemoved(WorktreeId),
107 DiskBasedDiagnosticsStarted,
108 DiskBasedDiagnosticsUpdated,
109 DiskBasedDiagnosticsFinished,
110 DiagnosticsUpdated(ProjectPath),
111}
112
113#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
114pub struct ProjectPath {
115 pub worktree_id: WorktreeId,
116 pub path: Arc<Path>,
117}
118
119#[derive(Clone, Debug, Default, PartialEq)]
120pub struct DiagnosticSummary {
121 pub error_count: usize,
122 pub warning_count: usize,
123 pub info_count: usize,
124 pub hint_count: usize,
125}
126
127#[derive(Debug)]
128pub struct Location {
129 pub buffer: ModelHandle<Buffer>,
130 pub range: Range<language::Anchor>,
131}
132
133#[derive(Debug)]
134pub struct DocumentHighlight {
135 pub range: Range<language::Anchor>,
136 pub kind: DocumentHighlightKind,
137}
138
139#[derive(Clone, Debug)]
140pub struct Symbol {
141 pub source_worktree_id: WorktreeId,
142 pub worktree_id: WorktreeId,
143 pub language_name: String,
144 pub path: PathBuf,
145 pub label: CodeLabel,
146 pub name: String,
147 pub kind: lsp::SymbolKind,
148 pub range: Range<PointUtf16>,
149 pub signature: [u8; 32],
150}
151
152#[derive(Default)]
153pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
154
155impl DiagnosticSummary {
156 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
157 let mut this = Self {
158 error_count: 0,
159 warning_count: 0,
160 info_count: 0,
161 hint_count: 0,
162 };
163
164 for entry in diagnostics {
165 if entry.diagnostic.is_primary {
166 match entry.diagnostic.severity {
167 DiagnosticSeverity::ERROR => this.error_count += 1,
168 DiagnosticSeverity::WARNING => this.warning_count += 1,
169 DiagnosticSeverity::INFORMATION => this.info_count += 1,
170 DiagnosticSeverity::HINT => this.hint_count += 1,
171 _ => {}
172 }
173 }
174 }
175
176 this
177 }
178
179 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
180 proto::DiagnosticSummary {
181 path: path.to_string_lossy().to_string(),
182 error_count: self.error_count as u32,
183 warning_count: self.warning_count as u32,
184 info_count: self.info_count as u32,
185 hint_count: self.hint_count as u32,
186 }
187 }
188}
189
190#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
191pub struct ProjectEntry {
192 pub worktree_id: WorktreeId,
193 pub entry_id: usize,
194}
195
196impl Project {
197 pub fn init(client: &Arc<Client>) {
198 client.add_entity_message_handler(Self::handle_add_collaborator);
199 client.add_entity_message_handler(Self::handle_buffer_reloaded);
200 client.add_entity_message_handler(Self::handle_buffer_saved);
201 client.add_entity_message_handler(Self::handle_close_buffer);
202 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
203 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
204 client.add_entity_message_handler(Self::handle_remove_collaborator);
205 client.add_entity_message_handler(Self::handle_register_worktree);
206 client.add_entity_message_handler(Self::handle_unregister_worktree);
207 client.add_entity_message_handler(Self::handle_unshare_project);
208 client.add_entity_message_handler(Self::handle_update_buffer_file);
209 client.add_entity_message_handler(Self::handle_update_buffer);
210 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
211 client.add_entity_message_handler(Self::handle_update_worktree);
212 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
213 client.add_entity_request_handler(Self::handle_apply_code_action);
214 client.add_entity_request_handler(Self::handle_format_buffers);
215 client.add_entity_request_handler(Self::handle_get_code_actions);
216 client.add_entity_request_handler(Self::handle_get_completions);
217 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
218 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
222 client.add_entity_request_handler(Self::handle_search_project);
223 client.add_entity_request_handler(Self::handle_get_project_symbols);
224 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
225 client.add_entity_request_handler(Self::handle_open_buffer);
226 client.add_entity_request_handler(Self::handle_save_buffer);
227 }
228
229 pub fn local(
230 client: Arc<Client>,
231 user_store: ModelHandle<UserStore>,
232 languages: Arc<LanguageRegistry>,
233 fs: Arc<dyn Fs>,
234 cx: &mut MutableAppContext,
235 ) -> ModelHandle<Self> {
236 cx.add_model(|cx: &mut ModelContext<Self>| {
237 let (remote_id_tx, remote_id_rx) = watch::channel();
238 let _maintain_remote_id_task = cx.spawn_weak({
239 let rpc = client.clone();
240 move |this, mut cx| {
241 async move {
242 let mut status = rpc.status();
243 while let Some(status) = status.next().await {
244 if let Some(this) = this.upgrade(&cx) {
245 let remote_id = if let client::Status::Connected { .. } = status {
246 let response = rpc.request(proto::RegisterProject {}).await?;
247 Some(response.project_id)
248 } else {
249 None
250 };
251
252 if let Some(project_id) = remote_id {
253 let mut registrations = Vec::new();
254 this.update(&mut cx, |this, cx| {
255 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
256 registrations.push(worktree.update(
257 cx,
258 |worktree, cx| {
259 let worktree = worktree.as_local_mut().unwrap();
260 worktree.register(project_id, cx)
261 },
262 ));
263 }
264 });
265 for registration in registrations {
266 registration.await?;
267 }
268 }
269 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
270 }
271 }
272 Ok(())
273 }
274 .log_err()
275 }
276 });
277
278 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
279 Self {
280 worktrees: Default::default(),
281 collaborators: Default::default(),
282 opened_buffers: Default::default(),
283 shared_buffers: Default::default(),
284 loading_buffers: Default::default(),
285 client_state: ProjectClientState::Local {
286 is_shared: false,
287 remote_id_tx,
288 remote_id_rx,
289 _maintain_remote_id_task,
290 },
291 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
292 subscriptions: Vec::new(),
293 active_entry: None,
294 languages,
295 client,
296 user_store,
297 fs,
298 language_servers_with_diagnostics_running: 0,
299 language_servers: Default::default(),
300 started_language_servers: Default::default(),
301 nonce: StdRng::from_entropy().gen(),
302 }
303 })
304 }
305
306 pub async fn remote(
307 remote_id: u64,
308 client: Arc<Client>,
309 user_store: ModelHandle<UserStore>,
310 languages: Arc<LanguageRegistry>,
311 fs: Arc<dyn Fs>,
312 cx: &mut AsyncAppContext,
313 ) -> Result<ModelHandle<Self>> {
314 client.authenticate_and_connect(&cx).await?;
315
316 let response = client
317 .request(proto::JoinProject {
318 project_id: remote_id,
319 })
320 .await?;
321
322 let replica_id = response.replica_id as ReplicaId;
323
324 let mut worktrees = Vec::new();
325 for worktree in response.worktrees {
326 let (worktree, load_task) = cx
327 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
328 worktrees.push(worktree);
329 load_task.detach();
330 }
331
332 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
333 let this = cx.add_model(|cx| {
334 let mut this = Self {
335 worktrees: Vec::new(),
336 loading_buffers: Default::default(),
337 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
338 shared_buffers: Default::default(),
339 active_entry: None,
340 collaborators: Default::default(),
341 languages,
342 user_store: user_store.clone(),
343 fs,
344 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
345 client,
346 client_state: ProjectClientState::Remote {
347 sharing_has_stopped: false,
348 remote_id,
349 replica_id,
350 },
351 language_servers_with_diagnostics_running: 0,
352 language_servers: Default::default(),
353 started_language_servers: Default::default(),
354 opened_buffers: Default::default(),
355 nonce: StdRng::from_entropy().gen(),
356 };
357 for worktree in worktrees {
358 this.add_worktree(&worktree, cx);
359 }
360 this
361 });
362
363 let user_ids = response
364 .collaborators
365 .iter()
366 .map(|peer| peer.user_id)
367 .collect();
368 user_store
369 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
370 .await?;
371 let mut collaborators = HashMap::default();
372 for message in response.collaborators {
373 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
374 collaborators.insert(collaborator.peer_id, collaborator);
375 }
376
377 this.update(cx, |this, _| {
378 this.collaborators = collaborators;
379 });
380
381 Ok(this)
382 }
383
384 #[cfg(any(test, feature = "test-support"))]
385 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
386 let languages = Arc::new(LanguageRegistry::new());
387 let http_client = client::test::FakeHttpClient::with_404_response();
388 let client = client::Client::new(http_client.clone());
389 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
390 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
391 }
392
393 #[cfg(any(test, feature = "test-support"))]
394 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
395 self.opened_buffers
396 .get(&remote_id)
397 .and_then(|buffer| buffer.upgrade(cx))
398 }
399
400 #[cfg(any(test, feature = "test-support"))]
401 pub fn has_deferred_operations(&self, cx: &AppContext) -> bool {
402 self.opened_buffers.values().any(|buffer| match buffer {
403 OpenBuffer::Strong(buffer) => buffer.read(cx).deferred_ops_len() > 0,
404 OpenBuffer::Weak(buffer) => buffer
405 .upgrade(cx)
406 .map_or(false, |buffer| buffer.read(cx).deferred_ops_len() > 0),
407 OpenBuffer::Loading(_) => false,
408 })
409 }
410
411 #[cfg(any(test, feature = "test-support"))]
412 pub fn languages(&self) -> &Arc<LanguageRegistry> {
413 &self.languages
414 }
415
416 #[cfg(any(test, feature = "test-support"))]
417 pub fn check_invariants(&self, cx: &AppContext) {
418 if self.is_local() {
419 let buffers = self.buffers(cx);
420 for (i, buffer) in buffers.iter().enumerate() {
421 let buffer = buffer.read(cx);
422 let path = buffer.file().unwrap().as_local().unwrap().abs_path(cx);
423 for other_buffer in &buffers[0..i] {
424 let other_buffer = other_buffer.read(cx);
425 let other_path = other_buffer
426 .file()
427 .unwrap()
428 .as_local()
429 .unwrap()
430 .abs_path(cx);
431 if other_path == path {
432 panic!(
433 "buffers {} and {} have the same absolute path: {:?}",
434 buffer.remote_id(),
435 other_buffer.remote_id(),
436 path,
437 );
438 }
439 }
440 }
441 }
442 }
443
444 #[cfg(any(test, feature = "test-support"))]
445 pub fn buffers(&self, cx: &AppContext) -> Vec<ModelHandle<Buffer>> {
446 self.opened_buffers
447 .values()
448 .filter_map(|buffer| match buffer {
449 OpenBuffer::Strong(buffer) => Some(buffer.clone()),
450 OpenBuffer::Weak(buffer) => buffer.upgrade(cx),
451 OpenBuffer::Loading(_) => None,
452 })
453 .collect()
454 }
455
456 #[cfg(any(test, feature = "test-support"))]
457 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
458 let path = path.into();
459 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
460 self.opened_buffers.iter().any(|(_, buffer)| {
461 if let Some(buffer) = buffer.upgrade(cx) {
462 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
463 if file.worktree == worktree && file.path() == &path.path {
464 return true;
465 }
466 }
467 }
468 false
469 })
470 } else {
471 false
472 }
473 }
474
475 pub fn fs(&self) -> &Arc<dyn Fs> {
476 &self.fs
477 }
478
479 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
480 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
481 *remote_id_tx.borrow_mut() = remote_id;
482 }
483
484 self.subscriptions.clear();
485 if let Some(remote_id) = remote_id {
486 self.subscriptions
487 .push(self.client.add_model_for_remote_entity(remote_id, cx));
488 }
489 }
490
491 pub fn remote_id(&self) -> Option<u64> {
492 match &self.client_state {
493 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
494 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
495 }
496 }
497
498 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
499 let mut id = None;
500 let mut watch = None;
501 match &self.client_state {
502 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
503 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
504 }
505
506 async move {
507 if let Some(id) = id {
508 return id;
509 }
510 let mut watch = watch.unwrap();
511 loop {
512 let id = *watch.borrow();
513 if let Some(id) = id {
514 return id;
515 }
516 watch.next().await;
517 }
518 }
519 }
520
521 pub fn replica_id(&self) -> ReplicaId {
522 match &self.client_state {
523 ProjectClientState::Local { .. } => 0,
524 ProjectClientState::Remote { replica_id, .. } => *replica_id,
525 }
526 }
527
528 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
529 &self.collaborators
530 }
531
532 pub fn worktrees<'a>(
533 &'a self,
534 cx: &'a AppContext,
535 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
536 self.worktrees
537 .iter()
538 .filter_map(move |worktree| worktree.upgrade(cx))
539 }
540
541 pub fn visible_worktrees<'a>(
542 &'a self,
543 cx: &'a AppContext,
544 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
545 self.worktrees.iter().filter_map(|worktree| {
546 worktree.upgrade(cx).and_then(|worktree| {
547 if worktree.read(cx).is_visible() {
548 Some(worktree)
549 } else {
550 None
551 }
552 })
553 })
554 }
555
556 pub fn worktree_for_id(
557 &self,
558 id: WorktreeId,
559 cx: &AppContext,
560 ) -> Option<ModelHandle<Worktree>> {
561 self.worktrees(cx)
562 .find(|worktree| worktree.read(cx).id() == id)
563 }
564
565 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
566 let rpc = self.client.clone();
567 cx.spawn(|this, mut cx| async move {
568 let project_id = this.update(&mut cx, |this, cx| {
569 if let ProjectClientState::Local {
570 is_shared,
571 remote_id_rx,
572 ..
573 } = &mut this.client_state
574 {
575 *is_shared = true;
576
577 for open_buffer in this.opened_buffers.values_mut() {
578 match open_buffer {
579 OpenBuffer::Strong(_) => {}
580 OpenBuffer::Weak(buffer) => {
581 if let Some(buffer) = buffer.upgrade(cx) {
582 *open_buffer = OpenBuffer::Strong(buffer);
583 }
584 }
585 OpenBuffer::Loading(_) => unreachable!(),
586 }
587 }
588
589 for worktree_handle in this.worktrees.iter_mut() {
590 match worktree_handle {
591 WorktreeHandle::Strong(_) => {}
592 WorktreeHandle::Weak(worktree) => {
593 if let Some(worktree) = worktree.upgrade(cx) {
594 *worktree_handle = WorktreeHandle::Strong(worktree);
595 }
596 }
597 }
598 }
599
600 remote_id_rx
601 .borrow()
602 .ok_or_else(|| anyhow!("no project id"))
603 } else {
604 Err(anyhow!("can't share a remote project"))
605 }
606 })?;
607
608 rpc.request(proto::ShareProject { project_id }).await?;
609
610 let mut tasks = Vec::new();
611 this.update(&mut cx, |this, cx| {
612 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
613 worktree.update(cx, |worktree, cx| {
614 let worktree = worktree.as_local_mut().unwrap();
615 tasks.push(worktree.share(project_id, cx));
616 });
617 }
618 });
619 for task in tasks {
620 task.await?;
621 }
622 this.update(&mut cx, |_, cx| cx.notify());
623 Ok(())
624 })
625 }
626
627 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
628 let rpc = self.client.clone();
629 cx.spawn(|this, mut cx| async move {
630 let project_id = this.update(&mut cx, |this, cx| {
631 if let ProjectClientState::Local {
632 is_shared,
633 remote_id_rx,
634 ..
635 } = &mut this.client_state
636 {
637 *is_shared = false;
638
639 for open_buffer in this.opened_buffers.values_mut() {
640 match open_buffer {
641 OpenBuffer::Strong(buffer) => {
642 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
643 }
644 _ => {}
645 }
646 }
647
648 for worktree_handle in this.worktrees.iter_mut() {
649 match worktree_handle {
650 WorktreeHandle::Strong(worktree) => {
651 if !worktree.read(cx).is_visible() {
652 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
653 }
654 }
655 _ => {}
656 }
657 }
658
659 remote_id_rx
660 .borrow()
661 .ok_or_else(|| anyhow!("no project id"))
662 } else {
663 Err(anyhow!("can't share a remote project"))
664 }
665 })?;
666
667 rpc.send(proto::UnshareProject { project_id })?;
668 this.update(&mut cx, |this, cx| {
669 this.collaborators.clear();
670 this.shared_buffers.clear();
671 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
672 worktree.update(cx, |worktree, _| {
673 worktree.as_local_mut().unwrap().unshare();
674 });
675 }
676 cx.notify()
677 });
678 Ok(())
679 })
680 }
681
682 pub fn is_read_only(&self) -> bool {
683 match &self.client_state {
684 ProjectClientState::Local { .. } => false,
685 ProjectClientState::Remote {
686 sharing_has_stopped,
687 ..
688 } => *sharing_has_stopped,
689 }
690 }
691
692 pub fn is_local(&self) -> bool {
693 match &self.client_state {
694 ProjectClientState::Local { .. } => true,
695 ProjectClientState::Remote { .. } => false,
696 }
697 }
698
699 pub fn is_remote(&self) -> bool {
700 !self.is_local()
701 }
702
703 pub fn open_buffer(
704 &mut self,
705 path: impl Into<ProjectPath>,
706 cx: &mut ModelContext<Self>,
707 ) -> Task<Result<ModelHandle<Buffer>>> {
708 let project_path = path.into();
709 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
710 worktree
711 } else {
712 return Task::ready(Err(anyhow!("no such worktree")));
713 };
714
715 // If there is already a buffer for the given path, then return it.
716 let existing_buffer = self.get_open_buffer(&project_path, cx);
717 if let Some(existing_buffer) = existing_buffer {
718 return Task::ready(Ok(existing_buffer));
719 }
720
721 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
722 // If the given path is already being loaded, then wait for that existing
723 // task to complete and return the same buffer.
724 hash_map::Entry::Occupied(e) => e.get().clone(),
725
726 // Otherwise, record the fact that this path is now being loaded.
727 hash_map::Entry::Vacant(entry) => {
728 let (mut tx, rx) = postage::watch::channel();
729 entry.insert(rx.clone());
730
731 let load_buffer = if worktree.read(cx).is_local() {
732 self.open_local_buffer(&project_path.path, &worktree, cx)
733 } else {
734 self.open_remote_buffer(&project_path.path, &worktree, cx)
735 };
736
737 cx.spawn(move |this, mut cx| async move {
738 let load_result = load_buffer.await;
739 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
740 // Record the fact that the buffer is no longer loading.
741 this.loading_buffers.remove(&project_path);
742 let buffer = load_result.map_err(Arc::new)?;
743 Ok(buffer)
744 }));
745 })
746 .detach();
747 rx
748 }
749 };
750
751 cx.foreground().spawn(async move {
752 loop {
753 if let Some(result) = loading_watch.borrow().as_ref() {
754 match result {
755 Ok(buffer) => return Ok(buffer.clone()),
756 Err(error) => return Err(anyhow!("{}", error)),
757 }
758 }
759 loading_watch.next().await;
760 }
761 })
762 }
763
764 fn open_local_buffer(
765 &mut self,
766 path: &Arc<Path>,
767 worktree: &ModelHandle<Worktree>,
768 cx: &mut ModelContext<Self>,
769 ) -> Task<Result<ModelHandle<Buffer>>> {
770 let load_buffer = worktree.update(cx, |worktree, cx| {
771 let worktree = worktree.as_local_mut().unwrap();
772 worktree.load_buffer(path, cx)
773 });
774 let worktree = worktree.downgrade();
775 cx.spawn(|this, mut cx| async move {
776 let buffer = load_buffer.await?;
777 let worktree = worktree
778 .upgrade(&cx)
779 .ok_or_else(|| anyhow!("worktree was removed"))?;
780 this.update(&mut cx, |this, cx| {
781 this.register_buffer(&buffer, Some(&worktree), cx)
782 })?;
783 Ok(buffer)
784 })
785 }
786
787 fn open_remote_buffer(
788 &mut self,
789 path: &Arc<Path>,
790 worktree: &ModelHandle<Worktree>,
791 cx: &mut ModelContext<Self>,
792 ) -> Task<Result<ModelHandle<Buffer>>> {
793 let rpc = self.client.clone();
794 let project_id = self.remote_id().unwrap();
795 let remote_worktree_id = worktree.read(cx).id();
796 let path = path.clone();
797 let path_string = path.to_string_lossy().to_string();
798 cx.spawn(|this, mut cx| async move {
799 let response = rpc
800 .request(proto::OpenBuffer {
801 project_id,
802 worktree_id: remote_worktree_id.to_proto(),
803 path: path_string,
804 })
805 .await?;
806 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
807 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
808 .await
809 })
810 }
811
812 fn open_local_buffer_via_lsp(
813 &mut self,
814 abs_path: lsp::Url,
815 lang_name: String,
816 lang_server: Arc<LanguageServer>,
817 cx: &mut ModelContext<Self>,
818 ) -> Task<Result<ModelHandle<Buffer>>> {
819 cx.spawn(|this, mut cx| async move {
820 let abs_path = abs_path
821 .to_file_path()
822 .map_err(|_| anyhow!("can't convert URI to path"))?;
823 let (worktree, relative_path) = if let Some(result) =
824 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
825 {
826 result
827 } else {
828 let worktree = this
829 .update(&mut cx, |this, cx| {
830 this.create_local_worktree(&abs_path, false, cx)
831 })
832 .await?;
833 this.update(&mut cx, |this, cx| {
834 this.language_servers
835 .insert((worktree.read(cx).id(), lang_name), lang_server);
836 });
837 (worktree, PathBuf::new())
838 };
839
840 let project_path = ProjectPath {
841 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
842 path: relative_path.into(),
843 };
844 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
845 .await
846 })
847 }
848
849 pub fn save_buffer_as(
850 &self,
851 buffer: ModelHandle<Buffer>,
852 abs_path: PathBuf,
853 cx: &mut ModelContext<Project>,
854 ) -> Task<Result<()>> {
855 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
856 cx.spawn(|this, mut cx| async move {
857 let (worktree, path) = worktree_task.await?;
858 worktree
859 .update(&mut cx, |worktree, cx| {
860 worktree
861 .as_local_mut()
862 .unwrap()
863 .save_buffer_as(buffer.clone(), path, cx)
864 })
865 .await?;
866 this.update(&mut cx, |this, cx| {
867 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
868 });
869 Ok(())
870 })
871 }
872
873 pub fn get_open_buffer(
874 &mut self,
875 path: &ProjectPath,
876 cx: &mut ModelContext<Self>,
877 ) -> Option<ModelHandle<Buffer>> {
878 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
879 self.opened_buffers.values().find_map(|buffer| {
880 let buffer = buffer.upgrade(cx)?;
881 let file = File::from_dyn(buffer.read(cx).file())?;
882 if file.worktree == worktree && file.path() == &path.path {
883 Some(buffer)
884 } else {
885 None
886 }
887 })
888 }
889
890 fn register_buffer(
891 &mut self,
892 buffer: &ModelHandle<Buffer>,
893 worktree: Option<&ModelHandle<Worktree>>,
894 cx: &mut ModelContext<Self>,
895 ) -> Result<()> {
896 let remote_id = buffer.read(cx).remote_id();
897 let open_buffer = if self.is_remote() || self.is_shared() {
898 OpenBuffer::Strong(buffer.clone())
899 } else {
900 OpenBuffer::Weak(buffer.downgrade())
901 };
902
903 match self.opened_buffers.insert(remote_id, open_buffer) {
904 None => {}
905 Some(OpenBuffer::Loading(operations)) => {
906 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
907 }
908 Some(OpenBuffer::Weak(existing_handle)) => {
909 if existing_handle.upgrade(cx).is_some() {
910 Err(anyhow!(
911 "already registered buffer with remote id {}",
912 remote_id
913 ))?
914 }
915 }
916 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
917 "already registered buffer with remote id {}",
918 remote_id
919 ))?,
920 }
921 self.assign_language_to_buffer(&buffer, worktree, cx);
922 Ok(())
923 }
924
925 fn assign_language_to_buffer(
926 &mut self,
927 buffer: &ModelHandle<Buffer>,
928 worktree: Option<&ModelHandle<Worktree>>,
929 cx: &mut ModelContext<Self>,
930 ) -> Option<()> {
931 let (path, full_path) = {
932 let file = buffer.read(cx).file()?;
933 (file.path().clone(), file.full_path(cx))
934 };
935
936 // If the buffer has a language, set it and start/assign the language server
937 if let Some(language) = self.languages.select_language(&full_path) {
938 buffer.update(cx, |buffer, cx| {
939 buffer.set_language(Some(language.clone()), cx);
940 });
941
942 // For local worktrees, start a language server if needed.
943 // Also assign the language server and any previously stored diagnostics to the buffer.
944 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
945 let worktree_id = local_worktree.id();
946 let worktree_abs_path = local_worktree.abs_path().clone();
947 let buffer = buffer.downgrade();
948 let language_server =
949 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
950
951 cx.spawn_weak(|_, mut cx| async move {
952 if let Some(language_server) = language_server.await {
953 if let Some(buffer) = buffer.upgrade(&cx) {
954 buffer.update(&mut cx, |buffer, cx| {
955 buffer.set_language_server(Some(language_server), cx);
956 });
957 }
958 }
959 })
960 .detach();
961 }
962 }
963
964 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
965 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
966 buffer.update(cx, |buffer, cx| {
967 buffer.update_diagnostics(diagnostics, None, cx).log_err();
968 });
969 }
970 }
971
972 None
973 }
974
975 fn start_language_server(
976 &mut self,
977 worktree_id: WorktreeId,
978 worktree_path: Arc<Path>,
979 language: Arc<Language>,
980 cx: &mut ModelContext<Self>,
981 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
982 enum LspEvent {
983 DiagnosticsStart,
984 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
985 DiagnosticsFinish,
986 }
987
988 let key = (worktree_id, language.name().to_string());
989 self.started_language_servers
990 .entry(key.clone())
991 .or_insert_with(|| {
992 let language_server = self.languages.start_language_server(
993 &language,
994 worktree_path,
995 self.client.http_client(),
996 cx,
997 );
998 let rpc = self.client.clone();
999 cx.spawn_weak(|this, mut cx| async move {
1000 let language_server = language_server?.await.log_err()?;
1001 if let Some(this) = this.upgrade(&cx) {
1002 this.update(&mut cx, |this, _| {
1003 this.language_servers.insert(key, language_server.clone());
1004 });
1005 }
1006
1007 let disk_based_sources = language
1008 .disk_based_diagnostic_sources()
1009 .cloned()
1010 .unwrap_or_default();
1011 let disk_based_diagnostics_progress_token =
1012 language.disk_based_diagnostics_progress_token().cloned();
1013 let has_disk_based_diagnostic_progress_token =
1014 disk_based_diagnostics_progress_token.is_some();
1015 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1016
1017 // Listen for `PublishDiagnostics` notifications.
1018 language_server
1019 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1020 let diagnostics_tx = diagnostics_tx.clone();
1021 move |params| {
1022 if !has_disk_based_diagnostic_progress_token {
1023 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1024 }
1025 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1026 .ok();
1027 if !has_disk_based_diagnostic_progress_token {
1028 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1029 }
1030 }
1031 })
1032 .detach();
1033
1034 // Listen for `Progress` notifications. Send an event when the language server
1035 // transitions between running jobs and not running any jobs.
1036 let mut running_jobs_for_this_server: i32 = 0;
1037 language_server
1038 .on_notification::<lsp::notification::Progress, _>(move |params| {
1039 let token = match params.token {
1040 lsp::NumberOrString::Number(_) => None,
1041 lsp::NumberOrString::String(token) => Some(token),
1042 };
1043
1044 if token == disk_based_diagnostics_progress_token {
1045 match params.value {
1046 lsp::ProgressParamsValue::WorkDone(progress) => {
1047 match progress {
1048 lsp::WorkDoneProgress::Begin(_) => {
1049 running_jobs_for_this_server += 1;
1050 if running_jobs_for_this_server == 1 {
1051 block_on(
1052 diagnostics_tx
1053 .send(LspEvent::DiagnosticsStart),
1054 )
1055 .ok();
1056 }
1057 }
1058 lsp::WorkDoneProgress::End(_) => {
1059 running_jobs_for_this_server -= 1;
1060 if running_jobs_for_this_server == 0 {
1061 block_on(
1062 diagnostics_tx
1063 .send(LspEvent::DiagnosticsFinish),
1064 )
1065 .ok();
1066 }
1067 }
1068 _ => {}
1069 }
1070 }
1071 }
1072 }
1073 })
1074 .detach();
1075
1076 // Process all the LSP events.
1077 cx.spawn(|mut cx| async move {
1078 while let Ok(message) = diagnostics_rx.recv().await {
1079 let this = this.upgrade(&cx)?;
1080 match message {
1081 LspEvent::DiagnosticsStart => {
1082 this.update(&mut cx, |this, cx| {
1083 this.disk_based_diagnostics_started(cx);
1084 if let Some(project_id) = this.remote_id() {
1085 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1086 project_id,
1087 })
1088 .log_err();
1089 }
1090 });
1091 }
1092 LspEvent::DiagnosticsUpdate(mut params) => {
1093 language.process_diagnostics(&mut params);
1094 this.update(&mut cx, |this, cx| {
1095 this.update_diagnostics(params, &disk_based_sources, cx)
1096 .log_err();
1097 });
1098 }
1099 LspEvent::DiagnosticsFinish => {
1100 this.update(&mut cx, |this, cx| {
1101 this.disk_based_diagnostics_finished(cx);
1102 if let Some(project_id) = this.remote_id() {
1103 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1104 project_id,
1105 })
1106 .log_err();
1107 }
1108 });
1109 }
1110 }
1111 }
1112 Some(())
1113 })
1114 .detach();
1115
1116 Some(language_server)
1117 })
1118 .shared()
1119 })
1120 .clone()
1121 }
1122
1123 pub fn update_diagnostics(
1124 &mut self,
1125 params: lsp::PublishDiagnosticsParams,
1126 disk_based_sources: &HashSet<String>,
1127 cx: &mut ModelContext<Self>,
1128 ) -> Result<()> {
1129 let abs_path = params
1130 .uri
1131 .to_file_path()
1132 .map_err(|_| anyhow!("URI is not a file"))?;
1133 let mut next_group_id = 0;
1134 let mut diagnostics = Vec::default();
1135 let mut primary_diagnostic_group_ids = HashMap::default();
1136 let mut sources_by_group_id = HashMap::default();
1137 let mut supporting_diagnostic_severities = HashMap::default();
1138 for diagnostic in ¶ms.diagnostics {
1139 let source = diagnostic.source.as_ref();
1140 let code = diagnostic.code.as_ref().map(|code| match code {
1141 lsp::NumberOrString::Number(code) => code.to_string(),
1142 lsp::NumberOrString::String(code) => code.clone(),
1143 });
1144 let range = range_from_lsp(diagnostic.range);
1145 let is_supporting = diagnostic
1146 .related_information
1147 .as_ref()
1148 .map_or(false, |infos| {
1149 infos.iter().any(|info| {
1150 primary_diagnostic_group_ids.contains_key(&(
1151 source,
1152 code.clone(),
1153 range_from_lsp(info.location.range),
1154 ))
1155 })
1156 });
1157
1158 if is_supporting {
1159 if let Some(severity) = diagnostic.severity {
1160 supporting_diagnostic_severities
1161 .insert((source, code.clone(), range), severity);
1162 }
1163 } else {
1164 let group_id = post_inc(&mut next_group_id);
1165 let is_disk_based =
1166 source.map_or(false, |source| disk_based_sources.contains(source));
1167
1168 sources_by_group_id.insert(group_id, source);
1169 primary_diagnostic_group_ids
1170 .insert((source, code.clone(), range.clone()), group_id);
1171
1172 diagnostics.push(DiagnosticEntry {
1173 range,
1174 diagnostic: Diagnostic {
1175 code: code.clone(),
1176 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1177 message: diagnostic.message.clone(),
1178 group_id,
1179 is_primary: true,
1180 is_valid: true,
1181 is_disk_based,
1182 },
1183 });
1184 if let Some(infos) = &diagnostic.related_information {
1185 for info in infos {
1186 if info.location.uri == params.uri && !info.message.is_empty() {
1187 let range = range_from_lsp(info.location.range);
1188 diagnostics.push(DiagnosticEntry {
1189 range,
1190 diagnostic: Diagnostic {
1191 code: code.clone(),
1192 severity: DiagnosticSeverity::INFORMATION,
1193 message: info.message.clone(),
1194 group_id,
1195 is_primary: false,
1196 is_valid: true,
1197 is_disk_based,
1198 },
1199 });
1200 }
1201 }
1202 }
1203 }
1204 }
1205
1206 for entry in &mut diagnostics {
1207 let diagnostic = &mut entry.diagnostic;
1208 if !diagnostic.is_primary {
1209 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1210 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1211 source,
1212 diagnostic.code.clone(),
1213 entry.range.clone(),
1214 )) {
1215 diagnostic.severity = severity;
1216 }
1217 }
1218 }
1219
1220 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1221 Ok(())
1222 }
1223
1224 pub fn update_diagnostic_entries(
1225 &mut self,
1226 abs_path: PathBuf,
1227 version: Option<i32>,
1228 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1229 cx: &mut ModelContext<Project>,
1230 ) -> Result<(), anyhow::Error> {
1231 let (worktree, relative_path) = self
1232 .find_local_worktree(&abs_path, cx)
1233 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1234 let project_path = ProjectPath {
1235 worktree_id: worktree.read(cx).id(),
1236 path: relative_path.into(),
1237 };
1238
1239 for buffer in self.opened_buffers.values() {
1240 if let Some(buffer) = buffer.upgrade(cx) {
1241 if buffer
1242 .read(cx)
1243 .file()
1244 .map_or(false, |file| *file.path() == project_path.path)
1245 {
1246 buffer.update(cx, |buffer, cx| {
1247 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1248 })?;
1249 break;
1250 }
1251 }
1252 }
1253 worktree.update(cx, |worktree, cx| {
1254 worktree
1255 .as_local_mut()
1256 .ok_or_else(|| anyhow!("not a local worktree"))?
1257 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1258 })?;
1259 cx.emit(Event::DiagnosticsUpdated(project_path));
1260 Ok(())
1261 }
1262
1263 pub fn format(
1264 &self,
1265 buffers: HashSet<ModelHandle<Buffer>>,
1266 push_to_history: bool,
1267 cx: &mut ModelContext<Project>,
1268 ) -> Task<Result<ProjectTransaction>> {
1269 let mut local_buffers = Vec::new();
1270 let mut remote_buffers = None;
1271 for buffer_handle in buffers {
1272 let buffer = buffer_handle.read(cx);
1273 let worktree;
1274 if let Some(file) = File::from_dyn(buffer.file()) {
1275 worktree = file.worktree.clone();
1276 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1277 let lang_server;
1278 if let Some(lang) = buffer.language() {
1279 if let Some(server) = self
1280 .language_servers
1281 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1282 {
1283 lang_server = server.clone();
1284 } else {
1285 return Task::ready(Ok(Default::default()));
1286 };
1287 } else {
1288 return Task::ready(Ok(Default::default()));
1289 }
1290
1291 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1292 } else {
1293 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1294 }
1295 } else {
1296 return Task::ready(Ok(Default::default()));
1297 }
1298 }
1299
1300 let remote_buffers = self.remote_id().zip(remote_buffers);
1301 let client = self.client.clone();
1302
1303 cx.spawn(|this, mut cx| async move {
1304 let mut project_transaction = ProjectTransaction::default();
1305
1306 if let Some((project_id, remote_buffers)) = remote_buffers {
1307 let response = client
1308 .request(proto::FormatBuffers {
1309 project_id,
1310 buffer_ids: remote_buffers
1311 .iter()
1312 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1313 .collect(),
1314 })
1315 .await?
1316 .transaction
1317 .ok_or_else(|| anyhow!("missing transaction"))?;
1318 project_transaction = this
1319 .update(&mut cx, |this, cx| {
1320 this.deserialize_project_transaction(response, push_to_history, cx)
1321 })
1322 .await?;
1323 }
1324
1325 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1326 let lsp_edits = lang_server
1327 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1328 text_document: lsp::TextDocumentIdentifier::new(
1329 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1330 ),
1331 options: Default::default(),
1332 work_done_progress_params: Default::default(),
1333 })
1334 .await?;
1335
1336 if let Some(lsp_edits) = lsp_edits {
1337 let edits = buffer
1338 .update(&mut cx, |buffer, cx| {
1339 buffer.edits_from_lsp(lsp_edits, None, cx)
1340 })
1341 .await?;
1342 buffer.update(&mut cx, |buffer, cx| {
1343 buffer.finalize_last_transaction();
1344 buffer.start_transaction();
1345 for (range, text) in edits {
1346 buffer.edit([range], text, cx);
1347 }
1348 if buffer.end_transaction(cx).is_some() {
1349 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1350 if !push_to_history {
1351 buffer.forget_transaction(transaction.id);
1352 }
1353 project_transaction.0.insert(cx.handle(), transaction);
1354 }
1355 });
1356 }
1357 }
1358
1359 Ok(project_transaction)
1360 })
1361 }
1362
1363 pub fn definition<T: ToPointUtf16>(
1364 &self,
1365 buffer: &ModelHandle<Buffer>,
1366 position: T,
1367 cx: &mut ModelContext<Self>,
1368 ) -> Task<Result<Vec<Location>>> {
1369 let position = position.to_point_utf16(buffer.read(cx));
1370 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1371 }
1372
1373 pub fn references<T: ToPointUtf16>(
1374 &self,
1375 buffer: &ModelHandle<Buffer>,
1376 position: T,
1377 cx: &mut ModelContext<Self>,
1378 ) -> Task<Result<Vec<Location>>> {
1379 let position = position.to_point_utf16(buffer.read(cx));
1380 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1381 }
1382
1383 pub fn document_highlights<T: ToPointUtf16>(
1384 &self,
1385 buffer: &ModelHandle<Buffer>,
1386 position: T,
1387 cx: &mut ModelContext<Self>,
1388 ) -> Task<Result<Vec<DocumentHighlight>>> {
1389 let position = position.to_point_utf16(buffer.read(cx));
1390 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1391 }
1392
1393 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1394 if self.is_local() {
1395 let mut language_servers = HashMap::default();
1396 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1397 if let Some((worktree, language)) = self
1398 .worktree_for_id(*worktree_id, cx)
1399 .and_then(|worktree| worktree.read(cx).as_local())
1400 .zip(self.languages.get_language(language_name))
1401 {
1402 language_servers
1403 .entry(Arc::as_ptr(language_server))
1404 .or_insert((
1405 language_server.clone(),
1406 *worktree_id,
1407 worktree.abs_path().clone(),
1408 language.clone(),
1409 ));
1410 }
1411 }
1412
1413 let mut requests = Vec::new();
1414 for (language_server, _, _, _) in language_servers.values() {
1415 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1416 lsp::WorkspaceSymbolParams {
1417 query: query.to_string(),
1418 ..Default::default()
1419 },
1420 ));
1421 }
1422
1423 cx.spawn_weak(|this, cx| async move {
1424 let responses = futures::future::try_join_all(requests).await?;
1425
1426 let mut symbols = Vec::new();
1427 if let Some(this) = this.upgrade(&cx) {
1428 this.read_with(&cx, |this, cx| {
1429 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1430 language_servers.into_values().zip(responses)
1431 {
1432 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1433 |lsp_symbol| {
1434 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1435 let mut worktree_id = source_worktree_id;
1436 let path;
1437 if let Some((worktree, rel_path)) =
1438 this.find_local_worktree(&abs_path, cx)
1439 {
1440 worktree_id = worktree.read(cx).id();
1441 path = rel_path;
1442 } else {
1443 path = relativize_path(&worktree_abs_path, &abs_path);
1444 }
1445
1446 let label = language
1447 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1448 .unwrap_or_else(|| {
1449 CodeLabel::plain(lsp_symbol.name.clone(), None)
1450 });
1451 let signature = this.symbol_signature(worktree_id, &path);
1452
1453 Some(Symbol {
1454 source_worktree_id,
1455 worktree_id,
1456 language_name: language.name().to_string(),
1457 name: lsp_symbol.name,
1458 kind: lsp_symbol.kind,
1459 label,
1460 path,
1461 range: range_from_lsp(lsp_symbol.location.range),
1462 signature,
1463 })
1464 },
1465 ));
1466 }
1467 })
1468 }
1469
1470 Ok(symbols)
1471 })
1472 } else if let Some(project_id) = self.remote_id() {
1473 let request = self.client.request(proto::GetProjectSymbols {
1474 project_id,
1475 query: query.to_string(),
1476 });
1477 cx.spawn_weak(|this, cx| async move {
1478 let response = request.await?;
1479 let mut symbols = Vec::new();
1480 if let Some(this) = this.upgrade(&cx) {
1481 this.read_with(&cx, |this, _| {
1482 symbols.extend(
1483 response
1484 .symbols
1485 .into_iter()
1486 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1487 );
1488 })
1489 }
1490 Ok(symbols)
1491 })
1492 } else {
1493 Task::ready(Ok(Default::default()))
1494 }
1495 }
1496
1497 pub fn open_buffer_for_symbol(
1498 &mut self,
1499 symbol: &Symbol,
1500 cx: &mut ModelContext<Self>,
1501 ) -> Task<Result<ModelHandle<Buffer>>> {
1502 if self.is_local() {
1503 let language_server = if let Some(server) = self
1504 .language_servers
1505 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1506 {
1507 server.clone()
1508 } else {
1509 return Task::ready(Err(anyhow!(
1510 "language server for worktree and language not found"
1511 )));
1512 };
1513
1514 let worktree_abs_path = if let Some(worktree_abs_path) = self
1515 .worktree_for_id(symbol.worktree_id, cx)
1516 .and_then(|worktree| worktree.read(cx).as_local())
1517 .map(|local_worktree| local_worktree.abs_path())
1518 {
1519 worktree_abs_path
1520 } else {
1521 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1522 };
1523 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1524 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1525 uri
1526 } else {
1527 return Task::ready(Err(anyhow!("invalid symbol path")));
1528 };
1529
1530 self.open_local_buffer_via_lsp(
1531 symbol_uri,
1532 symbol.language_name.clone(),
1533 language_server,
1534 cx,
1535 )
1536 } else if let Some(project_id) = self.remote_id() {
1537 let request = self.client.request(proto::OpenBufferForSymbol {
1538 project_id,
1539 symbol: Some(serialize_symbol(symbol)),
1540 });
1541 cx.spawn(|this, mut cx| async move {
1542 let response = request.await?;
1543 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1544 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1545 .await
1546 })
1547 } else {
1548 Task::ready(Err(anyhow!("project does not have a remote id")))
1549 }
1550 }
1551
1552 pub fn completions<T: ToPointUtf16>(
1553 &self,
1554 source_buffer_handle: &ModelHandle<Buffer>,
1555 position: T,
1556 cx: &mut ModelContext<Self>,
1557 ) -> Task<Result<Vec<Completion>>> {
1558 let source_buffer_handle = source_buffer_handle.clone();
1559 let source_buffer = source_buffer_handle.read(cx);
1560 let buffer_id = source_buffer.remote_id();
1561 let language = source_buffer.language().cloned();
1562 let worktree;
1563 let buffer_abs_path;
1564 if let Some(file) = File::from_dyn(source_buffer.file()) {
1565 worktree = file.worktree.clone();
1566 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1567 } else {
1568 return Task::ready(Ok(Default::default()));
1569 };
1570
1571 let position = position.to_point_utf16(source_buffer);
1572 let anchor = source_buffer.anchor_after(position);
1573
1574 if worktree.read(cx).as_local().is_some() {
1575 let buffer_abs_path = buffer_abs_path.unwrap();
1576 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1577 server
1578 } else {
1579 return Task::ready(Ok(Default::default()));
1580 };
1581
1582 cx.spawn(|_, cx| async move {
1583 let completions = lang_server
1584 .request::<lsp::request::Completion>(lsp::CompletionParams {
1585 text_document_position: lsp::TextDocumentPositionParams::new(
1586 lsp::TextDocumentIdentifier::new(
1587 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1588 ),
1589 position.to_lsp_position(),
1590 ),
1591 context: Default::default(),
1592 work_done_progress_params: Default::default(),
1593 partial_result_params: Default::default(),
1594 })
1595 .await
1596 .context("lsp completion request failed")?;
1597
1598 let completions = if let Some(completions) = completions {
1599 match completions {
1600 lsp::CompletionResponse::Array(completions) => completions,
1601 lsp::CompletionResponse::List(list) => list.items,
1602 }
1603 } else {
1604 Default::default()
1605 };
1606
1607 source_buffer_handle.read_with(&cx, |this, _| {
1608 Ok(completions
1609 .into_iter()
1610 .filter_map(|lsp_completion| {
1611 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1612 lsp::CompletionTextEdit::Edit(edit) => {
1613 (range_from_lsp(edit.range), edit.new_text.clone())
1614 }
1615 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1616 log::info!("unsupported insert/replace completion");
1617 return None;
1618 }
1619 };
1620
1621 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1622 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1623 if clipped_start == old_range.start && clipped_end == old_range.end {
1624 Some(Completion {
1625 old_range: this.anchor_before(old_range.start)
1626 ..this.anchor_after(old_range.end),
1627 new_text,
1628 label: language
1629 .as_ref()
1630 .and_then(|l| l.label_for_completion(&lsp_completion))
1631 .unwrap_or_else(|| {
1632 CodeLabel::plain(
1633 lsp_completion.label.clone(),
1634 lsp_completion.filter_text.as_deref(),
1635 )
1636 }),
1637 lsp_completion,
1638 })
1639 } else {
1640 None
1641 }
1642 })
1643 .collect())
1644 })
1645 })
1646 } else if let Some(project_id) = self.remote_id() {
1647 let rpc = self.client.clone();
1648 let message = proto::GetCompletions {
1649 project_id,
1650 buffer_id,
1651 position: Some(language::proto::serialize_anchor(&anchor)),
1652 version: (&source_buffer.version()).into(),
1653 };
1654 cx.spawn_weak(|_, mut cx| async move {
1655 let response = rpc.request(message).await?;
1656
1657 source_buffer_handle
1658 .update(&mut cx, |buffer, _| {
1659 buffer.wait_for_version(response.version.into())
1660 })
1661 .await;
1662
1663 response
1664 .completions
1665 .into_iter()
1666 .map(|completion| {
1667 language::proto::deserialize_completion(completion, language.as_ref())
1668 })
1669 .collect()
1670 })
1671 } else {
1672 Task::ready(Ok(Default::default()))
1673 }
1674 }
1675
1676 pub fn apply_additional_edits_for_completion(
1677 &self,
1678 buffer_handle: ModelHandle<Buffer>,
1679 completion: Completion,
1680 push_to_history: bool,
1681 cx: &mut ModelContext<Self>,
1682 ) -> Task<Result<Option<Transaction>>> {
1683 let buffer = buffer_handle.read(cx);
1684 let buffer_id = buffer.remote_id();
1685
1686 if self.is_local() {
1687 let lang_server = if let Some(language_server) = buffer.language_server() {
1688 language_server.clone()
1689 } else {
1690 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1691 };
1692
1693 cx.spawn(|_, mut cx| async move {
1694 let resolved_completion = lang_server
1695 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1696 .await?;
1697 if let Some(edits) = resolved_completion.additional_text_edits {
1698 let edits = buffer_handle
1699 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1700 .await?;
1701 buffer_handle.update(&mut cx, |buffer, cx| {
1702 buffer.finalize_last_transaction();
1703 buffer.start_transaction();
1704 for (range, text) in edits {
1705 buffer.edit([range], text, cx);
1706 }
1707 let transaction = if buffer.end_transaction(cx).is_some() {
1708 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1709 if !push_to_history {
1710 buffer.forget_transaction(transaction.id);
1711 }
1712 Some(transaction)
1713 } else {
1714 None
1715 };
1716 Ok(transaction)
1717 })
1718 } else {
1719 Ok(None)
1720 }
1721 })
1722 } else if let Some(project_id) = self.remote_id() {
1723 let client = self.client.clone();
1724 cx.spawn(|_, mut cx| async move {
1725 let response = client
1726 .request(proto::ApplyCompletionAdditionalEdits {
1727 project_id,
1728 buffer_id,
1729 completion: Some(language::proto::serialize_completion(&completion)),
1730 })
1731 .await?;
1732
1733 if let Some(transaction) = response.transaction {
1734 let transaction = language::proto::deserialize_transaction(transaction)?;
1735 buffer_handle
1736 .update(&mut cx, |buffer, _| {
1737 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1738 })
1739 .await;
1740 if push_to_history {
1741 buffer_handle.update(&mut cx, |buffer, _| {
1742 buffer.push_transaction(transaction.clone(), Instant::now());
1743 });
1744 }
1745 Ok(Some(transaction))
1746 } else {
1747 Ok(None)
1748 }
1749 })
1750 } else {
1751 Task::ready(Err(anyhow!("project does not have a remote id")))
1752 }
1753 }
1754
1755 pub fn code_actions<T: ToOffset>(
1756 &self,
1757 buffer_handle: &ModelHandle<Buffer>,
1758 range: Range<T>,
1759 cx: &mut ModelContext<Self>,
1760 ) -> Task<Result<Vec<CodeAction>>> {
1761 let buffer_handle = buffer_handle.clone();
1762 let buffer = buffer_handle.read(cx);
1763 let buffer_id = buffer.remote_id();
1764 let worktree;
1765 let buffer_abs_path;
1766 if let Some(file) = File::from_dyn(buffer.file()) {
1767 worktree = file.worktree.clone();
1768 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1769 } else {
1770 return Task::ready(Ok(Default::default()));
1771 };
1772 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1773
1774 if worktree.read(cx).as_local().is_some() {
1775 let buffer_abs_path = buffer_abs_path.unwrap();
1776 let lang_name;
1777 let lang_server;
1778 if let Some(lang) = buffer.language() {
1779 lang_name = lang.name().to_string();
1780 if let Some(server) = self
1781 .language_servers
1782 .get(&(worktree.read(cx).id(), lang_name.clone()))
1783 {
1784 lang_server = server.clone();
1785 } else {
1786 return Task::ready(Ok(Default::default()));
1787 };
1788 } else {
1789 return Task::ready(Ok(Default::default()));
1790 }
1791
1792 let lsp_range = lsp::Range::new(
1793 range.start.to_point_utf16(buffer).to_lsp_position(),
1794 range.end.to_point_utf16(buffer).to_lsp_position(),
1795 );
1796 cx.foreground().spawn(async move {
1797 Ok(lang_server
1798 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1799 text_document: lsp::TextDocumentIdentifier::new(
1800 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1801 ),
1802 range: lsp_range,
1803 work_done_progress_params: Default::default(),
1804 partial_result_params: Default::default(),
1805 context: lsp::CodeActionContext {
1806 diagnostics: Default::default(),
1807 only: Some(vec![
1808 lsp::CodeActionKind::QUICKFIX,
1809 lsp::CodeActionKind::REFACTOR,
1810 lsp::CodeActionKind::REFACTOR_EXTRACT,
1811 ]),
1812 },
1813 })
1814 .await?
1815 .unwrap_or_default()
1816 .into_iter()
1817 .filter_map(|entry| {
1818 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1819 Some(CodeAction {
1820 range: range.clone(),
1821 lsp_action,
1822 })
1823 } else {
1824 None
1825 }
1826 })
1827 .collect())
1828 })
1829 } else if let Some(project_id) = self.remote_id() {
1830 let rpc = self.client.clone();
1831 cx.spawn_weak(|_, mut cx| async move {
1832 let response = rpc
1833 .request(proto::GetCodeActions {
1834 project_id,
1835 buffer_id,
1836 start: Some(language::proto::serialize_anchor(&range.start)),
1837 end: Some(language::proto::serialize_anchor(&range.end)),
1838 })
1839 .await?;
1840
1841 buffer_handle
1842 .update(&mut cx, |buffer, _| {
1843 buffer.wait_for_version(response.version.into())
1844 })
1845 .await;
1846
1847 response
1848 .actions
1849 .into_iter()
1850 .map(language::proto::deserialize_code_action)
1851 .collect()
1852 })
1853 } else {
1854 Task::ready(Ok(Default::default()))
1855 }
1856 }
1857
1858 pub fn apply_code_action(
1859 &self,
1860 buffer_handle: ModelHandle<Buffer>,
1861 mut action: CodeAction,
1862 push_to_history: bool,
1863 cx: &mut ModelContext<Self>,
1864 ) -> Task<Result<ProjectTransaction>> {
1865 if self.is_local() {
1866 let buffer = buffer_handle.read(cx);
1867 let lang_name = if let Some(lang) = buffer.language() {
1868 lang.name().to_string()
1869 } else {
1870 return Task::ready(Ok(Default::default()));
1871 };
1872 let lang_server = if let Some(language_server) = buffer.language_server() {
1873 language_server.clone()
1874 } else {
1875 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1876 };
1877 let range = action.range.to_point_utf16(buffer);
1878
1879 cx.spawn(|this, mut cx| async move {
1880 if let Some(lsp_range) = action
1881 .lsp_action
1882 .data
1883 .as_mut()
1884 .and_then(|d| d.get_mut("codeActionParams"))
1885 .and_then(|d| d.get_mut("range"))
1886 {
1887 *lsp_range = serde_json::to_value(&lsp::Range::new(
1888 range.start.to_lsp_position(),
1889 range.end.to_lsp_position(),
1890 ))
1891 .unwrap();
1892 action.lsp_action = lang_server
1893 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1894 .await?;
1895 } else {
1896 let actions = this
1897 .update(&mut cx, |this, cx| {
1898 this.code_actions(&buffer_handle, action.range, cx)
1899 })
1900 .await?;
1901 action.lsp_action = actions
1902 .into_iter()
1903 .find(|a| a.lsp_action.title == action.lsp_action.title)
1904 .ok_or_else(|| anyhow!("code action is outdated"))?
1905 .lsp_action;
1906 }
1907
1908 if let Some(edit) = action.lsp_action.edit {
1909 Self::deserialize_workspace_edit(
1910 this,
1911 edit,
1912 push_to_history,
1913 lang_name,
1914 lang_server,
1915 &mut cx,
1916 )
1917 .await
1918 } else {
1919 Ok(ProjectTransaction::default())
1920 }
1921 })
1922 } else if let Some(project_id) = self.remote_id() {
1923 let client = self.client.clone();
1924 let request = proto::ApplyCodeAction {
1925 project_id,
1926 buffer_id: buffer_handle.read(cx).remote_id(),
1927 action: Some(language::proto::serialize_code_action(&action)),
1928 };
1929 cx.spawn(|this, mut cx| async move {
1930 let response = client
1931 .request(request)
1932 .await?
1933 .transaction
1934 .ok_or_else(|| anyhow!("missing transaction"))?;
1935 this.update(&mut cx, |this, cx| {
1936 this.deserialize_project_transaction(response, push_to_history, cx)
1937 })
1938 .await
1939 })
1940 } else {
1941 Task::ready(Err(anyhow!("project does not have a remote id")))
1942 }
1943 }
1944
1945 async fn deserialize_workspace_edit(
1946 this: ModelHandle<Self>,
1947 edit: lsp::WorkspaceEdit,
1948 push_to_history: bool,
1949 language_name: String,
1950 language_server: Arc<LanguageServer>,
1951 cx: &mut AsyncAppContext,
1952 ) -> Result<ProjectTransaction> {
1953 let fs = this.read_with(cx, |this, _| this.fs.clone());
1954 let mut operations = Vec::new();
1955 if let Some(document_changes) = edit.document_changes {
1956 match document_changes {
1957 lsp::DocumentChanges::Edits(edits) => {
1958 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
1959 }
1960 lsp::DocumentChanges::Operations(ops) => operations = ops,
1961 }
1962 } else if let Some(changes) = edit.changes {
1963 operations.extend(changes.into_iter().map(|(uri, edits)| {
1964 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
1965 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
1966 uri,
1967 version: None,
1968 },
1969 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
1970 })
1971 }));
1972 }
1973
1974 let mut project_transaction = ProjectTransaction::default();
1975 for operation in operations {
1976 match operation {
1977 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
1978 let abs_path = op
1979 .uri
1980 .to_file_path()
1981 .map_err(|_| anyhow!("can't convert URI to path"))?;
1982
1983 if let Some(parent_path) = abs_path.parent() {
1984 fs.create_dir(parent_path).await?;
1985 }
1986 if abs_path.ends_with("/") {
1987 fs.create_dir(&abs_path).await?;
1988 } else {
1989 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
1990 .await?;
1991 }
1992 }
1993 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
1994 let source_abs_path = op
1995 .old_uri
1996 .to_file_path()
1997 .map_err(|_| anyhow!("can't convert URI to path"))?;
1998 let target_abs_path = op
1999 .new_uri
2000 .to_file_path()
2001 .map_err(|_| anyhow!("can't convert URI to path"))?;
2002 fs.rename(
2003 &source_abs_path,
2004 &target_abs_path,
2005 op.options.map(Into::into).unwrap_or_default(),
2006 )
2007 .await?;
2008 }
2009 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2010 let abs_path = op
2011 .uri
2012 .to_file_path()
2013 .map_err(|_| anyhow!("can't convert URI to path"))?;
2014 let options = op.options.map(Into::into).unwrap_or_default();
2015 if abs_path.ends_with("/") {
2016 fs.remove_dir(&abs_path, options).await?;
2017 } else {
2018 fs.remove_file(&abs_path, options).await?;
2019 }
2020 }
2021 lsp::DocumentChangeOperation::Edit(op) => {
2022 let buffer_to_edit = this
2023 .update(cx, |this, cx| {
2024 this.open_local_buffer_via_lsp(
2025 op.text_document.uri,
2026 language_name.clone(),
2027 language_server.clone(),
2028 cx,
2029 )
2030 })
2031 .await?;
2032
2033 let edits = buffer_to_edit
2034 .update(cx, |buffer, cx| {
2035 let edits = op.edits.into_iter().map(|edit| match edit {
2036 lsp::OneOf::Left(edit) => edit,
2037 lsp::OneOf::Right(edit) => edit.text_edit,
2038 });
2039 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2040 })
2041 .await?;
2042
2043 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2044 buffer.finalize_last_transaction();
2045 buffer.start_transaction();
2046 for (range, text) in edits {
2047 buffer.edit([range], text, cx);
2048 }
2049 let transaction = if buffer.end_transaction(cx).is_some() {
2050 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2051 if !push_to_history {
2052 buffer.forget_transaction(transaction.id);
2053 }
2054 Some(transaction)
2055 } else {
2056 None
2057 };
2058
2059 transaction
2060 });
2061 if let Some(transaction) = transaction {
2062 project_transaction.0.insert(buffer_to_edit, transaction);
2063 }
2064 }
2065 }
2066 }
2067
2068 Ok(project_transaction)
2069 }
2070
2071 pub fn prepare_rename<T: ToPointUtf16>(
2072 &self,
2073 buffer: ModelHandle<Buffer>,
2074 position: T,
2075 cx: &mut ModelContext<Self>,
2076 ) -> Task<Result<Option<Range<Anchor>>>> {
2077 let position = position.to_point_utf16(buffer.read(cx));
2078 self.request_lsp(buffer, PrepareRename { position }, cx)
2079 }
2080
2081 pub fn perform_rename<T: ToPointUtf16>(
2082 &self,
2083 buffer: ModelHandle<Buffer>,
2084 position: T,
2085 new_name: String,
2086 push_to_history: bool,
2087 cx: &mut ModelContext<Self>,
2088 ) -> Task<Result<ProjectTransaction>> {
2089 let position = position.to_point_utf16(buffer.read(cx));
2090 self.request_lsp(
2091 buffer,
2092 PerformRename {
2093 position,
2094 new_name,
2095 push_to_history,
2096 },
2097 cx,
2098 )
2099 }
2100
2101 pub fn search(
2102 &self,
2103 query: SearchQuery,
2104 cx: &mut ModelContext<Self>,
2105 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2106 if self.is_local() {
2107 let snapshots = self
2108 .visible_worktrees(cx)
2109 .filter_map(|tree| {
2110 let tree = tree.read(cx).as_local()?;
2111 Some(tree.snapshot())
2112 })
2113 .collect::<Vec<_>>();
2114
2115 let background = cx.background().clone();
2116 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2117 if path_count == 0 {
2118 return Task::ready(Ok(Default::default()));
2119 }
2120 let workers = background.num_cpus().min(path_count);
2121 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2122 cx.background()
2123 .spawn({
2124 let fs = self.fs.clone();
2125 let background = cx.background().clone();
2126 let query = query.clone();
2127 async move {
2128 let fs = &fs;
2129 let query = &query;
2130 let matching_paths_tx = &matching_paths_tx;
2131 let paths_per_worker = (path_count + workers - 1) / workers;
2132 let snapshots = &snapshots;
2133 background
2134 .scoped(|scope| {
2135 for worker_ix in 0..workers {
2136 let worker_start_ix = worker_ix * paths_per_worker;
2137 let worker_end_ix = worker_start_ix + paths_per_worker;
2138 scope.spawn(async move {
2139 let mut snapshot_start_ix = 0;
2140 let mut abs_path = PathBuf::new();
2141 for snapshot in snapshots {
2142 let snapshot_end_ix =
2143 snapshot_start_ix + snapshot.visible_file_count();
2144 if worker_end_ix <= snapshot_start_ix {
2145 break;
2146 } else if worker_start_ix > snapshot_end_ix {
2147 snapshot_start_ix = snapshot_end_ix;
2148 continue;
2149 } else {
2150 let start_in_snapshot = worker_start_ix
2151 .saturating_sub(snapshot_start_ix);
2152 let end_in_snapshot =
2153 cmp::min(worker_end_ix, snapshot_end_ix)
2154 - snapshot_start_ix;
2155
2156 for entry in snapshot
2157 .files(false, start_in_snapshot)
2158 .take(end_in_snapshot - start_in_snapshot)
2159 {
2160 if matching_paths_tx.is_closed() {
2161 break;
2162 }
2163
2164 abs_path.clear();
2165 abs_path.push(&snapshot.abs_path());
2166 abs_path.push(&entry.path);
2167 let matches = if let Some(file) =
2168 fs.open_sync(&abs_path).await.log_err()
2169 {
2170 query.detect(file).unwrap_or(false)
2171 } else {
2172 false
2173 };
2174
2175 if matches {
2176 let project_path =
2177 (snapshot.id(), entry.path.clone());
2178 if matching_paths_tx
2179 .send(project_path)
2180 .await
2181 .is_err()
2182 {
2183 break;
2184 }
2185 }
2186 }
2187
2188 snapshot_start_ix = snapshot_end_ix;
2189 }
2190 }
2191 });
2192 }
2193 })
2194 .await;
2195 }
2196 })
2197 .detach();
2198
2199 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2200 let open_buffers = self
2201 .opened_buffers
2202 .values()
2203 .filter_map(|b| b.upgrade(cx))
2204 .collect::<HashSet<_>>();
2205 cx.spawn(|this, cx| async move {
2206 for buffer in &open_buffers {
2207 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2208 buffers_tx.send((buffer.clone(), snapshot)).await?;
2209 }
2210
2211 let open_buffers = Rc::new(RefCell::new(open_buffers));
2212 while let Some(project_path) = matching_paths_rx.next().await {
2213 if buffers_tx.is_closed() {
2214 break;
2215 }
2216
2217 let this = this.clone();
2218 let open_buffers = open_buffers.clone();
2219 let buffers_tx = buffers_tx.clone();
2220 cx.spawn(|mut cx| async move {
2221 if let Some(buffer) = this
2222 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2223 .await
2224 .log_err()
2225 {
2226 if open_buffers.borrow_mut().insert(buffer.clone()) {
2227 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2228 buffers_tx.send((buffer, snapshot)).await?;
2229 }
2230 }
2231
2232 Ok::<_, anyhow::Error>(())
2233 })
2234 .detach();
2235 }
2236
2237 Ok::<_, anyhow::Error>(())
2238 })
2239 .detach_and_log_err(cx);
2240
2241 let background = cx.background().clone();
2242 cx.background().spawn(async move {
2243 let query = &query;
2244 let mut matched_buffers = Vec::new();
2245 for _ in 0..workers {
2246 matched_buffers.push(HashMap::default());
2247 }
2248 background
2249 .scoped(|scope| {
2250 for worker_matched_buffers in matched_buffers.iter_mut() {
2251 let mut buffers_rx = buffers_rx.clone();
2252 scope.spawn(async move {
2253 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2254 let buffer_matches = query
2255 .search(snapshot.as_rope())
2256 .await
2257 .iter()
2258 .map(|range| {
2259 snapshot.anchor_before(range.start)
2260 ..snapshot.anchor_after(range.end)
2261 })
2262 .collect::<Vec<_>>();
2263 if !buffer_matches.is_empty() {
2264 worker_matched_buffers
2265 .insert(buffer.clone(), buffer_matches);
2266 }
2267 }
2268 });
2269 }
2270 })
2271 .await;
2272 Ok(matched_buffers.into_iter().flatten().collect())
2273 })
2274 } else if let Some(project_id) = self.remote_id() {
2275 let request = self.client.request(query.to_proto(project_id));
2276 cx.spawn(|this, mut cx| async move {
2277 let response = request.await?;
2278 let mut result = HashMap::default();
2279 for location in response.locations {
2280 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2281 let target_buffer = this
2282 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2283 .await?;
2284 let start = location
2285 .start
2286 .and_then(deserialize_anchor)
2287 .ok_or_else(|| anyhow!("missing target start"))?;
2288 let end = location
2289 .end
2290 .and_then(deserialize_anchor)
2291 .ok_or_else(|| anyhow!("missing target end"))?;
2292 result
2293 .entry(target_buffer)
2294 .or_insert(Vec::new())
2295 .push(start..end)
2296 }
2297 Ok(result)
2298 })
2299 } else {
2300 Task::ready(Ok(Default::default()))
2301 }
2302 }
2303
2304 fn request_lsp<R: LspCommand>(
2305 &self,
2306 buffer_handle: ModelHandle<Buffer>,
2307 request: R,
2308 cx: &mut ModelContext<Self>,
2309 ) -> Task<Result<R::Response>>
2310 where
2311 <R::LspRequest as lsp::request::Request>::Result: Send,
2312 {
2313 let buffer = buffer_handle.read(cx);
2314 if self.is_local() {
2315 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2316 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2317 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2318 return cx.spawn(|this, cx| async move {
2319 let response = language_server
2320 .request::<R::LspRequest>(lsp_params)
2321 .await
2322 .context("lsp request failed")?;
2323 request
2324 .response_from_lsp(response, this, buffer_handle, cx)
2325 .await
2326 });
2327 }
2328 } else if let Some(project_id) = self.remote_id() {
2329 let rpc = self.client.clone();
2330 let message = request.to_proto(project_id, buffer);
2331 return cx.spawn(|this, cx| async move {
2332 let response = rpc.request(message).await?;
2333 request
2334 .response_from_proto(response, this, buffer_handle, cx)
2335 .await
2336 });
2337 }
2338 Task::ready(Ok(Default::default()))
2339 }
2340
2341 pub fn find_or_create_local_worktree(
2342 &self,
2343 abs_path: impl AsRef<Path>,
2344 visible: bool,
2345 cx: &mut ModelContext<Self>,
2346 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2347 let abs_path = abs_path.as_ref();
2348 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2349 Task::ready(Ok((tree.clone(), relative_path.into())))
2350 } else {
2351 let worktree = self.create_local_worktree(abs_path, visible, cx);
2352 cx.foreground()
2353 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2354 }
2355 }
2356
2357 pub fn find_local_worktree(
2358 &self,
2359 abs_path: &Path,
2360 cx: &AppContext,
2361 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2362 for tree in self.worktrees(cx) {
2363 if let Some(relative_path) = tree
2364 .read(cx)
2365 .as_local()
2366 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2367 {
2368 return Some((tree.clone(), relative_path.into()));
2369 }
2370 }
2371 None
2372 }
2373
2374 pub fn is_shared(&self) -> bool {
2375 match &self.client_state {
2376 ProjectClientState::Local { is_shared, .. } => *is_shared,
2377 ProjectClientState::Remote { .. } => false,
2378 }
2379 }
2380
2381 fn create_local_worktree(
2382 &self,
2383 abs_path: impl AsRef<Path>,
2384 visible: bool,
2385 cx: &mut ModelContext<Self>,
2386 ) -> Task<Result<ModelHandle<Worktree>>> {
2387 let fs = self.fs.clone();
2388 let client = self.client.clone();
2389 let path = Arc::from(abs_path.as_ref());
2390 cx.spawn(|project, mut cx| async move {
2391 let worktree = Worktree::local(client.clone(), path, visible, fs, &mut cx).await?;
2392
2393 let (remote_project_id, is_shared) = project.update(&mut cx, |project, cx| {
2394 project.add_worktree(&worktree, cx);
2395 (project.remote_id(), project.is_shared())
2396 });
2397
2398 if let Some(project_id) = remote_project_id {
2399 worktree
2400 .update(&mut cx, |worktree, cx| {
2401 worktree.as_local_mut().unwrap().register(project_id, cx)
2402 })
2403 .await?;
2404 if is_shared {
2405 worktree
2406 .update(&mut cx, |worktree, cx| {
2407 worktree.as_local_mut().unwrap().share(project_id, cx)
2408 })
2409 .await?;
2410 }
2411 }
2412
2413 Ok(worktree)
2414 })
2415 }
2416
2417 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2418 self.worktrees.retain(|worktree| {
2419 worktree
2420 .upgrade(cx)
2421 .map_or(false, |w| w.read(cx).id() != id)
2422 });
2423 cx.notify();
2424 }
2425
2426 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2427 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2428 if worktree.read(cx).is_local() {
2429 cx.subscribe(&worktree, |this, worktree, _, cx| {
2430 this.update_local_worktree_buffers(worktree, cx);
2431 })
2432 .detach();
2433 }
2434
2435 let push_strong_handle = {
2436 let worktree = worktree.read(cx);
2437 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2438 };
2439 if push_strong_handle {
2440 self.worktrees
2441 .push(WorktreeHandle::Strong(worktree.clone()));
2442 } else {
2443 cx.observe_release(&worktree, |this, cx| {
2444 this.worktrees
2445 .retain(|worktree| worktree.upgrade(cx).is_some());
2446 cx.notify();
2447 })
2448 .detach();
2449 self.worktrees
2450 .push(WorktreeHandle::Weak(worktree.downgrade()));
2451 }
2452 cx.notify();
2453 }
2454
2455 fn update_local_worktree_buffers(
2456 &mut self,
2457 worktree_handle: ModelHandle<Worktree>,
2458 cx: &mut ModelContext<Self>,
2459 ) {
2460 let snapshot = worktree_handle.read(cx).snapshot();
2461 let mut buffers_to_delete = Vec::new();
2462 for (buffer_id, buffer) in &self.opened_buffers {
2463 if let Some(buffer) = buffer.upgrade(cx) {
2464 buffer.update(cx, |buffer, cx| {
2465 if let Some(old_file) = File::from_dyn(buffer.file()) {
2466 if old_file.worktree != worktree_handle {
2467 return;
2468 }
2469
2470 let new_file = if let Some(entry) = old_file
2471 .entry_id
2472 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2473 {
2474 File {
2475 is_local: true,
2476 entry_id: Some(entry.id),
2477 mtime: entry.mtime,
2478 path: entry.path.clone(),
2479 worktree: worktree_handle.clone(),
2480 }
2481 } else if let Some(entry) =
2482 snapshot.entry_for_path(old_file.path().as_ref())
2483 {
2484 File {
2485 is_local: true,
2486 entry_id: Some(entry.id),
2487 mtime: entry.mtime,
2488 path: entry.path.clone(),
2489 worktree: worktree_handle.clone(),
2490 }
2491 } else {
2492 File {
2493 is_local: true,
2494 entry_id: None,
2495 path: old_file.path().clone(),
2496 mtime: old_file.mtime(),
2497 worktree: worktree_handle.clone(),
2498 }
2499 };
2500
2501 if let Some(project_id) = self.remote_id() {
2502 self.client
2503 .send(proto::UpdateBufferFile {
2504 project_id,
2505 buffer_id: *buffer_id as u64,
2506 file: Some(new_file.to_proto()),
2507 })
2508 .log_err();
2509 }
2510 buffer.file_updated(Box::new(new_file), cx).detach();
2511 }
2512 });
2513 } else {
2514 buffers_to_delete.push(*buffer_id);
2515 }
2516 }
2517
2518 for buffer_id in buffers_to_delete {
2519 self.opened_buffers.remove(&buffer_id);
2520 }
2521 }
2522
2523 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2524 let new_active_entry = entry.and_then(|project_path| {
2525 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2526 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2527 Some(ProjectEntry {
2528 worktree_id: project_path.worktree_id,
2529 entry_id: entry.id,
2530 })
2531 });
2532 if new_active_entry != self.active_entry {
2533 self.active_entry = new_active_entry;
2534 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2535 }
2536 }
2537
2538 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2539 self.language_servers_with_diagnostics_running > 0
2540 }
2541
2542 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2543 let mut summary = DiagnosticSummary::default();
2544 for (_, path_summary) in self.diagnostic_summaries(cx) {
2545 summary.error_count += path_summary.error_count;
2546 summary.warning_count += path_summary.warning_count;
2547 summary.info_count += path_summary.info_count;
2548 summary.hint_count += path_summary.hint_count;
2549 }
2550 summary
2551 }
2552
2553 pub fn diagnostic_summaries<'a>(
2554 &'a self,
2555 cx: &'a AppContext,
2556 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2557 self.worktrees(cx).flat_map(move |worktree| {
2558 let worktree = worktree.read(cx);
2559 let worktree_id = worktree.id();
2560 worktree
2561 .diagnostic_summaries()
2562 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2563 })
2564 }
2565
2566 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2567 self.language_servers_with_diagnostics_running += 1;
2568 if self.language_servers_with_diagnostics_running == 1 {
2569 cx.emit(Event::DiskBasedDiagnosticsStarted);
2570 }
2571 }
2572
2573 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2574 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2575 self.language_servers_with_diagnostics_running -= 1;
2576 if self.language_servers_with_diagnostics_running == 0 {
2577 cx.emit(Event::DiskBasedDiagnosticsFinished);
2578 }
2579 }
2580
2581 pub fn active_entry(&self) -> Option<ProjectEntry> {
2582 self.active_entry
2583 }
2584
2585 // RPC message handlers
2586
2587 async fn handle_unshare_project(
2588 this: ModelHandle<Self>,
2589 _: TypedEnvelope<proto::UnshareProject>,
2590 _: Arc<Client>,
2591 mut cx: AsyncAppContext,
2592 ) -> Result<()> {
2593 this.update(&mut cx, |this, cx| {
2594 if let ProjectClientState::Remote {
2595 sharing_has_stopped,
2596 ..
2597 } = &mut this.client_state
2598 {
2599 *sharing_has_stopped = true;
2600 this.collaborators.clear();
2601 cx.notify();
2602 } else {
2603 unreachable!()
2604 }
2605 });
2606
2607 Ok(())
2608 }
2609
2610 async fn handle_add_collaborator(
2611 this: ModelHandle<Self>,
2612 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2613 _: Arc<Client>,
2614 mut cx: AsyncAppContext,
2615 ) -> Result<()> {
2616 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2617 let collaborator = envelope
2618 .payload
2619 .collaborator
2620 .take()
2621 .ok_or_else(|| anyhow!("empty collaborator"))?;
2622
2623 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2624 this.update(&mut cx, |this, cx| {
2625 this.collaborators
2626 .insert(collaborator.peer_id, collaborator);
2627 cx.notify();
2628 });
2629
2630 Ok(())
2631 }
2632
2633 async fn handle_remove_collaborator(
2634 this: ModelHandle<Self>,
2635 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2636 _: Arc<Client>,
2637 mut cx: AsyncAppContext,
2638 ) -> Result<()> {
2639 this.update(&mut cx, |this, cx| {
2640 let peer_id = PeerId(envelope.payload.peer_id);
2641 let replica_id = this
2642 .collaborators
2643 .remove(&peer_id)
2644 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2645 .replica_id;
2646 for (_, buffer) in &this.opened_buffers {
2647 if let Some(buffer) = buffer.upgrade(cx) {
2648 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2649 }
2650 }
2651 cx.notify();
2652 Ok(())
2653 })
2654 }
2655
2656 async fn handle_register_worktree(
2657 this: ModelHandle<Self>,
2658 envelope: TypedEnvelope<proto::RegisterWorktree>,
2659 client: Arc<Client>,
2660 mut cx: AsyncAppContext,
2661 ) -> Result<()> {
2662 this.update(&mut cx, |this, cx| {
2663 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2664 let replica_id = this.replica_id();
2665 let worktree = proto::Worktree {
2666 id: envelope.payload.worktree_id,
2667 root_name: envelope.payload.root_name,
2668 entries: Default::default(),
2669 diagnostic_summaries: Default::default(),
2670 visible: envelope.payload.visible,
2671 };
2672 let (worktree, load_task) =
2673 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2674 this.add_worktree(&worktree, cx);
2675 load_task.detach();
2676 Ok(())
2677 })
2678 }
2679
2680 async fn handle_unregister_worktree(
2681 this: ModelHandle<Self>,
2682 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2683 _: Arc<Client>,
2684 mut cx: AsyncAppContext,
2685 ) -> Result<()> {
2686 this.update(&mut cx, |this, cx| {
2687 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2688 this.remove_worktree(worktree_id, cx);
2689 Ok(())
2690 })
2691 }
2692
2693 async fn handle_update_worktree(
2694 this: ModelHandle<Self>,
2695 envelope: TypedEnvelope<proto::UpdateWorktree>,
2696 _: Arc<Client>,
2697 mut cx: AsyncAppContext,
2698 ) -> Result<()> {
2699 this.update(&mut cx, |this, cx| {
2700 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2701 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2702 worktree.update(cx, |worktree, _| {
2703 let worktree = worktree.as_remote_mut().unwrap();
2704 worktree.update_from_remote(envelope)
2705 })?;
2706 }
2707 Ok(())
2708 })
2709 }
2710
2711 async fn handle_update_diagnostic_summary(
2712 this: ModelHandle<Self>,
2713 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2714 _: Arc<Client>,
2715 mut cx: AsyncAppContext,
2716 ) -> Result<()> {
2717 this.update(&mut cx, |this, cx| {
2718 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2719 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2720 if let Some(summary) = envelope.payload.summary {
2721 let project_path = ProjectPath {
2722 worktree_id,
2723 path: Path::new(&summary.path).into(),
2724 };
2725 worktree.update(cx, |worktree, _| {
2726 worktree
2727 .as_remote_mut()
2728 .unwrap()
2729 .update_diagnostic_summary(project_path.path.clone(), &summary);
2730 });
2731 cx.emit(Event::DiagnosticsUpdated(project_path));
2732 }
2733 }
2734 Ok(())
2735 })
2736 }
2737
2738 async fn handle_disk_based_diagnostics_updating(
2739 this: ModelHandle<Self>,
2740 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2741 _: Arc<Client>,
2742 mut cx: AsyncAppContext,
2743 ) -> Result<()> {
2744 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2745 Ok(())
2746 }
2747
2748 async fn handle_disk_based_diagnostics_updated(
2749 this: ModelHandle<Self>,
2750 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2751 _: Arc<Client>,
2752 mut cx: AsyncAppContext,
2753 ) -> Result<()> {
2754 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2755 Ok(())
2756 }
2757
2758 async fn handle_update_buffer(
2759 this: ModelHandle<Self>,
2760 envelope: TypedEnvelope<proto::UpdateBuffer>,
2761 _: Arc<Client>,
2762 mut cx: AsyncAppContext,
2763 ) -> Result<()> {
2764 this.update(&mut cx, |this, cx| {
2765 let payload = envelope.payload.clone();
2766 let buffer_id = payload.buffer_id;
2767 let ops = payload
2768 .operations
2769 .into_iter()
2770 .map(|op| language::proto::deserialize_operation(op))
2771 .collect::<Result<Vec<_>, _>>()?;
2772 match this.opened_buffers.entry(buffer_id) {
2773 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2774 OpenBuffer::Strong(buffer) => {
2775 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2776 }
2777 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2778 OpenBuffer::Weak(_) => {}
2779 },
2780 hash_map::Entry::Vacant(e) => {
2781 e.insert(OpenBuffer::Loading(ops));
2782 }
2783 }
2784 Ok(())
2785 })
2786 }
2787
2788 async fn handle_update_buffer_file(
2789 this: ModelHandle<Self>,
2790 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2791 _: Arc<Client>,
2792 mut cx: AsyncAppContext,
2793 ) -> Result<()> {
2794 this.update(&mut cx, |this, cx| {
2795 let payload = envelope.payload.clone();
2796 let buffer_id = payload.buffer_id;
2797 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2798 let worktree = this
2799 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2800 .ok_or_else(|| anyhow!("no such worktree"))?;
2801 let file = File::from_proto(file, worktree.clone(), cx)?;
2802 let buffer = this
2803 .opened_buffers
2804 .get_mut(&buffer_id)
2805 .and_then(|b| b.upgrade(cx))
2806 .ok_or_else(|| anyhow!("no such buffer"))?;
2807 buffer.update(cx, |buffer, cx| {
2808 buffer.file_updated(Box::new(file), cx).detach();
2809 });
2810 Ok(())
2811 })
2812 }
2813
2814 async fn handle_save_buffer(
2815 this: ModelHandle<Self>,
2816 envelope: TypedEnvelope<proto::SaveBuffer>,
2817 _: Arc<Client>,
2818 mut cx: AsyncAppContext,
2819 ) -> Result<proto::BufferSaved> {
2820 let buffer_id = envelope.payload.buffer_id;
2821 let requested_version = envelope.payload.version.try_into()?;
2822
2823 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2824 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2825 let buffer = this
2826 .opened_buffers
2827 .get(&buffer_id)
2828 .map(|buffer| buffer.upgrade(cx).unwrap())
2829 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2830 Ok::<_, anyhow::Error>((project_id, buffer))
2831 })?;
2832
2833 if !buffer
2834 .read_with(&cx, |buffer, _| buffer.version())
2835 .observed_all(&requested_version)
2836 {
2837 Err(anyhow!("save request depends on unreceived edits"))?;
2838 }
2839
2840 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2841 Ok(proto::BufferSaved {
2842 project_id,
2843 buffer_id,
2844 version: (&saved_version).into(),
2845 mtime: Some(mtime.into()),
2846 })
2847 }
2848
2849 async fn handle_format_buffers(
2850 this: ModelHandle<Self>,
2851 envelope: TypedEnvelope<proto::FormatBuffers>,
2852 _: Arc<Client>,
2853 mut cx: AsyncAppContext,
2854 ) -> Result<proto::FormatBuffersResponse> {
2855 let sender_id = envelope.original_sender_id()?;
2856 let format = this.update(&mut cx, |this, cx| {
2857 let mut buffers = HashSet::default();
2858 for buffer_id in &envelope.payload.buffer_ids {
2859 buffers.insert(
2860 this.opened_buffers
2861 .get(buffer_id)
2862 .map(|buffer| buffer.upgrade(cx).unwrap())
2863 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2864 );
2865 }
2866 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2867 })?;
2868
2869 let project_transaction = format.await?;
2870 let project_transaction = this.update(&mut cx, |this, cx| {
2871 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2872 });
2873 Ok(proto::FormatBuffersResponse {
2874 transaction: Some(project_transaction),
2875 })
2876 }
2877
2878 async fn handle_get_completions(
2879 this: ModelHandle<Self>,
2880 envelope: TypedEnvelope<proto::GetCompletions>,
2881 _: Arc<Client>,
2882 mut cx: AsyncAppContext,
2883 ) -> Result<proto::GetCompletionsResponse> {
2884 let position = envelope
2885 .payload
2886 .position
2887 .and_then(language::proto::deserialize_anchor)
2888 .ok_or_else(|| anyhow!("invalid position"))?;
2889 let version = clock::Global::from(envelope.payload.version);
2890 let buffer = this.read_with(&cx, |this, cx| {
2891 this.opened_buffers
2892 .get(&envelope.payload.buffer_id)
2893 .map(|buffer| buffer.upgrade(cx).unwrap())
2894 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2895 })?;
2896 if !buffer
2897 .read_with(&cx, |buffer, _| buffer.version())
2898 .observed_all(&version)
2899 {
2900 Err(anyhow!("completion request depends on unreceived edits"))?;
2901 }
2902 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2903 let completions = this
2904 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2905 .await?;
2906
2907 Ok(proto::GetCompletionsResponse {
2908 completions: completions
2909 .iter()
2910 .map(language::proto::serialize_completion)
2911 .collect(),
2912 version: (&version).into(),
2913 })
2914 }
2915
2916 async fn handle_apply_additional_edits_for_completion(
2917 this: ModelHandle<Self>,
2918 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2919 _: Arc<Client>,
2920 mut cx: AsyncAppContext,
2921 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2922 let apply_additional_edits = this.update(&mut cx, |this, cx| {
2923 let buffer = this
2924 .opened_buffers
2925 .get(&envelope.payload.buffer_id)
2926 .map(|buffer| buffer.upgrade(cx).unwrap())
2927 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
2928 let language = buffer.read(cx).language();
2929 let completion = language::proto::deserialize_completion(
2930 envelope
2931 .payload
2932 .completion
2933 .ok_or_else(|| anyhow!("invalid completion"))?,
2934 language,
2935 )?;
2936 Ok::<_, anyhow::Error>(
2937 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
2938 )
2939 })?;
2940
2941 Ok(proto::ApplyCompletionAdditionalEditsResponse {
2942 transaction: apply_additional_edits
2943 .await?
2944 .as_ref()
2945 .map(language::proto::serialize_transaction),
2946 })
2947 }
2948
2949 async fn handle_get_code_actions(
2950 this: ModelHandle<Self>,
2951 envelope: TypedEnvelope<proto::GetCodeActions>,
2952 _: Arc<Client>,
2953 mut cx: AsyncAppContext,
2954 ) -> Result<proto::GetCodeActionsResponse> {
2955 let start = envelope
2956 .payload
2957 .start
2958 .and_then(language::proto::deserialize_anchor)
2959 .ok_or_else(|| anyhow!("invalid start"))?;
2960 let end = envelope
2961 .payload
2962 .end
2963 .and_then(language::proto::deserialize_anchor)
2964 .ok_or_else(|| anyhow!("invalid end"))?;
2965 let buffer = this.update(&mut cx, |this, cx| {
2966 this.opened_buffers
2967 .get(&envelope.payload.buffer_id)
2968 .map(|buffer| buffer.upgrade(cx).unwrap())
2969 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2970 })?;
2971 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2972 if !version.observed(start.timestamp) || !version.observed(end.timestamp) {
2973 Err(anyhow!("code action request references unreceived edits"))?;
2974 }
2975 let code_actions = this.update(&mut cx, |this, cx| {
2976 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
2977 })?;
2978
2979 Ok(proto::GetCodeActionsResponse {
2980 actions: code_actions
2981 .await?
2982 .iter()
2983 .map(language::proto::serialize_code_action)
2984 .collect(),
2985 version: (&version).into(),
2986 })
2987 }
2988
2989 async fn handle_apply_code_action(
2990 this: ModelHandle<Self>,
2991 envelope: TypedEnvelope<proto::ApplyCodeAction>,
2992 _: Arc<Client>,
2993 mut cx: AsyncAppContext,
2994 ) -> Result<proto::ApplyCodeActionResponse> {
2995 let sender_id = envelope.original_sender_id()?;
2996 let action = language::proto::deserialize_code_action(
2997 envelope
2998 .payload
2999 .action
3000 .ok_or_else(|| anyhow!("invalid action"))?,
3001 )?;
3002 let apply_code_action = this.update(&mut cx, |this, cx| {
3003 let buffer = this
3004 .opened_buffers
3005 .get(&envelope.payload.buffer_id)
3006 .map(|buffer| buffer.upgrade(cx).unwrap())
3007 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3008 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3009 })?;
3010
3011 let project_transaction = apply_code_action.await?;
3012 let project_transaction = this.update(&mut cx, |this, cx| {
3013 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3014 });
3015 Ok(proto::ApplyCodeActionResponse {
3016 transaction: Some(project_transaction),
3017 })
3018 }
3019
3020 async fn handle_lsp_command<T: LspCommand>(
3021 this: ModelHandle<Self>,
3022 envelope: TypedEnvelope<T::ProtoRequest>,
3023 _: Arc<Client>,
3024 mut cx: AsyncAppContext,
3025 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3026 where
3027 <T::LspRequest as lsp::request::Request>::Result: Send,
3028 {
3029 let sender_id = envelope.original_sender_id()?;
3030 let (request, buffer_version) = this.update(&mut cx, |this, cx| {
3031 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3032 let buffer_handle = this
3033 .opened_buffers
3034 .get(&buffer_id)
3035 .map(|buffer| buffer.upgrade(cx).unwrap())
3036 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
3037 let buffer = buffer_handle.read(cx);
3038 let buffer_version = buffer.version();
3039 let request = T::from_proto(envelope.payload, this, buffer)?;
3040 Ok::<_, anyhow::Error>((this.request_lsp(buffer_handle, request, cx), buffer_version))
3041 })?;
3042 let response = request.await?;
3043 this.update(&mut cx, |this, cx| {
3044 Ok(T::response_to_proto(
3045 response,
3046 this,
3047 sender_id,
3048 &buffer_version,
3049 cx,
3050 ))
3051 })
3052 }
3053
3054 async fn handle_get_project_symbols(
3055 this: ModelHandle<Self>,
3056 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3057 _: Arc<Client>,
3058 mut cx: AsyncAppContext,
3059 ) -> Result<proto::GetProjectSymbolsResponse> {
3060 let symbols = this
3061 .update(&mut cx, |this, cx| {
3062 this.symbols(&envelope.payload.query, cx)
3063 })
3064 .await?;
3065
3066 Ok(proto::GetProjectSymbolsResponse {
3067 symbols: symbols.iter().map(serialize_symbol).collect(),
3068 })
3069 }
3070
3071 async fn handle_search_project(
3072 this: ModelHandle<Self>,
3073 envelope: TypedEnvelope<proto::SearchProject>,
3074 _: Arc<Client>,
3075 mut cx: AsyncAppContext,
3076 ) -> Result<proto::SearchProjectResponse> {
3077 let peer_id = envelope.original_sender_id()?;
3078 let query = SearchQuery::from_proto(envelope.payload)?;
3079 let result = this
3080 .update(&mut cx, |this, cx| this.search(query, cx))
3081 .await?;
3082
3083 this.update(&mut cx, |this, cx| {
3084 let mut locations = Vec::new();
3085 for (buffer, ranges) in result {
3086 for range in ranges {
3087 let start = serialize_anchor(&range.start);
3088 let end = serialize_anchor(&range.end);
3089 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3090 locations.push(proto::Location {
3091 buffer: Some(buffer),
3092 start: Some(start),
3093 end: Some(end),
3094 });
3095 }
3096 }
3097 Ok(proto::SearchProjectResponse { locations })
3098 })
3099 }
3100
3101 async fn handle_open_buffer_for_symbol(
3102 this: ModelHandle<Self>,
3103 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3104 _: Arc<Client>,
3105 mut cx: AsyncAppContext,
3106 ) -> Result<proto::OpenBufferForSymbolResponse> {
3107 let peer_id = envelope.original_sender_id()?;
3108 let symbol = envelope
3109 .payload
3110 .symbol
3111 .ok_or_else(|| anyhow!("invalid symbol"))?;
3112 let symbol = this.read_with(&cx, |this, _| {
3113 let symbol = this.deserialize_symbol(symbol)?;
3114 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3115 if signature == symbol.signature {
3116 Ok(symbol)
3117 } else {
3118 Err(anyhow!("invalid symbol signature"))
3119 }
3120 })?;
3121 let buffer = this
3122 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3123 .await?;
3124
3125 Ok(proto::OpenBufferForSymbolResponse {
3126 buffer: Some(this.update(&mut cx, |this, cx| {
3127 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3128 })),
3129 })
3130 }
3131
3132 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3133 let mut hasher = Sha256::new();
3134 hasher.update(worktree_id.to_proto().to_be_bytes());
3135 hasher.update(path.to_string_lossy().as_bytes());
3136 hasher.update(self.nonce.to_be_bytes());
3137 hasher.finalize().as_slice().try_into().unwrap()
3138 }
3139
3140 async fn handle_open_buffer(
3141 this: ModelHandle<Self>,
3142 envelope: TypedEnvelope<proto::OpenBuffer>,
3143 _: Arc<Client>,
3144 mut cx: AsyncAppContext,
3145 ) -> Result<proto::OpenBufferResponse> {
3146 let peer_id = envelope.original_sender_id()?;
3147 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3148 let open_buffer = this.update(&mut cx, |this, cx| {
3149 this.open_buffer(
3150 ProjectPath {
3151 worktree_id,
3152 path: PathBuf::from(envelope.payload.path).into(),
3153 },
3154 cx,
3155 )
3156 });
3157
3158 let buffer = open_buffer.await?;
3159 this.update(&mut cx, |this, cx| {
3160 Ok(proto::OpenBufferResponse {
3161 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3162 })
3163 })
3164 }
3165
3166 fn serialize_project_transaction_for_peer(
3167 &mut self,
3168 project_transaction: ProjectTransaction,
3169 peer_id: PeerId,
3170 cx: &AppContext,
3171 ) -> proto::ProjectTransaction {
3172 let mut serialized_transaction = proto::ProjectTransaction {
3173 buffers: Default::default(),
3174 transactions: Default::default(),
3175 };
3176 for (buffer, transaction) in project_transaction.0 {
3177 serialized_transaction
3178 .buffers
3179 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3180 serialized_transaction
3181 .transactions
3182 .push(language::proto::serialize_transaction(&transaction));
3183 }
3184 serialized_transaction
3185 }
3186
3187 fn deserialize_project_transaction(
3188 &mut self,
3189 message: proto::ProjectTransaction,
3190 push_to_history: bool,
3191 cx: &mut ModelContext<Self>,
3192 ) -> Task<Result<ProjectTransaction>> {
3193 cx.spawn(|this, mut cx| async move {
3194 let mut project_transaction = ProjectTransaction::default();
3195 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3196 let buffer = this
3197 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3198 .await?;
3199 let transaction = language::proto::deserialize_transaction(transaction)?;
3200 project_transaction.0.insert(buffer, transaction);
3201 }
3202
3203 for (buffer, transaction) in &project_transaction.0 {
3204 buffer
3205 .update(&mut cx, |buffer, _| {
3206 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3207 })
3208 .await;
3209
3210 if push_to_history {
3211 buffer.update(&mut cx, |buffer, _| {
3212 buffer.push_transaction(transaction.clone(), Instant::now());
3213 });
3214 }
3215 }
3216
3217 Ok(project_transaction)
3218 })
3219 }
3220
3221 fn serialize_buffer_for_peer(
3222 &mut self,
3223 buffer: &ModelHandle<Buffer>,
3224 peer_id: PeerId,
3225 cx: &AppContext,
3226 ) -> proto::Buffer {
3227 let buffer_id = buffer.read(cx).remote_id();
3228 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3229 if shared_buffers.insert(buffer_id) {
3230 proto::Buffer {
3231 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3232 }
3233 } else {
3234 proto::Buffer {
3235 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3236 }
3237 }
3238 }
3239
3240 fn deserialize_buffer(
3241 &mut self,
3242 buffer: proto::Buffer,
3243 cx: &mut ModelContext<Self>,
3244 ) -> Task<Result<ModelHandle<Buffer>>> {
3245 let replica_id = self.replica_id();
3246
3247 let opened_buffer_tx = self.opened_buffer.0.clone();
3248 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3249 cx.spawn(|this, mut cx| async move {
3250 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3251 proto::buffer::Variant::Id(id) => {
3252 let buffer = loop {
3253 let buffer = this.read_with(&cx, |this, cx| {
3254 this.opened_buffers
3255 .get(&id)
3256 .and_then(|buffer| buffer.upgrade(cx))
3257 });
3258 if let Some(buffer) = buffer {
3259 break buffer;
3260 }
3261 opened_buffer_rx
3262 .next()
3263 .await
3264 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3265 };
3266 Ok(buffer)
3267 }
3268 proto::buffer::Variant::State(mut buffer) => {
3269 let mut buffer_worktree = None;
3270 let mut buffer_file = None;
3271 if let Some(file) = buffer.file.take() {
3272 this.read_with(&cx, |this, cx| {
3273 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3274 let worktree =
3275 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3276 anyhow!("no worktree found for id {}", file.worktree_id)
3277 })?;
3278 buffer_file =
3279 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3280 as Box<dyn language::File>);
3281 buffer_worktree = Some(worktree);
3282 Ok::<_, anyhow::Error>(())
3283 })?;
3284 }
3285
3286 let buffer = cx.add_model(|cx| {
3287 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3288 });
3289
3290 this.update(&mut cx, |this, cx| {
3291 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3292 })?;
3293
3294 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3295 Ok(buffer)
3296 }
3297 }
3298 })
3299 }
3300
3301 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3302 let language = self
3303 .languages
3304 .get_language(&serialized_symbol.language_name);
3305 let start = serialized_symbol
3306 .start
3307 .ok_or_else(|| anyhow!("invalid start"))?;
3308 let end = serialized_symbol
3309 .end
3310 .ok_or_else(|| anyhow!("invalid end"))?;
3311 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3312 Ok(Symbol {
3313 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3314 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3315 language_name: serialized_symbol.language_name.clone(),
3316 label: language
3317 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3318 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3319 name: serialized_symbol.name,
3320 path: PathBuf::from(serialized_symbol.path),
3321 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3322 kind,
3323 signature: serialized_symbol
3324 .signature
3325 .try_into()
3326 .map_err(|_| anyhow!("invalid signature"))?,
3327 })
3328 }
3329
3330 async fn handle_close_buffer(
3331 _: ModelHandle<Self>,
3332 _: TypedEnvelope<proto::CloseBuffer>,
3333 _: Arc<Client>,
3334 _: AsyncAppContext,
3335 ) -> Result<()> {
3336 // TODO: use this for following
3337 Ok(())
3338 }
3339
3340 async fn handle_buffer_saved(
3341 this: ModelHandle<Self>,
3342 envelope: TypedEnvelope<proto::BufferSaved>,
3343 _: Arc<Client>,
3344 mut cx: AsyncAppContext,
3345 ) -> Result<()> {
3346 let version = envelope.payload.version.try_into()?;
3347 let mtime = envelope
3348 .payload
3349 .mtime
3350 .ok_or_else(|| anyhow!("missing mtime"))?
3351 .into();
3352
3353 this.update(&mut cx, |this, cx| {
3354 let buffer = this
3355 .opened_buffers
3356 .get(&envelope.payload.buffer_id)
3357 .and_then(|buffer| buffer.upgrade(cx));
3358 if let Some(buffer) = buffer {
3359 buffer.update(cx, |buffer, cx| {
3360 buffer.did_save(version, mtime, None, cx);
3361 });
3362 }
3363 Ok(())
3364 })
3365 }
3366
3367 async fn handle_buffer_reloaded(
3368 this: ModelHandle<Self>,
3369 envelope: TypedEnvelope<proto::BufferReloaded>,
3370 _: Arc<Client>,
3371 mut cx: AsyncAppContext,
3372 ) -> Result<()> {
3373 let payload = envelope.payload.clone();
3374 let version = payload.version.try_into()?;
3375 let mtime = payload
3376 .mtime
3377 .ok_or_else(|| anyhow!("missing mtime"))?
3378 .into();
3379 this.update(&mut cx, |this, cx| {
3380 let buffer = this
3381 .opened_buffers
3382 .get(&payload.buffer_id)
3383 .and_then(|buffer| buffer.upgrade(cx));
3384 if let Some(buffer) = buffer {
3385 buffer.update(cx, |buffer, cx| {
3386 buffer.did_reload(version, mtime, cx);
3387 });
3388 }
3389 Ok(())
3390 })
3391 }
3392
3393 pub fn match_paths<'a>(
3394 &self,
3395 query: &'a str,
3396 include_ignored: bool,
3397 smart_case: bool,
3398 max_results: usize,
3399 cancel_flag: &'a AtomicBool,
3400 cx: &AppContext,
3401 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3402 let worktrees = self
3403 .worktrees(cx)
3404 .filter(|worktree| worktree.read(cx).is_visible())
3405 .collect::<Vec<_>>();
3406 let include_root_name = worktrees.len() > 1;
3407 let candidate_sets = worktrees
3408 .into_iter()
3409 .map(|worktree| CandidateSet {
3410 snapshot: worktree.read(cx).snapshot(),
3411 include_ignored,
3412 include_root_name,
3413 })
3414 .collect::<Vec<_>>();
3415
3416 let background = cx.background().clone();
3417 async move {
3418 fuzzy::match_paths(
3419 candidate_sets.as_slice(),
3420 query,
3421 smart_case,
3422 max_results,
3423 cancel_flag,
3424 background,
3425 )
3426 .await
3427 }
3428 }
3429}
3430
3431impl WorktreeHandle {
3432 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3433 match self {
3434 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3435 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3436 }
3437 }
3438}
3439
3440impl OpenBuffer {
3441 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3442 match self {
3443 OpenBuffer::Strong(handle) => Some(handle.clone()),
3444 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3445 OpenBuffer::Loading(_) => None,
3446 }
3447 }
3448}
3449
3450struct CandidateSet {
3451 snapshot: Snapshot,
3452 include_ignored: bool,
3453 include_root_name: bool,
3454}
3455
3456impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3457 type Candidates = CandidateSetIter<'a>;
3458
3459 fn id(&self) -> usize {
3460 self.snapshot.id().to_usize()
3461 }
3462
3463 fn len(&self) -> usize {
3464 if self.include_ignored {
3465 self.snapshot.file_count()
3466 } else {
3467 self.snapshot.visible_file_count()
3468 }
3469 }
3470
3471 fn prefix(&self) -> Arc<str> {
3472 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3473 self.snapshot.root_name().into()
3474 } else if self.include_root_name {
3475 format!("{}/", self.snapshot.root_name()).into()
3476 } else {
3477 "".into()
3478 }
3479 }
3480
3481 fn candidates(&'a self, start: usize) -> Self::Candidates {
3482 CandidateSetIter {
3483 traversal: self.snapshot.files(self.include_ignored, start),
3484 }
3485 }
3486}
3487
3488struct CandidateSetIter<'a> {
3489 traversal: Traversal<'a>,
3490}
3491
3492impl<'a> Iterator for CandidateSetIter<'a> {
3493 type Item = PathMatchCandidate<'a>;
3494
3495 fn next(&mut self) -> Option<Self::Item> {
3496 self.traversal.next().map(|entry| {
3497 if let EntryKind::File(char_bag) = entry.kind {
3498 PathMatchCandidate {
3499 path: &entry.path,
3500 char_bag,
3501 }
3502 } else {
3503 unreachable!()
3504 }
3505 })
3506 }
3507}
3508
3509impl Entity for Project {
3510 type Event = Event;
3511
3512 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3513 match &self.client_state {
3514 ProjectClientState::Local { remote_id_rx, .. } => {
3515 if let Some(project_id) = *remote_id_rx.borrow() {
3516 self.client
3517 .send(proto::UnregisterProject { project_id })
3518 .log_err();
3519 }
3520 }
3521 ProjectClientState::Remote { remote_id, .. } => {
3522 self.client
3523 .send(proto::LeaveProject {
3524 project_id: *remote_id,
3525 })
3526 .log_err();
3527 }
3528 }
3529 }
3530
3531 fn app_will_quit(
3532 &mut self,
3533 _: &mut MutableAppContext,
3534 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3535 let shutdown_futures = self
3536 .language_servers
3537 .drain()
3538 .filter_map(|(_, server)| server.shutdown())
3539 .collect::<Vec<_>>();
3540 Some(
3541 async move {
3542 futures::future::join_all(shutdown_futures).await;
3543 }
3544 .boxed(),
3545 )
3546 }
3547}
3548
3549impl Collaborator {
3550 fn from_proto(
3551 message: proto::Collaborator,
3552 user_store: &ModelHandle<UserStore>,
3553 cx: &mut AsyncAppContext,
3554 ) -> impl Future<Output = Result<Self>> {
3555 let user = user_store.update(cx, |user_store, cx| {
3556 user_store.fetch_user(message.user_id, cx)
3557 });
3558
3559 async move {
3560 Ok(Self {
3561 peer_id: PeerId(message.peer_id),
3562 user: user.await?,
3563 replica_id: message.replica_id as ReplicaId,
3564 })
3565 }
3566 }
3567}
3568
3569impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3570 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3571 Self {
3572 worktree_id,
3573 path: path.as_ref().into(),
3574 }
3575 }
3576}
3577
3578impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3579 fn from(options: lsp::CreateFileOptions) -> Self {
3580 Self {
3581 overwrite: options.overwrite.unwrap_or(false),
3582 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3583 }
3584 }
3585}
3586
3587impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3588 fn from(options: lsp::RenameFileOptions) -> Self {
3589 Self {
3590 overwrite: options.overwrite.unwrap_or(false),
3591 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3592 }
3593 }
3594}
3595
3596impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3597 fn from(options: lsp::DeleteFileOptions) -> Self {
3598 Self {
3599 recursive: options.recursive.unwrap_or(false),
3600 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3601 }
3602 }
3603}
3604
3605fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3606 proto::Symbol {
3607 source_worktree_id: symbol.source_worktree_id.to_proto(),
3608 worktree_id: symbol.worktree_id.to_proto(),
3609 language_name: symbol.language_name.clone(),
3610 name: symbol.name.clone(),
3611 kind: unsafe { mem::transmute(symbol.kind) },
3612 path: symbol.path.to_string_lossy().to_string(),
3613 start: Some(proto::Point {
3614 row: symbol.range.start.row,
3615 column: symbol.range.start.column,
3616 }),
3617 end: Some(proto::Point {
3618 row: symbol.range.end.row,
3619 column: symbol.range.end.column,
3620 }),
3621 signature: symbol.signature.to_vec(),
3622 }
3623}
3624
3625fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3626 let mut path_components = path.components();
3627 let mut base_components = base.components();
3628 let mut components: Vec<Component> = Vec::new();
3629 loop {
3630 match (path_components.next(), base_components.next()) {
3631 (None, None) => break,
3632 (Some(a), None) => {
3633 components.push(a);
3634 components.extend(path_components.by_ref());
3635 break;
3636 }
3637 (None, _) => components.push(Component::ParentDir),
3638 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3639 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3640 (Some(a), Some(_)) => {
3641 components.push(Component::ParentDir);
3642 for _ in base_components {
3643 components.push(Component::ParentDir);
3644 }
3645 components.push(a);
3646 components.extend(path_components.by_ref());
3647 break;
3648 }
3649 }
3650 }
3651 components.iter().map(|c| c.as_os_str()).collect()
3652}
3653
3654#[cfg(test)]
3655mod tests {
3656 use super::{Event, *};
3657 use fs::RealFs;
3658 use futures::StreamExt;
3659 use gpui::test::subscribe;
3660 use language::{
3661 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3662 };
3663 use lsp::Url;
3664 use serde_json::json;
3665 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3666 use unindent::Unindent as _;
3667 use util::test::temp_tree;
3668 use worktree::WorktreeHandle as _;
3669
3670 #[gpui::test]
3671 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3672 let dir = temp_tree(json!({
3673 "root": {
3674 "apple": "",
3675 "banana": {
3676 "carrot": {
3677 "date": "",
3678 "endive": "",
3679 }
3680 },
3681 "fennel": {
3682 "grape": "",
3683 }
3684 }
3685 }));
3686
3687 let root_link_path = dir.path().join("root_link");
3688 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3689 unix::fs::symlink(
3690 &dir.path().join("root/fennel"),
3691 &dir.path().join("root/finnochio"),
3692 )
3693 .unwrap();
3694
3695 let project = Project::test(Arc::new(RealFs), cx);
3696
3697 let (tree, _) = project
3698 .update(cx, |project, cx| {
3699 project.find_or_create_local_worktree(&root_link_path, true, cx)
3700 })
3701 .await
3702 .unwrap();
3703
3704 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3705 .await;
3706 cx.read(|cx| {
3707 let tree = tree.read(cx);
3708 assert_eq!(tree.file_count(), 5);
3709 assert_eq!(
3710 tree.inode_for_path("fennel/grape"),
3711 tree.inode_for_path("finnochio/grape")
3712 );
3713 });
3714
3715 let cancel_flag = Default::default();
3716 let results = project
3717 .read_with(cx, |project, cx| {
3718 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3719 })
3720 .await;
3721 assert_eq!(
3722 results
3723 .into_iter()
3724 .map(|result| result.path)
3725 .collect::<Vec<Arc<Path>>>(),
3726 vec![
3727 PathBuf::from("banana/carrot/date").into(),
3728 PathBuf::from("banana/carrot/endive").into(),
3729 ]
3730 );
3731 }
3732
3733 #[gpui::test]
3734 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3735 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3736 let progress_token = language_server_config
3737 .disk_based_diagnostics_progress_token
3738 .clone()
3739 .unwrap();
3740
3741 let language = Arc::new(Language::new(
3742 LanguageConfig {
3743 name: "Rust".into(),
3744 path_suffixes: vec!["rs".to_string()],
3745 language_server: Some(language_server_config),
3746 ..Default::default()
3747 },
3748 Some(tree_sitter_rust::language()),
3749 ));
3750
3751 let fs = FakeFs::new(cx.background());
3752 fs.insert_tree(
3753 "/dir",
3754 json!({
3755 "a.rs": "fn a() { A }",
3756 "b.rs": "const y: i32 = 1",
3757 }),
3758 )
3759 .await;
3760
3761 let project = Project::test(fs, cx);
3762 project.update(cx, |project, _| {
3763 Arc::get_mut(&mut project.languages).unwrap().add(language);
3764 });
3765
3766 let (tree, _) = project
3767 .update(cx, |project, cx| {
3768 project.find_or_create_local_worktree("/dir", true, cx)
3769 })
3770 .await
3771 .unwrap();
3772 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3773
3774 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3775 .await;
3776
3777 // Cause worktree to start the fake language server
3778 let _buffer = project
3779 .update(cx, |project, cx| {
3780 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3781 })
3782 .await
3783 .unwrap();
3784
3785 let mut events = subscribe(&project, cx);
3786
3787 let mut fake_server = fake_servers.next().await.unwrap();
3788 fake_server.start_progress(&progress_token).await;
3789 assert_eq!(
3790 events.next().await.unwrap(),
3791 Event::DiskBasedDiagnosticsStarted
3792 );
3793
3794 fake_server.start_progress(&progress_token).await;
3795 fake_server.end_progress(&progress_token).await;
3796 fake_server.start_progress(&progress_token).await;
3797
3798 fake_server
3799 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3800 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3801 version: None,
3802 diagnostics: vec![lsp::Diagnostic {
3803 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3804 severity: Some(lsp::DiagnosticSeverity::ERROR),
3805 message: "undefined variable 'A'".to_string(),
3806 ..Default::default()
3807 }],
3808 })
3809 .await;
3810 assert_eq!(
3811 events.next().await.unwrap(),
3812 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3813 );
3814
3815 fake_server.end_progress(&progress_token).await;
3816 fake_server.end_progress(&progress_token).await;
3817 assert_eq!(
3818 events.next().await.unwrap(),
3819 Event::DiskBasedDiagnosticsUpdated
3820 );
3821 assert_eq!(
3822 events.next().await.unwrap(),
3823 Event::DiskBasedDiagnosticsFinished
3824 );
3825
3826 let buffer = project
3827 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3828 .await
3829 .unwrap();
3830
3831 buffer.read_with(cx, |buffer, _| {
3832 let snapshot = buffer.snapshot();
3833 let diagnostics = snapshot
3834 .diagnostics_in_range::<_, Point>(0..buffer.len())
3835 .collect::<Vec<_>>();
3836 assert_eq!(
3837 diagnostics,
3838 &[DiagnosticEntry {
3839 range: Point::new(0, 9)..Point::new(0, 10),
3840 diagnostic: Diagnostic {
3841 severity: lsp::DiagnosticSeverity::ERROR,
3842 message: "undefined variable 'A'".to_string(),
3843 group_id: 0,
3844 is_primary: true,
3845 ..Default::default()
3846 }
3847 }]
3848 )
3849 });
3850 }
3851
3852 #[gpui::test]
3853 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3854 let dir = temp_tree(json!({
3855 "root": {
3856 "dir1": {},
3857 "dir2": {
3858 "dir3": {}
3859 }
3860 }
3861 }));
3862
3863 let project = Project::test(Arc::new(RealFs), cx);
3864 let (tree, _) = project
3865 .update(cx, |project, cx| {
3866 project.find_or_create_local_worktree(&dir.path(), true, cx)
3867 })
3868 .await
3869 .unwrap();
3870
3871 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3872 .await;
3873
3874 let cancel_flag = Default::default();
3875 let results = project
3876 .read_with(cx, |project, cx| {
3877 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3878 })
3879 .await;
3880
3881 assert!(results.is_empty());
3882 }
3883
3884 #[gpui::test]
3885 async fn test_definition(cx: &mut gpui::TestAppContext) {
3886 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3887 let language = Arc::new(Language::new(
3888 LanguageConfig {
3889 name: "Rust".into(),
3890 path_suffixes: vec!["rs".to_string()],
3891 language_server: Some(language_server_config),
3892 ..Default::default()
3893 },
3894 Some(tree_sitter_rust::language()),
3895 ));
3896
3897 let fs = FakeFs::new(cx.background());
3898 fs.insert_tree(
3899 "/dir",
3900 json!({
3901 "a.rs": "const fn a() { A }",
3902 "b.rs": "const y: i32 = crate::a()",
3903 }),
3904 )
3905 .await;
3906
3907 let project = Project::test(fs, cx);
3908 project.update(cx, |project, _| {
3909 Arc::get_mut(&mut project.languages).unwrap().add(language);
3910 });
3911
3912 let (tree, _) = project
3913 .update(cx, |project, cx| {
3914 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
3915 })
3916 .await
3917 .unwrap();
3918 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3919 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3920 .await;
3921
3922 let buffer = project
3923 .update(cx, |project, cx| {
3924 project.open_buffer(
3925 ProjectPath {
3926 worktree_id,
3927 path: Path::new("").into(),
3928 },
3929 cx,
3930 )
3931 })
3932 .await
3933 .unwrap();
3934
3935 let mut fake_server = fake_servers.next().await.unwrap();
3936 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
3937 let params = params.text_document_position_params;
3938 assert_eq!(
3939 params.text_document.uri.to_file_path().unwrap(),
3940 Path::new("/dir/b.rs"),
3941 );
3942 assert_eq!(params.position, lsp::Position::new(0, 22));
3943
3944 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
3945 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
3946 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3947 )))
3948 });
3949
3950 let mut definitions = project
3951 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
3952 .await
3953 .unwrap();
3954
3955 assert_eq!(definitions.len(), 1);
3956 let definition = definitions.pop().unwrap();
3957 cx.update(|cx| {
3958 let target_buffer = definition.buffer.read(cx);
3959 assert_eq!(
3960 target_buffer
3961 .file()
3962 .unwrap()
3963 .as_local()
3964 .unwrap()
3965 .abs_path(cx),
3966 Path::new("/dir/a.rs"),
3967 );
3968 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
3969 assert_eq!(
3970 list_worktrees(&project, cx),
3971 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
3972 );
3973
3974 drop(definition);
3975 });
3976 cx.read(|cx| {
3977 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
3978 });
3979
3980 fn list_worktrees<'a>(
3981 project: &'a ModelHandle<Project>,
3982 cx: &'a AppContext,
3983 ) -> Vec<(&'a Path, bool)> {
3984 project
3985 .read(cx)
3986 .worktrees(cx)
3987 .map(|worktree| {
3988 let worktree = worktree.read(cx);
3989 (
3990 worktree.as_local().unwrap().abs_path().as_ref(),
3991 worktree.is_visible(),
3992 )
3993 })
3994 .collect::<Vec<_>>()
3995 }
3996 }
3997
3998 #[gpui::test]
3999 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4000 let fs = FakeFs::new(cx.background());
4001 fs.insert_tree(
4002 "/dir",
4003 json!({
4004 "file1": "the old contents",
4005 }),
4006 )
4007 .await;
4008
4009 let project = Project::test(fs.clone(), cx);
4010 let worktree_id = project
4011 .update(cx, |p, cx| {
4012 p.find_or_create_local_worktree("/dir", true, cx)
4013 })
4014 .await
4015 .unwrap()
4016 .0
4017 .read_with(cx, |tree, _| tree.id());
4018
4019 let buffer = project
4020 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4021 .await
4022 .unwrap();
4023 buffer
4024 .update(cx, |buffer, cx| {
4025 assert_eq!(buffer.text(), "the old contents");
4026 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4027 buffer.save(cx)
4028 })
4029 .await
4030 .unwrap();
4031
4032 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4033 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4034 }
4035
4036 #[gpui::test]
4037 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4038 let fs = FakeFs::new(cx.background());
4039 fs.insert_tree(
4040 "/dir",
4041 json!({
4042 "file1": "the old contents",
4043 }),
4044 )
4045 .await;
4046
4047 let project = Project::test(fs.clone(), cx);
4048 let worktree_id = project
4049 .update(cx, |p, cx| {
4050 p.find_or_create_local_worktree("/dir/file1", true, cx)
4051 })
4052 .await
4053 .unwrap()
4054 .0
4055 .read_with(cx, |tree, _| tree.id());
4056
4057 let buffer = project
4058 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4059 .await
4060 .unwrap();
4061 buffer
4062 .update(cx, |buffer, cx| {
4063 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4064 buffer.save(cx)
4065 })
4066 .await
4067 .unwrap();
4068
4069 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4070 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4071 }
4072
4073 #[gpui::test(retries = 5)]
4074 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4075 let dir = temp_tree(json!({
4076 "a": {
4077 "file1": "",
4078 "file2": "",
4079 "file3": "",
4080 },
4081 "b": {
4082 "c": {
4083 "file4": "",
4084 "file5": "",
4085 }
4086 }
4087 }));
4088
4089 let project = Project::test(Arc::new(RealFs), cx);
4090 let rpc = project.read_with(cx, |p, _| p.client.clone());
4091
4092 let (tree, _) = project
4093 .update(cx, |p, cx| {
4094 p.find_or_create_local_worktree(dir.path(), true, cx)
4095 })
4096 .await
4097 .unwrap();
4098 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4099
4100 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4101 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4102 async move { buffer.await.unwrap() }
4103 };
4104 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4105 tree.read_with(cx, |tree, _| {
4106 tree.entry_for_path(path)
4107 .expect(&format!("no entry for path {}", path))
4108 .id
4109 })
4110 };
4111
4112 let buffer2 = buffer_for_path("a/file2", cx).await;
4113 let buffer3 = buffer_for_path("a/file3", cx).await;
4114 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4115 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4116
4117 let file2_id = id_for_path("a/file2", &cx);
4118 let file3_id = id_for_path("a/file3", &cx);
4119 let file4_id = id_for_path("b/c/file4", &cx);
4120
4121 // Wait for the initial scan.
4122 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4123 .await;
4124
4125 // Create a remote copy of this worktree.
4126 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4127 let (remote, load_task) = cx.update(|cx| {
4128 Worktree::remote(
4129 1,
4130 1,
4131 initial_snapshot.to_proto(&Default::default(), true),
4132 rpc.clone(),
4133 cx,
4134 )
4135 });
4136 load_task.await;
4137
4138 cx.read(|cx| {
4139 assert!(!buffer2.read(cx).is_dirty());
4140 assert!(!buffer3.read(cx).is_dirty());
4141 assert!(!buffer4.read(cx).is_dirty());
4142 assert!(!buffer5.read(cx).is_dirty());
4143 });
4144
4145 // Rename and delete files and directories.
4146 tree.flush_fs_events(&cx).await;
4147 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4148 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4149 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4150 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4151 tree.flush_fs_events(&cx).await;
4152
4153 let expected_paths = vec![
4154 "a",
4155 "a/file1",
4156 "a/file2.new",
4157 "b",
4158 "d",
4159 "d/file3",
4160 "d/file4",
4161 ];
4162
4163 cx.read(|app| {
4164 assert_eq!(
4165 tree.read(app)
4166 .paths()
4167 .map(|p| p.to_str().unwrap())
4168 .collect::<Vec<_>>(),
4169 expected_paths
4170 );
4171
4172 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4173 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4174 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4175
4176 assert_eq!(
4177 buffer2.read(app).file().unwrap().path().as_ref(),
4178 Path::new("a/file2.new")
4179 );
4180 assert_eq!(
4181 buffer3.read(app).file().unwrap().path().as_ref(),
4182 Path::new("d/file3")
4183 );
4184 assert_eq!(
4185 buffer4.read(app).file().unwrap().path().as_ref(),
4186 Path::new("d/file4")
4187 );
4188 assert_eq!(
4189 buffer5.read(app).file().unwrap().path().as_ref(),
4190 Path::new("b/c/file5")
4191 );
4192
4193 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4194 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4195 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4196 assert!(buffer5.read(app).file().unwrap().is_deleted());
4197 });
4198
4199 // Update the remote worktree. Check that it becomes consistent with the
4200 // local worktree.
4201 remote.update(cx, |remote, cx| {
4202 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4203 &initial_snapshot,
4204 1,
4205 1,
4206 true,
4207 );
4208 remote
4209 .as_remote_mut()
4210 .unwrap()
4211 .snapshot
4212 .apply_remote_update(update_message)
4213 .unwrap();
4214
4215 assert_eq!(
4216 remote
4217 .paths()
4218 .map(|p| p.to_str().unwrap())
4219 .collect::<Vec<_>>(),
4220 expected_paths
4221 );
4222 });
4223 }
4224
4225 #[gpui::test]
4226 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4227 let fs = FakeFs::new(cx.background());
4228 fs.insert_tree(
4229 "/the-dir",
4230 json!({
4231 "a.txt": "a-contents",
4232 "b.txt": "b-contents",
4233 }),
4234 )
4235 .await;
4236
4237 let project = Project::test(fs.clone(), cx);
4238 let worktree_id = project
4239 .update(cx, |p, cx| {
4240 p.find_or_create_local_worktree("/the-dir", true, cx)
4241 })
4242 .await
4243 .unwrap()
4244 .0
4245 .read_with(cx, |tree, _| tree.id());
4246
4247 // Spawn multiple tasks to open paths, repeating some paths.
4248 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4249 (
4250 p.open_buffer((worktree_id, "a.txt"), cx),
4251 p.open_buffer((worktree_id, "b.txt"), cx),
4252 p.open_buffer((worktree_id, "a.txt"), cx),
4253 )
4254 });
4255
4256 let buffer_a_1 = buffer_a_1.await.unwrap();
4257 let buffer_a_2 = buffer_a_2.await.unwrap();
4258 let buffer_b = buffer_b.await.unwrap();
4259 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4260 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4261
4262 // There is only one buffer per path.
4263 let buffer_a_id = buffer_a_1.id();
4264 assert_eq!(buffer_a_2.id(), buffer_a_id);
4265
4266 // Open the same path again while it is still open.
4267 drop(buffer_a_1);
4268 let buffer_a_3 = project
4269 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4270 .await
4271 .unwrap();
4272
4273 // There's still only one buffer per path.
4274 assert_eq!(buffer_a_3.id(), buffer_a_id);
4275 }
4276
4277 #[gpui::test]
4278 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4279 use std::fs;
4280
4281 let dir = temp_tree(json!({
4282 "file1": "abc",
4283 "file2": "def",
4284 "file3": "ghi",
4285 }));
4286
4287 let project = Project::test(Arc::new(RealFs), cx);
4288 let (worktree, _) = project
4289 .update(cx, |p, cx| {
4290 p.find_or_create_local_worktree(dir.path(), true, cx)
4291 })
4292 .await
4293 .unwrap();
4294 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4295
4296 worktree.flush_fs_events(&cx).await;
4297 worktree
4298 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4299 .await;
4300
4301 let buffer1 = project
4302 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4303 .await
4304 .unwrap();
4305 let events = Rc::new(RefCell::new(Vec::new()));
4306
4307 // initially, the buffer isn't dirty.
4308 buffer1.update(cx, |buffer, cx| {
4309 cx.subscribe(&buffer1, {
4310 let events = events.clone();
4311 move |_, _, event, _| events.borrow_mut().push(event.clone())
4312 })
4313 .detach();
4314
4315 assert!(!buffer.is_dirty());
4316 assert!(events.borrow().is_empty());
4317
4318 buffer.edit(vec![1..2], "", cx);
4319 });
4320
4321 // after the first edit, the buffer is dirty, and emits a dirtied event.
4322 buffer1.update(cx, |buffer, cx| {
4323 assert!(buffer.text() == "ac");
4324 assert!(buffer.is_dirty());
4325 assert_eq!(
4326 *events.borrow(),
4327 &[language::Event::Edited, language::Event::Dirtied]
4328 );
4329 events.borrow_mut().clear();
4330 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4331 });
4332
4333 // after saving, the buffer is not dirty, and emits a saved event.
4334 buffer1.update(cx, |buffer, cx| {
4335 assert!(!buffer.is_dirty());
4336 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4337 events.borrow_mut().clear();
4338
4339 buffer.edit(vec![1..1], "B", cx);
4340 buffer.edit(vec![2..2], "D", cx);
4341 });
4342
4343 // after editing again, the buffer is dirty, and emits another dirty event.
4344 buffer1.update(cx, |buffer, cx| {
4345 assert!(buffer.text() == "aBDc");
4346 assert!(buffer.is_dirty());
4347 assert_eq!(
4348 *events.borrow(),
4349 &[
4350 language::Event::Edited,
4351 language::Event::Dirtied,
4352 language::Event::Edited,
4353 ],
4354 );
4355 events.borrow_mut().clear();
4356
4357 // TODO - currently, after restoring the buffer to its
4358 // previously-saved state, the is still considered dirty.
4359 buffer.edit([1..3], "", cx);
4360 assert!(buffer.text() == "ac");
4361 assert!(buffer.is_dirty());
4362 });
4363
4364 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4365
4366 // When a file is deleted, the buffer is considered dirty.
4367 let events = Rc::new(RefCell::new(Vec::new()));
4368 let buffer2 = project
4369 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4370 .await
4371 .unwrap();
4372 buffer2.update(cx, |_, cx| {
4373 cx.subscribe(&buffer2, {
4374 let events = events.clone();
4375 move |_, _, event, _| events.borrow_mut().push(event.clone())
4376 })
4377 .detach();
4378 });
4379
4380 fs::remove_file(dir.path().join("file2")).unwrap();
4381 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4382 assert_eq!(
4383 *events.borrow(),
4384 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4385 );
4386
4387 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4388 let events = Rc::new(RefCell::new(Vec::new()));
4389 let buffer3 = project
4390 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4391 .await
4392 .unwrap();
4393 buffer3.update(cx, |_, cx| {
4394 cx.subscribe(&buffer3, {
4395 let events = events.clone();
4396 move |_, _, event, _| events.borrow_mut().push(event.clone())
4397 })
4398 .detach();
4399 });
4400
4401 worktree.flush_fs_events(&cx).await;
4402 buffer3.update(cx, |buffer, cx| {
4403 buffer.edit(Some(0..0), "x", cx);
4404 });
4405 events.borrow_mut().clear();
4406 fs::remove_file(dir.path().join("file3")).unwrap();
4407 buffer3
4408 .condition(&cx, |_, _| !events.borrow().is_empty())
4409 .await;
4410 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4411 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4412 }
4413
4414 #[gpui::test]
4415 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4416 use std::fs;
4417
4418 let initial_contents = "aaa\nbbbbb\nc\n";
4419 let dir = temp_tree(json!({ "the-file": initial_contents }));
4420
4421 let project = Project::test(Arc::new(RealFs), cx);
4422 let (worktree, _) = project
4423 .update(cx, |p, cx| {
4424 p.find_or_create_local_worktree(dir.path(), true, cx)
4425 })
4426 .await
4427 .unwrap();
4428 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4429
4430 worktree
4431 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4432 .await;
4433
4434 let abs_path = dir.path().join("the-file");
4435 let buffer = project
4436 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4437 .await
4438 .unwrap();
4439
4440 // TODO
4441 // Add a cursor on each row.
4442 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4443 // assert!(!buffer.is_dirty());
4444 // buffer.add_selection_set(
4445 // &(0..3)
4446 // .map(|row| Selection {
4447 // id: row as usize,
4448 // start: Point::new(row, 1),
4449 // end: Point::new(row, 1),
4450 // reversed: false,
4451 // goal: SelectionGoal::None,
4452 // })
4453 // .collect::<Vec<_>>(),
4454 // cx,
4455 // )
4456 // });
4457
4458 // Change the file on disk, adding two new lines of text, and removing
4459 // one line.
4460 buffer.read_with(cx, |buffer, _| {
4461 assert!(!buffer.is_dirty());
4462 assert!(!buffer.has_conflict());
4463 });
4464 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4465 fs::write(&abs_path, new_contents).unwrap();
4466
4467 // Because the buffer was not modified, it is reloaded from disk. Its
4468 // contents are edited according to the diff between the old and new
4469 // file contents.
4470 buffer
4471 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4472 .await;
4473
4474 buffer.update(cx, |buffer, _| {
4475 assert_eq!(buffer.text(), new_contents);
4476 assert!(!buffer.is_dirty());
4477 assert!(!buffer.has_conflict());
4478
4479 // TODO
4480 // let cursor_positions = buffer
4481 // .selection_set(selection_set_id)
4482 // .unwrap()
4483 // .selections::<Point>(&*buffer)
4484 // .map(|selection| {
4485 // assert_eq!(selection.start, selection.end);
4486 // selection.start
4487 // })
4488 // .collect::<Vec<_>>();
4489 // assert_eq!(
4490 // cursor_positions,
4491 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4492 // );
4493 });
4494
4495 // Modify the buffer
4496 buffer.update(cx, |buffer, cx| {
4497 buffer.edit(vec![0..0], " ", cx);
4498 assert!(buffer.is_dirty());
4499 assert!(!buffer.has_conflict());
4500 });
4501
4502 // Change the file on disk again, adding blank lines to the beginning.
4503 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4504
4505 // Because the buffer is modified, it doesn't reload from disk, but is
4506 // marked as having a conflict.
4507 buffer
4508 .condition(&cx, |buffer, _| buffer.has_conflict())
4509 .await;
4510 }
4511
4512 #[gpui::test]
4513 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4514 let fs = FakeFs::new(cx.background());
4515 fs.insert_tree(
4516 "/the-dir",
4517 json!({
4518 "a.rs": "
4519 fn foo(mut v: Vec<usize>) {
4520 for x in &v {
4521 v.push(1);
4522 }
4523 }
4524 "
4525 .unindent(),
4526 }),
4527 )
4528 .await;
4529
4530 let project = Project::test(fs.clone(), cx);
4531 let (worktree, _) = project
4532 .update(cx, |p, cx| {
4533 p.find_or_create_local_worktree("/the-dir", true, cx)
4534 })
4535 .await
4536 .unwrap();
4537 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4538
4539 let buffer = project
4540 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4541 .await
4542 .unwrap();
4543
4544 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4545 let message = lsp::PublishDiagnosticsParams {
4546 uri: buffer_uri.clone(),
4547 diagnostics: vec![
4548 lsp::Diagnostic {
4549 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4550 severity: Some(DiagnosticSeverity::WARNING),
4551 message: "error 1".to_string(),
4552 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4553 location: lsp::Location {
4554 uri: buffer_uri.clone(),
4555 range: lsp::Range::new(
4556 lsp::Position::new(1, 8),
4557 lsp::Position::new(1, 9),
4558 ),
4559 },
4560 message: "error 1 hint 1".to_string(),
4561 }]),
4562 ..Default::default()
4563 },
4564 lsp::Diagnostic {
4565 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4566 severity: Some(DiagnosticSeverity::HINT),
4567 message: "error 1 hint 1".to_string(),
4568 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4569 location: lsp::Location {
4570 uri: buffer_uri.clone(),
4571 range: lsp::Range::new(
4572 lsp::Position::new(1, 8),
4573 lsp::Position::new(1, 9),
4574 ),
4575 },
4576 message: "original diagnostic".to_string(),
4577 }]),
4578 ..Default::default()
4579 },
4580 lsp::Diagnostic {
4581 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4582 severity: Some(DiagnosticSeverity::ERROR),
4583 message: "error 2".to_string(),
4584 related_information: Some(vec![
4585 lsp::DiagnosticRelatedInformation {
4586 location: lsp::Location {
4587 uri: buffer_uri.clone(),
4588 range: lsp::Range::new(
4589 lsp::Position::new(1, 13),
4590 lsp::Position::new(1, 15),
4591 ),
4592 },
4593 message: "error 2 hint 1".to_string(),
4594 },
4595 lsp::DiagnosticRelatedInformation {
4596 location: lsp::Location {
4597 uri: buffer_uri.clone(),
4598 range: lsp::Range::new(
4599 lsp::Position::new(1, 13),
4600 lsp::Position::new(1, 15),
4601 ),
4602 },
4603 message: "error 2 hint 2".to_string(),
4604 },
4605 ]),
4606 ..Default::default()
4607 },
4608 lsp::Diagnostic {
4609 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4610 severity: Some(DiagnosticSeverity::HINT),
4611 message: "error 2 hint 1".to_string(),
4612 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4613 location: lsp::Location {
4614 uri: buffer_uri.clone(),
4615 range: lsp::Range::new(
4616 lsp::Position::new(2, 8),
4617 lsp::Position::new(2, 17),
4618 ),
4619 },
4620 message: "original diagnostic".to_string(),
4621 }]),
4622 ..Default::default()
4623 },
4624 lsp::Diagnostic {
4625 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4626 severity: Some(DiagnosticSeverity::HINT),
4627 message: "error 2 hint 2".to_string(),
4628 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4629 location: lsp::Location {
4630 uri: buffer_uri.clone(),
4631 range: lsp::Range::new(
4632 lsp::Position::new(2, 8),
4633 lsp::Position::new(2, 17),
4634 ),
4635 },
4636 message: "original diagnostic".to_string(),
4637 }]),
4638 ..Default::default()
4639 },
4640 ],
4641 version: None,
4642 };
4643
4644 project
4645 .update(cx, |p, cx| {
4646 p.update_diagnostics(message, &Default::default(), cx)
4647 })
4648 .unwrap();
4649 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4650
4651 assert_eq!(
4652 buffer
4653 .diagnostics_in_range::<_, Point>(0..buffer.len())
4654 .collect::<Vec<_>>(),
4655 &[
4656 DiagnosticEntry {
4657 range: Point::new(1, 8)..Point::new(1, 9),
4658 diagnostic: Diagnostic {
4659 severity: DiagnosticSeverity::WARNING,
4660 message: "error 1".to_string(),
4661 group_id: 0,
4662 is_primary: true,
4663 ..Default::default()
4664 }
4665 },
4666 DiagnosticEntry {
4667 range: Point::new(1, 8)..Point::new(1, 9),
4668 diagnostic: Diagnostic {
4669 severity: DiagnosticSeverity::HINT,
4670 message: "error 1 hint 1".to_string(),
4671 group_id: 0,
4672 is_primary: false,
4673 ..Default::default()
4674 }
4675 },
4676 DiagnosticEntry {
4677 range: Point::new(1, 13)..Point::new(1, 15),
4678 diagnostic: Diagnostic {
4679 severity: DiagnosticSeverity::HINT,
4680 message: "error 2 hint 1".to_string(),
4681 group_id: 1,
4682 is_primary: false,
4683 ..Default::default()
4684 }
4685 },
4686 DiagnosticEntry {
4687 range: Point::new(1, 13)..Point::new(1, 15),
4688 diagnostic: Diagnostic {
4689 severity: DiagnosticSeverity::HINT,
4690 message: "error 2 hint 2".to_string(),
4691 group_id: 1,
4692 is_primary: false,
4693 ..Default::default()
4694 }
4695 },
4696 DiagnosticEntry {
4697 range: Point::new(2, 8)..Point::new(2, 17),
4698 diagnostic: Diagnostic {
4699 severity: DiagnosticSeverity::ERROR,
4700 message: "error 2".to_string(),
4701 group_id: 1,
4702 is_primary: true,
4703 ..Default::default()
4704 }
4705 }
4706 ]
4707 );
4708
4709 assert_eq!(
4710 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4711 &[
4712 DiagnosticEntry {
4713 range: Point::new(1, 8)..Point::new(1, 9),
4714 diagnostic: Diagnostic {
4715 severity: DiagnosticSeverity::WARNING,
4716 message: "error 1".to_string(),
4717 group_id: 0,
4718 is_primary: true,
4719 ..Default::default()
4720 }
4721 },
4722 DiagnosticEntry {
4723 range: Point::new(1, 8)..Point::new(1, 9),
4724 diagnostic: Diagnostic {
4725 severity: DiagnosticSeverity::HINT,
4726 message: "error 1 hint 1".to_string(),
4727 group_id: 0,
4728 is_primary: false,
4729 ..Default::default()
4730 }
4731 },
4732 ]
4733 );
4734 assert_eq!(
4735 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4736 &[
4737 DiagnosticEntry {
4738 range: Point::new(1, 13)..Point::new(1, 15),
4739 diagnostic: Diagnostic {
4740 severity: DiagnosticSeverity::HINT,
4741 message: "error 2 hint 1".to_string(),
4742 group_id: 1,
4743 is_primary: false,
4744 ..Default::default()
4745 }
4746 },
4747 DiagnosticEntry {
4748 range: Point::new(1, 13)..Point::new(1, 15),
4749 diagnostic: Diagnostic {
4750 severity: DiagnosticSeverity::HINT,
4751 message: "error 2 hint 2".to_string(),
4752 group_id: 1,
4753 is_primary: false,
4754 ..Default::default()
4755 }
4756 },
4757 DiagnosticEntry {
4758 range: Point::new(2, 8)..Point::new(2, 17),
4759 diagnostic: Diagnostic {
4760 severity: DiagnosticSeverity::ERROR,
4761 message: "error 2".to_string(),
4762 group_id: 1,
4763 is_primary: true,
4764 ..Default::default()
4765 }
4766 }
4767 ]
4768 );
4769 }
4770
4771 #[gpui::test]
4772 async fn test_rename(cx: &mut gpui::TestAppContext) {
4773 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4774 let language = Arc::new(Language::new(
4775 LanguageConfig {
4776 name: "Rust".into(),
4777 path_suffixes: vec!["rs".to_string()],
4778 language_server: Some(language_server_config),
4779 ..Default::default()
4780 },
4781 Some(tree_sitter_rust::language()),
4782 ));
4783
4784 let fs = FakeFs::new(cx.background());
4785 fs.insert_tree(
4786 "/dir",
4787 json!({
4788 "one.rs": "const ONE: usize = 1;",
4789 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4790 }),
4791 )
4792 .await;
4793
4794 let project = Project::test(fs.clone(), cx);
4795 project.update(cx, |project, _| {
4796 Arc::get_mut(&mut project.languages).unwrap().add(language);
4797 });
4798
4799 let (tree, _) = project
4800 .update(cx, |project, cx| {
4801 project.find_or_create_local_worktree("/dir", true, cx)
4802 })
4803 .await
4804 .unwrap();
4805 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4806 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4807 .await;
4808
4809 let buffer = project
4810 .update(cx, |project, cx| {
4811 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4812 })
4813 .await
4814 .unwrap();
4815
4816 let mut fake_server = fake_servers.next().await.unwrap();
4817
4818 let response = project.update(cx, |project, cx| {
4819 project.prepare_rename(buffer.clone(), 7, cx)
4820 });
4821 fake_server
4822 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4823 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4824 assert_eq!(params.position, lsp::Position::new(0, 7));
4825 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4826 lsp::Position::new(0, 6),
4827 lsp::Position::new(0, 9),
4828 )))
4829 })
4830 .next()
4831 .await
4832 .unwrap();
4833 let range = response.await.unwrap().unwrap();
4834 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4835 assert_eq!(range, 6..9);
4836
4837 let response = project.update(cx, |project, cx| {
4838 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4839 });
4840 fake_server
4841 .handle_request::<lsp::request::Rename, _>(|params, _| {
4842 assert_eq!(
4843 params.text_document_position.text_document.uri.as_str(),
4844 "file:///dir/one.rs"
4845 );
4846 assert_eq!(
4847 params.text_document_position.position,
4848 lsp::Position::new(0, 7)
4849 );
4850 assert_eq!(params.new_name, "THREE");
4851 Some(lsp::WorkspaceEdit {
4852 changes: Some(
4853 [
4854 (
4855 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4856 vec![lsp::TextEdit::new(
4857 lsp::Range::new(
4858 lsp::Position::new(0, 6),
4859 lsp::Position::new(0, 9),
4860 ),
4861 "THREE".to_string(),
4862 )],
4863 ),
4864 (
4865 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4866 vec![
4867 lsp::TextEdit::new(
4868 lsp::Range::new(
4869 lsp::Position::new(0, 24),
4870 lsp::Position::new(0, 27),
4871 ),
4872 "THREE".to_string(),
4873 ),
4874 lsp::TextEdit::new(
4875 lsp::Range::new(
4876 lsp::Position::new(0, 35),
4877 lsp::Position::new(0, 38),
4878 ),
4879 "THREE".to_string(),
4880 ),
4881 ],
4882 ),
4883 ]
4884 .into_iter()
4885 .collect(),
4886 ),
4887 ..Default::default()
4888 })
4889 })
4890 .next()
4891 .await
4892 .unwrap();
4893 let mut transaction = response.await.unwrap().0;
4894 assert_eq!(transaction.len(), 2);
4895 assert_eq!(
4896 transaction
4897 .remove_entry(&buffer)
4898 .unwrap()
4899 .0
4900 .read_with(cx, |buffer, _| buffer.text()),
4901 "const THREE: usize = 1;"
4902 );
4903 assert_eq!(
4904 transaction
4905 .into_keys()
4906 .next()
4907 .unwrap()
4908 .read_with(cx, |buffer, _| buffer.text()),
4909 "const TWO: usize = one::THREE + one::THREE;"
4910 );
4911 }
4912
4913 #[gpui::test]
4914 async fn test_search(cx: &mut gpui::TestAppContext) {
4915 let fs = FakeFs::new(cx.background());
4916 fs.insert_tree(
4917 "/dir",
4918 json!({
4919 "one.rs": "const ONE: usize = 1;",
4920 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4921 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4922 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4923 }),
4924 )
4925 .await;
4926 let project = Project::test(fs.clone(), cx);
4927 let (tree, _) = project
4928 .update(cx, |project, cx| {
4929 project.find_or_create_local_worktree("/dir", true, cx)
4930 })
4931 .await
4932 .unwrap();
4933 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4934 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4935 .await;
4936
4937 assert_eq!(
4938 search(&project, SearchQuery::text("TWO", false, true), cx)
4939 .await
4940 .unwrap(),
4941 HashMap::from_iter([
4942 ("two.rs".to_string(), vec![6..9]),
4943 ("three.rs".to_string(), vec![37..40])
4944 ])
4945 );
4946
4947 let buffer_4 = project
4948 .update(cx, |project, cx| {
4949 project.open_buffer((worktree_id, "four.rs"), cx)
4950 })
4951 .await
4952 .unwrap();
4953 buffer_4.update(cx, |buffer, cx| {
4954 buffer.edit([20..28, 31..43], "two::TWO", cx);
4955 });
4956
4957 assert_eq!(
4958 search(&project, SearchQuery::text("TWO", false, true), cx)
4959 .await
4960 .unwrap(),
4961 HashMap::from_iter([
4962 ("two.rs".to_string(), vec![6..9]),
4963 ("three.rs".to_string(), vec![37..40]),
4964 ("four.rs".to_string(), vec![25..28, 36..39])
4965 ])
4966 );
4967
4968 async fn search(
4969 project: &ModelHandle<Project>,
4970 query: SearchQuery,
4971 cx: &mut gpui::TestAppContext,
4972 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
4973 let results = project
4974 .update(cx, |project, cx| project.search(query, cx))
4975 .await?;
4976
4977 Ok(results
4978 .into_iter()
4979 .map(|(buffer, ranges)| {
4980 buffer.read_with(cx, |buffer, _| {
4981 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4982 let ranges = ranges
4983 .into_iter()
4984 .map(|range| range.to_offset(buffer))
4985 .collect::<Vec<_>>();
4986 (path, ranges)
4987 })
4988 })
4989 .collect())
4990 }
4991 }
4992}