1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 Operation, PointUtf16, ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 _detect_unshare_task: Task<Option<()>>,
96 },
97}
98
99#[derive(Clone, Debug)]
100pub struct Collaborator {
101 pub user: Arc<User>,
102 pub peer_id: PeerId,
103 pub replica_id: ReplicaId,
104}
105
106#[derive(Clone, Debug, PartialEq)]
107pub enum Event {
108 ActiveEntryChanged(Option<ProjectEntry>),
109 WorktreeRemoved(WorktreeId),
110 DiskBasedDiagnosticsStarted,
111 DiskBasedDiagnosticsUpdated,
112 DiskBasedDiagnosticsFinished,
113 DiagnosticsUpdated(ProjectPath),
114}
115
116#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
117pub struct ProjectPath {
118 pub worktree_id: WorktreeId,
119 pub path: Arc<Path>,
120}
121
122#[derive(Clone, Debug, Default, PartialEq)]
123pub struct DiagnosticSummary {
124 pub error_count: usize,
125 pub warning_count: usize,
126 pub info_count: usize,
127 pub hint_count: usize,
128}
129
130#[derive(Debug)]
131pub struct Location {
132 pub buffer: ModelHandle<Buffer>,
133 pub range: Range<language::Anchor>,
134}
135
136#[derive(Debug)]
137pub struct DocumentHighlight {
138 pub range: Range<language::Anchor>,
139 pub kind: DocumentHighlightKind,
140}
141
142#[derive(Clone, Debug)]
143pub struct Symbol {
144 pub source_worktree_id: WorktreeId,
145 pub worktree_id: WorktreeId,
146 pub language_name: String,
147 pub path: PathBuf,
148 pub label: CodeLabel,
149 pub name: String,
150 pub kind: lsp::SymbolKind,
151 pub range: Range<PointUtf16>,
152 pub signature: [u8; 32],
153}
154
155#[derive(Default)]
156pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
157
158impl DiagnosticSummary {
159 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
160 let mut this = Self {
161 error_count: 0,
162 warning_count: 0,
163 info_count: 0,
164 hint_count: 0,
165 };
166
167 for entry in diagnostics {
168 if entry.diagnostic.is_primary {
169 match entry.diagnostic.severity {
170 DiagnosticSeverity::ERROR => this.error_count += 1,
171 DiagnosticSeverity::WARNING => this.warning_count += 1,
172 DiagnosticSeverity::INFORMATION => this.info_count += 1,
173 DiagnosticSeverity::HINT => this.hint_count += 1,
174 _ => {}
175 }
176 }
177 }
178
179 this
180 }
181
182 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
183 proto::DiagnosticSummary {
184 path: path.to_string_lossy().to_string(),
185 error_count: self.error_count as u32,
186 warning_count: self.warning_count as u32,
187 info_count: self.info_count as u32,
188 hint_count: self.hint_count as u32,
189 }
190 }
191}
192
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
194pub struct ProjectEntry {
195 pub worktree_id: WorktreeId,
196 pub entry_id: usize,
197}
198
199impl Project {
200 pub fn init(client: &Arc<Client>) {
201 client.add_entity_message_handler(Self::handle_add_collaborator);
202 client.add_entity_message_handler(Self::handle_buffer_reloaded);
203 client.add_entity_message_handler(Self::handle_buffer_saved);
204 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
206 client.add_entity_message_handler(Self::handle_remove_collaborator);
207 client.add_entity_message_handler(Self::handle_register_worktree);
208 client.add_entity_message_handler(Self::handle_unregister_worktree);
209 client.add_entity_message_handler(Self::handle_unshare_project);
210 client.add_entity_message_handler(Self::handle_update_buffer_file);
211 client.add_entity_message_handler(Self::handle_update_buffer);
212 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
213 client.add_entity_message_handler(Self::handle_update_worktree);
214 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
215 client.add_entity_request_handler(Self::handle_apply_code_action);
216 client.add_entity_request_handler(Self::handle_format_buffers);
217 client.add_entity_request_handler(Self::handle_get_code_actions);
218 client.add_entity_request_handler(Self::handle_get_completions);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
224 client.add_entity_request_handler(Self::handle_search_project);
225 client.add_entity_request_handler(Self::handle_get_project_symbols);
226 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
227 client.add_entity_request_handler(Self::handle_open_buffer);
228 client.add_entity_request_handler(Self::handle_save_buffer);
229 }
230
231 pub fn local(
232 client: Arc<Client>,
233 user_store: ModelHandle<UserStore>,
234 languages: Arc<LanguageRegistry>,
235 fs: Arc<dyn Fs>,
236 cx: &mut MutableAppContext,
237 ) -> ModelHandle<Self> {
238 cx.add_model(|cx: &mut ModelContext<Self>| {
239 let (remote_id_tx, remote_id_rx) = watch::channel();
240 let _maintain_remote_id_task = cx.spawn_weak({
241 let rpc = client.clone();
242 move |this, mut cx| {
243 async move {
244 let mut status = rpc.status();
245 while let Some(status) = status.next().await {
246 if let Some(this) = this.upgrade(&cx) {
247 let remote_id = if status.is_connected() {
248 let response = rpc.request(proto::RegisterProject {}).await?;
249 Some(response.project_id)
250 } else {
251 None
252 };
253
254 if let Some(project_id) = remote_id {
255 let mut registrations = Vec::new();
256 this.update(&mut cx, |this, cx| {
257 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
258 registrations.push(worktree.update(
259 cx,
260 |worktree, cx| {
261 let worktree = worktree.as_local_mut().unwrap();
262 worktree.register(project_id, cx)
263 },
264 ));
265 }
266 });
267 for registration in registrations {
268 registration.await?;
269 }
270 }
271 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
272 }
273 }
274 Ok(())
275 }
276 .log_err()
277 }
278 });
279
280 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
281 Self {
282 worktrees: Default::default(),
283 collaborators: Default::default(),
284 opened_buffers: Default::default(),
285 shared_buffers: Default::default(),
286 loading_buffers: Default::default(),
287 loading_local_worktrees: Default::default(),
288 client_state: ProjectClientState::Local {
289 is_shared: false,
290 remote_id_tx,
291 remote_id_rx,
292 _maintain_remote_id_task,
293 },
294 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
295 subscriptions: Vec::new(),
296 active_entry: None,
297 languages,
298 client,
299 user_store,
300 fs,
301 language_servers_with_diagnostics_running: 0,
302 language_servers: Default::default(),
303 started_language_servers: Default::default(),
304 nonce: StdRng::from_entropy().gen(),
305 }
306 })
307 }
308
309 pub async fn remote(
310 remote_id: u64,
311 client: Arc<Client>,
312 user_store: ModelHandle<UserStore>,
313 languages: Arc<LanguageRegistry>,
314 fs: Arc<dyn Fs>,
315 cx: &mut AsyncAppContext,
316 ) -> Result<ModelHandle<Self>> {
317 client.authenticate_and_connect(&cx).await?;
318
319 let response = client
320 .request(proto::JoinProject {
321 project_id: remote_id,
322 })
323 .await?;
324
325 let replica_id = response.replica_id as ReplicaId;
326
327 let mut worktrees = Vec::new();
328 for worktree in response.worktrees {
329 let (worktree, load_task) = cx
330 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
331 worktrees.push(worktree);
332 load_task.detach();
333 }
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
337 let mut this = Self {
338 worktrees: Vec::new(),
339 loading_buffers: Default::default(),
340 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
341 shared_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 active_entry: None,
344 collaborators: Default::default(),
345 languages,
346 user_store: user_store.clone(),
347 fs,
348 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
349 client: client.clone(),
350 client_state: ProjectClientState::Remote {
351 sharing_has_stopped: false,
352 remote_id,
353 replica_id,
354 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
355 async move {
356 let mut status = client.status();
357 let is_connected =
358 status.next().await.map_or(false, |s| s.is_connected());
359 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
360 if !is_connected || status.next().await.is_some() {
361 if let Some(this) = this.upgrade(&cx) {
362 this.update(&mut cx, |this, cx| this.project_unshared(cx))
363 }
364 }
365 Ok(())
366 }
367 .log_err()
368 }),
369 },
370 language_servers_with_diagnostics_running: 0,
371 language_servers: Default::default(),
372 started_language_servers: Default::default(),
373 opened_buffers: Default::default(),
374 nonce: StdRng::from_entropy().gen(),
375 };
376 for worktree in worktrees {
377 this.add_worktree(&worktree, cx);
378 }
379 this
380 });
381
382 let user_ids = response
383 .collaborators
384 .iter()
385 .map(|peer| peer.user_id)
386 .collect();
387 user_store
388 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
389 .await?;
390 let mut collaborators = HashMap::default();
391 for message in response.collaborators {
392 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
393 collaborators.insert(collaborator.peer_id, collaborator);
394 }
395
396 this.update(cx, |this, _| {
397 this.collaborators = collaborators;
398 });
399
400 Ok(this)
401 }
402
403 #[cfg(any(test, feature = "test-support"))]
404 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
405 let languages = Arc::new(LanguageRegistry::test());
406 let http_client = client::test::FakeHttpClient::with_404_response();
407 let client = client::Client::new(http_client.clone());
408 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
409 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
410 }
411
412 #[cfg(any(test, feature = "test-support"))]
413 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
414 self.opened_buffers
415 .get(&remote_id)
416 .and_then(|buffer| buffer.upgrade(cx))
417 }
418
419 #[cfg(any(test, feature = "test-support"))]
420 pub fn languages(&self) -> &Arc<LanguageRegistry> {
421 &self.languages
422 }
423
424 #[cfg(any(test, feature = "test-support"))]
425 pub fn check_invariants(&self, cx: &AppContext) {
426 if self.is_local() {
427 let mut worktree_root_paths = HashMap::default();
428 for worktree in self.worktrees(cx) {
429 let worktree = worktree.read(cx);
430 let abs_path = worktree.as_local().unwrap().abs_path().clone();
431 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
432 assert_eq!(
433 prev_worktree_id,
434 None,
435 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
436 abs_path,
437 worktree.id(),
438 prev_worktree_id
439 )
440 }
441 } else {
442 let replica_id = self.replica_id();
443 for buffer in self.opened_buffers.values() {
444 if let Some(buffer) = buffer.upgrade(cx) {
445 let buffer = buffer.read(cx);
446 assert_eq!(
447 buffer.deferred_ops_len(),
448 0,
449 "replica {}, buffer {} has deferred operations",
450 replica_id,
451 buffer.remote_id()
452 );
453 }
454 }
455 }
456 }
457
458 #[cfg(any(test, feature = "test-support"))]
459 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
460 let path = path.into();
461 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
462 self.opened_buffers.iter().any(|(_, buffer)| {
463 if let Some(buffer) = buffer.upgrade(cx) {
464 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
465 if file.worktree == worktree && file.path() == &path.path {
466 return true;
467 }
468 }
469 }
470 false
471 })
472 } else {
473 false
474 }
475 }
476
477 pub fn fs(&self) -> &Arc<dyn Fs> {
478 &self.fs
479 }
480
481 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
482 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
483 *remote_id_tx.borrow_mut() = remote_id;
484 }
485
486 self.subscriptions.clear();
487 if let Some(remote_id) = remote_id {
488 self.subscriptions
489 .push(self.client.add_model_for_remote_entity(remote_id, cx));
490 }
491 }
492
493 pub fn remote_id(&self) -> Option<u64> {
494 match &self.client_state {
495 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
496 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
497 }
498 }
499
500 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
501 let mut id = None;
502 let mut watch = None;
503 match &self.client_state {
504 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
505 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
506 }
507
508 async move {
509 if let Some(id) = id {
510 return id;
511 }
512 let mut watch = watch.unwrap();
513 loop {
514 let id = *watch.borrow();
515 if let Some(id) = id {
516 return id;
517 }
518 watch.next().await;
519 }
520 }
521 }
522
523 pub fn replica_id(&self) -> ReplicaId {
524 match &self.client_state {
525 ProjectClientState::Local { .. } => 0,
526 ProjectClientState::Remote { replica_id, .. } => *replica_id,
527 }
528 }
529
530 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
531 &self.collaborators
532 }
533
534 pub fn worktrees<'a>(
535 &'a self,
536 cx: &'a AppContext,
537 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
538 self.worktrees
539 .iter()
540 .filter_map(move |worktree| worktree.upgrade(cx))
541 }
542
543 pub fn visible_worktrees<'a>(
544 &'a self,
545 cx: &'a AppContext,
546 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
547 self.worktrees.iter().filter_map(|worktree| {
548 worktree.upgrade(cx).and_then(|worktree| {
549 if worktree.read(cx).is_visible() {
550 Some(worktree)
551 } else {
552 None
553 }
554 })
555 })
556 }
557
558 pub fn worktree_for_id(
559 &self,
560 id: WorktreeId,
561 cx: &AppContext,
562 ) -> Option<ModelHandle<Worktree>> {
563 self.worktrees(cx)
564 .find(|worktree| worktree.read(cx).id() == id)
565 }
566
567 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
568 let rpc = self.client.clone();
569 cx.spawn(|this, mut cx| async move {
570 let project_id = this.update(&mut cx, |this, cx| {
571 if let ProjectClientState::Local {
572 is_shared,
573 remote_id_rx,
574 ..
575 } = &mut this.client_state
576 {
577 *is_shared = true;
578
579 for open_buffer in this.opened_buffers.values_mut() {
580 match open_buffer {
581 OpenBuffer::Strong(_) => {}
582 OpenBuffer::Weak(buffer) => {
583 if let Some(buffer) = buffer.upgrade(cx) {
584 *open_buffer = OpenBuffer::Strong(buffer);
585 }
586 }
587 OpenBuffer::Loading(_) => unreachable!(),
588 }
589 }
590
591 for worktree_handle in this.worktrees.iter_mut() {
592 match worktree_handle {
593 WorktreeHandle::Strong(_) => {}
594 WorktreeHandle::Weak(worktree) => {
595 if let Some(worktree) = worktree.upgrade(cx) {
596 *worktree_handle = WorktreeHandle::Strong(worktree);
597 }
598 }
599 }
600 }
601
602 remote_id_rx
603 .borrow()
604 .ok_or_else(|| anyhow!("no project id"))
605 } else {
606 Err(anyhow!("can't share a remote project"))
607 }
608 })?;
609
610 rpc.request(proto::ShareProject { project_id }).await?;
611
612 let mut tasks = Vec::new();
613 this.update(&mut cx, |this, cx| {
614 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
615 worktree.update(cx, |worktree, cx| {
616 let worktree = worktree.as_local_mut().unwrap();
617 tasks.push(worktree.share(project_id, cx));
618 });
619 }
620 });
621 for task in tasks {
622 task.await?;
623 }
624 this.update(&mut cx, |_, cx| cx.notify());
625 Ok(())
626 })
627 }
628
629 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
630 let rpc = self.client.clone();
631 cx.spawn(|this, mut cx| async move {
632 let project_id = this.update(&mut cx, |this, cx| {
633 if let ProjectClientState::Local {
634 is_shared,
635 remote_id_rx,
636 ..
637 } = &mut this.client_state
638 {
639 *is_shared = false;
640
641 for open_buffer in this.opened_buffers.values_mut() {
642 match open_buffer {
643 OpenBuffer::Strong(buffer) => {
644 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
645 }
646 _ => {}
647 }
648 }
649
650 for worktree_handle in this.worktrees.iter_mut() {
651 match worktree_handle {
652 WorktreeHandle::Strong(worktree) => {
653 if !worktree.read(cx).is_visible() {
654 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
655 }
656 }
657 _ => {}
658 }
659 }
660
661 remote_id_rx
662 .borrow()
663 .ok_or_else(|| anyhow!("no project id"))
664 } else {
665 Err(anyhow!("can't share a remote project"))
666 }
667 })?;
668
669 rpc.send(proto::UnshareProject { project_id })?;
670 this.update(&mut cx, |this, cx| {
671 this.collaborators.clear();
672 this.shared_buffers.clear();
673 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
674 worktree.update(cx, |worktree, _| {
675 worktree.as_local_mut().unwrap().unshare();
676 });
677 }
678 cx.notify()
679 });
680 Ok(())
681 })
682 }
683
684 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
685 if let ProjectClientState::Remote {
686 sharing_has_stopped,
687 ..
688 } = &mut self.client_state
689 {
690 *sharing_has_stopped = true;
691 self.collaborators.clear();
692 cx.notify();
693 }
694 }
695
696 pub fn is_read_only(&self) -> bool {
697 match &self.client_state {
698 ProjectClientState::Local { .. } => false,
699 ProjectClientState::Remote {
700 sharing_has_stopped,
701 ..
702 } => *sharing_has_stopped,
703 }
704 }
705
706 pub fn is_local(&self) -> bool {
707 match &self.client_state {
708 ProjectClientState::Local { .. } => true,
709 ProjectClientState::Remote { .. } => false,
710 }
711 }
712
713 pub fn is_remote(&self) -> bool {
714 !self.is_local()
715 }
716
717 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
718 if self.is_remote() {
719 return Err(anyhow!("creating buffers as a guest is not supported yet"));
720 }
721
722 let buffer = cx.add_model(|cx| {
723 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
724 });
725 self.register_buffer(&buffer, None, cx)?;
726 Ok(buffer)
727 }
728
729 pub fn open_buffer(
730 &mut self,
731 path: impl Into<ProjectPath>,
732 cx: &mut ModelContext<Self>,
733 ) -> Task<Result<ModelHandle<Buffer>>> {
734 let project_path = path.into();
735 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
736 worktree
737 } else {
738 return Task::ready(Err(anyhow!("no such worktree")));
739 };
740
741 // If there is already a buffer for the given path, then return it.
742 let existing_buffer = self.get_open_buffer(&project_path, cx);
743 if let Some(existing_buffer) = existing_buffer {
744 return Task::ready(Ok(existing_buffer));
745 }
746
747 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
748 // If the given path is already being loaded, then wait for that existing
749 // task to complete and return the same buffer.
750 hash_map::Entry::Occupied(e) => e.get().clone(),
751
752 // Otherwise, record the fact that this path is now being loaded.
753 hash_map::Entry::Vacant(entry) => {
754 let (mut tx, rx) = postage::watch::channel();
755 entry.insert(rx.clone());
756
757 let load_buffer = if worktree.read(cx).is_local() {
758 self.open_local_buffer(&project_path.path, &worktree, cx)
759 } else {
760 self.open_remote_buffer(&project_path.path, &worktree, cx)
761 };
762
763 cx.spawn(move |this, mut cx| async move {
764 let load_result = load_buffer.await;
765 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
766 // Record the fact that the buffer is no longer loading.
767 this.loading_buffers.remove(&project_path);
768 let buffer = load_result.map_err(Arc::new)?;
769 Ok(buffer)
770 }));
771 })
772 .detach();
773 rx
774 }
775 };
776
777 cx.foreground().spawn(async move {
778 loop {
779 if let Some(result) = loading_watch.borrow().as_ref() {
780 match result {
781 Ok(buffer) => return Ok(buffer.clone()),
782 Err(error) => return Err(anyhow!("{}", error)),
783 }
784 }
785 loading_watch.next().await;
786 }
787 })
788 }
789
790 fn open_local_buffer(
791 &mut self,
792 path: &Arc<Path>,
793 worktree: &ModelHandle<Worktree>,
794 cx: &mut ModelContext<Self>,
795 ) -> Task<Result<ModelHandle<Buffer>>> {
796 let load_buffer = worktree.update(cx, |worktree, cx| {
797 let worktree = worktree.as_local_mut().unwrap();
798 worktree.load_buffer(path, cx)
799 });
800 let worktree = worktree.downgrade();
801 cx.spawn(|this, mut cx| async move {
802 let buffer = load_buffer.await?;
803 let worktree = worktree
804 .upgrade(&cx)
805 .ok_or_else(|| anyhow!("worktree was removed"))?;
806 this.update(&mut cx, |this, cx| {
807 this.register_buffer(&buffer, Some(&worktree), cx)
808 })?;
809 Ok(buffer)
810 })
811 }
812
813 fn open_remote_buffer(
814 &mut self,
815 path: &Arc<Path>,
816 worktree: &ModelHandle<Worktree>,
817 cx: &mut ModelContext<Self>,
818 ) -> Task<Result<ModelHandle<Buffer>>> {
819 let rpc = self.client.clone();
820 let project_id = self.remote_id().unwrap();
821 let remote_worktree_id = worktree.read(cx).id();
822 let path = path.clone();
823 let path_string = path.to_string_lossy().to_string();
824 cx.spawn(|this, mut cx| async move {
825 let response = rpc
826 .request(proto::OpenBuffer {
827 project_id,
828 worktree_id: remote_worktree_id.to_proto(),
829 path: path_string,
830 })
831 .await?;
832 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
833 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
834 .await
835 })
836 }
837
838 fn open_local_buffer_via_lsp(
839 &mut self,
840 abs_path: lsp::Url,
841 lang_name: String,
842 lang_server: Arc<LanguageServer>,
843 cx: &mut ModelContext<Self>,
844 ) -> Task<Result<ModelHandle<Buffer>>> {
845 cx.spawn(|this, mut cx| async move {
846 let abs_path = abs_path
847 .to_file_path()
848 .map_err(|_| anyhow!("can't convert URI to path"))?;
849 let (worktree, relative_path) = if let Some(result) =
850 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
851 {
852 result
853 } else {
854 let worktree = this
855 .update(&mut cx, |this, cx| {
856 this.create_local_worktree(&abs_path, false, cx)
857 })
858 .await?;
859 this.update(&mut cx, |this, cx| {
860 this.language_servers
861 .insert((worktree.read(cx).id(), lang_name), lang_server);
862 });
863 (worktree, PathBuf::new())
864 };
865
866 let project_path = ProjectPath {
867 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
868 path: relative_path.into(),
869 };
870 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
871 .await
872 })
873 }
874
875 pub fn save_buffer_as(
876 &mut self,
877 buffer: ModelHandle<Buffer>,
878 abs_path: PathBuf,
879 cx: &mut ModelContext<Project>,
880 ) -> Task<Result<()>> {
881 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
882 cx.spawn(|this, mut cx| async move {
883 let (worktree, path) = worktree_task.await?;
884 worktree
885 .update(&mut cx, |worktree, cx| {
886 worktree
887 .as_local_mut()
888 .unwrap()
889 .save_buffer_as(buffer.clone(), path, cx)
890 })
891 .await?;
892 this.update(&mut cx, |this, cx| {
893 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
894 });
895 Ok(())
896 })
897 }
898
899 pub fn get_open_buffer(
900 &mut self,
901 path: &ProjectPath,
902 cx: &mut ModelContext<Self>,
903 ) -> Option<ModelHandle<Buffer>> {
904 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
905 self.opened_buffers.values().find_map(|buffer| {
906 let buffer = buffer.upgrade(cx)?;
907 let file = File::from_dyn(buffer.read(cx).file())?;
908 if file.worktree == worktree && file.path() == &path.path {
909 Some(buffer)
910 } else {
911 None
912 }
913 })
914 }
915
916 fn register_buffer(
917 &mut self,
918 buffer: &ModelHandle<Buffer>,
919 worktree: Option<&ModelHandle<Worktree>>,
920 cx: &mut ModelContext<Self>,
921 ) -> Result<()> {
922 let remote_id = buffer.read(cx).remote_id();
923 let open_buffer = if self.is_remote() || self.is_shared() {
924 OpenBuffer::Strong(buffer.clone())
925 } else {
926 OpenBuffer::Weak(buffer.downgrade())
927 };
928
929 match self.opened_buffers.insert(remote_id, open_buffer) {
930 None => {}
931 Some(OpenBuffer::Loading(operations)) => {
932 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
933 }
934 Some(OpenBuffer::Weak(existing_handle)) => {
935 if existing_handle.upgrade(cx).is_some() {
936 Err(anyhow!(
937 "already registered buffer with remote id {}",
938 remote_id
939 ))?
940 }
941 }
942 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
943 "already registered buffer with remote id {}",
944 remote_id
945 ))?,
946 }
947 cx.become_delegate(buffer, Self::on_buffer_event).detach();
948 self.assign_language_to_buffer(buffer, worktree, cx);
949
950 Ok(())
951 }
952
953 fn on_buffer_event(
954 &mut self,
955 buffer: ModelHandle<Buffer>,
956 event: BufferEvent,
957 cx: &mut ModelContext<Self>,
958 ) {
959 match event {
960 BufferEvent::Operation(operation) => {
961 if let Some(project_id) = self.remote_id() {
962 let request = self.client.request(proto::UpdateBuffer {
963 project_id,
964 buffer_id: buffer.read(cx).remote_id(),
965 operations: vec![language::proto::serialize_operation(&operation)],
966 });
967 cx.background().spawn(request).detach_and_log_err(cx);
968 }
969 }
970 BufferEvent::Saved => {
971 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
972 let worktree_id = file.worktree_id(cx);
973 if let Some(abs_path) = file.as_local().map(|file| file.abs_path(cx)) {
974 let text_document = lsp::TextDocumentIdentifier {
975 uri: lsp::Url::from_file_path(abs_path).unwrap(),
976 };
977
978 let mut notifications = Vec::new();
979 for ((lang_server_worktree_id, _), lang_server) in &self.language_servers {
980 if *lang_server_worktree_id != worktree_id {
981 continue;
982 }
983
984 notifications.push(
985 lang_server.notify::<lsp::notification::DidSaveTextDocument>(
986 lsp::DidSaveTextDocumentParams {
987 text_document: text_document.clone(),
988 text: None,
989 },
990 ),
991 );
992 }
993
994 cx.background()
995 .spawn(futures::future::try_join_all(notifications))
996 .detach_and_log_err(cx);
997 }
998 }
999 }
1000 _ => {}
1001 }
1002 }
1003
1004 fn assign_language_to_buffer(
1005 &mut self,
1006 buffer: &ModelHandle<Buffer>,
1007 worktree: Option<&ModelHandle<Worktree>>,
1008 cx: &mut ModelContext<Self>,
1009 ) -> Option<()> {
1010 let (path, full_path) = {
1011 let file = buffer.read(cx).file()?;
1012 (file.path().clone(), file.full_path(cx))
1013 };
1014
1015 // If the buffer has a language, set it and start/assign the language server
1016 if let Some(language) = self.languages.select_language(&full_path) {
1017 buffer.update(cx, |buffer, cx| {
1018 buffer.set_language(Some(language.clone()), cx);
1019 });
1020
1021 // For local worktrees, start a language server if needed.
1022 // Also assign the language server and any previously stored diagnostics to the buffer.
1023 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
1024 let worktree_id = local_worktree.id();
1025 let worktree_abs_path = local_worktree.abs_path().clone();
1026 let buffer = buffer.downgrade();
1027 let language_server =
1028 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1029
1030 cx.spawn_weak(|_, mut cx| async move {
1031 if let Some(language_server) = language_server.await {
1032 if let Some(buffer) = buffer.upgrade(&cx) {
1033 buffer.update(&mut cx, |buffer, cx| {
1034 buffer.set_language_server(Some(language_server), cx);
1035 });
1036 }
1037 }
1038 })
1039 .detach();
1040 }
1041 }
1042
1043 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
1044 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
1045 buffer.update(cx, |buffer, cx| {
1046 buffer.update_diagnostics(diagnostics, None, cx).log_err();
1047 });
1048 }
1049 }
1050
1051 None
1052 }
1053
1054 fn start_language_server(
1055 &mut self,
1056 worktree_id: WorktreeId,
1057 worktree_path: Arc<Path>,
1058 language: Arc<Language>,
1059 cx: &mut ModelContext<Self>,
1060 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
1061 enum LspEvent {
1062 DiagnosticsStart,
1063 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1064 DiagnosticsFinish,
1065 }
1066
1067 let key = (worktree_id, language.name().to_string());
1068 self.started_language_servers
1069 .entry(key.clone())
1070 .or_insert_with(|| {
1071 let language_server = self.languages.start_language_server(
1072 language.clone(),
1073 worktree_path,
1074 self.client.http_client(),
1075 cx,
1076 );
1077 let rpc = self.client.clone();
1078 cx.spawn_weak(|this, mut cx| async move {
1079 let language_server = language_server?.await.log_err()?;
1080 if let Some(this) = this.upgrade(&cx) {
1081 this.update(&mut cx, |this, _| {
1082 this.language_servers.insert(key, language_server.clone());
1083 });
1084 }
1085
1086 let disk_based_sources = language
1087 .disk_based_diagnostic_sources()
1088 .cloned()
1089 .unwrap_or_default();
1090 let disk_based_diagnostics_progress_token =
1091 language.disk_based_diagnostics_progress_token().cloned();
1092 let has_disk_based_diagnostic_progress_token =
1093 disk_based_diagnostics_progress_token.is_some();
1094 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1095
1096 // Listen for `PublishDiagnostics` notifications.
1097 language_server
1098 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1099 let diagnostics_tx = diagnostics_tx.clone();
1100 move |params| {
1101 if !has_disk_based_diagnostic_progress_token {
1102 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1103 }
1104 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1105 .ok();
1106 if !has_disk_based_diagnostic_progress_token {
1107 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1108 }
1109 }
1110 })
1111 .detach();
1112
1113 // Listen for `Progress` notifications. Send an event when the language server
1114 // transitions between running jobs and not running any jobs.
1115 let mut running_jobs_for_this_server: i32 = 0;
1116 language_server
1117 .on_notification::<lsp::notification::Progress, _>(move |params| {
1118 let token = match params.token {
1119 lsp::NumberOrString::Number(_) => None,
1120 lsp::NumberOrString::String(token) => Some(token),
1121 };
1122
1123 if token == disk_based_diagnostics_progress_token {
1124 match params.value {
1125 lsp::ProgressParamsValue::WorkDone(progress) => {
1126 match progress {
1127 lsp::WorkDoneProgress::Begin(_) => {
1128 running_jobs_for_this_server += 1;
1129 if running_jobs_for_this_server == 1 {
1130 block_on(
1131 diagnostics_tx
1132 .send(LspEvent::DiagnosticsStart),
1133 )
1134 .ok();
1135 }
1136 }
1137 lsp::WorkDoneProgress::End(_) => {
1138 running_jobs_for_this_server -= 1;
1139 if running_jobs_for_this_server == 0 {
1140 block_on(
1141 diagnostics_tx
1142 .send(LspEvent::DiagnosticsFinish),
1143 )
1144 .ok();
1145 }
1146 }
1147 _ => {}
1148 }
1149 }
1150 }
1151 }
1152 })
1153 .detach();
1154
1155 // Process all the LSP events.
1156 cx.spawn(|mut cx| async move {
1157 while let Ok(message) = diagnostics_rx.recv().await {
1158 let this = this.upgrade(&cx)?;
1159 match message {
1160 LspEvent::DiagnosticsStart => {
1161 this.update(&mut cx, |this, cx| {
1162 this.disk_based_diagnostics_started(cx);
1163 if let Some(project_id) = this.remote_id() {
1164 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1165 project_id,
1166 })
1167 .log_err();
1168 }
1169 });
1170 }
1171 LspEvent::DiagnosticsUpdate(mut params) => {
1172 language.process_diagnostics(&mut params);
1173 this.update(&mut cx, |this, cx| {
1174 this.update_diagnostics(params, &disk_based_sources, cx)
1175 .log_err();
1176 });
1177 }
1178 LspEvent::DiagnosticsFinish => {
1179 this.update(&mut cx, |this, cx| {
1180 this.disk_based_diagnostics_finished(cx);
1181 if let Some(project_id) = this.remote_id() {
1182 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1183 project_id,
1184 })
1185 .log_err();
1186 }
1187 });
1188 }
1189 }
1190 }
1191 Some(())
1192 })
1193 .detach();
1194
1195 Some(language_server)
1196 })
1197 .shared()
1198 })
1199 .clone()
1200 }
1201
1202 pub fn update_diagnostics(
1203 &mut self,
1204 params: lsp::PublishDiagnosticsParams,
1205 disk_based_sources: &HashSet<String>,
1206 cx: &mut ModelContext<Self>,
1207 ) -> Result<()> {
1208 let abs_path = params
1209 .uri
1210 .to_file_path()
1211 .map_err(|_| anyhow!("URI is not a file"))?;
1212 let mut next_group_id = 0;
1213 let mut diagnostics = Vec::default();
1214 let mut primary_diagnostic_group_ids = HashMap::default();
1215 let mut sources_by_group_id = HashMap::default();
1216 let mut supporting_diagnostic_severities = HashMap::default();
1217 for diagnostic in ¶ms.diagnostics {
1218 let source = diagnostic.source.as_ref();
1219 let code = diagnostic.code.as_ref().map(|code| match code {
1220 lsp::NumberOrString::Number(code) => code.to_string(),
1221 lsp::NumberOrString::String(code) => code.clone(),
1222 });
1223 let range = range_from_lsp(diagnostic.range);
1224 let is_supporting = diagnostic
1225 .related_information
1226 .as_ref()
1227 .map_or(false, |infos| {
1228 infos.iter().any(|info| {
1229 primary_diagnostic_group_ids.contains_key(&(
1230 source,
1231 code.clone(),
1232 range_from_lsp(info.location.range),
1233 ))
1234 })
1235 });
1236
1237 if is_supporting {
1238 if let Some(severity) = diagnostic.severity {
1239 supporting_diagnostic_severities
1240 .insert((source, code.clone(), range), severity);
1241 }
1242 } else {
1243 let group_id = post_inc(&mut next_group_id);
1244 let is_disk_based =
1245 source.map_or(false, |source| disk_based_sources.contains(source));
1246
1247 sources_by_group_id.insert(group_id, source);
1248 primary_diagnostic_group_ids
1249 .insert((source, code.clone(), range.clone()), group_id);
1250
1251 diagnostics.push(DiagnosticEntry {
1252 range,
1253 diagnostic: Diagnostic {
1254 code: code.clone(),
1255 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1256 message: diagnostic.message.clone(),
1257 group_id,
1258 is_primary: true,
1259 is_valid: true,
1260 is_disk_based,
1261 },
1262 });
1263 if let Some(infos) = &diagnostic.related_information {
1264 for info in infos {
1265 if info.location.uri == params.uri && !info.message.is_empty() {
1266 let range = range_from_lsp(info.location.range);
1267 diagnostics.push(DiagnosticEntry {
1268 range,
1269 diagnostic: Diagnostic {
1270 code: code.clone(),
1271 severity: DiagnosticSeverity::INFORMATION,
1272 message: info.message.clone(),
1273 group_id,
1274 is_primary: false,
1275 is_valid: true,
1276 is_disk_based,
1277 },
1278 });
1279 }
1280 }
1281 }
1282 }
1283 }
1284
1285 for entry in &mut diagnostics {
1286 let diagnostic = &mut entry.diagnostic;
1287 if !diagnostic.is_primary {
1288 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1289 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1290 source,
1291 diagnostic.code.clone(),
1292 entry.range.clone(),
1293 )) {
1294 diagnostic.severity = severity;
1295 }
1296 }
1297 }
1298
1299 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1300 Ok(())
1301 }
1302
1303 pub fn update_diagnostic_entries(
1304 &mut self,
1305 abs_path: PathBuf,
1306 version: Option<i32>,
1307 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1308 cx: &mut ModelContext<Project>,
1309 ) -> Result<(), anyhow::Error> {
1310 let (worktree, relative_path) = self
1311 .find_local_worktree(&abs_path, cx)
1312 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1313 if !worktree.read(cx).is_visible() {
1314 return Ok(());
1315 }
1316
1317 let project_path = ProjectPath {
1318 worktree_id: worktree.read(cx).id(),
1319 path: relative_path.into(),
1320 };
1321
1322 for buffer in self.opened_buffers.values() {
1323 if let Some(buffer) = buffer.upgrade(cx) {
1324 if buffer
1325 .read(cx)
1326 .file()
1327 .map_or(false, |file| *file.path() == project_path.path)
1328 {
1329 buffer.update(cx, |buffer, cx| {
1330 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1331 })?;
1332 break;
1333 }
1334 }
1335 }
1336 worktree.update(cx, |worktree, cx| {
1337 worktree
1338 .as_local_mut()
1339 .ok_or_else(|| anyhow!("not a local worktree"))?
1340 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1341 })?;
1342 cx.emit(Event::DiagnosticsUpdated(project_path));
1343 Ok(())
1344 }
1345
1346 pub fn format(
1347 &self,
1348 buffers: HashSet<ModelHandle<Buffer>>,
1349 push_to_history: bool,
1350 cx: &mut ModelContext<Project>,
1351 ) -> Task<Result<ProjectTransaction>> {
1352 let mut local_buffers = Vec::new();
1353 let mut remote_buffers = None;
1354 for buffer_handle in buffers {
1355 let buffer = buffer_handle.read(cx);
1356 let worktree;
1357 if let Some(file) = File::from_dyn(buffer.file()) {
1358 worktree = file.worktree.clone();
1359 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1360 let lang_server;
1361 if let Some(lang) = buffer.language() {
1362 if let Some(server) = self
1363 .language_servers
1364 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1365 {
1366 lang_server = server.clone();
1367 } else {
1368 return Task::ready(Ok(Default::default()));
1369 };
1370 } else {
1371 return Task::ready(Ok(Default::default()));
1372 }
1373
1374 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1375 } else {
1376 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1377 }
1378 } else {
1379 return Task::ready(Ok(Default::default()));
1380 }
1381 }
1382
1383 let remote_buffers = self.remote_id().zip(remote_buffers);
1384 let client = self.client.clone();
1385
1386 cx.spawn(|this, mut cx| async move {
1387 let mut project_transaction = ProjectTransaction::default();
1388
1389 if let Some((project_id, remote_buffers)) = remote_buffers {
1390 let response = client
1391 .request(proto::FormatBuffers {
1392 project_id,
1393 buffer_ids: remote_buffers
1394 .iter()
1395 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1396 .collect(),
1397 })
1398 .await?
1399 .transaction
1400 .ok_or_else(|| anyhow!("missing transaction"))?;
1401 project_transaction = this
1402 .update(&mut cx, |this, cx| {
1403 this.deserialize_project_transaction(response, push_to_history, cx)
1404 })
1405 .await?;
1406 }
1407
1408 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1409 let capabilities = if let Some(capabilities) = lang_server.capabilities().await {
1410 capabilities
1411 } else {
1412 continue;
1413 };
1414
1415 let text_document = lsp::TextDocumentIdentifier::new(
1416 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1417 );
1418 let lsp_edits = if capabilities
1419 .document_formatting_provider
1420 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1421 {
1422 lang_server
1423 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1424 text_document,
1425 options: Default::default(),
1426 work_done_progress_params: Default::default(),
1427 })
1428 .await?
1429 } else if capabilities
1430 .document_range_formatting_provider
1431 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1432 {
1433 let buffer_start = lsp::Position::new(0, 0);
1434 let buffer_end = buffer
1435 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1436 .to_lsp_position();
1437 lang_server
1438 .request::<lsp::request::RangeFormatting>(
1439 lsp::DocumentRangeFormattingParams {
1440 text_document,
1441 range: lsp::Range::new(buffer_start, buffer_end),
1442 options: Default::default(),
1443 work_done_progress_params: Default::default(),
1444 },
1445 )
1446 .await?
1447 } else {
1448 continue;
1449 };
1450
1451 if let Some(lsp_edits) = lsp_edits {
1452 let edits = buffer
1453 .update(&mut cx, |buffer, cx| {
1454 buffer.edits_from_lsp(lsp_edits, None, cx)
1455 })
1456 .await?;
1457 buffer.update(&mut cx, |buffer, cx| {
1458 buffer.finalize_last_transaction();
1459 buffer.start_transaction();
1460 for (range, text) in edits {
1461 buffer.edit([range], text, cx);
1462 }
1463 if buffer.end_transaction(cx).is_some() {
1464 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1465 if !push_to_history {
1466 buffer.forget_transaction(transaction.id);
1467 }
1468 project_transaction.0.insert(cx.handle(), transaction);
1469 }
1470 });
1471 }
1472 }
1473
1474 Ok(project_transaction)
1475 })
1476 }
1477
1478 pub fn definition<T: ToPointUtf16>(
1479 &self,
1480 buffer: &ModelHandle<Buffer>,
1481 position: T,
1482 cx: &mut ModelContext<Self>,
1483 ) -> Task<Result<Vec<Location>>> {
1484 let position = position.to_point_utf16(buffer.read(cx));
1485 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1486 }
1487
1488 pub fn references<T: ToPointUtf16>(
1489 &self,
1490 buffer: &ModelHandle<Buffer>,
1491 position: T,
1492 cx: &mut ModelContext<Self>,
1493 ) -> Task<Result<Vec<Location>>> {
1494 let position = position.to_point_utf16(buffer.read(cx));
1495 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1496 }
1497
1498 pub fn document_highlights<T: ToPointUtf16>(
1499 &self,
1500 buffer: &ModelHandle<Buffer>,
1501 position: T,
1502 cx: &mut ModelContext<Self>,
1503 ) -> Task<Result<Vec<DocumentHighlight>>> {
1504 let position = position.to_point_utf16(buffer.read(cx));
1505
1506 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1507 }
1508
1509 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1510 if self.is_local() {
1511 let mut language_servers = HashMap::default();
1512 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1513 if let Some((worktree, language)) = self
1514 .worktree_for_id(*worktree_id, cx)
1515 .and_then(|worktree| worktree.read(cx).as_local())
1516 .zip(self.languages.get_language(language_name))
1517 {
1518 language_servers
1519 .entry(Arc::as_ptr(language_server))
1520 .or_insert((
1521 language_server.clone(),
1522 *worktree_id,
1523 worktree.abs_path().clone(),
1524 language.clone(),
1525 ));
1526 }
1527 }
1528
1529 let mut requests = Vec::new();
1530 for (language_server, _, _, _) in language_servers.values() {
1531 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1532 lsp::WorkspaceSymbolParams {
1533 query: query.to_string(),
1534 ..Default::default()
1535 },
1536 ));
1537 }
1538
1539 cx.spawn_weak(|this, cx| async move {
1540 let responses = futures::future::try_join_all(requests).await?;
1541
1542 let mut symbols = Vec::new();
1543 if let Some(this) = this.upgrade(&cx) {
1544 this.read_with(&cx, |this, cx| {
1545 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1546 language_servers.into_values().zip(responses)
1547 {
1548 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1549 |lsp_symbol| {
1550 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1551 let mut worktree_id = source_worktree_id;
1552 let path;
1553 if let Some((worktree, rel_path)) =
1554 this.find_local_worktree(&abs_path, cx)
1555 {
1556 worktree_id = worktree.read(cx).id();
1557 path = rel_path;
1558 } else {
1559 path = relativize_path(&worktree_abs_path, &abs_path);
1560 }
1561
1562 let label = language
1563 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1564 .unwrap_or_else(|| {
1565 CodeLabel::plain(lsp_symbol.name.clone(), None)
1566 });
1567 let signature = this.symbol_signature(worktree_id, &path);
1568
1569 Some(Symbol {
1570 source_worktree_id,
1571 worktree_id,
1572 language_name: language.name().to_string(),
1573 name: lsp_symbol.name,
1574 kind: lsp_symbol.kind,
1575 label,
1576 path,
1577 range: range_from_lsp(lsp_symbol.location.range),
1578 signature,
1579 })
1580 },
1581 ));
1582 }
1583 })
1584 }
1585
1586 Ok(symbols)
1587 })
1588 } else if let Some(project_id) = self.remote_id() {
1589 let request = self.client.request(proto::GetProjectSymbols {
1590 project_id,
1591 query: query.to_string(),
1592 });
1593 cx.spawn_weak(|this, cx| async move {
1594 let response = request.await?;
1595 let mut symbols = Vec::new();
1596 if let Some(this) = this.upgrade(&cx) {
1597 this.read_with(&cx, |this, _| {
1598 symbols.extend(
1599 response
1600 .symbols
1601 .into_iter()
1602 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1603 );
1604 })
1605 }
1606 Ok(symbols)
1607 })
1608 } else {
1609 Task::ready(Ok(Default::default()))
1610 }
1611 }
1612
1613 pub fn open_buffer_for_symbol(
1614 &mut self,
1615 symbol: &Symbol,
1616 cx: &mut ModelContext<Self>,
1617 ) -> Task<Result<ModelHandle<Buffer>>> {
1618 if self.is_local() {
1619 let language_server = if let Some(server) = self
1620 .language_servers
1621 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1622 {
1623 server.clone()
1624 } else {
1625 return Task::ready(Err(anyhow!(
1626 "language server for worktree and language not found"
1627 )));
1628 };
1629
1630 let worktree_abs_path = if let Some(worktree_abs_path) = self
1631 .worktree_for_id(symbol.worktree_id, cx)
1632 .and_then(|worktree| worktree.read(cx).as_local())
1633 .map(|local_worktree| local_worktree.abs_path())
1634 {
1635 worktree_abs_path
1636 } else {
1637 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1638 };
1639 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1640 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1641 uri
1642 } else {
1643 return Task::ready(Err(anyhow!("invalid symbol path")));
1644 };
1645
1646 self.open_local_buffer_via_lsp(
1647 symbol_uri,
1648 symbol.language_name.clone(),
1649 language_server,
1650 cx,
1651 )
1652 } else if let Some(project_id) = self.remote_id() {
1653 let request = self.client.request(proto::OpenBufferForSymbol {
1654 project_id,
1655 symbol: Some(serialize_symbol(symbol)),
1656 });
1657 cx.spawn(|this, mut cx| async move {
1658 let response = request.await?;
1659 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1660 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1661 .await
1662 })
1663 } else {
1664 Task::ready(Err(anyhow!("project does not have a remote id")))
1665 }
1666 }
1667
1668 pub fn completions<T: ToPointUtf16>(
1669 &self,
1670 source_buffer_handle: &ModelHandle<Buffer>,
1671 position: T,
1672 cx: &mut ModelContext<Self>,
1673 ) -> Task<Result<Vec<Completion>>> {
1674 let source_buffer_handle = source_buffer_handle.clone();
1675 let source_buffer = source_buffer_handle.read(cx);
1676 let buffer_id = source_buffer.remote_id();
1677 let language = source_buffer.language().cloned();
1678 let worktree;
1679 let buffer_abs_path;
1680 if let Some(file) = File::from_dyn(source_buffer.file()) {
1681 worktree = file.worktree.clone();
1682 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1683 } else {
1684 return Task::ready(Ok(Default::default()));
1685 };
1686
1687 let position = position.to_point_utf16(source_buffer);
1688 let anchor = source_buffer.anchor_after(position);
1689
1690 if worktree.read(cx).as_local().is_some() {
1691 let buffer_abs_path = buffer_abs_path.unwrap();
1692 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1693 server
1694 } else {
1695 return Task::ready(Ok(Default::default()));
1696 };
1697
1698 cx.spawn(|_, cx| async move {
1699 let completions = lang_server
1700 .request::<lsp::request::Completion>(lsp::CompletionParams {
1701 text_document_position: lsp::TextDocumentPositionParams::new(
1702 lsp::TextDocumentIdentifier::new(
1703 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1704 ),
1705 position.to_lsp_position(),
1706 ),
1707 context: Default::default(),
1708 work_done_progress_params: Default::default(),
1709 partial_result_params: Default::default(),
1710 })
1711 .await
1712 .context("lsp completion request failed")?;
1713
1714 let completions = if let Some(completions) = completions {
1715 match completions {
1716 lsp::CompletionResponse::Array(completions) => completions,
1717 lsp::CompletionResponse::List(list) => list.items,
1718 }
1719 } else {
1720 Default::default()
1721 };
1722
1723 source_buffer_handle.read_with(&cx, |this, _| {
1724 Ok(completions
1725 .into_iter()
1726 .filter_map(|lsp_completion| {
1727 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1728 lsp::CompletionTextEdit::Edit(edit) => {
1729 (range_from_lsp(edit.range), edit.new_text.clone())
1730 }
1731 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1732 log::info!("unsupported insert/replace completion");
1733 return None;
1734 }
1735 };
1736
1737 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1738 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1739 if clipped_start == old_range.start && clipped_end == old_range.end {
1740 Some(Completion {
1741 old_range: this.anchor_before(old_range.start)
1742 ..this.anchor_after(old_range.end),
1743 new_text,
1744 label: language
1745 .as_ref()
1746 .and_then(|l| l.label_for_completion(&lsp_completion))
1747 .unwrap_or_else(|| {
1748 CodeLabel::plain(
1749 lsp_completion.label.clone(),
1750 lsp_completion.filter_text.as_deref(),
1751 )
1752 }),
1753 lsp_completion,
1754 })
1755 } else {
1756 None
1757 }
1758 })
1759 .collect())
1760 })
1761 })
1762 } else if let Some(project_id) = self.remote_id() {
1763 let rpc = self.client.clone();
1764 let message = proto::GetCompletions {
1765 project_id,
1766 buffer_id,
1767 position: Some(language::proto::serialize_anchor(&anchor)),
1768 version: serialize_version(&source_buffer.version()),
1769 };
1770 cx.spawn_weak(|_, mut cx| async move {
1771 let response = rpc.request(message).await?;
1772
1773 source_buffer_handle
1774 .update(&mut cx, |buffer, _| {
1775 buffer.wait_for_version(deserialize_version(response.version))
1776 })
1777 .await;
1778
1779 response
1780 .completions
1781 .into_iter()
1782 .map(|completion| {
1783 language::proto::deserialize_completion(completion, language.as_ref())
1784 })
1785 .collect()
1786 })
1787 } else {
1788 Task::ready(Ok(Default::default()))
1789 }
1790 }
1791
1792 pub fn apply_additional_edits_for_completion(
1793 &self,
1794 buffer_handle: ModelHandle<Buffer>,
1795 completion: Completion,
1796 push_to_history: bool,
1797 cx: &mut ModelContext<Self>,
1798 ) -> Task<Result<Option<Transaction>>> {
1799 let buffer = buffer_handle.read(cx);
1800 let buffer_id = buffer.remote_id();
1801
1802 if self.is_local() {
1803 let lang_server = if let Some(language_server) = buffer.language_server() {
1804 language_server.clone()
1805 } else {
1806 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1807 };
1808
1809 cx.spawn(|_, mut cx| async move {
1810 let resolved_completion = lang_server
1811 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1812 .await?;
1813 if let Some(edits) = resolved_completion.additional_text_edits {
1814 let edits = buffer_handle
1815 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1816 .await?;
1817 buffer_handle.update(&mut cx, |buffer, cx| {
1818 buffer.finalize_last_transaction();
1819 buffer.start_transaction();
1820 for (range, text) in edits {
1821 buffer.edit([range], text, cx);
1822 }
1823 let transaction = if buffer.end_transaction(cx).is_some() {
1824 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1825 if !push_to_history {
1826 buffer.forget_transaction(transaction.id);
1827 }
1828 Some(transaction)
1829 } else {
1830 None
1831 };
1832 Ok(transaction)
1833 })
1834 } else {
1835 Ok(None)
1836 }
1837 })
1838 } else if let Some(project_id) = self.remote_id() {
1839 let client = self.client.clone();
1840 cx.spawn(|_, mut cx| async move {
1841 let response = client
1842 .request(proto::ApplyCompletionAdditionalEdits {
1843 project_id,
1844 buffer_id,
1845 completion: Some(language::proto::serialize_completion(&completion)),
1846 })
1847 .await?;
1848
1849 if let Some(transaction) = response.transaction {
1850 let transaction = language::proto::deserialize_transaction(transaction)?;
1851 buffer_handle
1852 .update(&mut cx, |buffer, _| {
1853 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1854 })
1855 .await;
1856 if push_to_history {
1857 buffer_handle.update(&mut cx, |buffer, _| {
1858 buffer.push_transaction(transaction.clone(), Instant::now());
1859 });
1860 }
1861 Ok(Some(transaction))
1862 } else {
1863 Ok(None)
1864 }
1865 })
1866 } else {
1867 Task::ready(Err(anyhow!("project does not have a remote id")))
1868 }
1869 }
1870
1871 pub fn code_actions<T: ToOffset>(
1872 &self,
1873 buffer_handle: &ModelHandle<Buffer>,
1874 range: Range<T>,
1875 cx: &mut ModelContext<Self>,
1876 ) -> Task<Result<Vec<CodeAction>>> {
1877 let buffer_handle = buffer_handle.clone();
1878 let buffer = buffer_handle.read(cx);
1879 let buffer_id = buffer.remote_id();
1880 let worktree;
1881 let buffer_abs_path;
1882 if let Some(file) = File::from_dyn(buffer.file()) {
1883 worktree = file.worktree.clone();
1884 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1885 } else {
1886 return Task::ready(Ok(Default::default()));
1887 };
1888 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1889
1890 if worktree.read(cx).as_local().is_some() {
1891 let buffer_abs_path = buffer_abs_path.unwrap();
1892 let lang_name;
1893 let lang_server;
1894 if let Some(lang) = buffer.language() {
1895 lang_name = lang.name().to_string();
1896 if let Some(server) = self
1897 .language_servers
1898 .get(&(worktree.read(cx).id(), lang_name.clone()))
1899 {
1900 lang_server = server.clone();
1901 } else {
1902 return Task::ready(Ok(Default::default()));
1903 };
1904 } else {
1905 return Task::ready(Ok(Default::default()));
1906 }
1907
1908 let lsp_range = lsp::Range::new(
1909 range.start.to_point_utf16(buffer).to_lsp_position(),
1910 range.end.to_point_utf16(buffer).to_lsp_position(),
1911 );
1912 cx.foreground().spawn(async move {
1913 if !lang_server
1914 .capabilities()
1915 .await
1916 .map_or(false, |capabilities| {
1917 capabilities.code_action_provider.is_some()
1918 })
1919 {
1920 return Ok(Default::default());
1921 }
1922
1923 Ok(lang_server
1924 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1925 text_document: lsp::TextDocumentIdentifier::new(
1926 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1927 ),
1928 range: lsp_range,
1929 work_done_progress_params: Default::default(),
1930 partial_result_params: Default::default(),
1931 context: lsp::CodeActionContext {
1932 diagnostics: Default::default(),
1933 only: Some(vec![
1934 lsp::CodeActionKind::QUICKFIX,
1935 lsp::CodeActionKind::REFACTOR,
1936 lsp::CodeActionKind::REFACTOR_EXTRACT,
1937 ]),
1938 },
1939 })
1940 .await?
1941 .unwrap_or_default()
1942 .into_iter()
1943 .filter_map(|entry| {
1944 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1945 Some(CodeAction {
1946 range: range.clone(),
1947 lsp_action,
1948 })
1949 } else {
1950 None
1951 }
1952 })
1953 .collect())
1954 })
1955 } else if let Some(project_id) = self.remote_id() {
1956 let rpc = self.client.clone();
1957 let version = buffer.version();
1958 cx.spawn_weak(|_, mut cx| async move {
1959 let response = rpc
1960 .request(proto::GetCodeActions {
1961 project_id,
1962 buffer_id,
1963 start: Some(language::proto::serialize_anchor(&range.start)),
1964 end: Some(language::proto::serialize_anchor(&range.end)),
1965 version: serialize_version(&version),
1966 })
1967 .await?;
1968
1969 buffer_handle
1970 .update(&mut cx, |buffer, _| {
1971 buffer.wait_for_version(deserialize_version(response.version))
1972 })
1973 .await;
1974
1975 response
1976 .actions
1977 .into_iter()
1978 .map(language::proto::deserialize_code_action)
1979 .collect()
1980 })
1981 } else {
1982 Task::ready(Ok(Default::default()))
1983 }
1984 }
1985
1986 pub fn apply_code_action(
1987 &self,
1988 buffer_handle: ModelHandle<Buffer>,
1989 mut action: CodeAction,
1990 push_to_history: bool,
1991 cx: &mut ModelContext<Self>,
1992 ) -> Task<Result<ProjectTransaction>> {
1993 if self.is_local() {
1994 let buffer = buffer_handle.read(cx);
1995 let lang_name = if let Some(lang) = buffer.language() {
1996 lang.name().to_string()
1997 } else {
1998 return Task::ready(Ok(Default::default()));
1999 };
2000 let lang_server = if let Some(language_server) = buffer.language_server() {
2001 language_server.clone()
2002 } else {
2003 return Task::ready(Err(anyhow!("buffer does not have a language server")));
2004 };
2005 let range = action.range.to_point_utf16(buffer);
2006
2007 cx.spawn(|this, mut cx| async move {
2008 if let Some(lsp_range) = action
2009 .lsp_action
2010 .data
2011 .as_mut()
2012 .and_then(|d| d.get_mut("codeActionParams"))
2013 .and_then(|d| d.get_mut("range"))
2014 {
2015 *lsp_range = serde_json::to_value(&lsp::Range::new(
2016 range.start.to_lsp_position(),
2017 range.end.to_lsp_position(),
2018 ))
2019 .unwrap();
2020 action.lsp_action = lang_server
2021 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
2022 .await?;
2023 } else {
2024 let actions = this
2025 .update(&mut cx, |this, cx| {
2026 this.code_actions(&buffer_handle, action.range, cx)
2027 })
2028 .await?;
2029 action.lsp_action = actions
2030 .into_iter()
2031 .find(|a| a.lsp_action.title == action.lsp_action.title)
2032 .ok_or_else(|| anyhow!("code action is outdated"))?
2033 .lsp_action;
2034 }
2035
2036 if let Some(edit) = action.lsp_action.edit {
2037 Self::deserialize_workspace_edit(
2038 this,
2039 edit,
2040 push_to_history,
2041 lang_name,
2042 lang_server,
2043 &mut cx,
2044 )
2045 .await
2046 } else {
2047 Ok(ProjectTransaction::default())
2048 }
2049 })
2050 } else if let Some(project_id) = self.remote_id() {
2051 let client = self.client.clone();
2052 let request = proto::ApplyCodeAction {
2053 project_id,
2054 buffer_id: buffer_handle.read(cx).remote_id(),
2055 action: Some(language::proto::serialize_code_action(&action)),
2056 };
2057 cx.spawn(|this, mut cx| async move {
2058 let response = client
2059 .request(request)
2060 .await?
2061 .transaction
2062 .ok_or_else(|| anyhow!("missing transaction"))?;
2063 this.update(&mut cx, |this, cx| {
2064 this.deserialize_project_transaction(response, push_to_history, cx)
2065 })
2066 .await
2067 })
2068 } else {
2069 Task::ready(Err(anyhow!("project does not have a remote id")))
2070 }
2071 }
2072
2073 async fn deserialize_workspace_edit(
2074 this: ModelHandle<Self>,
2075 edit: lsp::WorkspaceEdit,
2076 push_to_history: bool,
2077 language_name: String,
2078 language_server: Arc<LanguageServer>,
2079 cx: &mut AsyncAppContext,
2080 ) -> Result<ProjectTransaction> {
2081 let fs = this.read_with(cx, |this, _| this.fs.clone());
2082 let mut operations = Vec::new();
2083 if let Some(document_changes) = edit.document_changes {
2084 match document_changes {
2085 lsp::DocumentChanges::Edits(edits) => {
2086 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2087 }
2088 lsp::DocumentChanges::Operations(ops) => operations = ops,
2089 }
2090 } else if let Some(changes) = edit.changes {
2091 operations.extend(changes.into_iter().map(|(uri, edits)| {
2092 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2093 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2094 uri,
2095 version: None,
2096 },
2097 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2098 })
2099 }));
2100 }
2101
2102 let mut project_transaction = ProjectTransaction::default();
2103 for operation in operations {
2104 match operation {
2105 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2106 let abs_path = op
2107 .uri
2108 .to_file_path()
2109 .map_err(|_| anyhow!("can't convert URI to path"))?;
2110
2111 if let Some(parent_path) = abs_path.parent() {
2112 fs.create_dir(parent_path).await?;
2113 }
2114 if abs_path.ends_with("/") {
2115 fs.create_dir(&abs_path).await?;
2116 } else {
2117 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2118 .await?;
2119 }
2120 }
2121 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2122 let source_abs_path = op
2123 .old_uri
2124 .to_file_path()
2125 .map_err(|_| anyhow!("can't convert URI to path"))?;
2126 let target_abs_path = op
2127 .new_uri
2128 .to_file_path()
2129 .map_err(|_| anyhow!("can't convert URI to path"))?;
2130 fs.rename(
2131 &source_abs_path,
2132 &target_abs_path,
2133 op.options.map(Into::into).unwrap_or_default(),
2134 )
2135 .await?;
2136 }
2137 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2138 let abs_path = op
2139 .uri
2140 .to_file_path()
2141 .map_err(|_| anyhow!("can't convert URI to path"))?;
2142 let options = op.options.map(Into::into).unwrap_or_default();
2143 if abs_path.ends_with("/") {
2144 fs.remove_dir(&abs_path, options).await?;
2145 } else {
2146 fs.remove_file(&abs_path, options).await?;
2147 }
2148 }
2149 lsp::DocumentChangeOperation::Edit(op) => {
2150 let buffer_to_edit = this
2151 .update(cx, |this, cx| {
2152 this.open_local_buffer_via_lsp(
2153 op.text_document.uri,
2154 language_name.clone(),
2155 language_server.clone(),
2156 cx,
2157 )
2158 })
2159 .await?;
2160
2161 let edits = buffer_to_edit
2162 .update(cx, |buffer, cx| {
2163 let edits = op.edits.into_iter().map(|edit| match edit {
2164 lsp::OneOf::Left(edit) => edit,
2165 lsp::OneOf::Right(edit) => edit.text_edit,
2166 });
2167 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2168 })
2169 .await?;
2170
2171 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2172 buffer.finalize_last_transaction();
2173 buffer.start_transaction();
2174 for (range, text) in edits {
2175 buffer.edit([range], text, cx);
2176 }
2177 let transaction = if buffer.end_transaction(cx).is_some() {
2178 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2179 if !push_to_history {
2180 buffer.forget_transaction(transaction.id);
2181 }
2182 Some(transaction)
2183 } else {
2184 None
2185 };
2186
2187 transaction
2188 });
2189 if let Some(transaction) = transaction {
2190 project_transaction.0.insert(buffer_to_edit, transaction);
2191 }
2192 }
2193 }
2194 }
2195
2196 Ok(project_transaction)
2197 }
2198
2199 pub fn prepare_rename<T: ToPointUtf16>(
2200 &self,
2201 buffer: ModelHandle<Buffer>,
2202 position: T,
2203 cx: &mut ModelContext<Self>,
2204 ) -> Task<Result<Option<Range<Anchor>>>> {
2205 let position = position.to_point_utf16(buffer.read(cx));
2206 self.request_lsp(buffer, PrepareRename { position }, cx)
2207 }
2208
2209 pub fn perform_rename<T: ToPointUtf16>(
2210 &self,
2211 buffer: ModelHandle<Buffer>,
2212 position: T,
2213 new_name: String,
2214 push_to_history: bool,
2215 cx: &mut ModelContext<Self>,
2216 ) -> Task<Result<ProjectTransaction>> {
2217 let position = position.to_point_utf16(buffer.read(cx));
2218 self.request_lsp(
2219 buffer,
2220 PerformRename {
2221 position,
2222 new_name,
2223 push_to_history,
2224 },
2225 cx,
2226 )
2227 }
2228
2229 pub fn search(
2230 &self,
2231 query: SearchQuery,
2232 cx: &mut ModelContext<Self>,
2233 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2234 if self.is_local() {
2235 let snapshots = self
2236 .visible_worktrees(cx)
2237 .filter_map(|tree| {
2238 let tree = tree.read(cx).as_local()?;
2239 Some(tree.snapshot())
2240 })
2241 .collect::<Vec<_>>();
2242
2243 let background = cx.background().clone();
2244 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2245 if path_count == 0 {
2246 return Task::ready(Ok(Default::default()));
2247 }
2248 let workers = background.num_cpus().min(path_count);
2249 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2250 cx.background()
2251 .spawn({
2252 let fs = self.fs.clone();
2253 let background = cx.background().clone();
2254 let query = query.clone();
2255 async move {
2256 let fs = &fs;
2257 let query = &query;
2258 let matching_paths_tx = &matching_paths_tx;
2259 let paths_per_worker = (path_count + workers - 1) / workers;
2260 let snapshots = &snapshots;
2261 background
2262 .scoped(|scope| {
2263 for worker_ix in 0..workers {
2264 let worker_start_ix = worker_ix * paths_per_worker;
2265 let worker_end_ix = worker_start_ix + paths_per_worker;
2266 scope.spawn(async move {
2267 let mut snapshot_start_ix = 0;
2268 let mut abs_path = PathBuf::new();
2269 for snapshot in snapshots {
2270 let snapshot_end_ix =
2271 snapshot_start_ix + snapshot.visible_file_count();
2272 if worker_end_ix <= snapshot_start_ix {
2273 break;
2274 } else if worker_start_ix > snapshot_end_ix {
2275 snapshot_start_ix = snapshot_end_ix;
2276 continue;
2277 } else {
2278 let start_in_snapshot = worker_start_ix
2279 .saturating_sub(snapshot_start_ix);
2280 let end_in_snapshot =
2281 cmp::min(worker_end_ix, snapshot_end_ix)
2282 - snapshot_start_ix;
2283
2284 for entry in snapshot
2285 .files(false, start_in_snapshot)
2286 .take(end_in_snapshot - start_in_snapshot)
2287 {
2288 if matching_paths_tx.is_closed() {
2289 break;
2290 }
2291
2292 abs_path.clear();
2293 abs_path.push(&snapshot.abs_path());
2294 abs_path.push(&entry.path);
2295 let matches = if let Some(file) =
2296 fs.open_sync(&abs_path).await.log_err()
2297 {
2298 query.detect(file).unwrap_or(false)
2299 } else {
2300 false
2301 };
2302
2303 if matches {
2304 let project_path =
2305 (snapshot.id(), entry.path.clone());
2306 if matching_paths_tx
2307 .send(project_path)
2308 .await
2309 .is_err()
2310 {
2311 break;
2312 }
2313 }
2314 }
2315
2316 snapshot_start_ix = snapshot_end_ix;
2317 }
2318 }
2319 });
2320 }
2321 })
2322 .await;
2323 }
2324 })
2325 .detach();
2326
2327 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2328 let open_buffers = self
2329 .opened_buffers
2330 .values()
2331 .filter_map(|b| b.upgrade(cx))
2332 .collect::<HashSet<_>>();
2333 cx.spawn(|this, cx| async move {
2334 for buffer in &open_buffers {
2335 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2336 buffers_tx.send((buffer.clone(), snapshot)).await?;
2337 }
2338
2339 let open_buffers = Rc::new(RefCell::new(open_buffers));
2340 while let Some(project_path) = matching_paths_rx.next().await {
2341 if buffers_tx.is_closed() {
2342 break;
2343 }
2344
2345 let this = this.clone();
2346 let open_buffers = open_buffers.clone();
2347 let buffers_tx = buffers_tx.clone();
2348 cx.spawn(|mut cx| async move {
2349 if let Some(buffer) = this
2350 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2351 .await
2352 .log_err()
2353 {
2354 if open_buffers.borrow_mut().insert(buffer.clone()) {
2355 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2356 buffers_tx.send((buffer, snapshot)).await?;
2357 }
2358 }
2359
2360 Ok::<_, anyhow::Error>(())
2361 })
2362 .detach();
2363 }
2364
2365 Ok::<_, anyhow::Error>(())
2366 })
2367 .detach_and_log_err(cx);
2368
2369 let background = cx.background().clone();
2370 cx.background().spawn(async move {
2371 let query = &query;
2372 let mut matched_buffers = Vec::new();
2373 for _ in 0..workers {
2374 matched_buffers.push(HashMap::default());
2375 }
2376 background
2377 .scoped(|scope| {
2378 for worker_matched_buffers in matched_buffers.iter_mut() {
2379 let mut buffers_rx = buffers_rx.clone();
2380 scope.spawn(async move {
2381 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2382 let buffer_matches = query
2383 .search(snapshot.as_rope())
2384 .await
2385 .iter()
2386 .map(|range| {
2387 snapshot.anchor_before(range.start)
2388 ..snapshot.anchor_after(range.end)
2389 })
2390 .collect::<Vec<_>>();
2391 if !buffer_matches.is_empty() {
2392 worker_matched_buffers
2393 .insert(buffer.clone(), buffer_matches);
2394 }
2395 }
2396 });
2397 }
2398 })
2399 .await;
2400 Ok(matched_buffers.into_iter().flatten().collect())
2401 })
2402 } else if let Some(project_id) = self.remote_id() {
2403 let request = self.client.request(query.to_proto(project_id));
2404 cx.spawn(|this, mut cx| async move {
2405 let response = request.await?;
2406 let mut result = HashMap::default();
2407 for location in response.locations {
2408 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2409 let target_buffer = this
2410 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2411 .await?;
2412 let start = location
2413 .start
2414 .and_then(deserialize_anchor)
2415 .ok_or_else(|| anyhow!("missing target start"))?;
2416 let end = location
2417 .end
2418 .and_then(deserialize_anchor)
2419 .ok_or_else(|| anyhow!("missing target end"))?;
2420 result
2421 .entry(target_buffer)
2422 .or_insert(Vec::new())
2423 .push(start..end)
2424 }
2425 Ok(result)
2426 })
2427 } else {
2428 Task::ready(Ok(Default::default()))
2429 }
2430 }
2431
2432 fn request_lsp<R: LspCommand>(
2433 &self,
2434 buffer_handle: ModelHandle<Buffer>,
2435 request: R,
2436 cx: &mut ModelContext<Self>,
2437 ) -> Task<Result<R::Response>>
2438 where
2439 <R::LspRequest as lsp::request::Request>::Result: Send,
2440 {
2441 let buffer = buffer_handle.read(cx);
2442 if self.is_local() {
2443 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2444 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2445 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2446 return cx.spawn(|this, cx| async move {
2447 if !language_server
2448 .capabilities()
2449 .await
2450 .map_or(false, |capabilities| {
2451 request.check_capabilities(&capabilities)
2452 })
2453 {
2454 return Ok(Default::default());
2455 }
2456
2457 let response = language_server
2458 .request::<R::LspRequest>(lsp_params)
2459 .await
2460 .context("lsp request failed")?;
2461 request
2462 .response_from_lsp(response, this, buffer_handle, cx)
2463 .await
2464 });
2465 }
2466 } else if let Some(project_id) = self.remote_id() {
2467 let rpc = self.client.clone();
2468 let message = request.to_proto(project_id, buffer);
2469 return cx.spawn(|this, cx| async move {
2470 let response = rpc.request(message).await?;
2471 request
2472 .response_from_proto(response, this, buffer_handle, cx)
2473 .await
2474 });
2475 }
2476 Task::ready(Ok(Default::default()))
2477 }
2478
2479 pub fn find_or_create_local_worktree(
2480 &mut self,
2481 abs_path: impl AsRef<Path>,
2482 visible: bool,
2483 cx: &mut ModelContext<Self>,
2484 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2485 let abs_path = abs_path.as_ref();
2486 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2487 Task::ready(Ok((tree.clone(), relative_path.into())))
2488 } else {
2489 let worktree = self.create_local_worktree(abs_path, visible, cx);
2490 cx.foreground()
2491 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2492 }
2493 }
2494
2495 pub fn find_local_worktree(
2496 &self,
2497 abs_path: &Path,
2498 cx: &AppContext,
2499 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2500 for tree in self.worktrees(cx) {
2501 if let Some(relative_path) = tree
2502 .read(cx)
2503 .as_local()
2504 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2505 {
2506 return Some((tree.clone(), relative_path.into()));
2507 }
2508 }
2509 None
2510 }
2511
2512 pub fn is_shared(&self) -> bool {
2513 match &self.client_state {
2514 ProjectClientState::Local { is_shared, .. } => *is_shared,
2515 ProjectClientState::Remote { .. } => false,
2516 }
2517 }
2518
2519 fn create_local_worktree(
2520 &mut self,
2521 abs_path: impl AsRef<Path>,
2522 visible: bool,
2523 cx: &mut ModelContext<Self>,
2524 ) -> Task<Result<ModelHandle<Worktree>>> {
2525 let fs = self.fs.clone();
2526 let client = self.client.clone();
2527 let path: Arc<Path> = abs_path.as_ref().into();
2528 let task = self
2529 .loading_local_worktrees
2530 .entry(path.clone())
2531 .or_insert_with(|| {
2532 cx.spawn(|project, mut cx| {
2533 async move {
2534 let worktree =
2535 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2536 .await;
2537 project.update(&mut cx, |project, _| {
2538 project.loading_local_worktrees.remove(&path);
2539 });
2540 let worktree = worktree?;
2541
2542 let (remote_project_id, is_shared) =
2543 project.update(&mut cx, |project, cx| {
2544 project.add_worktree(&worktree, cx);
2545 (project.remote_id(), project.is_shared())
2546 });
2547
2548 if let Some(project_id) = remote_project_id {
2549 if is_shared {
2550 worktree
2551 .update(&mut cx, |worktree, cx| {
2552 worktree.as_local_mut().unwrap().share(project_id, cx)
2553 })
2554 .await?;
2555 } else {
2556 worktree
2557 .update(&mut cx, |worktree, cx| {
2558 worktree.as_local_mut().unwrap().register(project_id, cx)
2559 })
2560 .await?;
2561 }
2562 }
2563
2564 Ok(worktree)
2565 }
2566 .map_err(|err| Arc::new(err))
2567 })
2568 .shared()
2569 })
2570 .clone();
2571 cx.foreground().spawn(async move {
2572 match task.await {
2573 Ok(worktree) => Ok(worktree),
2574 Err(err) => Err(anyhow!("{}", err)),
2575 }
2576 })
2577 }
2578
2579 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2580 self.worktrees.retain(|worktree| {
2581 worktree
2582 .upgrade(cx)
2583 .map_or(false, |w| w.read(cx).id() != id)
2584 });
2585 cx.notify();
2586 }
2587
2588 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2589 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2590 if worktree.read(cx).is_local() {
2591 cx.subscribe(&worktree, |this, worktree, _, cx| {
2592 this.update_local_worktree_buffers(worktree, cx);
2593 })
2594 .detach();
2595 }
2596
2597 let push_strong_handle = {
2598 let worktree = worktree.read(cx);
2599 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2600 };
2601 if push_strong_handle {
2602 self.worktrees
2603 .push(WorktreeHandle::Strong(worktree.clone()));
2604 } else {
2605 cx.observe_release(&worktree, |this, cx| {
2606 this.worktrees
2607 .retain(|worktree| worktree.upgrade(cx).is_some());
2608 cx.notify();
2609 })
2610 .detach();
2611 self.worktrees
2612 .push(WorktreeHandle::Weak(worktree.downgrade()));
2613 }
2614 cx.notify();
2615 }
2616
2617 fn update_local_worktree_buffers(
2618 &mut self,
2619 worktree_handle: ModelHandle<Worktree>,
2620 cx: &mut ModelContext<Self>,
2621 ) {
2622 let snapshot = worktree_handle.read(cx).snapshot();
2623 let mut buffers_to_delete = Vec::new();
2624 for (buffer_id, buffer) in &self.opened_buffers {
2625 if let Some(buffer) = buffer.upgrade(cx) {
2626 buffer.update(cx, |buffer, cx| {
2627 if let Some(old_file) = File::from_dyn(buffer.file()) {
2628 if old_file.worktree != worktree_handle {
2629 return;
2630 }
2631
2632 let new_file = if let Some(entry) = old_file
2633 .entry_id
2634 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2635 {
2636 File {
2637 is_local: true,
2638 entry_id: Some(entry.id),
2639 mtime: entry.mtime,
2640 path: entry.path.clone(),
2641 worktree: worktree_handle.clone(),
2642 }
2643 } else if let Some(entry) =
2644 snapshot.entry_for_path(old_file.path().as_ref())
2645 {
2646 File {
2647 is_local: true,
2648 entry_id: Some(entry.id),
2649 mtime: entry.mtime,
2650 path: entry.path.clone(),
2651 worktree: worktree_handle.clone(),
2652 }
2653 } else {
2654 File {
2655 is_local: true,
2656 entry_id: None,
2657 path: old_file.path().clone(),
2658 mtime: old_file.mtime(),
2659 worktree: worktree_handle.clone(),
2660 }
2661 };
2662
2663 if let Some(project_id) = self.remote_id() {
2664 self.client
2665 .send(proto::UpdateBufferFile {
2666 project_id,
2667 buffer_id: *buffer_id as u64,
2668 file: Some(new_file.to_proto()),
2669 })
2670 .log_err();
2671 }
2672 buffer.file_updated(Box::new(new_file), cx).detach();
2673 }
2674 });
2675 } else {
2676 buffers_to_delete.push(*buffer_id);
2677 }
2678 }
2679
2680 for buffer_id in buffers_to_delete {
2681 self.opened_buffers.remove(&buffer_id);
2682 }
2683 }
2684
2685 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2686 let new_active_entry = entry.and_then(|project_path| {
2687 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2688 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2689 Some(ProjectEntry {
2690 worktree_id: project_path.worktree_id,
2691 entry_id: entry.id,
2692 })
2693 });
2694 if new_active_entry != self.active_entry {
2695 self.active_entry = new_active_entry;
2696 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2697 }
2698 }
2699
2700 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2701 self.language_servers_with_diagnostics_running > 0
2702 }
2703
2704 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2705 let mut summary = DiagnosticSummary::default();
2706 for (_, path_summary) in self.diagnostic_summaries(cx) {
2707 summary.error_count += path_summary.error_count;
2708 summary.warning_count += path_summary.warning_count;
2709 summary.info_count += path_summary.info_count;
2710 summary.hint_count += path_summary.hint_count;
2711 }
2712 summary
2713 }
2714
2715 pub fn diagnostic_summaries<'a>(
2716 &'a self,
2717 cx: &'a AppContext,
2718 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2719 self.worktrees(cx).flat_map(move |worktree| {
2720 let worktree = worktree.read(cx);
2721 let worktree_id = worktree.id();
2722 worktree
2723 .diagnostic_summaries()
2724 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2725 })
2726 }
2727
2728 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2729 self.language_servers_with_diagnostics_running += 1;
2730 if self.language_servers_with_diagnostics_running == 1 {
2731 cx.emit(Event::DiskBasedDiagnosticsStarted);
2732 }
2733 }
2734
2735 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2736 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2737 self.language_servers_with_diagnostics_running -= 1;
2738 if self.language_servers_with_diagnostics_running == 0 {
2739 cx.emit(Event::DiskBasedDiagnosticsFinished);
2740 }
2741 }
2742
2743 pub fn active_entry(&self) -> Option<ProjectEntry> {
2744 self.active_entry
2745 }
2746
2747 // RPC message handlers
2748
2749 async fn handle_unshare_project(
2750 this: ModelHandle<Self>,
2751 _: TypedEnvelope<proto::UnshareProject>,
2752 _: Arc<Client>,
2753 mut cx: AsyncAppContext,
2754 ) -> Result<()> {
2755 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2756 Ok(())
2757 }
2758
2759 async fn handle_add_collaborator(
2760 this: ModelHandle<Self>,
2761 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2762 _: Arc<Client>,
2763 mut cx: AsyncAppContext,
2764 ) -> Result<()> {
2765 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2766 let collaborator = envelope
2767 .payload
2768 .collaborator
2769 .take()
2770 .ok_or_else(|| anyhow!("empty collaborator"))?;
2771
2772 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2773 this.update(&mut cx, |this, cx| {
2774 this.collaborators
2775 .insert(collaborator.peer_id, collaborator);
2776 cx.notify();
2777 });
2778
2779 Ok(())
2780 }
2781
2782 async fn handle_remove_collaborator(
2783 this: ModelHandle<Self>,
2784 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2785 _: Arc<Client>,
2786 mut cx: AsyncAppContext,
2787 ) -> Result<()> {
2788 this.update(&mut cx, |this, cx| {
2789 let peer_id = PeerId(envelope.payload.peer_id);
2790 let replica_id = this
2791 .collaborators
2792 .remove(&peer_id)
2793 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2794 .replica_id;
2795 for (_, buffer) in &this.opened_buffers {
2796 if let Some(buffer) = buffer.upgrade(cx) {
2797 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2798 }
2799 }
2800 cx.notify();
2801 Ok(())
2802 })
2803 }
2804
2805 async fn handle_register_worktree(
2806 this: ModelHandle<Self>,
2807 envelope: TypedEnvelope<proto::RegisterWorktree>,
2808 client: Arc<Client>,
2809 mut cx: AsyncAppContext,
2810 ) -> Result<()> {
2811 this.update(&mut cx, |this, cx| {
2812 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2813 let replica_id = this.replica_id();
2814 let worktree = proto::Worktree {
2815 id: envelope.payload.worktree_id,
2816 root_name: envelope.payload.root_name,
2817 entries: Default::default(),
2818 diagnostic_summaries: Default::default(),
2819 visible: envelope.payload.visible,
2820 };
2821 let (worktree, load_task) =
2822 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2823 this.add_worktree(&worktree, cx);
2824 load_task.detach();
2825 Ok(())
2826 })
2827 }
2828
2829 async fn handle_unregister_worktree(
2830 this: ModelHandle<Self>,
2831 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2832 _: Arc<Client>,
2833 mut cx: AsyncAppContext,
2834 ) -> Result<()> {
2835 this.update(&mut cx, |this, cx| {
2836 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2837 this.remove_worktree(worktree_id, cx);
2838 Ok(())
2839 })
2840 }
2841
2842 async fn handle_update_worktree(
2843 this: ModelHandle<Self>,
2844 envelope: TypedEnvelope<proto::UpdateWorktree>,
2845 _: Arc<Client>,
2846 mut cx: AsyncAppContext,
2847 ) -> Result<()> {
2848 this.update(&mut cx, |this, cx| {
2849 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2850 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2851 worktree.update(cx, |worktree, _| {
2852 let worktree = worktree.as_remote_mut().unwrap();
2853 worktree.update_from_remote(envelope)
2854 })?;
2855 }
2856 Ok(())
2857 })
2858 }
2859
2860 async fn handle_update_diagnostic_summary(
2861 this: ModelHandle<Self>,
2862 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2863 _: Arc<Client>,
2864 mut cx: AsyncAppContext,
2865 ) -> Result<()> {
2866 this.update(&mut cx, |this, cx| {
2867 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2868 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2869 if let Some(summary) = envelope.payload.summary {
2870 let project_path = ProjectPath {
2871 worktree_id,
2872 path: Path::new(&summary.path).into(),
2873 };
2874 worktree.update(cx, |worktree, _| {
2875 worktree
2876 .as_remote_mut()
2877 .unwrap()
2878 .update_diagnostic_summary(project_path.path.clone(), &summary);
2879 });
2880 cx.emit(Event::DiagnosticsUpdated(project_path));
2881 }
2882 }
2883 Ok(())
2884 })
2885 }
2886
2887 async fn handle_disk_based_diagnostics_updating(
2888 this: ModelHandle<Self>,
2889 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2890 _: Arc<Client>,
2891 mut cx: AsyncAppContext,
2892 ) -> Result<()> {
2893 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2894 Ok(())
2895 }
2896
2897 async fn handle_disk_based_diagnostics_updated(
2898 this: ModelHandle<Self>,
2899 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2900 _: Arc<Client>,
2901 mut cx: AsyncAppContext,
2902 ) -> Result<()> {
2903 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2904 Ok(())
2905 }
2906
2907 async fn handle_update_buffer(
2908 this: ModelHandle<Self>,
2909 envelope: TypedEnvelope<proto::UpdateBuffer>,
2910 _: Arc<Client>,
2911 mut cx: AsyncAppContext,
2912 ) -> Result<()> {
2913 this.update(&mut cx, |this, cx| {
2914 let payload = envelope.payload.clone();
2915 let buffer_id = payload.buffer_id;
2916 let ops = payload
2917 .operations
2918 .into_iter()
2919 .map(|op| language::proto::deserialize_operation(op))
2920 .collect::<Result<Vec<_>, _>>()?;
2921 match this.opened_buffers.entry(buffer_id) {
2922 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2923 OpenBuffer::Strong(buffer) => {
2924 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2925 }
2926 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2927 OpenBuffer::Weak(_) => {}
2928 },
2929 hash_map::Entry::Vacant(e) => {
2930 e.insert(OpenBuffer::Loading(ops));
2931 }
2932 }
2933 Ok(())
2934 })
2935 }
2936
2937 async fn handle_update_buffer_file(
2938 this: ModelHandle<Self>,
2939 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2940 _: Arc<Client>,
2941 mut cx: AsyncAppContext,
2942 ) -> Result<()> {
2943 this.update(&mut cx, |this, cx| {
2944 let payload = envelope.payload.clone();
2945 let buffer_id = payload.buffer_id;
2946 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2947 let worktree = this
2948 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2949 .ok_or_else(|| anyhow!("no such worktree"))?;
2950 let file = File::from_proto(file, worktree.clone(), cx)?;
2951 let buffer = this
2952 .opened_buffers
2953 .get_mut(&buffer_id)
2954 .and_then(|b| b.upgrade(cx))
2955 .ok_or_else(|| anyhow!("no such buffer"))?;
2956 buffer.update(cx, |buffer, cx| {
2957 buffer.file_updated(Box::new(file), cx).detach();
2958 });
2959 Ok(())
2960 })
2961 }
2962
2963 async fn handle_save_buffer(
2964 this: ModelHandle<Self>,
2965 envelope: TypedEnvelope<proto::SaveBuffer>,
2966 _: Arc<Client>,
2967 mut cx: AsyncAppContext,
2968 ) -> Result<proto::BufferSaved> {
2969 let buffer_id = envelope.payload.buffer_id;
2970 let requested_version = deserialize_version(envelope.payload.version);
2971
2972 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2973 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2974 let buffer = this
2975 .opened_buffers
2976 .get(&buffer_id)
2977 .map(|buffer| buffer.upgrade(cx).unwrap())
2978 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2979 Ok::<_, anyhow::Error>((project_id, buffer))
2980 })?;
2981 buffer
2982 .update(&mut cx, |buffer, _| {
2983 buffer.wait_for_version(requested_version)
2984 })
2985 .await;
2986
2987 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2988 Ok(proto::BufferSaved {
2989 project_id,
2990 buffer_id,
2991 version: serialize_version(&saved_version),
2992 mtime: Some(mtime.into()),
2993 })
2994 }
2995
2996 async fn handle_format_buffers(
2997 this: ModelHandle<Self>,
2998 envelope: TypedEnvelope<proto::FormatBuffers>,
2999 _: Arc<Client>,
3000 mut cx: AsyncAppContext,
3001 ) -> Result<proto::FormatBuffersResponse> {
3002 let sender_id = envelope.original_sender_id()?;
3003 let format = this.update(&mut cx, |this, cx| {
3004 let mut buffers = HashSet::default();
3005 for buffer_id in &envelope.payload.buffer_ids {
3006 buffers.insert(
3007 this.opened_buffers
3008 .get(buffer_id)
3009 .map(|buffer| buffer.upgrade(cx).unwrap())
3010 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
3011 );
3012 }
3013 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
3014 })?;
3015
3016 let project_transaction = format.await?;
3017 let project_transaction = this.update(&mut cx, |this, cx| {
3018 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3019 });
3020 Ok(proto::FormatBuffersResponse {
3021 transaction: Some(project_transaction),
3022 })
3023 }
3024
3025 async fn handle_get_completions(
3026 this: ModelHandle<Self>,
3027 envelope: TypedEnvelope<proto::GetCompletions>,
3028 _: Arc<Client>,
3029 mut cx: AsyncAppContext,
3030 ) -> Result<proto::GetCompletionsResponse> {
3031 let position = envelope
3032 .payload
3033 .position
3034 .and_then(language::proto::deserialize_anchor)
3035 .ok_or_else(|| anyhow!("invalid position"))?;
3036 let version = deserialize_version(envelope.payload.version);
3037 let buffer = this.read_with(&cx, |this, cx| {
3038 this.opened_buffers
3039 .get(&envelope.payload.buffer_id)
3040 .map(|buffer| buffer.upgrade(cx).unwrap())
3041 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3042 })?;
3043 buffer
3044 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3045 .await;
3046 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3047 let completions = this
3048 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3049 .await?;
3050
3051 Ok(proto::GetCompletionsResponse {
3052 completions: completions
3053 .iter()
3054 .map(language::proto::serialize_completion)
3055 .collect(),
3056 version: serialize_version(&version),
3057 })
3058 }
3059
3060 async fn handle_apply_additional_edits_for_completion(
3061 this: ModelHandle<Self>,
3062 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3063 _: Arc<Client>,
3064 mut cx: AsyncAppContext,
3065 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3066 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3067 let buffer = this
3068 .opened_buffers
3069 .get(&envelope.payload.buffer_id)
3070 .map(|buffer| buffer.upgrade(cx).unwrap())
3071 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3072 let language = buffer.read(cx).language();
3073 let completion = language::proto::deserialize_completion(
3074 envelope
3075 .payload
3076 .completion
3077 .ok_or_else(|| anyhow!("invalid completion"))?,
3078 language,
3079 )?;
3080 Ok::<_, anyhow::Error>(
3081 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3082 )
3083 })?;
3084
3085 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3086 transaction: apply_additional_edits
3087 .await?
3088 .as_ref()
3089 .map(language::proto::serialize_transaction),
3090 })
3091 }
3092
3093 async fn handle_get_code_actions(
3094 this: ModelHandle<Self>,
3095 envelope: TypedEnvelope<proto::GetCodeActions>,
3096 _: Arc<Client>,
3097 mut cx: AsyncAppContext,
3098 ) -> Result<proto::GetCodeActionsResponse> {
3099 let start = envelope
3100 .payload
3101 .start
3102 .and_then(language::proto::deserialize_anchor)
3103 .ok_or_else(|| anyhow!("invalid start"))?;
3104 let end = envelope
3105 .payload
3106 .end
3107 .and_then(language::proto::deserialize_anchor)
3108 .ok_or_else(|| anyhow!("invalid end"))?;
3109 let buffer = this.update(&mut cx, |this, cx| {
3110 this.opened_buffers
3111 .get(&envelope.payload.buffer_id)
3112 .map(|buffer| buffer.upgrade(cx).unwrap())
3113 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3114 })?;
3115 buffer
3116 .update(&mut cx, |buffer, _| {
3117 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3118 })
3119 .await;
3120
3121 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3122 let code_actions = this.update(&mut cx, |this, cx| {
3123 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3124 })?;
3125
3126 Ok(proto::GetCodeActionsResponse {
3127 actions: code_actions
3128 .await?
3129 .iter()
3130 .map(language::proto::serialize_code_action)
3131 .collect(),
3132 version: serialize_version(&version),
3133 })
3134 }
3135
3136 async fn handle_apply_code_action(
3137 this: ModelHandle<Self>,
3138 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3139 _: Arc<Client>,
3140 mut cx: AsyncAppContext,
3141 ) -> Result<proto::ApplyCodeActionResponse> {
3142 let sender_id = envelope.original_sender_id()?;
3143 let action = language::proto::deserialize_code_action(
3144 envelope
3145 .payload
3146 .action
3147 .ok_or_else(|| anyhow!("invalid action"))?,
3148 )?;
3149 let apply_code_action = this.update(&mut cx, |this, cx| {
3150 let buffer = this
3151 .opened_buffers
3152 .get(&envelope.payload.buffer_id)
3153 .map(|buffer| buffer.upgrade(cx).unwrap())
3154 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3155 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3156 })?;
3157
3158 let project_transaction = apply_code_action.await?;
3159 let project_transaction = this.update(&mut cx, |this, cx| {
3160 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3161 });
3162 Ok(proto::ApplyCodeActionResponse {
3163 transaction: Some(project_transaction),
3164 })
3165 }
3166
3167 async fn handle_lsp_command<T: LspCommand>(
3168 this: ModelHandle<Self>,
3169 envelope: TypedEnvelope<T::ProtoRequest>,
3170 _: Arc<Client>,
3171 mut cx: AsyncAppContext,
3172 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3173 where
3174 <T::LspRequest as lsp::request::Request>::Result: Send,
3175 {
3176 let sender_id = envelope.original_sender_id()?;
3177 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3178 let buffer_handle = this.read_with(&cx, |this, _| {
3179 this.opened_buffers
3180 .get(&buffer_id)
3181 .map(|buffer| buffer.upgrade(&cx).unwrap())
3182 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3183 })?;
3184 let request = T::from_proto(
3185 envelope.payload,
3186 this.clone(),
3187 buffer_handle.clone(),
3188 cx.clone(),
3189 )
3190 .await?;
3191 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3192 let response = this
3193 .update(&mut cx, |this, cx| {
3194 this.request_lsp(buffer_handle, request, cx)
3195 })
3196 .await?;
3197 this.update(&mut cx, |this, cx| {
3198 Ok(T::response_to_proto(
3199 response,
3200 this,
3201 sender_id,
3202 &buffer_version,
3203 cx,
3204 ))
3205 })
3206 }
3207
3208 async fn handle_get_project_symbols(
3209 this: ModelHandle<Self>,
3210 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3211 _: Arc<Client>,
3212 mut cx: AsyncAppContext,
3213 ) -> Result<proto::GetProjectSymbolsResponse> {
3214 let symbols = this
3215 .update(&mut cx, |this, cx| {
3216 this.symbols(&envelope.payload.query, cx)
3217 })
3218 .await?;
3219
3220 Ok(proto::GetProjectSymbolsResponse {
3221 symbols: symbols.iter().map(serialize_symbol).collect(),
3222 })
3223 }
3224
3225 async fn handle_search_project(
3226 this: ModelHandle<Self>,
3227 envelope: TypedEnvelope<proto::SearchProject>,
3228 _: Arc<Client>,
3229 mut cx: AsyncAppContext,
3230 ) -> Result<proto::SearchProjectResponse> {
3231 let peer_id = envelope.original_sender_id()?;
3232 let query = SearchQuery::from_proto(envelope.payload)?;
3233 let result = this
3234 .update(&mut cx, |this, cx| this.search(query, cx))
3235 .await?;
3236
3237 this.update(&mut cx, |this, cx| {
3238 let mut locations = Vec::new();
3239 for (buffer, ranges) in result {
3240 for range in ranges {
3241 let start = serialize_anchor(&range.start);
3242 let end = serialize_anchor(&range.end);
3243 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3244 locations.push(proto::Location {
3245 buffer: Some(buffer),
3246 start: Some(start),
3247 end: Some(end),
3248 });
3249 }
3250 }
3251 Ok(proto::SearchProjectResponse { locations })
3252 })
3253 }
3254
3255 async fn handle_open_buffer_for_symbol(
3256 this: ModelHandle<Self>,
3257 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3258 _: Arc<Client>,
3259 mut cx: AsyncAppContext,
3260 ) -> Result<proto::OpenBufferForSymbolResponse> {
3261 let peer_id = envelope.original_sender_id()?;
3262 let symbol = envelope
3263 .payload
3264 .symbol
3265 .ok_or_else(|| anyhow!("invalid symbol"))?;
3266 let symbol = this.read_with(&cx, |this, _| {
3267 let symbol = this.deserialize_symbol(symbol)?;
3268 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3269 if signature == symbol.signature {
3270 Ok(symbol)
3271 } else {
3272 Err(anyhow!("invalid symbol signature"))
3273 }
3274 })?;
3275 let buffer = this
3276 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3277 .await?;
3278
3279 Ok(proto::OpenBufferForSymbolResponse {
3280 buffer: Some(this.update(&mut cx, |this, cx| {
3281 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3282 })),
3283 })
3284 }
3285
3286 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3287 let mut hasher = Sha256::new();
3288 hasher.update(worktree_id.to_proto().to_be_bytes());
3289 hasher.update(path.to_string_lossy().as_bytes());
3290 hasher.update(self.nonce.to_be_bytes());
3291 hasher.finalize().as_slice().try_into().unwrap()
3292 }
3293
3294 async fn handle_open_buffer(
3295 this: ModelHandle<Self>,
3296 envelope: TypedEnvelope<proto::OpenBuffer>,
3297 _: Arc<Client>,
3298 mut cx: AsyncAppContext,
3299 ) -> Result<proto::OpenBufferResponse> {
3300 let peer_id = envelope.original_sender_id()?;
3301 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3302 let open_buffer = this.update(&mut cx, |this, cx| {
3303 this.open_buffer(
3304 ProjectPath {
3305 worktree_id,
3306 path: PathBuf::from(envelope.payload.path).into(),
3307 },
3308 cx,
3309 )
3310 });
3311
3312 let buffer = open_buffer.await?;
3313 this.update(&mut cx, |this, cx| {
3314 Ok(proto::OpenBufferResponse {
3315 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3316 })
3317 })
3318 }
3319
3320 fn serialize_project_transaction_for_peer(
3321 &mut self,
3322 project_transaction: ProjectTransaction,
3323 peer_id: PeerId,
3324 cx: &AppContext,
3325 ) -> proto::ProjectTransaction {
3326 let mut serialized_transaction = proto::ProjectTransaction {
3327 buffers: Default::default(),
3328 transactions: Default::default(),
3329 };
3330 for (buffer, transaction) in project_transaction.0 {
3331 serialized_transaction
3332 .buffers
3333 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3334 serialized_transaction
3335 .transactions
3336 .push(language::proto::serialize_transaction(&transaction));
3337 }
3338 serialized_transaction
3339 }
3340
3341 fn deserialize_project_transaction(
3342 &mut self,
3343 message: proto::ProjectTransaction,
3344 push_to_history: bool,
3345 cx: &mut ModelContext<Self>,
3346 ) -> Task<Result<ProjectTransaction>> {
3347 cx.spawn(|this, mut cx| async move {
3348 let mut project_transaction = ProjectTransaction::default();
3349 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3350 let buffer = this
3351 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3352 .await?;
3353 let transaction = language::proto::deserialize_transaction(transaction)?;
3354 project_transaction.0.insert(buffer, transaction);
3355 }
3356
3357 for (buffer, transaction) in &project_transaction.0 {
3358 buffer
3359 .update(&mut cx, |buffer, _| {
3360 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3361 })
3362 .await;
3363
3364 if push_to_history {
3365 buffer.update(&mut cx, |buffer, _| {
3366 buffer.push_transaction(transaction.clone(), Instant::now());
3367 });
3368 }
3369 }
3370
3371 Ok(project_transaction)
3372 })
3373 }
3374
3375 fn serialize_buffer_for_peer(
3376 &mut self,
3377 buffer: &ModelHandle<Buffer>,
3378 peer_id: PeerId,
3379 cx: &AppContext,
3380 ) -> proto::Buffer {
3381 let buffer_id = buffer.read(cx).remote_id();
3382 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3383 if shared_buffers.insert(buffer_id) {
3384 proto::Buffer {
3385 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3386 }
3387 } else {
3388 proto::Buffer {
3389 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3390 }
3391 }
3392 }
3393
3394 fn deserialize_buffer(
3395 &mut self,
3396 buffer: proto::Buffer,
3397 cx: &mut ModelContext<Self>,
3398 ) -> Task<Result<ModelHandle<Buffer>>> {
3399 let replica_id = self.replica_id();
3400
3401 let opened_buffer_tx = self.opened_buffer.0.clone();
3402 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3403 cx.spawn(|this, mut cx| async move {
3404 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3405 proto::buffer::Variant::Id(id) => {
3406 let buffer = loop {
3407 let buffer = this.read_with(&cx, |this, cx| {
3408 this.opened_buffers
3409 .get(&id)
3410 .and_then(|buffer| buffer.upgrade(cx))
3411 });
3412 if let Some(buffer) = buffer {
3413 break buffer;
3414 }
3415 opened_buffer_rx
3416 .next()
3417 .await
3418 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3419 };
3420 Ok(buffer)
3421 }
3422 proto::buffer::Variant::State(mut buffer) => {
3423 let mut buffer_worktree = None;
3424 let mut buffer_file = None;
3425 if let Some(file) = buffer.file.take() {
3426 this.read_with(&cx, |this, cx| {
3427 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3428 let worktree =
3429 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3430 anyhow!("no worktree found for id {}", file.worktree_id)
3431 })?;
3432 buffer_file =
3433 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3434 as Box<dyn language::File>);
3435 buffer_worktree = Some(worktree);
3436 Ok::<_, anyhow::Error>(())
3437 })?;
3438 }
3439
3440 let buffer = cx.add_model(|cx| {
3441 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3442 });
3443
3444 this.update(&mut cx, |this, cx| {
3445 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3446 })?;
3447
3448 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3449 Ok(buffer)
3450 }
3451 }
3452 })
3453 }
3454
3455 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3456 let language = self
3457 .languages
3458 .get_language(&serialized_symbol.language_name);
3459 let start = serialized_symbol
3460 .start
3461 .ok_or_else(|| anyhow!("invalid start"))?;
3462 let end = serialized_symbol
3463 .end
3464 .ok_or_else(|| anyhow!("invalid end"))?;
3465 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3466 Ok(Symbol {
3467 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3468 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3469 language_name: serialized_symbol.language_name.clone(),
3470 label: language
3471 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3472 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3473 name: serialized_symbol.name,
3474 path: PathBuf::from(serialized_symbol.path),
3475 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3476 kind,
3477 signature: serialized_symbol
3478 .signature
3479 .try_into()
3480 .map_err(|_| anyhow!("invalid signature"))?,
3481 })
3482 }
3483
3484 async fn handle_buffer_saved(
3485 this: ModelHandle<Self>,
3486 envelope: TypedEnvelope<proto::BufferSaved>,
3487 _: Arc<Client>,
3488 mut cx: AsyncAppContext,
3489 ) -> Result<()> {
3490 let version = deserialize_version(envelope.payload.version);
3491 let mtime = envelope
3492 .payload
3493 .mtime
3494 .ok_or_else(|| anyhow!("missing mtime"))?
3495 .into();
3496
3497 this.update(&mut cx, |this, cx| {
3498 let buffer = this
3499 .opened_buffers
3500 .get(&envelope.payload.buffer_id)
3501 .and_then(|buffer| buffer.upgrade(cx));
3502 if let Some(buffer) = buffer {
3503 buffer.update(cx, |buffer, cx| {
3504 buffer.did_save(version, mtime, None, cx);
3505 });
3506 }
3507 Ok(())
3508 })
3509 }
3510
3511 async fn handle_buffer_reloaded(
3512 this: ModelHandle<Self>,
3513 envelope: TypedEnvelope<proto::BufferReloaded>,
3514 _: Arc<Client>,
3515 mut cx: AsyncAppContext,
3516 ) -> Result<()> {
3517 let payload = envelope.payload.clone();
3518 let version = deserialize_version(payload.version);
3519 let mtime = payload
3520 .mtime
3521 .ok_or_else(|| anyhow!("missing mtime"))?
3522 .into();
3523 this.update(&mut cx, |this, cx| {
3524 let buffer = this
3525 .opened_buffers
3526 .get(&payload.buffer_id)
3527 .and_then(|buffer| buffer.upgrade(cx));
3528 if let Some(buffer) = buffer {
3529 buffer.update(cx, |buffer, cx| {
3530 buffer.did_reload(version, mtime, cx);
3531 });
3532 }
3533 Ok(())
3534 })
3535 }
3536
3537 pub fn match_paths<'a>(
3538 &self,
3539 query: &'a str,
3540 include_ignored: bool,
3541 smart_case: bool,
3542 max_results: usize,
3543 cancel_flag: &'a AtomicBool,
3544 cx: &AppContext,
3545 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3546 let worktrees = self
3547 .worktrees(cx)
3548 .filter(|worktree| worktree.read(cx).is_visible())
3549 .collect::<Vec<_>>();
3550 let include_root_name = worktrees.len() > 1;
3551 let candidate_sets = worktrees
3552 .into_iter()
3553 .map(|worktree| CandidateSet {
3554 snapshot: worktree.read(cx).snapshot(),
3555 include_ignored,
3556 include_root_name,
3557 })
3558 .collect::<Vec<_>>();
3559
3560 let background = cx.background().clone();
3561 async move {
3562 fuzzy::match_paths(
3563 candidate_sets.as_slice(),
3564 query,
3565 smart_case,
3566 max_results,
3567 cancel_flag,
3568 background,
3569 )
3570 .await
3571 }
3572 }
3573}
3574
3575impl WorktreeHandle {
3576 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3577 match self {
3578 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3579 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3580 }
3581 }
3582}
3583
3584impl OpenBuffer {
3585 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3586 match self {
3587 OpenBuffer::Strong(handle) => Some(handle.clone()),
3588 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3589 OpenBuffer::Loading(_) => None,
3590 }
3591 }
3592}
3593
3594struct CandidateSet {
3595 snapshot: Snapshot,
3596 include_ignored: bool,
3597 include_root_name: bool,
3598}
3599
3600impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3601 type Candidates = CandidateSetIter<'a>;
3602
3603 fn id(&self) -> usize {
3604 self.snapshot.id().to_usize()
3605 }
3606
3607 fn len(&self) -> usize {
3608 if self.include_ignored {
3609 self.snapshot.file_count()
3610 } else {
3611 self.snapshot.visible_file_count()
3612 }
3613 }
3614
3615 fn prefix(&self) -> Arc<str> {
3616 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3617 self.snapshot.root_name().into()
3618 } else if self.include_root_name {
3619 format!("{}/", self.snapshot.root_name()).into()
3620 } else {
3621 "".into()
3622 }
3623 }
3624
3625 fn candidates(&'a self, start: usize) -> Self::Candidates {
3626 CandidateSetIter {
3627 traversal: self.snapshot.files(self.include_ignored, start),
3628 }
3629 }
3630}
3631
3632struct CandidateSetIter<'a> {
3633 traversal: Traversal<'a>,
3634}
3635
3636impl<'a> Iterator for CandidateSetIter<'a> {
3637 type Item = PathMatchCandidate<'a>;
3638
3639 fn next(&mut self) -> Option<Self::Item> {
3640 self.traversal.next().map(|entry| {
3641 if let EntryKind::File(char_bag) = entry.kind {
3642 PathMatchCandidate {
3643 path: &entry.path,
3644 char_bag,
3645 }
3646 } else {
3647 unreachable!()
3648 }
3649 })
3650 }
3651}
3652
3653impl Entity for Project {
3654 type Event = Event;
3655
3656 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3657 match &self.client_state {
3658 ProjectClientState::Local { remote_id_rx, .. } => {
3659 if let Some(project_id) = *remote_id_rx.borrow() {
3660 self.client
3661 .send(proto::UnregisterProject { project_id })
3662 .log_err();
3663 }
3664 }
3665 ProjectClientState::Remote { remote_id, .. } => {
3666 self.client
3667 .send(proto::LeaveProject {
3668 project_id: *remote_id,
3669 })
3670 .log_err();
3671 }
3672 }
3673 }
3674
3675 fn app_will_quit(
3676 &mut self,
3677 _: &mut MutableAppContext,
3678 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3679 let shutdown_futures = self
3680 .language_servers
3681 .drain()
3682 .filter_map(|(_, server)| server.shutdown())
3683 .collect::<Vec<_>>();
3684 Some(
3685 async move {
3686 futures::future::join_all(shutdown_futures).await;
3687 }
3688 .boxed(),
3689 )
3690 }
3691}
3692
3693impl Collaborator {
3694 fn from_proto(
3695 message: proto::Collaborator,
3696 user_store: &ModelHandle<UserStore>,
3697 cx: &mut AsyncAppContext,
3698 ) -> impl Future<Output = Result<Self>> {
3699 let user = user_store.update(cx, |user_store, cx| {
3700 user_store.fetch_user(message.user_id, cx)
3701 });
3702
3703 async move {
3704 Ok(Self {
3705 peer_id: PeerId(message.peer_id),
3706 user: user.await?,
3707 replica_id: message.replica_id as ReplicaId,
3708 })
3709 }
3710 }
3711}
3712
3713impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3714 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3715 Self {
3716 worktree_id,
3717 path: path.as_ref().into(),
3718 }
3719 }
3720}
3721
3722impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3723 fn from(options: lsp::CreateFileOptions) -> Self {
3724 Self {
3725 overwrite: options.overwrite.unwrap_or(false),
3726 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3727 }
3728 }
3729}
3730
3731impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3732 fn from(options: lsp::RenameFileOptions) -> Self {
3733 Self {
3734 overwrite: options.overwrite.unwrap_or(false),
3735 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3736 }
3737 }
3738}
3739
3740impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3741 fn from(options: lsp::DeleteFileOptions) -> Self {
3742 Self {
3743 recursive: options.recursive.unwrap_or(false),
3744 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3745 }
3746 }
3747}
3748
3749fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3750 proto::Symbol {
3751 source_worktree_id: symbol.source_worktree_id.to_proto(),
3752 worktree_id: symbol.worktree_id.to_proto(),
3753 language_name: symbol.language_name.clone(),
3754 name: symbol.name.clone(),
3755 kind: unsafe { mem::transmute(symbol.kind) },
3756 path: symbol.path.to_string_lossy().to_string(),
3757 start: Some(proto::Point {
3758 row: symbol.range.start.row,
3759 column: symbol.range.start.column,
3760 }),
3761 end: Some(proto::Point {
3762 row: symbol.range.end.row,
3763 column: symbol.range.end.column,
3764 }),
3765 signature: symbol.signature.to_vec(),
3766 }
3767}
3768
3769fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3770 let mut path_components = path.components();
3771 let mut base_components = base.components();
3772 let mut components: Vec<Component> = Vec::new();
3773 loop {
3774 match (path_components.next(), base_components.next()) {
3775 (None, None) => break,
3776 (Some(a), None) => {
3777 components.push(a);
3778 components.extend(path_components.by_ref());
3779 break;
3780 }
3781 (None, _) => components.push(Component::ParentDir),
3782 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3783 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3784 (Some(a), Some(_)) => {
3785 components.push(Component::ParentDir);
3786 for _ in base_components {
3787 components.push(Component::ParentDir);
3788 }
3789 components.push(a);
3790 components.extend(path_components.by_ref());
3791 break;
3792 }
3793 }
3794 }
3795 components.iter().map(|c| c.as_os_str()).collect()
3796}
3797
3798#[cfg(test)]
3799mod tests {
3800 use super::{Event, *};
3801 use fs::RealFs;
3802 use futures::StreamExt;
3803 use gpui::test::subscribe;
3804 use language::{
3805 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3806 };
3807 use lsp::Url;
3808 use serde_json::json;
3809 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3810 use unindent::Unindent as _;
3811 use util::test::temp_tree;
3812 use worktree::WorktreeHandle as _;
3813
3814 #[gpui::test]
3815 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3816 let dir = temp_tree(json!({
3817 "root": {
3818 "apple": "",
3819 "banana": {
3820 "carrot": {
3821 "date": "",
3822 "endive": "",
3823 }
3824 },
3825 "fennel": {
3826 "grape": "",
3827 }
3828 }
3829 }));
3830
3831 let root_link_path = dir.path().join("root_link");
3832 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3833 unix::fs::symlink(
3834 &dir.path().join("root/fennel"),
3835 &dir.path().join("root/finnochio"),
3836 )
3837 .unwrap();
3838
3839 let project = Project::test(Arc::new(RealFs), cx);
3840
3841 let (tree, _) = project
3842 .update(cx, |project, cx| {
3843 project.find_or_create_local_worktree(&root_link_path, true, cx)
3844 })
3845 .await
3846 .unwrap();
3847
3848 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3849 .await;
3850 cx.read(|cx| {
3851 let tree = tree.read(cx);
3852 assert_eq!(tree.file_count(), 5);
3853 assert_eq!(
3854 tree.inode_for_path("fennel/grape"),
3855 tree.inode_for_path("finnochio/grape")
3856 );
3857 });
3858
3859 let cancel_flag = Default::default();
3860 let results = project
3861 .read_with(cx, |project, cx| {
3862 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3863 })
3864 .await;
3865 assert_eq!(
3866 results
3867 .into_iter()
3868 .map(|result| result.path)
3869 .collect::<Vec<Arc<Path>>>(),
3870 vec![
3871 PathBuf::from("banana/carrot/date").into(),
3872 PathBuf::from("banana/carrot/endive").into(),
3873 ]
3874 );
3875 }
3876
3877 #[gpui::test]
3878 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3879 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3880 let progress_token = language_server_config
3881 .disk_based_diagnostics_progress_token
3882 .clone()
3883 .unwrap();
3884
3885 let language = Arc::new(Language::new(
3886 LanguageConfig {
3887 name: "Rust".into(),
3888 path_suffixes: vec!["rs".to_string()],
3889 language_server: Some(language_server_config),
3890 ..Default::default()
3891 },
3892 Some(tree_sitter_rust::language()),
3893 ));
3894
3895 let fs = FakeFs::new(cx.background());
3896 fs.insert_tree(
3897 "/dir",
3898 json!({
3899 "a.rs": "fn a() { A }",
3900 "b.rs": "const y: i32 = 1",
3901 }),
3902 )
3903 .await;
3904
3905 let project = Project::test(fs, cx);
3906 project.update(cx, |project, _| {
3907 Arc::get_mut(&mut project.languages).unwrap().add(language);
3908 });
3909
3910 let (tree, _) = project
3911 .update(cx, |project, cx| {
3912 project.find_or_create_local_worktree("/dir", true, cx)
3913 })
3914 .await
3915 .unwrap();
3916 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3917
3918 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3919 .await;
3920
3921 // Cause worktree to start the fake language server
3922 let _buffer = project
3923 .update(cx, |project, cx| {
3924 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3925 })
3926 .await
3927 .unwrap();
3928
3929 let mut events = subscribe(&project, cx);
3930
3931 let mut fake_server = fake_servers.next().await.unwrap();
3932 fake_server.start_progress(&progress_token).await;
3933 assert_eq!(
3934 events.next().await.unwrap(),
3935 Event::DiskBasedDiagnosticsStarted
3936 );
3937
3938 fake_server.start_progress(&progress_token).await;
3939 fake_server.end_progress(&progress_token).await;
3940 fake_server.start_progress(&progress_token).await;
3941
3942 fake_server
3943 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3944 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3945 version: None,
3946 diagnostics: vec![lsp::Diagnostic {
3947 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3948 severity: Some(lsp::DiagnosticSeverity::ERROR),
3949 message: "undefined variable 'A'".to_string(),
3950 ..Default::default()
3951 }],
3952 })
3953 .await;
3954 assert_eq!(
3955 events.next().await.unwrap(),
3956 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3957 );
3958
3959 fake_server.end_progress(&progress_token).await;
3960 fake_server.end_progress(&progress_token).await;
3961 assert_eq!(
3962 events.next().await.unwrap(),
3963 Event::DiskBasedDiagnosticsUpdated
3964 );
3965 assert_eq!(
3966 events.next().await.unwrap(),
3967 Event::DiskBasedDiagnosticsFinished
3968 );
3969
3970 let buffer = project
3971 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3972 .await
3973 .unwrap();
3974
3975 buffer.read_with(cx, |buffer, _| {
3976 let snapshot = buffer.snapshot();
3977 let diagnostics = snapshot
3978 .diagnostics_in_range::<_, Point>(0..buffer.len())
3979 .collect::<Vec<_>>();
3980 assert_eq!(
3981 diagnostics,
3982 &[DiagnosticEntry {
3983 range: Point::new(0, 9)..Point::new(0, 10),
3984 diagnostic: Diagnostic {
3985 severity: lsp::DiagnosticSeverity::ERROR,
3986 message: "undefined variable 'A'".to_string(),
3987 group_id: 0,
3988 is_primary: true,
3989 ..Default::default()
3990 }
3991 }]
3992 )
3993 });
3994 }
3995
3996 #[gpui::test]
3997 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3998 let dir = temp_tree(json!({
3999 "root": {
4000 "dir1": {},
4001 "dir2": {
4002 "dir3": {}
4003 }
4004 }
4005 }));
4006
4007 let project = Project::test(Arc::new(RealFs), cx);
4008 let (tree, _) = project
4009 .update(cx, |project, cx| {
4010 project.find_or_create_local_worktree(&dir.path(), true, cx)
4011 })
4012 .await
4013 .unwrap();
4014
4015 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4016 .await;
4017
4018 let cancel_flag = Default::default();
4019 let results = project
4020 .read_with(cx, |project, cx| {
4021 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
4022 })
4023 .await;
4024
4025 assert!(results.is_empty());
4026 }
4027
4028 #[gpui::test]
4029 async fn test_definition(cx: &mut gpui::TestAppContext) {
4030 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4031 let language = Arc::new(Language::new(
4032 LanguageConfig {
4033 name: "Rust".into(),
4034 path_suffixes: vec!["rs".to_string()],
4035 language_server: Some(language_server_config),
4036 ..Default::default()
4037 },
4038 Some(tree_sitter_rust::language()),
4039 ));
4040
4041 let fs = FakeFs::new(cx.background());
4042 fs.insert_tree(
4043 "/dir",
4044 json!({
4045 "a.rs": "const fn a() { A }",
4046 "b.rs": "const y: i32 = crate::a()",
4047 }),
4048 )
4049 .await;
4050
4051 let project = Project::test(fs, cx);
4052 project.update(cx, |project, _| {
4053 Arc::get_mut(&mut project.languages).unwrap().add(language);
4054 });
4055
4056 let (tree, _) = project
4057 .update(cx, |project, cx| {
4058 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
4059 })
4060 .await
4061 .unwrap();
4062 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4063 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4064 .await;
4065
4066 let buffer = project
4067 .update(cx, |project, cx| {
4068 project.open_buffer(
4069 ProjectPath {
4070 worktree_id,
4071 path: Path::new("").into(),
4072 },
4073 cx,
4074 )
4075 })
4076 .await
4077 .unwrap();
4078
4079 let mut fake_server = fake_servers.next().await.unwrap();
4080 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
4081 let params = params.text_document_position_params;
4082 assert_eq!(
4083 params.text_document.uri.to_file_path().unwrap(),
4084 Path::new("/dir/b.rs"),
4085 );
4086 assert_eq!(params.position, lsp::Position::new(0, 22));
4087
4088 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
4089 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4090 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4091 )))
4092 });
4093
4094 let mut definitions = project
4095 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
4096 .await
4097 .unwrap();
4098
4099 assert_eq!(definitions.len(), 1);
4100 let definition = definitions.pop().unwrap();
4101 cx.update(|cx| {
4102 let target_buffer = definition.buffer.read(cx);
4103 assert_eq!(
4104 target_buffer
4105 .file()
4106 .unwrap()
4107 .as_local()
4108 .unwrap()
4109 .abs_path(cx),
4110 Path::new("/dir/a.rs"),
4111 );
4112 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4113 assert_eq!(
4114 list_worktrees(&project, cx),
4115 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4116 );
4117
4118 drop(definition);
4119 });
4120 cx.read(|cx| {
4121 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4122 });
4123
4124 fn list_worktrees<'a>(
4125 project: &'a ModelHandle<Project>,
4126 cx: &'a AppContext,
4127 ) -> Vec<(&'a Path, bool)> {
4128 project
4129 .read(cx)
4130 .worktrees(cx)
4131 .map(|worktree| {
4132 let worktree = worktree.read(cx);
4133 (
4134 worktree.as_local().unwrap().abs_path().as_ref(),
4135 worktree.is_visible(),
4136 )
4137 })
4138 .collect::<Vec<_>>()
4139 }
4140 }
4141
4142 #[gpui::test]
4143 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4144 let fs = FakeFs::new(cx.background());
4145 fs.insert_tree(
4146 "/dir",
4147 json!({
4148 "file1": "the old contents",
4149 }),
4150 )
4151 .await;
4152
4153 let project = Project::test(fs.clone(), cx);
4154 let worktree_id = project
4155 .update(cx, |p, cx| {
4156 p.find_or_create_local_worktree("/dir", true, cx)
4157 })
4158 .await
4159 .unwrap()
4160 .0
4161 .read_with(cx, |tree, _| tree.id());
4162
4163 let buffer = project
4164 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4165 .await
4166 .unwrap();
4167 buffer
4168 .update(cx, |buffer, cx| {
4169 assert_eq!(buffer.text(), "the old contents");
4170 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4171 buffer.save(cx)
4172 })
4173 .await
4174 .unwrap();
4175
4176 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4177 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4178 }
4179
4180 #[gpui::test]
4181 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4182 let fs = FakeFs::new(cx.background());
4183 fs.insert_tree(
4184 "/dir",
4185 json!({
4186 "file1": "the old contents",
4187 }),
4188 )
4189 .await;
4190
4191 let project = Project::test(fs.clone(), cx);
4192 let worktree_id = project
4193 .update(cx, |p, cx| {
4194 p.find_or_create_local_worktree("/dir/file1", true, cx)
4195 })
4196 .await
4197 .unwrap()
4198 .0
4199 .read_with(cx, |tree, _| tree.id());
4200
4201 let buffer = project
4202 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4203 .await
4204 .unwrap();
4205 buffer
4206 .update(cx, |buffer, cx| {
4207 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4208 buffer.save(cx)
4209 })
4210 .await
4211 .unwrap();
4212
4213 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4214 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4215 }
4216
4217 #[gpui::test]
4218 async fn test_save_as(cx: &mut gpui::TestAppContext) {
4219 let fs = FakeFs::new(cx.background());
4220 fs.insert_tree("/dir", json!({})).await;
4221
4222 let project = Project::test(fs.clone(), cx);
4223 let (worktree, _) = project
4224 .update(cx, |project, cx| {
4225 project.find_or_create_local_worktree("/dir", true, cx)
4226 })
4227 .await
4228 .unwrap();
4229 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4230
4231 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
4232 buffer.update(cx, |buffer, cx| {
4233 buffer.edit([0..0], "abc", cx);
4234 assert!(buffer.is_dirty());
4235 assert!(!buffer.has_conflict());
4236 });
4237 project
4238 .update(cx, |project, cx| {
4239 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
4240 })
4241 .await
4242 .unwrap();
4243 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
4244 buffer.read_with(cx, |buffer, cx| {
4245 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
4246 assert!(!buffer.is_dirty());
4247 assert!(!buffer.has_conflict());
4248 });
4249
4250 let opened_buffer = project
4251 .update(cx, |project, cx| {
4252 project.open_buffer((worktree_id, "file1"), cx)
4253 })
4254 .await
4255 .unwrap();
4256 assert_eq!(opened_buffer, buffer);
4257 }
4258
4259 #[gpui::test(retries = 5)]
4260 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4261 let dir = temp_tree(json!({
4262 "a": {
4263 "file1": "",
4264 "file2": "",
4265 "file3": "",
4266 },
4267 "b": {
4268 "c": {
4269 "file4": "",
4270 "file5": "",
4271 }
4272 }
4273 }));
4274
4275 let project = Project::test(Arc::new(RealFs), cx);
4276 let rpc = project.read_with(cx, |p, _| p.client.clone());
4277
4278 let (tree, _) = project
4279 .update(cx, |p, cx| {
4280 p.find_or_create_local_worktree(dir.path(), true, cx)
4281 })
4282 .await
4283 .unwrap();
4284 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4285
4286 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4287 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4288 async move { buffer.await.unwrap() }
4289 };
4290 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4291 tree.read_with(cx, |tree, _| {
4292 tree.entry_for_path(path)
4293 .expect(&format!("no entry for path {}", path))
4294 .id
4295 })
4296 };
4297
4298 let buffer2 = buffer_for_path("a/file2", cx).await;
4299 let buffer3 = buffer_for_path("a/file3", cx).await;
4300 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4301 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4302
4303 let file2_id = id_for_path("a/file2", &cx);
4304 let file3_id = id_for_path("a/file3", &cx);
4305 let file4_id = id_for_path("b/c/file4", &cx);
4306
4307 // Wait for the initial scan.
4308 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4309 .await;
4310
4311 // Create a remote copy of this worktree.
4312 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4313 let (remote, load_task) = cx.update(|cx| {
4314 Worktree::remote(
4315 1,
4316 1,
4317 initial_snapshot.to_proto(&Default::default(), true),
4318 rpc.clone(),
4319 cx,
4320 )
4321 });
4322 load_task.await;
4323
4324 cx.read(|cx| {
4325 assert!(!buffer2.read(cx).is_dirty());
4326 assert!(!buffer3.read(cx).is_dirty());
4327 assert!(!buffer4.read(cx).is_dirty());
4328 assert!(!buffer5.read(cx).is_dirty());
4329 });
4330
4331 // Rename and delete files and directories.
4332 tree.flush_fs_events(&cx).await;
4333 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4334 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4335 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4336 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4337 tree.flush_fs_events(&cx).await;
4338
4339 let expected_paths = vec![
4340 "a",
4341 "a/file1",
4342 "a/file2.new",
4343 "b",
4344 "d",
4345 "d/file3",
4346 "d/file4",
4347 ];
4348
4349 cx.read(|app| {
4350 assert_eq!(
4351 tree.read(app)
4352 .paths()
4353 .map(|p| p.to_str().unwrap())
4354 .collect::<Vec<_>>(),
4355 expected_paths
4356 );
4357
4358 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4359 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4360 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4361
4362 assert_eq!(
4363 buffer2.read(app).file().unwrap().path().as_ref(),
4364 Path::new("a/file2.new")
4365 );
4366 assert_eq!(
4367 buffer3.read(app).file().unwrap().path().as_ref(),
4368 Path::new("d/file3")
4369 );
4370 assert_eq!(
4371 buffer4.read(app).file().unwrap().path().as_ref(),
4372 Path::new("d/file4")
4373 );
4374 assert_eq!(
4375 buffer5.read(app).file().unwrap().path().as_ref(),
4376 Path::new("b/c/file5")
4377 );
4378
4379 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4380 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4381 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4382 assert!(buffer5.read(app).file().unwrap().is_deleted());
4383 });
4384
4385 // Update the remote worktree. Check that it becomes consistent with the
4386 // local worktree.
4387 remote.update(cx, |remote, cx| {
4388 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4389 &initial_snapshot,
4390 1,
4391 1,
4392 true,
4393 );
4394 remote
4395 .as_remote_mut()
4396 .unwrap()
4397 .snapshot
4398 .apply_remote_update(update_message)
4399 .unwrap();
4400
4401 assert_eq!(
4402 remote
4403 .paths()
4404 .map(|p| p.to_str().unwrap())
4405 .collect::<Vec<_>>(),
4406 expected_paths
4407 );
4408 });
4409 }
4410
4411 #[gpui::test]
4412 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4413 let fs = FakeFs::new(cx.background());
4414 fs.insert_tree(
4415 "/the-dir",
4416 json!({
4417 "a.txt": "a-contents",
4418 "b.txt": "b-contents",
4419 }),
4420 )
4421 .await;
4422
4423 let project = Project::test(fs.clone(), cx);
4424 let worktree_id = project
4425 .update(cx, |p, cx| {
4426 p.find_or_create_local_worktree("/the-dir", true, cx)
4427 })
4428 .await
4429 .unwrap()
4430 .0
4431 .read_with(cx, |tree, _| tree.id());
4432
4433 // Spawn multiple tasks to open paths, repeating some paths.
4434 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4435 (
4436 p.open_buffer((worktree_id, "a.txt"), cx),
4437 p.open_buffer((worktree_id, "b.txt"), cx),
4438 p.open_buffer((worktree_id, "a.txt"), cx),
4439 )
4440 });
4441
4442 let buffer_a_1 = buffer_a_1.await.unwrap();
4443 let buffer_a_2 = buffer_a_2.await.unwrap();
4444 let buffer_b = buffer_b.await.unwrap();
4445 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4446 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4447
4448 // There is only one buffer per path.
4449 let buffer_a_id = buffer_a_1.id();
4450 assert_eq!(buffer_a_2.id(), buffer_a_id);
4451
4452 // Open the same path again while it is still open.
4453 drop(buffer_a_1);
4454 let buffer_a_3 = project
4455 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4456 .await
4457 .unwrap();
4458
4459 // There's still only one buffer per path.
4460 assert_eq!(buffer_a_3.id(), buffer_a_id);
4461 }
4462
4463 #[gpui::test]
4464 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4465 use std::fs;
4466
4467 let dir = temp_tree(json!({
4468 "file1": "abc",
4469 "file2": "def",
4470 "file3": "ghi",
4471 }));
4472
4473 let project = Project::test(Arc::new(RealFs), cx);
4474 let (worktree, _) = project
4475 .update(cx, |p, cx| {
4476 p.find_or_create_local_worktree(dir.path(), true, cx)
4477 })
4478 .await
4479 .unwrap();
4480 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4481
4482 worktree.flush_fs_events(&cx).await;
4483 worktree
4484 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4485 .await;
4486
4487 let buffer1 = project
4488 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4489 .await
4490 .unwrap();
4491 let events = Rc::new(RefCell::new(Vec::new()));
4492
4493 // initially, the buffer isn't dirty.
4494 buffer1.update(cx, |buffer, cx| {
4495 cx.subscribe(&buffer1, {
4496 let events = events.clone();
4497 move |_, _, event, _| match event {
4498 BufferEvent::Operation(_) => {}
4499 _ => events.borrow_mut().push(event.clone()),
4500 }
4501 })
4502 .detach();
4503
4504 assert!(!buffer.is_dirty());
4505 assert!(events.borrow().is_empty());
4506
4507 buffer.edit(vec![1..2], "", cx);
4508 });
4509
4510 // after the first edit, the buffer is dirty, and emits a dirtied event.
4511 buffer1.update(cx, |buffer, cx| {
4512 assert!(buffer.text() == "ac");
4513 assert!(buffer.is_dirty());
4514 assert_eq!(
4515 *events.borrow(),
4516 &[language::Event::Edited, language::Event::Dirtied]
4517 );
4518 events.borrow_mut().clear();
4519 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4520 });
4521
4522 // after saving, the buffer is not dirty, and emits a saved event.
4523 buffer1.update(cx, |buffer, cx| {
4524 assert!(!buffer.is_dirty());
4525 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4526 events.borrow_mut().clear();
4527
4528 buffer.edit(vec![1..1], "B", cx);
4529 buffer.edit(vec![2..2], "D", cx);
4530 });
4531
4532 // after editing again, the buffer is dirty, and emits another dirty event.
4533 buffer1.update(cx, |buffer, cx| {
4534 assert!(buffer.text() == "aBDc");
4535 assert!(buffer.is_dirty());
4536 assert_eq!(
4537 *events.borrow(),
4538 &[
4539 language::Event::Edited,
4540 language::Event::Dirtied,
4541 language::Event::Edited,
4542 ],
4543 );
4544 events.borrow_mut().clear();
4545
4546 // TODO - currently, after restoring the buffer to its
4547 // previously-saved state, the is still considered dirty.
4548 buffer.edit([1..3], "", cx);
4549 assert!(buffer.text() == "ac");
4550 assert!(buffer.is_dirty());
4551 });
4552
4553 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4554
4555 // When a file is deleted, the buffer is considered dirty.
4556 let events = Rc::new(RefCell::new(Vec::new()));
4557 let buffer2 = project
4558 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4559 .await
4560 .unwrap();
4561 buffer2.update(cx, |_, cx| {
4562 cx.subscribe(&buffer2, {
4563 let events = events.clone();
4564 move |_, _, event, _| events.borrow_mut().push(event.clone())
4565 })
4566 .detach();
4567 });
4568
4569 fs::remove_file(dir.path().join("file2")).unwrap();
4570 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4571 assert_eq!(
4572 *events.borrow(),
4573 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4574 );
4575
4576 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4577 let events = Rc::new(RefCell::new(Vec::new()));
4578 let buffer3 = project
4579 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4580 .await
4581 .unwrap();
4582 buffer3.update(cx, |_, cx| {
4583 cx.subscribe(&buffer3, {
4584 let events = events.clone();
4585 move |_, _, event, _| events.borrow_mut().push(event.clone())
4586 })
4587 .detach();
4588 });
4589
4590 worktree.flush_fs_events(&cx).await;
4591 buffer3.update(cx, |buffer, cx| {
4592 buffer.edit(Some(0..0), "x", cx);
4593 });
4594 events.borrow_mut().clear();
4595 fs::remove_file(dir.path().join("file3")).unwrap();
4596 buffer3
4597 .condition(&cx, |_, _| !events.borrow().is_empty())
4598 .await;
4599 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4600 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4601 }
4602
4603 #[gpui::test]
4604 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4605 use std::fs;
4606
4607 let initial_contents = "aaa\nbbbbb\nc\n";
4608 let dir = temp_tree(json!({ "the-file": initial_contents }));
4609
4610 let project = Project::test(Arc::new(RealFs), cx);
4611 let (worktree, _) = project
4612 .update(cx, |p, cx| {
4613 p.find_or_create_local_worktree(dir.path(), true, cx)
4614 })
4615 .await
4616 .unwrap();
4617 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4618
4619 worktree
4620 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4621 .await;
4622
4623 let abs_path = dir.path().join("the-file");
4624 let buffer = project
4625 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4626 .await
4627 .unwrap();
4628
4629 // TODO
4630 // Add a cursor on each row.
4631 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4632 // assert!(!buffer.is_dirty());
4633 // buffer.add_selection_set(
4634 // &(0..3)
4635 // .map(|row| Selection {
4636 // id: row as usize,
4637 // start: Point::new(row, 1),
4638 // end: Point::new(row, 1),
4639 // reversed: false,
4640 // goal: SelectionGoal::None,
4641 // })
4642 // .collect::<Vec<_>>(),
4643 // cx,
4644 // )
4645 // });
4646
4647 // Change the file on disk, adding two new lines of text, and removing
4648 // one line.
4649 buffer.read_with(cx, |buffer, _| {
4650 assert!(!buffer.is_dirty());
4651 assert!(!buffer.has_conflict());
4652 });
4653 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4654 fs::write(&abs_path, new_contents).unwrap();
4655
4656 // Because the buffer was not modified, it is reloaded from disk. Its
4657 // contents are edited according to the diff between the old and new
4658 // file contents.
4659 buffer
4660 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4661 .await;
4662
4663 buffer.update(cx, |buffer, _| {
4664 assert_eq!(buffer.text(), new_contents);
4665 assert!(!buffer.is_dirty());
4666 assert!(!buffer.has_conflict());
4667
4668 // TODO
4669 // let cursor_positions = buffer
4670 // .selection_set(selection_set_id)
4671 // .unwrap()
4672 // .selections::<Point>(&*buffer)
4673 // .map(|selection| {
4674 // assert_eq!(selection.start, selection.end);
4675 // selection.start
4676 // })
4677 // .collect::<Vec<_>>();
4678 // assert_eq!(
4679 // cursor_positions,
4680 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4681 // );
4682 });
4683
4684 // Modify the buffer
4685 buffer.update(cx, |buffer, cx| {
4686 buffer.edit(vec![0..0], " ", cx);
4687 assert!(buffer.is_dirty());
4688 assert!(!buffer.has_conflict());
4689 });
4690
4691 // Change the file on disk again, adding blank lines to the beginning.
4692 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4693
4694 // Because the buffer is modified, it doesn't reload from disk, but is
4695 // marked as having a conflict.
4696 buffer
4697 .condition(&cx, |buffer, _| buffer.has_conflict())
4698 .await;
4699 }
4700
4701 #[gpui::test]
4702 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4703 let fs = FakeFs::new(cx.background());
4704 fs.insert_tree(
4705 "/the-dir",
4706 json!({
4707 "a.rs": "
4708 fn foo(mut v: Vec<usize>) {
4709 for x in &v {
4710 v.push(1);
4711 }
4712 }
4713 "
4714 .unindent(),
4715 }),
4716 )
4717 .await;
4718
4719 let project = Project::test(fs.clone(), cx);
4720 let (worktree, _) = project
4721 .update(cx, |p, cx| {
4722 p.find_or_create_local_worktree("/the-dir", true, cx)
4723 })
4724 .await
4725 .unwrap();
4726 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4727
4728 let buffer = project
4729 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4730 .await
4731 .unwrap();
4732
4733 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4734 let message = lsp::PublishDiagnosticsParams {
4735 uri: buffer_uri.clone(),
4736 diagnostics: vec![
4737 lsp::Diagnostic {
4738 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4739 severity: Some(DiagnosticSeverity::WARNING),
4740 message: "error 1".to_string(),
4741 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4742 location: lsp::Location {
4743 uri: buffer_uri.clone(),
4744 range: lsp::Range::new(
4745 lsp::Position::new(1, 8),
4746 lsp::Position::new(1, 9),
4747 ),
4748 },
4749 message: "error 1 hint 1".to_string(),
4750 }]),
4751 ..Default::default()
4752 },
4753 lsp::Diagnostic {
4754 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4755 severity: Some(DiagnosticSeverity::HINT),
4756 message: "error 1 hint 1".to_string(),
4757 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4758 location: lsp::Location {
4759 uri: buffer_uri.clone(),
4760 range: lsp::Range::new(
4761 lsp::Position::new(1, 8),
4762 lsp::Position::new(1, 9),
4763 ),
4764 },
4765 message: "original diagnostic".to_string(),
4766 }]),
4767 ..Default::default()
4768 },
4769 lsp::Diagnostic {
4770 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4771 severity: Some(DiagnosticSeverity::ERROR),
4772 message: "error 2".to_string(),
4773 related_information: Some(vec![
4774 lsp::DiagnosticRelatedInformation {
4775 location: lsp::Location {
4776 uri: buffer_uri.clone(),
4777 range: lsp::Range::new(
4778 lsp::Position::new(1, 13),
4779 lsp::Position::new(1, 15),
4780 ),
4781 },
4782 message: "error 2 hint 1".to_string(),
4783 },
4784 lsp::DiagnosticRelatedInformation {
4785 location: lsp::Location {
4786 uri: buffer_uri.clone(),
4787 range: lsp::Range::new(
4788 lsp::Position::new(1, 13),
4789 lsp::Position::new(1, 15),
4790 ),
4791 },
4792 message: "error 2 hint 2".to_string(),
4793 },
4794 ]),
4795 ..Default::default()
4796 },
4797 lsp::Diagnostic {
4798 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4799 severity: Some(DiagnosticSeverity::HINT),
4800 message: "error 2 hint 1".to_string(),
4801 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4802 location: lsp::Location {
4803 uri: buffer_uri.clone(),
4804 range: lsp::Range::new(
4805 lsp::Position::new(2, 8),
4806 lsp::Position::new(2, 17),
4807 ),
4808 },
4809 message: "original diagnostic".to_string(),
4810 }]),
4811 ..Default::default()
4812 },
4813 lsp::Diagnostic {
4814 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4815 severity: Some(DiagnosticSeverity::HINT),
4816 message: "error 2 hint 2".to_string(),
4817 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4818 location: lsp::Location {
4819 uri: buffer_uri.clone(),
4820 range: lsp::Range::new(
4821 lsp::Position::new(2, 8),
4822 lsp::Position::new(2, 17),
4823 ),
4824 },
4825 message: "original diagnostic".to_string(),
4826 }]),
4827 ..Default::default()
4828 },
4829 ],
4830 version: None,
4831 };
4832
4833 project
4834 .update(cx, |p, cx| {
4835 p.update_diagnostics(message, &Default::default(), cx)
4836 })
4837 .unwrap();
4838 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4839
4840 assert_eq!(
4841 buffer
4842 .diagnostics_in_range::<_, Point>(0..buffer.len())
4843 .collect::<Vec<_>>(),
4844 &[
4845 DiagnosticEntry {
4846 range: Point::new(1, 8)..Point::new(1, 9),
4847 diagnostic: Diagnostic {
4848 severity: DiagnosticSeverity::WARNING,
4849 message: "error 1".to_string(),
4850 group_id: 0,
4851 is_primary: true,
4852 ..Default::default()
4853 }
4854 },
4855 DiagnosticEntry {
4856 range: Point::new(1, 8)..Point::new(1, 9),
4857 diagnostic: Diagnostic {
4858 severity: DiagnosticSeverity::HINT,
4859 message: "error 1 hint 1".to_string(),
4860 group_id: 0,
4861 is_primary: false,
4862 ..Default::default()
4863 }
4864 },
4865 DiagnosticEntry {
4866 range: Point::new(1, 13)..Point::new(1, 15),
4867 diagnostic: Diagnostic {
4868 severity: DiagnosticSeverity::HINT,
4869 message: "error 2 hint 1".to_string(),
4870 group_id: 1,
4871 is_primary: false,
4872 ..Default::default()
4873 }
4874 },
4875 DiagnosticEntry {
4876 range: Point::new(1, 13)..Point::new(1, 15),
4877 diagnostic: Diagnostic {
4878 severity: DiagnosticSeverity::HINT,
4879 message: "error 2 hint 2".to_string(),
4880 group_id: 1,
4881 is_primary: false,
4882 ..Default::default()
4883 }
4884 },
4885 DiagnosticEntry {
4886 range: Point::new(2, 8)..Point::new(2, 17),
4887 diagnostic: Diagnostic {
4888 severity: DiagnosticSeverity::ERROR,
4889 message: "error 2".to_string(),
4890 group_id: 1,
4891 is_primary: true,
4892 ..Default::default()
4893 }
4894 }
4895 ]
4896 );
4897
4898 assert_eq!(
4899 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4900 &[
4901 DiagnosticEntry {
4902 range: Point::new(1, 8)..Point::new(1, 9),
4903 diagnostic: Diagnostic {
4904 severity: DiagnosticSeverity::WARNING,
4905 message: "error 1".to_string(),
4906 group_id: 0,
4907 is_primary: true,
4908 ..Default::default()
4909 }
4910 },
4911 DiagnosticEntry {
4912 range: Point::new(1, 8)..Point::new(1, 9),
4913 diagnostic: Diagnostic {
4914 severity: DiagnosticSeverity::HINT,
4915 message: "error 1 hint 1".to_string(),
4916 group_id: 0,
4917 is_primary: false,
4918 ..Default::default()
4919 }
4920 },
4921 ]
4922 );
4923 assert_eq!(
4924 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4925 &[
4926 DiagnosticEntry {
4927 range: Point::new(1, 13)..Point::new(1, 15),
4928 diagnostic: Diagnostic {
4929 severity: DiagnosticSeverity::HINT,
4930 message: "error 2 hint 1".to_string(),
4931 group_id: 1,
4932 is_primary: false,
4933 ..Default::default()
4934 }
4935 },
4936 DiagnosticEntry {
4937 range: Point::new(1, 13)..Point::new(1, 15),
4938 diagnostic: Diagnostic {
4939 severity: DiagnosticSeverity::HINT,
4940 message: "error 2 hint 2".to_string(),
4941 group_id: 1,
4942 is_primary: false,
4943 ..Default::default()
4944 }
4945 },
4946 DiagnosticEntry {
4947 range: Point::new(2, 8)..Point::new(2, 17),
4948 diagnostic: Diagnostic {
4949 severity: DiagnosticSeverity::ERROR,
4950 message: "error 2".to_string(),
4951 group_id: 1,
4952 is_primary: true,
4953 ..Default::default()
4954 }
4955 }
4956 ]
4957 );
4958 }
4959
4960 #[gpui::test]
4961 async fn test_rename(cx: &mut gpui::TestAppContext) {
4962 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4963 let language = Arc::new(Language::new(
4964 LanguageConfig {
4965 name: "Rust".into(),
4966 path_suffixes: vec!["rs".to_string()],
4967 language_server: Some(language_server_config),
4968 ..Default::default()
4969 },
4970 Some(tree_sitter_rust::language()),
4971 ));
4972
4973 let fs = FakeFs::new(cx.background());
4974 fs.insert_tree(
4975 "/dir",
4976 json!({
4977 "one.rs": "const ONE: usize = 1;",
4978 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4979 }),
4980 )
4981 .await;
4982
4983 let project = Project::test(fs.clone(), cx);
4984 project.update(cx, |project, _| {
4985 Arc::get_mut(&mut project.languages).unwrap().add(language);
4986 });
4987
4988 let (tree, _) = project
4989 .update(cx, |project, cx| {
4990 project.find_or_create_local_worktree("/dir", true, cx)
4991 })
4992 .await
4993 .unwrap();
4994 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4995 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4996 .await;
4997
4998 let buffer = project
4999 .update(cx, |project, cx| {
5000 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
5001 })
5002 .await
5003 .unwrap();
5004
5005 let mut fake_server = fake_servers.next().await.unwrap();
5006
5007 let response = project.update(cx, |project, cx| {
5008 project.prepare_rename(buffer.clone(), 7, cx)
5009 });
5010 fake_server
5011 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
5012 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
5013 assert_eq!(params.position, lsp::Position::new(0, 7));
5014 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5015 lsp::Position::new(0, 6),
5016 lsp::Position::new(0, 9),
5017 )))
5018 })
5019 .next()
5020 .await
5021 .unwrap();
5022 let range = response.await.unwrap().unwrap();
5023 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
5024 assert_eq!(range, 6..9);
5025
5026 let response = project.update(cx, |project, cx| {
5027 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
5028 });
5029 fake_server
5030 .handle_request::<lsp::request::Rename, _>(|params, _| {
5031 assert_eq!(
5032 params.text_document_position.text_document.uri.as_str(),
5033 "file:///dir/one.rs"
5034 );
5035 assert_eq!(
5036 params.text_document_position.position,
5037 lsp::Position::new(0, 7)
5038 );
5039 assert_eq!(params.new_name, "THREE");
5040 Some(lsp::WorkspaceEdit {
5041 changes: Some(
5042 [
5043 (
5044 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
5045 vec![lsp::TextEdit::new(
5046 lsp::Range::new(
5047 lsp::Position::new(0, 6),
5048 lsp::Position::new(0, 9),
5049 ),
5050 "THREE".to_string(),
5051 )],
5052 ),
5053 (
5054 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
5055 vec![
5056 lsp::TextEdit::new(
5057 lsp::Range::new(
5058 lsp::Position::new(0, 24),
5059 lsp::Position::new(0, 27),
5060 ),
5061 "THREE".to_string(),
5062 ),
5063 lsp::TextEdit::new(
5064 lsp::Range::new(
5065 lsp::Position::new(0, 35),
5066 lsp::Position::new(0, 38),
5067 ),
5068 "THREE".to_string(),
5069 ),
5070 ],
5071 ),
5072 ]
5073 .into_iter()
5074 .collect(),
5075 ),
5076 ..Default::default()
5077 })
5078 })
5079 .next()
5080 .await
5081 .unwrap();
5082 let mut transaction = response.await.unwrap().0;
5083 assert_eq!(transaction.len(), 2);
5084 assert_eq!(
5085 transaction
5086 .remove_entry(&buffer)
5087 .unwrap()
5088 .0
5089 .read_with(cx, |buffer, _| buffer.text()),
5090 "const THREE: usize = 1;"
5091 );
5092 assert_eq!(
5093 transaction
5094 .into_keys()
5095 .next()
5096 .unwrap()
5097 .read_with(cx, |buffer, _| buffer.text()),
5098 "const TWO: usize = one::THREE + one::THREE;"
5099 );
5100 }
5101
5102 #[gpui::test]
5103 async fn test_search(cx: &mut gpui::TestAppContext) {
5104 let fs = FakeFs::new(cx.background());
5105 fs.insert_tree(
5106 "/dir",
5107 json!({
5108 "one.rs": "const ONE: usize = 1;",
5109 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5110 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5111 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5112 }),
5113 )
5114 .await;
5115 let project = Project::test(fs.clone(), cx);
5116 let (tree, _) = project
5117 .update(cx, |project, cx| {
5118 project.find_or_create_local_worktree("/dir", true, cx)
5119 })
5120 .await
5121 .unwrap();
5122 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5123 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5124 .await;
5125
5126 assert_eq!(
5127 search(&project, SearchQuery::text("TWO", false, true), cx)
5128 .await
5129 .unwrap(),
5130 HashMap::from_iter([
5131 ("two.rs".to_string(), vec![6..9]),
5132 ("three.rs".to_string(), vec![37..40])
5133 ])
5134 );
5135
5136 let buffer_4 = project
5137 .update(cx, |project, cx| {
5138 project.open_buffer((worktree_id, "four.rs"), cx)
5139 })
5140 .await
5141 .unwrap();
5142 buffer_4.update(cx, |buffer, cx| {
5143 buffer.edit([20..28, 31..43], "two::TWO", cx);
5144 });
5145
5146 assert_eq!(
5147 search(&project, SearchQuery::text("TWO", false, true), cx)
5148 .await
5149 .unwrap(),
5150 HashMap::from_iter([
5151 ("two.rs".to_string(), vec![6..9]),
5152 ("three.rs".to_string(), vec![37..40]),
5153 ("four.rs".to_string(), vec![25..28, 36..39])
5154 ])
5155 );
5156
5157 async fn search(
5158 project: &ModelHandle<Project>,
5159 query: SearchQuery,
5160 cx: &mut gpui::TestAppContext,
5161 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5162 let results = project
5163 .update(cx, |project, cx| project.search(query, cx))
5164 .await?;
5165
5166 Ok(results
5167 .into_iter()
5168 .map(|(buffer, ranges)| {
5169 buffer.read_with(cx, |buffer, _| {
5170 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5171 let ranges = ranges
5172 .into_iter()
5173 .map(|range| range.to_offset(buffer))
5174 .collect::<Vec<_>>();
5175 (path, ranges)
5176 })
5177 })
5178 .collect())
5179 }
5180 }
5181}