1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, deserialize_version, serialize_anchor, serialize_version},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, Event as BufferEvent, File as _, Language, LanguageRegistry,
21 Operation, PointUtf16, ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 _detect_unshare_task: Task<Option<()>>,
96 },
97}
98
99#[derive(Clone, Debug)]
100pub struct Collaborator {
101 pub user: Arc<User>,
102 pub peer_id: PeerId,
103 pub replica_id: ReplicaId,
104}
105
106#[derive(Clone, Debug, PartialEq)]
107pub enum Event {
108 ActiveEntryChanged(Option<ProjectEntry>),
109 WorktreeRemoved(WorktreeId),
110 DiskBasedDiagnosticsStarted,
111 DiskBasedDiagnosticsUpdated,
112 DiskBasedDiagnosticsFinished,
113 DiagnosticsUpdated(ProjectPath),
114}
115
116#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
117pub struct ProjectPath {
118 pub worktree_id: WorktreeId,
119 pub path: Arc<Path>,
120}
121
122#[derive(Clone, Debug, Default, PartialEq)]
123pub struct DiagnosticSummary {
124 pub error_count: usize,
125 pub warning_count: usize,
126 pub info_count: usize,
127 pub hint_count: usize,
128}
129
130#[derive(Debug)]
131pub struct Location {
132 pub buffer: ModelHandle<Buffer>,
133 pub range: Range<language::Anchor>,
134}
135
136#[derive(Debug)]
137pub struct DocumentHighlight {
138 pub range: Range<language::Anchor>,
139 pub kind: DocumentHighlightKind,
140}
141
142#[derive(Clone, Debug)]
143pub struct Symbol {
144 pub source_worktree_id: WorktreeId,
145 pub worktree_id: WorktreeId,
146 pub language_name: String,
147 pub path: PathBuf,
148 pub label: CodeLabel,
149 pub name: String,
150 pub kind: lsp::SymbolKind,
151 pub range: Range<PointUtf16>,
152 pub signature: [u8; 32],
153}
154
155#[derive(Default)]
156pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
157
158impl DiagnosticSummary {
159 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
160 let mut this = Self {
161 error_count: 0,
162 warning_count: 0,
163 info_count: 0,
164 hint_count: 0,
165 };
166
167 for entry in diagnostics {
168 if entry.diagnostic.is_primary {
169 match entry.diagnostic.severity {
170 DiagnosticSeverity::ERROR => this.error_count += 1,
171 DiagnosticSeverity::WARNING => this.warning_count += 1,
172 DiagnosticSeverity::INFORMATION => this.info_count += 1,
173 DiagnosticSeverity::HINT => this.hint_count += 1,
174 _ => {}
175 }
176 }
177 }
178
179 this
180 }
181
182 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
183 proto::DiagnosticSummary {
184 path: path.to_string_lossy().to_string(),
185 error_count: self.error_count as u32,
186 warning_count: self.warning_count as u32,
187 info_count: self.info_count as u32,
188 hint_count: self.hint_count as u32,
189 }
190 }
191}
192
193#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
194pub struct ProjectEntry {
195 pub worktree_id: WorktreeId,
196 pub entry_id: usize,
197}
198
199impl Project {
200 pub fn init(client: &Arc<Client>) {
201 client.add_entity_message_handler(Self::handle_add_collaborator);
202 client.add_entity_message_handler(Self::handle_buffer_reloaded);
203 client.add_entity_message_handler(Self::handle_buffer_saved);
204 client.add_entity_message_handler(Self::handle_close_buffer);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
206 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
207 client.add_entity_message_handler(Self::handle_remove_collaborator);
208 client.add_entity_message_handler(Self::handle_register_worktree);
209 client.add_entity_message_handler(Self::handle_unregister_worktree);
210 client.add_entity_message_handler(Self::handle_unshare_project);
211 client.add_entity_message_handler(Self::handle_update_buffer_file);
212 client.add_entity_message_handler(Self::handle_update_buffer);
213 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
214 client.add_entity_message_handler(Self::handle_update_worktree);
215 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
216 client.add_entity_request_handler(Self::handle_apply_code_action);
217 client.add_entity_request_handler(Self::handle_format_buffers);
218 client.add_entity_request_handler(Self::handle_get_code_actions);
219 client.add_entity_request_handler(Self::handle_get_completions);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
224 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
225 client.add_entity_request_handler(Self::handle_search_project);
226 client.add_entity_request_handler(Self::handle_get_project_symbols);
227 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
228 client.add_entity_request_handler(Self::handle_open_buffer);
229 client.add_entity_request_handler(Self::handle_save_buffer);
230 }
231
232 pub fn local(
233 client: Arc<Client>,
234 user_store: ModelHandle<UserStore>,
235 languages: Arc<LanguageRegistry>,
236 fs: Arc<dyn Fs>,
237 cx: &mut MutableAppContext,
238 ) -> ModelHandle<Self> {
239 cx.add_model(|cx: &mut ModelContext<Self>| {
240 let (remote_id_tx, remote_id_rx) = watch::channel();
241 let _maintain_remote_id_task = cx.spawn_weak({
242 let rpc = client.clone();
243 move |this, mut cx| {
244 async move {
245 let mut status = rpc.status();
246 while let Some(status) = status.next().await {
247 if let Some(this) = this.upgrade(&cx) {
248 let remote_id = if status.is_connected() {
249 let response = rpc.request(proto::RegisterProject {}).await?;
250 Some(response.project_id)
251 } else {
252 None
253 };
254
255 if let Some(project_id) = remote_id {
256 let mut registrations = Vec::new();
257 this.update(&mut cx, |this, cx| {
258 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
259 registrations.push(worktree.update(
260 cx,
261 |worktree, cx| {
262 let worktree = worktree.as_local_mut().unwrap();
263 worktree.register(project_id, cx)
264 },
265 ));
266 }
267 });
268 for registration in registrations {
269 registration.await?;
270 }
271 }
272 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
273 }
274 }
275 Ok(())
276 }
277 .log_err()
278 }
279 });
280
281 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
282 Self {
283 worktrees: Default::default(),
284 collaborators: Default::default(),
285 opened_buffers: Default::default(),
286 shared_buffers: Default::default(),
287 loading_buffers: Default::default(),
288 loading_local_worktrees: Default::default(),
289 client_state: ProjectClientState::Local {
290 is_shared: false,
291 remote_id_tx,
292 remote_id_rx,
293 _maintain_remote_id_task,
294 },
295 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
296 subscriptions: Vec::new(),
297 active_entry: None,
298 languages,
299 client,
300 user_store,
301 fs,
302 language_servers_with_diagnostics_running: 0,
303 language_servers: Default::default(),
304 started_language_servers: Default::default(),
305 nonce: StdRng::from_entropy().gen(),
306 }
307 })
308 }
309
310 pub async fn remote(
311 remote_id: u64,
312 client: Arc<Client>,
313 user_store: ModelHandle<UserStore>,
314 languages: Arc<LanguageRegistry>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncAppContext,
317 ) -> Result<ModelHandle<Self>> {
318 client.authenticate_and_connect(&cx).await?;
319
320 let response = client
321 .request(proto::JoinProject {
322 project_id: remote_id,
323 })
324 .await?;
325
326 let replica_id = response.replica_id as ReplicaId;
327
328 let mut worktrees = Vec::new();
329 for worktree in response.worktrees {
330 let (worktree, load_task) = cx
331 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
332 worktrees.push(worktree);
333 load_task.detach();
334 }
335
336 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
337 let this = cx.add_model(|cx: &mut ModelContext<Self>| {
338 let mut this = Self {
339 worktrees: Vec::new(),
340 loading_buffers: Default::default(),
341 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
342 shared_buffers: Default::default(),
343 loading_local_worktrees: Default::default(),
344 active_entry: None,
345 collaborators: Default::default(),
346 languages,
347 user_store: user_store.clone(),
348 fs,
349 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
350 client: client.clone(),
351 client_state: ProjectClientState::Remote {
352 sharing_has_stopped: false,
353 remote_id,
354 replica_id,
355 _detect_unshare_task: cx.spawn_weak(move |this, mut cx| {
356 async move {
357 let mut status = client.status();
358 let is_connected =
359 status.next().await.map_or(false, |s| s.is_connected());
360 // Even if we're initially connected, any future change of the status means we momentarily disconnected.
361 if !is_connected || status.next().await.is_some() {
362 if let Some(this) = this.upgrade(&cx) {
363 this.update(&mut cx, |this, cx| this.project_unshared(cx))
364 }
365 }
366 Ok(())
367 }
368 .log_err()
369 }),
370 },
371 language_servers_with_diagnostics_running: 0,
372 language_servers: Default::default(),
373 started_language_servers: Default::default(),
374 opened_buffers: Default::default(),
375 nonce: StdRng::from_entropy().gen(),
376 };
377 for worktree in worktrees {
378 this.add_worktree(&worktree, cx);
379 }
380 this
381 });
382
383 let user_ids = response
384 .collaborators
385 .iter()
386 .map(|peer| peer.user_id)
387 .collect();
388 user_store
389 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
390 .await?;
391 let mut collaborators = HashMap::default();
392 for message in response.collaborators {
393 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
394 collaborators.insert(collaborator.peer_id, collaborator);
395 }
396
397 this.update(cx, |this, _| {
398 this.collaborators = collaborators;
399 });
400
401 Ok(this)
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
406 let languages = Arc::new(LanguageRegistry::test());
407 let http_client = client::test::FakeHttpClient::with_404_response();
408 let client = client::Client::new(http_client.clone());
409 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
410 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
411 }
412
413 #[cfg(any(test, feature = "test-support"))]
414 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
415 self.opened_buffers
416 .get(&remote_id)
417 .and_then(|buffer| buffer.upgrade(cx))
418 }
419
420 #[cfg(any(test, feature = "test-support"))]
421 pub fn languages(&self) -> &Arc<LanguageRegistry> {
422 &self.languages
423 }
424
425 #[cfg(any(test, feature = "test-support"))]
426 pub fn check_invariants(&self, cx: &AppContext) {
427 if self.is_local() {
428 let mut worktree_root_paths = HashMap::default();
429 for worktree in self.worktrees(cx) {
430 let worktree = worktree.read(cx);
431 let abs_path = worktree.as_local().unwrap().abs_path().clone();
432 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
433 assert_eq!(
434 prev_worktree_id,
435 None,
436 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
437 abs_path,
438 worktree.id(),
439 prev_worktree_id
440 )
441 }
442 } else {
443 let replica_id = self.replica_id();
444 for buffer in self.opened_buffers.values() {
445 if let Some(buffer) = buffer.upgrade(cx) {
446 let buffer = buffer.read(cx);
447 assert_eq!(
448 buffer.deferred_ops_len(),
449 0,
450 "replica {}, buffer {} has deferred operations",
451 replica_id,
452 buffer.remote_id()
453 );
454 }
455 }
456 }
457 }
458
459 #[cfg(any(test, feature = "test-support"))]
460 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
461 let path = path.into();
462 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
463 self.opened_buffers.iter().any(|(_, buffer)| {
464 if let Some(buffer) = buffer.upgrade(cx) {
465 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
466 if file.worktree == worktree && file.path() == &path.path {
467 return true;
468 }
469 }
470 }
471 false
472 })
473 } else {
474 false
475 }
476 }
477
478 pub fn fs(&self) -> &Arc<dyn Fs> {
479 &self.fs
480 }
481
482 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
483 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
484 *remote_id_tx.borrow_mut() = remote_id;
485 }
486
487 self.subscriptions.clear();
488 if let Some(remote_id) = remote_id {
489 self.subscriptions
490 .push(self.client.add_model_for_remote_entity(remote_id, cx));
491 }
492 }
493
494 pub fn remote_id(&self) -> Option<u64> {
495 match &self.client_state {
496 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
497 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
498 }
499 }
500
501 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
502 let mut id = None;
503 let mut watch = None;
504 match &self.client_state {
505 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
506 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
507 }
508
509 async move {
510 if let Some(id) = id {
511 return id;
512 }
513 let mut watch = watch.unwrap();
514 loop {
515 let id = *watch.borrow();
516 if let Some(id) = id {
517 return id;
518 }
519 watch.next().await;
520 }
521 }
522 }
523
524 pub fn replica_id(&self) -> ReplicaId {
525 match &self.client_state {
526 ProjectClientState::Local { .. } => 0,
527 ProjectClientState::Remote { replica_id, .. } => *replica_id,
528 }
529 }
530
531 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
532 &self.collaborators
533 }
534
535 pub fn worktrees<'a>(
536 &'a self,
537 cx: &'a AppContext,
538 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
539 self.worktrees
540 .iter()
541 .filter_map(move |worktree| worktree.upgrade(cx))
542 }
543
544 pub fn visible_worktrees<'a>(
545 &'a self,
546 cx: &'a AppContext,
547 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
548 self.worktrees.iter().filter_map(|worktree| {
549 worktree.upgrade(cx).and_then(|worktree| {
550 if worktree.read(cx).is_visible() {
551 Some(worktree)
552 } else {
553 None
554 }
555 })
556 })
557 }
558
559 pub fn worktree_for_id(
560 &self,
561 id: WorktreeId,
562 cx: &AppContext,
563 ) -> Option<ModelHandle<Worktree>> {
564 self.worktrees(cx)
565 .find(|worktree| worktree.read(cx).id() == id)
566 }
567
568 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
569 let rpc = self.client.clone();
570 cx.spawn(|this, mut cx| async move {
571 let project_id = this.update(&mut cx, |this, cx| {
572 if let ProjectClientState::Local {
573 is_shared,
574 remote_id_rx,
575 ..
576 } = &mut this.client_state
577 {
578 *is_shared = true;
579
580 for open_buffer in this.opened_buffers.values_mut() {
581 match open_buffer {
582 OpenBuffer::Strong(_) => {}
583 OpenBuffer::Weak(buffer) => {
584 if let Some(buffer) = buffer.upgrade(cx) {
585 *open_buffer = OpenBuffer::Strong(buffer);
586 }
587 }
588 OpenBuffer::Loading(_) => unreachable!(),
589 }
590 }
591
592 for worktree_handle in this.worktrees.iter_mut() {
593 match worktree_handle {
594 WorktreeHandle::Strong(_) => {}
595 WorktreeHandle::Weak(worktree) => {
596 if let Some(worktree) = worktree.upgrade(cx) {
597 *worktree_handle = WorktreeHandle::Strong(worktree);
598 }
599 }
600 }
601 }
602
603 remote_id_rx
604 .borrow()
605 .ok_or_else(|| anyhow!("no project id"))
606 } else {
607 Err(anyhow!("can't share a remote project"))
608 }
609 })?;
610
611 rpc.request(proto::ShareProject { project_id }).await?;
612
613 let mut tasks = Vec::new();
614 this.update(&mut cx, |this, cx| {
615 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
616 worktree.update(cx, |worktree, cx| {
617 let worktree = worktree.as_local_mut().unwrap();
618 tasks.push(worktree.share(project_id, cx));
619 });
620 }
621 });
622 for task in tasks {
623 task.await?;
624 }
625 this.update(&mut cx, |_, cx| cx.notify());
626 Ok(())
627 })
628 }
629
630 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
631 let rpc = self.client.clone();
632 cx.spawn(|this, mut cx| async move {
633 let project_id = this.update(&mut cx, |this, cx| {
634 if let ProjectClientState::Local {
635 is_shared,
636 remote_id_rx,
637 ..
638 } = &mut this.client_state
639 {
640 *is_shared = false;
641
642 for open_buffer in this.opened_buffers.values_mut() {
643 match open_buffer {
644 OpenBuffer::Strong(buffer) => {
645 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
646 }
647 _ => {}
648 }
649 }
650
651 for worktree_handle in this.worktrees.iter_mut() {
652 match worktree_handle {
653 WorktreeHandle::Strong(worktree) => {
654 if !worktree.read(cx).is_visible() {
655 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
656 }
657 }
658 _ => {}
659 }
660 }
661
662 remote_id_rx
663 .borrow()
664 .ok_or_else(|| anyhow!("no project id"))
665 } else {
666 Err(anyhow!("can't share a remote project"))
667 }
668 })?;
669
670 rpc.send(proto::UnshareProject { project_id })?;
671 this.update(&mut cx, |this, cx| {
672 this.collaborators.clear();
673 this.shared_buffers.clear();
674 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
675 worktree.update(cx, |worktree, _| {
676 worktree.as_local_mut().unwrap().unshare();
677 });
678 }
679 cx.notify()
680 });
681 Ok(())
682 })
683 }
684
685 fn project_unshared(&mut self, cx: &mut ModelContext<Self>) {
686 if let ProjectClientState::Remote {
687 sharing_has_stopped,
688 ..
689 } = &mut self.client_state
690 {
691 *sharing_has_stopped = true;
692 self.collaborators.clear();
693 cx.notify();
694 }
695 }
696
697 pub fn is_read_only(&self) -> bool {
698 match &self.client_state {
699 ProjectClientState::Local { .. } => false,
700 ProjectClientState::Remote {
701 sharing_has_stopped,
702 ..
703 } => *sharing_has_stopped,
704 }
705 }
706
707 pub fn is_local(&self) -> bool {
708 match &self.client_state {
709 ProjectClientState::Local { .. } => true,
710 ProjectClientState::Remote { .. } => false,
711 }
712 }
713
714 pub fn is_remote(&self) -> bool {
715 !self.is_local()
716 }
717
718 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
719 if self.is_remote() {
720 return Err(anyhow!("creating buffers as a guest is not supported yet"));
721 }
722
723 let buffer = cx.add_model(|cx| {
724 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
725 });
726 self.register_buffer(&buffer, None, cx)?;
727 Ok(buffer)
728 }
729
730 pub fn open_buffer(
731 &mut self,
732 path: impl Into<ProjectPath>,
733 cx: &mut ModelContext<Self>,
734 ) -> Task<Result<ModelHandle<Buffer>>> {
735 let project_path = path.into();
736 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
737 worktree
738 } else {
739 return Task::ready(Err(anyhow!("no such worktree")));
740 };
741
742 // If there is already a buffer for the given path, then return it.
743 let existing_buffer = self.get_open_buffer(&project_path, cx);
744 if let Some(existing_buffer) = existing_buffer {
745 return Task::ready(Ok(existing_buffer));
746 }
747
748 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
749 // If the given path is already being loaded, then wait for that existing
750 // task to complete and return the same buffer.
751 hash_map::Entry::Occupied(e) => e.get().clone(),
752
753 // Otherwise, record the fact that this path is now being loaded.
754 hash_map::Entry::Vacant(entry) => {
755 let (mut tx, rx) = postage::watch::channel();
756 entry.insert(rx.clone());
757
758 let load_buffer = if worktree.read(cx).is_local() {
759 self.open_local_buffer(&project_path.path, &worktree, cx)
760 } else {
761 self.open_remote_buffer(&project_path.path, &worktree, cx)
762 };
763
764 cx.spawn(move |this, mut cx| async move {
765 let load_result = load_buffer.await;
766 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
767 // Record the fact that the buffer is no longer loading.
768 this.loading_buffers.remove(&project_path);
769 let buffer = load_result.map_err(Arc::new)?;
770 Ok(buffer)
771 }));
772 })
773 .detach();
774 rx
775 }
776 };
777
778 cx.foreground().spawn(async move {
779 loop {
780 if let Some(result) = loading_watch.borrow().as_ref() {
781 match result {
782 Ok(buffer) => return Ok(buffer.clone()),
783 Err(error) => return Err(anyhow!("{}", error)),
784 }
785 }
786 loading_watch.next().await;
787 }
788 })
789 }
790
791 fn open_local_buffer(
792 &mut self,
793 path: &Arc<Path>,
794 worktree: &ModelHandle<Worktree>,
795 cx: &mut ModelContext<Self>,
796 ) -> Task<Result<ModelHandle<Buffer>>> {
797 let load_buffer = worktree.update(cx, |worktree, cx| {
798 let worktree = worktree.as_local_mut().unwrap();
799 worktree.load_buffer(path, cx)
800 });
801 let worktree = worktree.downgrade();
802 cx.spawn(|this, mut cx| async move {
803 let buffer = load_buffer.await?;
804 let worktree = worktree
805 .upgrade(&cx)
806 .ok_or_else(|| anyhow!("worktree was removed"))?;
807 this.update(&mut cx, |this, cx| {
808 this.register_buffer(&buffer, Some(&worktree), cx)
809 })?;
810 Ok(buffer)
811 })
812 }
813
814 fn open_remote_buffer(
815 &mut self,
816 path: &Arc<Path>,
817 worktree: &ModelHandle<Worktree>,
818 cx: &mut ModelContext<Self>,
819 ) -> Task<Result<ModelHandle<Buffer>>> {
820 let rpc = self.client.clone();
821 let project_id = self.remote_id().unwrap();
822 let remote_worktree_id = worktree.read(cx).id();
823 let path = path.clone();
824 let path_string = path.to_string_lossy().to_string();
825 cx.spawn(|this, mut cx| async move {
826 let response = rpc
827 .request(proto::OpenBuffer {
828 project_id,
829 worktree_id: remote_worktree_id.to_proto(),
830 path: path_string,
831 })
832 .await?;
833 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
834 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
835 .await
836 })
837 }
838
839 fn open_local_buffer_via_lsp(
840 &mut self,
841 abs_path: lsp::Url,
842 lang_name: String,
843 lang_server: Arc<LanguageServer>,
844 cx: &mut ModelContext<Self>,
845 ) -> Task<Result<ModelHandle<Buffer>>> {
846 cx.spawn(|this, mut cx| async move {
847 let abs_path = abs_path
848 .to_file_path()
849 .map_err(|_| anyhow!("can't convert URI to path"))?;
850 let (worktree, relative_path) = if let Some(result) =
851 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
852 {
853 result
854 } else {
855 let worktree = this
856 .update(&mut cx, |this, cx| {
857 this.create_local_worktree(&abs_path, false, cx)
858 })
859 .await?;
860 this.update(&mut cx, |this, cx| {
861 this.language_servers
862 .insert((worktree.read(cx).id(), lang_name), lang_server);
863 });
864 (worktree, PathBuf::new())
865 };
866
867 let project_path = ProjectPath {
868 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
869 path: relative_path.into(),
870 };
871 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
872 .await
873 })
874 }
875
876 pub fn save_buffer_as(
877 &mut self,
878 buffer: ModelHandle<Buffer>,
879 abs_path: PathBuf,
880 cx: &mut ModelContext<Project>,
881 ) -> Task<Result<()>> {
882 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
883 cx.spawn(|this, mut cx| async move {
884 let (worktree, path) = worktree_task.await?;
885 worktree
886 .update(&mut cx, |worktree, cx| {
887 worktree
888 .as_local_mut()
889 .unwrap()
890 .save_buffer_as(buffer.clone(), path, cx)
891 })
892 .await?;
893 this.update(&mut cx, |this, cx| {
894 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
895 });
896 Ok(())
897 })
898 }
899
900 pub fn get_open_buffer(
901 &mut self,
902 path: &ProjectPath,
903 cx: &mut ModelContext<Self>,
904 ) -> Option<ModelHandle<Buffer>> {
905 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
906 self.opened_buffers.values().find_map(|buffer| {
907 let buffer = buffer.upgrade(cx)?;
908 let file = File::from_dyn(buffer.read(cx).file())?;
909 if file.worktree == worktree && file.path() == &path.path {
910 Some(buffer)
911 } else {
912 None
913 }
914 })
915 }
916
917 fn register_buffer(
918 &mut self,
919 buffer: &ModelHandle<Buffer>,
920 worktree: Option<&ModelHandle<Worktree>>,
921 cx: &mut ModelContext<Self>,
922 ) -> Result<()> {
923 let remote_id = buffer.read(cx).remote_id();
924 let open_buffer = if self.is_remote() || self.is_shared() {
925 OpenBuffer::Strong(buffer.clone())
926 } else {
927 OpenBuffer::Weak(buffer.downgrade())
928 };
929
930 match self.opened_buffers.insert(remote_id, open_buffer) {
931 None => {}
932 Some(OpenBuffer::Loading(operations)) => {
933 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
934 }
935 Some(OpenBuffer::Weak(existing_handle)) => {
936 if existing_handle.upgrade(cx).is_some() {
937 Err(anyhow!(
938 "already registered buffer with remote id {}",
939 remote_id
940 ))?
941 }
942 }
943 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
944 "already registered buffer with remote id {}",
945 remote_id
946 ))?,
947 }
948 cx.become_delegate(buffer, Self::on_buffer_event).detach();
949 self.assign_language_to_buffer(buffer, worktree, cx);
950
951 Ok(())
952 }
953
954 fn on_buffer_event(
955 &mut self,
956 buffer: ModelHandle<Buffer>,
957 event: BufferEvent,
958 cx: &mut ModelContext<Self>,
959 ) {
960 match event {
961 BufferEvent::Operation(operation) => {
962 if let Some(project_id) = self.remote_id() {
963 let request = self.client.request(proto::UpdateBuffer {
964 project_id,
965 buffer_id: buffer.read(cx).remote_id(),
966 operations: vec![language::proto::serialize_operation(&operation)],
967 });
968 cx.foreground()
969 .spawn(async move {
970 request.await.log_err();
971 })
972 .detach();
973 }
974 }
975 _ => {}
976 }
977 }
978
979 fn assign_language_to_buffer(
980 &mut self,
981 buffer: &ModelHandle<Buffer>,
982 worktree: Option<&ModelHandle<Worktree>>,
983 cx: &mut ModelContext<Self>,
984 ) -> Option<()> {
985 let (path, full_path) = {
986 let file = buffer.read(cx).file()?;
987 (file.path().clone(), file.full_path(cx))
988 };
989
990 // If the buffer has a language, set it and start/assign the language server
991 if let Some(language) = self.languages.select_language(&full_path) {
992 buffer.update(cx, |buffer, cx| {
993 buffer.set_language(Some(language.clone()), cx);
994 });
995
996 // For local worktrees, start a language server if needed.
997 // Also assign the language server and any previously stored diagnostics to the buffer.
998 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
999 let worktree_id = local_worktree.id();
1000 let worktree_abs_path = local_worktree.abs_path().clone();
1001 let buffer = buffer.downgrade();
1002 let language_server =
1003 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
1004
1005 cx.spawn_weak(|_, mut cx| async move {
1006 if let Some(language_server) = language_server.await {
1007 if let Some(buffer) = buffer.upgrade(&cx) {
1008 buffer.update(&mut cx, |buffer, cx| {
1009 buffer.set_language_server(Some(language_server), cx);
1010 });
1011 }
1012 }
1013 })
1014 .detach();
1015 }
1016 }
1017
1018 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
1019 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
1020 buffer.update(cx, |buffer, cx| {
1021 buffer.update_diagnostics(diagnostics, None, cx).log_err();
1022 });
1023 }
1024 }
1025
1026 None
1027 }
1028
1029 fn start_language_server(
1030 &mut self,
1031 worktree_id: WorktreeId,
1032 worktree_path: Arc<Path>,
1033 language: Arc<Language>,
1034 cx: &mut ModelContext<Self>,
1035 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
1036 enum LspEvent {
1037 DiagnosticsStart,
1038 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
1039 DiagnosticsFinish,
1040 }
1041
1042 let key = (worktree_id, language.name().to_string());
1043 self.started_language_servers
1044 .entry(key.clone())
1045 .or_insert_with(|| {
1046 let language_server = self.languages.start_language_server(
1047 language.clone(),
1048 worktree_path,
1049 self.client.http_client(),
1050 cx,
1051 );
1052 let rpc = self.client.clone();
1053 cx.spawn_weak(|this, mut cx| async move {
1054 let language_server = language_server?.await.log_err()?;
1055 if let Some(this) = this.upgrade(&cx) {
1056 this.update(&mut cx, |this, _| {
1057 this.language_servers.insert(key, language_server.clone());
1058 });
1059 }
1060
1061 let disk_based_sources = language
1062 .disk_based_diagnostic_sources()
1063 .cloned()
1064 .unwrap_or_default();
1065 let disk_based_diagnostics_progress_token =
1066 language.disk_based_diagnostics_progress_token().cloned();
1067 let has_disk_based_diagnostic_progress_token =
1068 disk_based_diagnostics_progress_token.is_some();
1069 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1070
1071 // Listen for `PublishDiagnostics` notifications.
1072 language_server
1073 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1074 let diagnostics_tx = diagnostics_tx.clone();
1075 move |params| {
1076 if !has_disk_based_diagnostic_progress_token {
1077 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1078 }
1079 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1080 .ok();
1081 if !has_disk_based_diagnostic_progress_token {
1082 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1083 }
1084 }
1085 })
1086 .detach();
1087
1088 // Listen for `Progress` notifications. Send an event when the language server
1089 // transitions between running jobs and not running any jobs.
1090 let mut running_jobs_for_this_server: i32 = 0;
1091 language_server
1092 .on_notification::<lsp::notification::Progress, _>(move |params| {
1093 let token = match params.token {
1094 lsp::NumberOrString::Number(_) => None,
1095 lsp::NumberOrString::String(token) => Some(token),
1096 };
1097
1098 if token == disk_based_diagnostics_progress_token {
1099 match params.value {
1100 lsp::ProgressParamsValue::WorkDone(progress) => {
1101 match progress {
1102 lsp::WorkDoneProgress::Begin(_) => {
1103 running_jobs_for_this_server += 1;
1104 if running_jobs_for_this_server == 1 {
1105 block_on(
1106 diagnostics_tx
1107 .send(LspEvent::DiagnosticsStart),
1108 )
1109 .ok();
1110 }
1111 }
1112 lsp::WorkDoneProgress::End(_) => {
1113 running_jobs_for_this_server -= 1;
1114 if running_jobs_for_this_server == 0 {
1115 block_on(
1116 diagnostics_tx
1117 .send(LspEvent::DiagnosticsFinish),
1118 )
1119 .ok();
1120 }
1121 }
1122 _ => {}
1123 }
1124 }
1125 }
1126 }
1127 })
1128 .detach();
1129
1130 // Process all the LSP events.
1131 cx.spawn(|mut cx| async move {
1132 while let Ok(message) = diagnostics_rx.recv().await {
1133 let this = this.upgrade(&cx)?;
1134 match message {
1135 LspEvent::DiagnosticsStart => {
1136 this.update(&mut cx, |this, cx| {
1137 this.disk_based_diagnostics_started(cx);
1138 if let Some(project_id) = this.remote_id() {
1139 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1140 project_id,
1141 })
1142 .log_err();
1143 }
1144 });
1145 }
1146 LspEvent::DiagnosticsUpdate(mut params) => {
1147 language.process_diagnostics(&mut params);
1148 this.update(&mut cx, |this, cx| {
1149 this.update_diagnostics(params, &disk_based_sources, cx)
1150 .log_err();
1151 });
1152 }
1153 LspEvent::DiagnosticsFinish => {
1154 this.update(&mut cx, |this, cx| {
1155 this.disk_based_diagnostics_finished(cx);
1156 if let Some(project_id) = this.remote_id() {
1157 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1158 project_id,
1159 })
1160 .log_err();
1161 }
1162 });
1163 }
1164 }
1165 }
1166 Some(())
1167 })
1168 .detach();
1169
1170 Some(language_server)
1171 })
1172 .shared()
1173 })
1174 .clone()
1175 }
1176
1177 pub fn update_diagnostics(
1178 &mut self,
1179 params: lsp::PublishDiagnosticsParams,
1180 disk_based_sources: &HashSet<String>,
1181 cx: &mut ModelContext<Self>,
1182 ) -> Result<()> {
1183 let abs_path = params
1184 .uri
1185 .to_file_path()
1186 .map_err(|_| anyhow!("URI is not a file"))?;
1187 let mut next_group_id = 0;
1188 let mut diagnostics = Vec::default();
1189 let mut primary_diagnostic_group_ids = HashMap::default();
1190 let mut sources_by_group_id = HashMap::default();
1191 let mut supporting_diagnostic_severities = HashMap::default();
1192 for diagnostic in ¶ms.diagnostics {
1193 let source = diagnostic.source.as_ref();
1194 let code = diagnostic.code.as_ref().map(|code| match code {
1195 lsp::NumberOrString::Number(code) => code.to_string(),
1196 lsp::NumberOrString::String(code) => code.clone(),
1197 });
1198 let range = range_from_lsp(diagnostic.range);
1199 let is_supporting = diagnostic
1200 .related_information
1201 .as_ref()
1202 .map_or(false, |infos| {
1203 infos.iter().any(|info| {
1204 primary_diagnostic_group_ids.contains_key(&(
1205 source,
1206 code.clone(),
1207 range_from_lsp(info.location.range),
1208 ))
1209 })
1210 });
1211
1212 if is_supporting {
1213 if let Some(severity) = diagnostic.severity {
1214 supporting_diagnostic_severities
1215 .insert((source, code.clone(), range), severity);
1216 }
1217 } else {
1218 let group_id = post_inc(&mut next_group_id);
1219 let is_disk_based =
1220 source.map_or(false, |source| disk_based_sources.contains(source));
1221
1222 sources_by_group_id.insert(group_id, source);
1223 primary_diagnostic_group_ids
1224 .insert((source, code.clone(), range.clone()), group_id);
1225
1226 diagnostics.push(DiagnosticEntry {
1227 range,
1228 diagnostic: Diagnostic {
1229 code: code.clone(),
1230 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1231 message: diagnostic.message.clone(),
1232 group_id,
1233 is_primary: true,
1234 is_valid: true,
1235 is_disk_based,
1236 },
1237 });
1238 if let Some(infos) = &diagnostic.related_information {
1239 for info in infos {
1240 if info.location.uri == params.uri && !info.message.is_empty() {
1241 let range = range_from_lsp(info.location.range);
1242 diagnostics.push(DiagnosticEntry {
1243 range,
1244 diagnostic: Diagnostic {
1245 code: code.clone(),
1246 severity: DiagnosticSeverity::INFORMATION,
1247 message: info.message.clone(),
1248 group_id,
1249 is_primary: false,
1250 is_valid: true,
1251 is_disk_based,
1252 },
1253 });
1254 }
1255 }
1256 }
1257 }
1258 }
1259
1260 for entry in &mut diagnostics {
1261 let diagnostic = &mut entry.diagnostic;
1262 if !diagnostic.is_primary {
1263 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1264 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1265 source,
1266 diagnostic.code.clone(),
1267 entry.range.clone(),
1268 )) {
1269 diagnostic.severity = severity;
1270 }
1271 }
1272 }
1273
1274 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1275 Ok(())
1276 }
1277
1278 pub fn update_diagnostic_entries(
1279 &mut self,
1280 abs_path: PathBuf,
1281 version: Option<i32>,
1282 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1283 cx: &mut ModelContext<Project>,
1284 ) -> Result<(), anyhow::Error> {
1285 let (worktree, relative_path) = self
1286 .find_local_worktree(&abs_path, cx)
1287 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1288 if !worktree.read(cx).is_visible() {
1289 return Ok(());
1290 }
1291
1292 let project_path = ProjectPath {
1293 worktree_id: worktree.read(cx).id(),
1294 path: relative_path.into(),
1295 };
1296
1297 for buffer in self.opened_buffers.values() {
1298 if let Some(buffer) = buffer.upgrade(cx) {
1299 if buffer
1300 .read(cx)
1301 .file()
1302 .map_or(false, |file| *file.path() == project_path.path)
1303 {
1304 buffer.update(cx, |buffer, cx| {
1305 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1306 })?;
1307 break;
1308 }
1309 }
1310 }
1311 worktree.update(cx, |worktree, cx| {
1312 worktree
1313 .as_local_mut()
1314 .ok_or_else(|| anyhow!("not a local worktree"))?
1315 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1316 })?;
1317 cx.emit(Event::DiagnosticsUpdated(project_path));
1318 Ok(())
1319 }
1320
1321 pub fn format(
1322 &self,
1323 buffers: HashSet<ModelHandle<Buffer>>,
1324 push_to_history: bool,
1325 cx: &mut ModelContext<Project>,
1326 ) -> Task<Result<ProjectTransaction>> {
1327 let mut local_buffers = Vec::new();
1328 let mut remote_buffers = None;
1329 for buffer_handle in buffers {
1330 let buffer = buffer_handle.read(cx);
1331 let worktree;
1332 if let Some(file) = File::from_dyn(buffer.file()) {
1333 worktree = file.worktree.clone();
1334 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1335 let lang_server;
1336 if let Some(lang) = buffer.language() {
1337 if let Some(server) = self
1338 .language_servers
1339 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1340 {
1341 lang_server = server.clone();
1342 } else {
1343 return Task::ready(Ok(Default::default()));
1344 };
1345 } else {
1346 return Task::ready(Ok(Default::default()));
1347 }
1348
1349 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1350 } else {
1351 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1352 }
1353 } else {
1354 return Task::ready(Ok(Default::default()));
1355 }
1356 }
1357
1358 let remote_buffers = self.remote_id().zip(remote_buffers);
1359 let client = self.client.clone();
1360
1361 cx.spawn(|this, mut cx| async move {
1362 let mut project_transaction = ProjectTransaction::default();
1363
1364 if let Some((project_id, remote_buffers)) = remote_buffers {
1365 let response = client
1366 .request(proto::FormatBuffers {
1367 project_id,
1368 buffer_ids: remote_buffers
1369 .iter()
1370 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1371 .collect(),
1372 })
1373 .await?
1374 .transaction
1375 .ok_or_else(|| anyhow!("missing transaction"))?;
1376 project_transaction = this
1377 .update(&mut cx, |this, cx| {
1378 this.deserialize_project_transaction(response, push_to_history, cx)
1379 })
1380 .await?;
1381 }
1382
1383 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1384 let capabilities = if let Some(capabilities) = lang_server.capabilities().await {
1385 capabilities
1386 } else {
1387 continue;
1388 };
1389
1390 let text_document = lsp::TextDocumentIdentifier::new(
1391 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1392 );
1393 let lsp_edits = if capabilities
1394 .document_formatting_provider
1395 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1396 {
1397 lang_server
1398 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1399 text_document,
1400 options: Default::default(),
1401 work_done_progress_params: Default::default(),
1402 })
1403 .await?
1404 } else if capabilities
1405 .document_range_formatting_provider
1406 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1407 {
1408 let buffer_start = lsp::Position::new(0, 0);
1409 let buffer_end = buffer
1410 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1411 .to_lsp_position();
1412 lang_server
1413 .request::<lsp::request::RangeFormatting>(
1414 lsp::DocumentRangeFormattingParams {
1415 text_document,
1416 range: lsp::Range::new(buffer_start, buffer_end),
1417 options: Default::default(),
1418 work_done_progress_params: Default::default(),
1419 },
1420 )
1421 .await?
1422 } else {
1423 continue;
1424 };
1425
1426 if let Some(lsp_edits) = lsp_edits {
1427 let edits = buffer
1428 .update(&mut cx, |buffer, cx| {
1429 buffer.edits_from_lsp(lsp_edits, None, cx)
1430 })
1431 .await?;
1432 buffer.update(&mut cx, |buffer, cx| {
1433 buffer.finalize_last_transaction();
1434 buffer.start_transaction();
1435 for (range, text) in edits {
1436 buffer.edit([range], text, cx);
1437 }
1438 if buffer.end_transaction(cx).is_some() {
1439 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1440 if !push_to_history {
1441 buffer.forget_transaction(transaction.id);
1442 }
1443 project_transaction.0.insert(cx.handle(), transaction);
1444 }
1445 });
1446 }
1447 }
1448
1449 Ok(project_transaction)
1450 })
1451 }
1452
1453 pub fn definition<T: ToPointUtf16>(
1454 &self,
1455 buffer: &ModelHandle<Buffer>,
1456 position: T,
1457 cx: &mut ModelContext<Self>,
1458 ) -> Task<Result<Vec<Location>>> {
1459 let position = position.to_point_utf16(buffer.read(cx));
1460 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1461 }
1462
1463 pub fn references<T: ToPointUtf16>(
1464 &self,
1465 buffer: &ModelHandle<Buffer>,
1466 position: T,
1467 cx: &mut ModelContext<Self>,
1468 ) -> Task<Result<Vec<Location>>> {
1469 let position = position.to_point_utf16(buffer.read(cx));
1470 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1471 }
1472
1473 pub fn document_highlights<T: ToPointUtf16>(
1474 &self,
1475 buffer: &ModelHandle<Buffer>,
1476 position: T,
1477 cx: &mut ModelContext<Self>,
1478 ) -> Task<Result<Vec<DocumentHighlight>>> {
1479 let position = position.to_point_utf16(buffer.read(cx));
1480
1481 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1482 }
1483
1484 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1485 if self.is_local() {
1486 let mut language_servers = HashMap::default();
1487 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1488 if let Some((worktree, language)) = self
1489 .worktree_for_id(*worktree_id, cx)
1490 .and_then(|worktree| worktree.read(cx).as_local())
1491 .zip(self.languages.get_language(language_name))
1492 {
1493 language_servers
1494 .entry(Arc::as_ptr(language_server))
1495 .or_insert((
1496 language_server.clone(),
1497 *worktree_id,
1498 worktree.abs_path().clone(),
1499 language.clone(),
1500 ));
1501 }
1502 }
1503
1504 let mut requests = Vec::new();
1505 for (language_server, _, _, _) in language_servers.values() {
1506 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1507 lsp::WorkspaceSymbolParams {
1508 query: query.to_string(),
1509 ..Default::default()
1510 },
1511 ));
1512 }
1513
1514 cx.spawn_weak(|this, cx| async move {
1515 let responses = futures::future::try_join_all(requests).await?;
1516
1517 let mut symbols = Vec::new();
1518 if let Some(this) = this.upgrade(&cx) {
1519 this.read_with(&cx, |this, cx| {
1520 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1521 language_servers.into_values().zip(responses)
1522 {
1523 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1524 |lsp_symbol| {
1525 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1526 let mut worktree_id = source_worktree_id;
1527 let path;
1528 if let Some((worktree, rel_path)) =
1529 this.find_local_worktree(&abs_path, cx)
1530 {
1531 worktree_id = worktree.read(cx).id();
1532 path = rel_path;
1533 } else {
1534 path = relativize_path(&worktree_abs_path, &abs_path);
1535 }
1536
1537 let label = language
1538 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1539 .unwrap_or_else(|| {
1540 CodeLabel::plain(lsp_symbol.name.clone(), None)
1541 });
1542 let signature = this.symbol_signature(worktree_id, &path);
1543
1544 Some(Symbol {
1545 source_worktree_id,
1546 worktree_id,
1547 language_name: language.name().to_string(),
1548 name: lsp_symbol.name,
1549 kind: lsp_symbol.kind,
1550 label,
1551 path,
1552 range: range_from_lsp(lsp_symbol.location.range),
1553 signature,
1554 })
1555 },
1556 ));
1557 }
1558 })
1559 }
1560
1561 Ok(symbols)
1562 })
1563 } else if let Some(project_id) = self.remote_id() {
1564 let request = self.client.request(proto::GetProjectSymbols {
1565 project_id,
1566 query: query.to_string(),
1567 });
1568 cx.spawn_weak(|this, cx| async move {
1569 let response = request.await?;
1570 let mut symbols = Vec::new();
1571 if let Some(this) = this.upgrade(&cx) {
1572 this.read_with(&cx, |this, _| {
1573 symbols.extend(
1574 response
1575 .symbols
1576 .into_iter()
1577 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1578 );
1579 })
1580 }
1581 Ok(symbols)
1582 })
1583 } else {
1584 Task::ready(Ok(Default::default()))
1585 }
1586 }
1587
1588 pub fn open_buffer_for_symbol(
1589 &mut self,
1590 symbol: &Symbol,
1591 cx: &mut ModelContext<Self>,
1592 ) -> Task<Result<ModelHandle<Buffer>>> {
1593 if self.is_local() {
1594 let language_server = if let Some(server) = self
1595 .language_servers
1596 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1597 {
1598 server.clone()
1599 } else {
1600 return Task::ready(Err(anyhow!(
1601 "language server for worktree and language not found"
1602 )));
1603 };
1604
1605 let worktree_abs_path = if let Some(worktree_abs_path) = self
1606 .worktree_for_id(symbol.worktree_id, cx)
1607 .and_then(|worktree| worktree.read(cx).as_local())
1608 .map(|local_worktree| local_worktree.abs_path())
1609 {
1610 worktree_abs_path
1611 } else {
1612 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1613 };
1614 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1615 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1616 uri
1617 } else {
1618 return Task::ready(Err(anyhow!("invalid symbol path")));
1619 };
1620
1621 self.open_local_buffer_via_lsp(
1622 symbol_uri,
1623 symbol.language_name.clone(),
1624 language_server,
1625 cx,
1626 )
1627 } else if let Some(project_id) = self.remote_id() {
1628 let request = self.client.request(proto::OpenBufferForSymbol {
1629 project_id,
1630 symbol: Some(serialize_symbol(symbol)),
1631 });
1632 cx.spawn(|this, mut cx| async move {
1633 let response = request.await?;
1634 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1635 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1636 .await
1637 })
1638 } else {
1639 Task::ready(Err(anyhow!("project does not have a remote id")))
1640 }
1641 }
1642
1643 pub fn completions<T: ToPointUtf16>(
1644 &self,
1645 source_buffer_handle: &ModelHandle<Buffer>,
1646 position: T,
1647 cx: &mut ModelContext<Self>,
1648 ) -> Task<Result<Vec<Completion>>> {
1649 let source_buffer_handle = source_buffer_handle.clone();
1650 let source_buffer = source_buffer_handle.read(cx);
1651 let buffer_id = source_buffer.remote_id();
1652 let language = source_buffer.language().cloned();
1653 let worktree;
1654 let buffer_abs_path;
1655 if let Some(file) = File::from_dyn(source_buffer.file()) {
1656 worktree = file.worktree.clone();
1657 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1658 } else {
1659 return Task::ready(Ok(Default::default()));
1660 };
1661
1662 let position = position.to_point_utf16(source_buffer);
1663 let anchor = source_buffer.anchor_after(position);
1664
1665 if worktree.read(cx).as_local().is_some() {
1666 let buffer_abs_path = buffer_abs_path.unwrap();
1667 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1668 server
1669 } else {
1670 return Task::ready(Ok(Default::default()));
1671 };
1672
1673 cx.spawn(|_, cx| async move {
1674 let completions = lang_server
1675 .request::<lsp::request::Completion>(lsp::CompletionParams {
1676 text_document_position: lsp::TextDocumentPositionParams::new(
1677 lsp::TextDocumentIdentifier::new(
1678 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1679 ),
1680 position.to_lsp_position(),
1681 ),
1682 context: Default::default(),
1683 work_done_progress_params: Default::default(),
1684 partial_result_params: Default::default(),
1685 })
1686 .await
1687 .context("lsp completion request failed")?;
1688
1689 let completions = if let Some(completions) = completions {
1690 match completions {
1691 lsp::CompletionResponse::Array(completions) => completions,
1692 lsp::CompletionResponse::List(list) => list.items,
1693 }
1694 } else {
1695 Default::default()
1696 };
1697
1698 source_buffer_handle.read_with(&cx, |this, _| {
1699 Ok(completions
1700 .into_iter()
1701 .filter_map(|lsp_completion| {
1702 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1703 lsp::CompletionTextEdit::Edit(edit) => {
1704 (range_from_lsp(edit.range), edit.new_text.clone())
1705 }
1706 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1707 log::info!("unsupported insert/replace completion");
1708 return None;
1709 }
1710 };
1711
1712 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1713 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1714 if clipped_start == old_range.start && clipped_end == old_range.end {
1715 Some(Completion {
1716 old_range: this.anchor_before(old_range.start)
1717 ..this.anchor_after(old_range.end),
1718 new_text,
1719 label: language
1720 .as_ref()
1721 .and_then(|l| l.label_for_completion(&lsp_completion))
1722 .unwrap_or_else(|| {
1723 CodeLabel::plain(
1724 lsp_completion.label.clone(),
1725 lsp_completion.filter_text.as_deref(),
1726 )
1727 }),
1728 lsp_completion,
1729 })
1730 } else {
1731 None
1732 }
1733 })
1734 .collect())
1735 })
1736 })
1737 } else if let Some(project_id) = self.remote_id() {
1738 let rpc = self.client.clone();
1739 let message = proto::GetCompletions {
1740 project_id,
1741 buffer_id,
1742 position: Some(language::proto::serialize_anchor(&anchor)),
1743 version: serialize_version(&source_buffer.version()),
1744 };
1745 cx.spawn_weak(|_, mut cx| async move {
1746 let response = rpc.request(message).await?;
1747
1748 source_buffer_handle
1749 .update(&mut cx, |buffer, _| {
1750 buffer.wait_for_version(deserialize_version(response.version))
1751 })
1752 .await;
1753
1754 response
1755 .completions
1756 .into_iter()
1757 .map(|completion| {
1758 language::proto::deserialize_completion(completion, language.as_ref())
1759 })
1760 .collect()
1761 })
1762 } else {
1763 Task::ready(Ok(Default::default()))
1764 }
1765 }
1766
1767 pub fn apply_additional_edits_for_completion(
1768 &self,
1769 buffer_handle: ModelHandle<Buffer>,
1770 completion: Completion,
1771 push_to_history: bool,
1772 cx: &mut ModelContext<Self>,
1773 ) -> Task<Result<Option<Transaction>>> {
1774 let buffer = buffer_handle.read(cx);
1775 let buffer_id = buffer.remote_id();
1776
1777 if self.is_local() {
1778 let lang_server = if let Some(language_server) = buffer.language_server() {
1779 language_server.clone()
1780 } else {
1781 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1782 };
1783
1784 cx.spawn(|_, mut cx| async move {
1785 let resolved_completion = lang_server
1786 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1787 .await?;
1788 if let Some(edits) = resolved_completion.additional_text_edits {
1789 let edits = buffer_handle
1790 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1791 .await?;
1792 buffer_handle.update(&mut cx, |buffer, cx| {
1793 buffer.finalize_last_transaction();
1794 buffer.start_transaction();
1795 for (range, text) in edits {
1796 buffer.edit([range], text, cx);
1797 }
1798 let transaction = if buffer.end_transaction(cx).is_some() {
1799 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1800 if !push_to_history {
1801 buffer.forget_transaction(transaction.id);
1802 }
1803 Some(transaction)
1804 } else {
1805 None
1806 };
1807 Ok(transaction)
1808 })
1809 } else {
1810 Ok(None)
1811 }
1812 })
1813 } else if let Some(project_id) = self.remote_id() {
1814 let client = self.client.clone();
1815 cx.spawn(|_, mut cx| async move {
1816 let response = client
1817 .request(proto::ApplyCompletionAdditionalEdits {
1818 project_id,
1819 buffer_id,
1820 completion: Some(language::proto::serialize_completion(&completion)),
1821 })
1822 .await?;
1823
1824 if let Some(transaction) = response.transaction {
1825 let transaction = language::proto::deserialize_transaction(transaction)?;
1826 buffer_handle
1827 .update(&mut cx, |buffer, _| {
1828 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1829 })
1830 .await;
1831 if push_to_history {
1832 buffer_handle.update(&mut cx, |buffer, _| {
1833 buffer.push_transaction(transaction.clone(), Instant::now());
1834 });
1835 }
1836 Ok(Some(transaction))
1837 } else {
1838 Ok(None)
1839 }
1840 })
1841 } else {
1842 Task::ready(Err(anyhow!("project does not have a remote id")))
1843 }
1844 }
1845
1846 pub fn code_actions<T: ToOffset>(
1847 &self,
1848 buffer_handle: &ModelHandle<Buffer>,
1849 range: Range<T>,
1850 cx: &mut ModelContext<Self>,
1851 ) -> Task<Result<Vec<CodeAction>>> {
1852 let buffer_handle = buffer_handle.clone();
1853 let buffer = buffer_handle.read(cx);
1854 let buffer_id = buffer.remote_id();
1855 let worktree;
1856 let buffer_abs_path;
1857 if let Some(file) = File::from_dyn(buffer.file()) {
1858 worktree = file.worktree.clone();
1859 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1860 } else {
1861 return Task::ready(Ok(Default::default()));
1862 };
1863 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1864
1865 if worktree.read(cx).as_local().is_some() {
1866 let buffer_abs_path = buffer_abs_path.unwrap();
1867 let lang_name;
1868 let lang_server;
1869 if let Some(lang) = buffer.language() {
1870 lang_name = lang.name().to_string();
1871 if let Some(server) = self
1872 .language_servers
1873 .get(&(worktree.read(cx).id(), lang_name.clone()))
1874 {
1875 lang_server = server.clone();
1876 } else {
1877 return Task::ready(Ok(Default::default()));
1878 };
1879 } else {
1880 return Task::ready(Ok(Default::default()));
1881 }
1882
1883 let lsp_range = lsp::Range::new(
1884 range.start.to_point_utf16(buffer).to_lsp_position(),
1885 range.end.to_point_utf16(buffer).to_lsp_position(),
1886 );
1887 cx.foreground().spawn(async move {
1888 if !lang_server
1889 .capabilities()
1890 .await
1891 .map_or(false, |capabilities| {
1892 capabilities.code_action_provider.is_some()
1893 })
1894 {
1895 return Ok(Default::default());
1896 }
1897
1898 Ok(lang_server
1899 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1900 text_document: lsp::TextDocumentIdentifier::new(
1901 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1902 ),
1903 range: lsp_range,
1904 work_done_progress_params: Default::default(),
1905 partial_result_params: Default::default(),
1906 context: lsp::CodeActionContext {
1907 diagnostics: Default::default(),
1908 only: Some(vec![
1909 lsp::CodeActionKind::QUICKFIX,
1910 lsp::CodeActionKind::REFACTOR,
1911 lsp::CodeActionKind::REFACTOR_EXTRACT,
1912 ]),
1913 },
1914 })
1915 .await?
1916 .unwrap_or_default()
1917 .into_iter()
1918 .filter_map(|entry| {
1919 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1920 Some(CodeAction {
1921 range: range.clone(),
1922 lsp_action,
1923 })
1924 } else {
1925 None
1926 }
1927 })
1928 .collect())
1929 })
1930 } else if let Some(project_id) = self.remote_id() {
1931 let rpc = self.client.clone();
1932 let version = buffer.version();
1933 cx.spawn_weak(|_, mut cx| async move {
1934 let response = rpc
1935 .request(proto::GetCodeActions {
1936 project_id,
1937 buffer_id,
1938 start: Some(language::proto::serialize_anchor(&range.start)),
1939 end: Some(language::proto::serialize_anchor(&range.end)),
1940 version: serialize_version(&version),
1941 })
1942 .await?;
1943
1944 buffer_handle
1945 .update(&mut cx, |buffer, _| {
1946 buffer.wait_for_version(deserialize_version(response.version))
1947 })
1948 .await;
1949
1950 response
1951 .actions
1952 .into_iter()
1953 .map(language::proto::deserialize_code_action)
1954 .collect()
1955 })
1956 } else {
1957 Task::ready(Ok(Default::default()))
1958 }
1959 }
1960
1961 pub fn apply_code_action(
1962 &self,
1963 buffer_handle: ModelHandle<Buffer>,
1964 mut action: CodeAction,
1965 push_to_history: bool,
1966 cx: &mut ModelContext<Self>,
1967 ) -> Task<Result<ProjectTransaction>> {
1968 if self.is_local() {
1969 let buffer = buffer_handle.read(cx);
1970 let lang_name = if let Some(lang) = buffer.language() {
1971 lang.name().to_string()
1972 } else {
1973 return Task::ready(Ok(Default::default()));
1974 };
1975 let lang_server = if let Some(language_server) = buffer.language_server() {
1976 language_server.clone()
1977 } else {
1978 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1979 };
1980 let range = action.range.to_point_utf16(buffer);
1981
1982 cx.spawn(|this, mut cx| async move {
1983 if let Some(lsp_range) = action
1984 .lsp_action
1985 .data
1986 .as_mut()
1987 .and_then(|d| d.get_mut("codeActionParams"))
1988 .and_then(|d| d.get_mut("range"))
1989 {
1990 *lsp_range = serde_json::to_value(&lsp::Range::new(
1991 range.start.to_lsp_position(),
1992 range.end.to_lsp_position(),
1993 ))
1994 .unwrap();
1995 action.lsp_action = lang_server
1996 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1997 .await?;
1998 } else {
1999 let actions = this
2000 .update(&mut cx, |this, cx| {
2001 this.code_actions(&buffer_handle, action.range, cx)
2002 })
2003 .await?;
2004 action.lsp_action = actions
2005 .into_iter()
2006 .find(|a| a.lsp_action.title == action.lsp_action.title)
2007 .ok_or_else(|| anyhow!("code action is outdated"))?
2008 .lsp_action;
2009 }
2010
2011 if let Some(edit) = action.lsp_action.edit {
2012 Self::deserialize_workspace_edit(
2013 this,
2014 edit,
2015 push_to_history,
2016 lang_name,
2017 lang_server,
2018 &mut cx,
2019 )
2020 .await
2021 } else {
2022 Ok(ProjectTransaction::default())
2023 }
2024 })
2025 } else if let Some(project_id) = self.remote_id() {
2026 let client = self.client.clone();
2027 let request = proto::ApplyCodeAction {
2028 project_id,
2029 buffer_id: buffer_handle.read(cx).remote_id(),
2030 action: Some(language::proto::serialize_code_action(&action)),
2031 };
2032 cx.spawn(|this, mut cx| async move {
2033 let response = client
2034 .request(request)
2035 .await?
2036 .transaction
2037 .ok_or_else(|| anyhow!("missing transaction"))?;
2038 this.update(&mut cx, |this, cx| {
2039 this.deserialize_project_transaction(response, push_to_history, cx)
2040 })
2041 .await
2042 })
2043 } else {
2044 Task::ready(Err(anyhow!("project does not have a remote id")))
2045 }
2046 }
2047
2048 async fn deserialize_workspace_edit(
2049 this: ModelHandle<Self>,
2050 edit: lsp::WorkspaceEdit,
2051 push_to_history: bool,
2052 language_name: String,
2053 language_server: Arc<LanguageServer>,
2054 cx: &mut AsyncAppContext,
2055 ) -> Result<ProjectTransaction> {
2056 let fs = this.read_with(cx, |this, _| this.fs.clone());
2057 let mut operations = Vec::new();
2058 if let Some(document_changes) = edit.document_changes {
2059 match document_changes {
2060 lsp::DocumentChanges::Edits(edits) => {
2061 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2062 }
2063 lsp::DocumentChanges::Operations(ops) => operations = ops,
2064 }
2065 } else if let Some(changes) = edit.changes {
2066 operations.extend(changes.into_iter().map(|(uri, edits)| {
2067 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2068 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2069 uri,
2070 version: None,
2071 },
2072 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2073 })
2074 }));
2075 }
2076
2077 let mut project_transaction = ProjectTransaction::default();
2078 for operation in operations {
2079 match operation {
2080 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2081 let abs_path = op
2082 .uri
2083 .to_file_path()
2084 .map_err(|_| anyhow!("can't convert URI to path"))?;
2085
2086 if let Some(parent_path) = abs_path.parent() {
2087 fs.create_dir(parent_path).await?;
2088 }
2089 if abs_path.ends_with("/") {
2090 fs.create_dir(&abs_path).await?;
2091 } else {
2092 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2093 .await?;
2094 }
2095 }
2096 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2097 let source_abs_path = op
2098 .old_uri
2099 .to_file_path()
2100 .map_err(|_| anyhow!("can't convert URI to path"))?;
2101 let target_abs_path = op
2102 .new_uri
2103 .to_file_path()
2104 .map_err(|_| anyhow!("can't convert URI to path"))?;
2105 fs.rename(
2106 &source_abs_path,
2107 &target_abs_path,
2108 op.options.map(Into::into).unwrap_or_default(),
2109 )
2110 .await?;
2111 }
2112 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2113 let abs_path = op
2114 .uri
2115 .to_file_path()
2116 .map_err(|_| anyhow!("can't convert URI to path"))?;
2117 let options = op.options.map(Into::into).unwrap_or_default();
2118 if abs_path.ends_with("/") {
2119 fs.remove_dir(&abs_path, options).await?;
2120 } else {
2121 fs.remove_file(&abs_path, options).await?;
2122 }
2123 }
2124 lsp::DocumentChangeOperation::Edit(op) => {
2125 let buffer_to_edit = this
2126 .update(cx, |this, cx| {
2127 this.open_local_buffer_via_lsp(
2128 op.text_document.uri,
2129 language_name.clone(),
2130 language_server.clone(),
2131 cx,
2132 )
2133 })
2134 .await?;
2135
2136 let edits = buffer_to_edit
2137 .update(cx, |buffer, cx| {
2138 let edits = op.edits.into_iter().map(|edit| match edit {
2139 lsp::OneOf::Left(edit) => edit,
2140 lsp::OneOf::Right(edit) => edit.text_edit,
2141 });
2142 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2143 })
2144 .await?;
2145
2146 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2147 buffer.finalize_last_transaction();
2148 buffer.start_transaction();
2149 for (range, text) in edits {
2150 buffer.edit([range], text, cx);
2151 }
2152 let transaction = if buffer.end_transaction(cx).is_some() {
2153 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2154 if !push_to_history {
2155 buffer.forget_transaction(transaction.id);
2156 }
2157 Some(transaction)
2158 } else {
2159 None
2160 };
2161
2162 transaction
2163 });
2164 if let Some(transaction) = transaction {
2165 project_transaction.0.insert(buffer_to_edit, transaction);
2166 }
2167 }
2168 }
2169 }
2170
2171 Ok(project_transaction)
2172 }
2173
2174 pub fn prepare_rename<T: ToPointUtf16>(
2175 &self,
2176 buffer: ModelHandle<Buffer>,
2177 position: T,
2178 cx: &mut ModelContext<Self>,
2179 ) -> Task<Result<Option<Range<Anchor>>>> {
2180 let position = position.to_point_utf16(buffer.read(cx));
2181 self.request_lsp(buffer, PrepareRename { position }, cx)
2182 }
2183
2184 pub fn perform_rename<T: ToPointUtf16>(
2185 &self,
2186 buffer: ModelHandle<Buffer>,
2187 position: T,
2188 new_name: String,
2189 push_to_history: bool,
2190 cx: &mut ModelContext<Self>,
2191 ) -> Task<Result<ProjectTransaction>> {
2192 let position = position.to_point_utf16(buffer.read(cx));
2193 self.request_lsp(
2194 buffer,
2195 PerformRename {
2196 position,
2197 new_name,
2198 push_to_history,
2199 },
2200 cx,
2201 )
2202 }
2203
2204 pub fn search(
2205 &self,
2206 query: SearchQuery,
2207 cx: &mut ModelContext<Self>,
2208 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2209 if self.is_local() {
2210 let snapshots = self
2211 .visible_worktrees(cx)
2212 .filter_map(|tree| {
2213 let tree = tree.read(cx).as_local()?;
2214 Some(tree.snapshot())
2215 })
2216 .collect::<Vec<_>>();
2217
2218 let background = cx.background().clone();
2219 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2220 if path_count == 0 {
2221 return Task::ready(Ok(Default::default()));
2222 }
2223 let workers = background.num_cpus().min(path_count);
2224 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2225 cx.background()
2226 .spawn({
2227 let fs = self.fs.clone();
2228 let background = cx.background().clone();
2229 let query = query.clone();
2230 async move {
2231 let fs = &fs;
2232 let query = &query;
2233 let matching_paths_tx = &matching_paths_tx;
2234 let paths_per_worker = (path_count + workers - 1) / workers;
2235 let snapshots = &snapshots;
2236 background
2237 .scoped(|scope| {
2238 for worker_ix in 0..workers {
2239 let worker_start_ix = worker_ix * paths_per_worker;
2240 let worker_end_ix = worker_start_ix + paths_per_worker;
2241 scope.spawn(async move {
2242 let mut snapshot_start_ix = 0;
2243 let mut abs_path = PathBuf::new();
2244 for snapshot in snapshots {
2245 let snapshot_end_ix =
2246 snapshot_start_ix + snapshot.visible_file_count();
2247 if worker_end_ix <= snapshot_start_ix {
2248 break;
2249 } else if worker_start_ix > snapshot_end_ix {
2250 snapshot_start_ix = snapshot_end_ix;
2251 continue;
2252 } else {
2253 let start_in_snapshot = worker_start_ix
2254 .saturating_sub(snapshot_start_ix);
2255 let end_in_snapshot =
2256 cmp::min(worker_end_ix, snapshot_end_ix)
2257 - snapshot_start_ix;
2258
2259 for entry in snapshot
2260 .files(false, start_in_snapshot)
2261 .take(end_in_snapshot - start_in_snapshot)
2262 {
2263 if matching_paths_tx.is_closed() {
2264 break;
2265 }
2266
2267 abs_path.clear();
2268 abs_path.push(&snapshot.abs_path());
2269 abs_path.push(&entry.path);
2270 let matches = if let Some(file) =
2271 fs.open_sync(&abs_path).await.log_err()
2272 {
2273 query.detect(file).unwrap_or(false)
2274 } else {
2275 false
2276 };
2277
2278 if matches {
2279 let project_path =
2280 (snapshot.id(), entry.path.clone());
2281 if matching_paths_tx
2282 .send(project_path)
2283 .await
2284 .is_err()
2285 {
2286 break;
2287 }
2288 }
2289 }
2290
2291 snapshot_start_ix = snapshot_end_ix;
2292 }
2293 }
2294 });
2295 }
2296 })
2297 .await;
2298 }
2299 })
2300 .detach();
2301
2302 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2303 let open_buffers = self
2304 .opened_buffers
2305 .values()
2306 .filter_map(|b| b.upgrade(cx))
2307 .collect::<HashSet<_>>();
2308 cx.spawn(|this, cx| async move {
2309 for buffer in &open_buffers {
2310 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2311 buffers_tx.send((buffer.clone(), snapshot)).await?;
2312 }
2313
2314 let open_buffers = Rc::new(RefCell::new(open_buffers));
2315 while let Some(project_path) = matching_paths_rx.next().await {
2316 if buffers_tx.is_closed() {
2317 break;
2318 }
2319
2320 let this = this.clone();
2321 let open_buffers = open_buffers.clone();
2322 let buffers_tx = buffers_tx.clone();
2323 cx.spawn(|mut cx| async move {
2324 if let Some(buffer) = this
2325 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2326 .await
2327 .log_err()
2328 {
2329 if open_buffers.borrow_mut().insert(buffer.clone()) {
2330 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2331 buffers_tx.send((buffer, snapshot)).await?;
2332 }
2333 }
2334
2335 Ok::<_, anyhow::Error>(())
2336 })
2337 .detach();
2338 }
2339
2340 Ok::<_, anyhow::Error>(())
2341 })
2342 .detach_and_log_err(cx);
2343
2344 let background = cx.background().clone();
2345 cx.background().spawn(async move {
2346 let query = &query;
2347 let mut matched_buffers = Vec::new();
2348 for _ in 0..workers {
2349 matched_buffers.push(HashMap::default());
2350 }
2351 background
2352 .scoped(|scope| {
2353 for worker_matched_buffers in matched_buffers.iter_mut() {
2354 let mut buffers_rx = buffers_rx.clone();
2355 scope.spawn(async move {
2356 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2357 let buffer_matches = query
2358 .search(snapshot.as_rope())
2359 .await
2360 .iter()
2361 .map(|range| {
2362 snapshot.anchor_before(range.start)
2363 ..snapshot.anchor_after(range.end)
2364 })
2365 .collect::<Vec<_>>();
2366 if !buffer_matches.is_empty() {
2367 worker_matched_buffers
2368 .insert(buffer.clone(), buffer_matches);
2369 }
2370 }
2371 });
2372 }
2373 })
2374 .await;
2375 Ok(matched_buffers.into_iter().flatten().collect())
2376 })
2377 } else if let Some(project_id) = self.remote_id() {
2378 let request = self.client.request(query.to_proto(project_id));
2379 cx.spawn(|this, mut cx| async move {
2380 let response = request.await?;
2381 let mut result = HashMap::default();
2382 for location in response.locations {
2383 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2384 let target_buffer = this
2385 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2386 .await?;
2387 let start = location
2388 .start
2389 .and_then(deserialize_anchor)
2390 .ok_or_else(|| anyhow!("missing target start"))?;
2391 let end = location
2392 .end
2393 .and_then(deserialize_anchor)
2394 .ok_or_else(|| anyhow!("missing target end"))?;
2395 result
2396 .entry(target_buffer)
2397 .or_insert(Vec::new())
2398 .push(start..end)
2399 }
2400 Ok(result)
2401 })
2402 } else {
2403 Task::ready(Ok(Default::default()))
2404 }
2405 }
2406
2407 fn request_lsp<R: LspCommand>(
2408 &self,
2409 buffer_handle: ModelHandle<Buffer>,
2410 request: R,
2411 cx: &mut ModelContext<Self>,
2412 ) -> Task<Result<R::Response>>
2413 where
2414 <R::LspRequest as lsp::request::Request>::Result: Send,
2415 {
2416 let buffer = buffer_handle.read(cx);
2417 if self.is_local() {
2418 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2419 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2420 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2421 return cx.spawn(|this, cx| async move {
2422 if !language_server
2423 .capabilities()
2424 .await
2425 .map_or(false, |capabilities| {
2426 request.check_capabilities(&capabilities)
2427 })
2428 {
2429 return Ok(Default::default());
2430 }
2431
2432 let response = language_server
2433 .request::<R::LspRequest>(lsp_params)
2434 .await
2435 .context("lsp request failed")?;
2436 request
2437 .response_from_lsp(response, this, buffer_handle, cx)
2438 .await
2439 });
2440 }
2441 } else if let Some(project_id) = self.remote_id() {
2442 let rpc = self.client.clone();
2443 let message = request.to_proto(project_id, buffer);
2444 return cx.spawn(|this, cx| async move {
2445 let response = rpc.request(message).await?;
2446 request
2447 .response_from_proto(response, this, buffer_handle, cx)
2448 .await
2449 });
2450 }
2451 Task::ready(Ok(Default::default()))
2452 }
2453
2454 pub fn find_or_create_local_worktree(
2455 &mut self,
2456 abs_path: impl AsRef<Path>,
2457 visible: bool,
2458 cx: &mut ModelContext<Self>,
2459 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2460 let abs_path = abs_path.as_ref();
2461 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2462 Task::ready(Ok((tree.clone(), relative_path.into())))
2463 } else {
2464 let worktree = self.create_local_worktree(abs_path, visible, cx);
2465 cx.foreground()
2466 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2467 }
2468 }
2469
2470 pub fn find_local_worktree(
2471 &self,
2472 abs_path: &Path,
2473 cx: &AppContext,
2474 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2475 for tree in self.worktrees(cx) {
2476 if let Some(relative_path) = tree
2477 .read(cx)
2478 .as_local()
2479 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2480 {
2481 return Some((tree.clone(), relative_path.into()));
2482 }
2483 }
2484 None
2485 }
2486
2487 pub fn is_shared(&self) -> bool {
2488 match &self.client_state {
2489 ProjectClientState::Local { is_shared, .. } => *is_shared,
2490 ProjectClientState::Remote { .. } => false,
2491 }
2492 }
2493
2494 fn create_local_worktree(
2495 &mut self,
2496 abs_path: impl AsRef<Path>,
2497 visible: bool,
2498 cx: &mut ModelContext<Self>,
2499 ) -> Task<Result<ModelHandle<Worktree>>> {
2500 let fs = self.fs.clone();
2501 let client = self.client.clone();
2502 let path: Arc<Path> = abs_path.as_ref().into();
2503 let task = self
2504 .loading_local_worktrees
2505 .entry(path.clone())
2506 .or_insert_with(|| {
2507 cx.spawn(|project, mut cx| {
2508 async move {
2509 let worktree =
2510 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2511 .await;
2512 project.update(&mut cx, |project, _| {
2513 project.loading_local_worktrees.remove(&path);
2514 });
2515 let worktree = worktree?;
2516
2517 let (remote_project_id, is_shared) =
2518 project.update(&mut cx, |project, cx| {
2519 project.add_worktree(&worktree, cx);
2520 (project.remote_id(), project.is_shared())
2521 });
2522
2523 if let Some(project_id) = remote_project_id {
2524 if is_shared {
2525 worktree
2526 .update(&mut cx, |worktree, cx| {
2527 worktree.as_local_mut().unwrap().share(project_id, cx)
2528 })
2529 .await?;
2530 } else {
2531 worktree
2532 .update(&mut cx, |worktree, cx| {
2533 worktree.as_local_mut().unwrap().register(project_id, cx)
2534 })
2535 .await?;
2536 }
2537 }
2538
2539 Ok(worktree)
2540 }
2541 .map_err(|err| Arc::new(err))
2542 })
2543 .shared()
2544 })
2545 .clone();
2546 cx.foreground().spawn(async move {
2547 match task.await {
2548 Ok(worktree) => Ok(worktree),
2549 Err(err) => Err(anyhow!("{}", err)),
2550 }
2551 })
2552 }
2553
2554 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2555 self.worktrees.retain(|worktree| {
2556 worktree
2557 .upgrade(cx)
2558 .map_or(false, |w| w.read(cx).id() != id)
2559 });
2560 cx.notify();
2561 }
2562
2563 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2564 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2565 if worktree.read(cx).is_local() {
2566 cx.subscribe(&worktree, |this, worktree, _, cx| {
2567 this.update_local_worktree_buffers(worktree, cx);
2568 })
2569 .detach();
2570 }
2571
2572 let push_strong_handle = {
2573 let worktree = worktree.read(cx);
2574 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2575 };
2576 if push_strong_handle {
2577 self.worktrees
2578 .push(WorktreeHandle::Strong(worktree.clone()));
2579 } else {
2580 cx.observe_release(&worktree, |this, cx| {
2581 this.worktrees
2582 .retain(|worktree| worktree.upgrade(cx).is_some());
2583 cx.notify();
2584 })
2585 .detach();
2586 self.worktrees
2587 .push(WorktreeHandle::Weak(worktree.downgrade()));
2588 }
2589 cx.notify();
2590 }
2591
2592 fn update_local_worktree_buffers(
2593 &mut self,
2594 worktree_handle: ModelHandle<Worktree>,
2595 cx: &mut ModelContext<Self>,
2596 ) {
2597 let snapshot = worktree_handle.read(cx).snapshot();
2598 let mut buffers_to_delete = Vec::new();
2599 for (buffer_id, buffer) in &self.opened_buffers {
2600 if let Some(buffer) = buffer.upgrade(cx) {
2601 buffer.update(cx, |buffer, cx| {
2602 if let Some(old_file) = File::from_dyn(buffer.file()) {
2603 if old_file.worktree != worktree_handle {
2604 return;
2605 }
2606
2607 let new_file = if let Some(entry) = old_file
2608 .entry_id
2609 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2610 {
2611 File {
2612 is_local: true,
2613 entry_id: Some(entry.id),
2614 mtime: entry.mtime,
2615 path: entry.path.clone(),
2616 worktree: worktree_handle.clone(),
2617 }
2618 } else if let Some(entry) =
2619 snapshot.entry_for_path(old_file.path().as_ref())
2620 {
2621 File {
2622 is_local: true,
2623 entry_id: Some(entry.id),
2624 mtime: entry.mtime,
2625 path: entry.path.clone(),
2626 worktree: worktree_handle.clone(),
2627 }
2628 } else {
2629 File {
2630 is_local: true,
2631 entry_id: None,
2632 path: old_file.path().clone(),
2633 mtime: old_file.mtime(),
2634 worktree: worktree_handle.clone(),
2635 }
2636 };
2637
2638 if let Some(project_id) = self.remote_id() {
2639 self.client
2640 .send(proto::UpdateBufferFile {
2641 project_id,
2642 buffer_id: *buffer_id as u64,
2643 file: Some(new_file.to_proto()),
2644 })
2645 .log_err();
2646 }
2647 buffer.file_updated(Box::new(new_file), cx).detach();
2648 }
2649 });
2650 } else {
2651 buffers_to_delete.push(*buffer_id);
2652 }
2653 }
2654
2655 for buffer_id in buffers_to_delete {
2656 self.opened_buffers.remove(&buffer_id);
2657 }
2658 }
2659
2660 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2661 let new_active_entry = entry.and_then(|project_path| {
2662 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2663 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2664 Some(ProjectEntry {
2665 worktree_id: project_path.worktree_id,
2666 entry_id: entry.id,
2667 })
2668 });
2669 if new_active_entry != self.active_entry {
2670 self.active_entry = new_active_entry;
2671 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2672 }
2673 }
2674
2675 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2676 self.language_servers_with_diagnostics_running > 0
2677 }
2678
2679 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2680 let mut summary = DiagnosticSummary::default();
2681 for (_, path_summary) in self.diagnostic_summaries(cx) {
2682 summary.error_count += path_summary.error_count;
2683 summary.warning_count += path_summary.warning_count;
2684 summary.info_count += path_summary.info_count;
2685 summary.hint_count += path_summary.hint_count;
2686 }
2687 summary
2688 }
2689
2690 pub fn diagnostic_summaries<'a>(
2691 &'a self,
2692 cx: &'a AppContext,
2693 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2694 self.worktrees(cx).flat_map(move |worktree| {
2695 let worktree = worktree.read(cx);
2696 let worktree_id = worktree.id();
2697 worktree
2698 .diagnostic_summaries()
2699 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2700 })
2701 }
2702
2703 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2704 self.language_servers_with_diagnostics_running += 1;
2705 if self.language_servers_with_diagnostics_running == 1 {
2706 cx.emit(Event::DiskBasedDiagnosticsStarted);
2707 }
2708 }
2709
2710 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2711 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2712 self.language_servers_with_diagnostics_running -= 1;
2713 if self.language_servers_with_diagnostics_running == 0 {
2714 cx.emit(Event::DiskBasedDiagnosticsFinished);
2715 }
2716 }
2717
2718 pub fn active_entry(&self) -> Option<ProjectEntry> {
2719 self.active_entry
2720 }
2721
2722 // RPC message handlers
2723
2724 async fn handle_unshare_project(
2725 this: ModelHandle<Self>,
2726 _: TypedEnvelope<proto::UnshareProject>,
2727 _: Arc<Client>,
2728 mut cx: AsyncAppContext,
2729 ) -> Result<()> {
2730 this.update(&mut cx, |this, cx| this.project_unshared(cx));
2731 Ok(())
2732 }
2733
2734 async fn handle_add_collaborator(
2735 this: ModelHandle<Self>,
2736 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2737 _: Arc<Client>,
2738 mut cx: AsyncAppContext,
2739 ) -> Result<()> {
2740 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2741 let collaborator = envelope
2742 .payload
2743 .collaborator
2744 .take()
2745 .ok_or_else(|| anyhow!("empty collaborator"))?;
2746
2747 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2748 this.update(&mut cx, |this, cx| {
2749 this.collaborators
2750 .insert(collaborator.peer_id, collaborator);
2751 cx.notify();
2752 });
2753
2754 Ok(())
2755 }
2756
2757 async fn handle_remove_collaborator(
2758 this: ModelHandle<Self>,
2759 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2760 _: Arc<Client>,
2761 mut cx: AsyncAppContext,
2762 ) -> Result<()> {
2763 this.update(&mut cx, |this, cx| {
2764 let peer_id = PeerId(envelope.payload.peer_id);
2765 let replica_id = this
2766 .collaborators
2767 .remove(&peer_id)
2768 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2769 .replica_id;
2770 for (_, buffer) in &this.opened_buffers {
2771 if let Some(buffer) = buffer.upgrade(cx) {
2772 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2773 }
2774 }
2775 cx.notify();
2776 Ok(())
2777 })
2778 }
2779
2780 async fn handle_register_worktree(
2781 this: ModelHandle<Self>,
2782 envelope: TypedEnvelope<proto::RegisterWorktree>,
2783 client: Arc<Client>,
2784 mut cx: AsyncAppContext,
2785 ) -> Result<()> {
2786 this.update(&mut cx, |this, cx| {
2787 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2788 let replica_id = this.replica_id();
2789 let worktree = proto::Worktree {
2790 id: envelope.payload.worktree_id,
2791 root_name: envelope.payload.root_name,
2792 entries: Default::default(),
2793 diagnostic_summaries: Default::default(),
2794 visible: envelope.payload.visible,
2795 };
2796 let (worktree, load_task) =
2797 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2798 this.add_worktree(&worktree, cx);
2799 load_task.detach();
2800 Ok(())
2801 })
2802 }
2803
2804 async fn handle_unregister_worktree(
2805 this: ModelHandle<Self>,
2806 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2807 _: Arc<Client>,
2808 mut cx: AsyncAppContext,
2809 ) -> Result<()> {
2810 this.update(&mut cx, |this, cx| {
2811 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2812 this.remove_worktree(worktree_id, cx);
2813 Ok(())
2814 })
2815 }
2816
2817 async fn handle_update_worktree(
2818 this: ModelHandle<Self>,
2819 envelope: TypedEnvelope<proto::UpdateWorktree>,
2820 _: Arc<Client>,
2821 mut cx: AsyncAppContext,
2822 ) -> Result<()> {
2823 this.update(&mut cx, |this, cx| {
2824 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2825 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2826 worktree.update(cx, |worktree, _| {
2827 let worktree = worktree.as_remote_mut().unwrap();
2828 worktree.update_from_remote(envelope)
2829 })?;
2830 }
2831 Ok(())
2832 })
2833 }
2834
2835 async fn handle_update_diagnostic_summary(
2836 this: ModelHandle<Self>,
2837 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2838 _: Arc<Client>,
2839 mut cx: AsyncAppContext,
2840 ) -> Result<()> {
2841 this.update(&mut cx, |this, cx| {
2842 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2843 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2844 if let Some(summary) = envelope.payload.summary {
2845 let project_path = ProjectPath {
2846 worktree_id,
2847 path: Path::new(&summary.path).into(),
2848 };
2849 worktree.update(cx, |worktree, _| {
2850 worktree
2851 .as_remote_mut()
2852 .unwrap()
2853 .update_diagnostic_summary(project_path.path.clone(), &summary);
2854 });
2855 cx.emit(Event::DiagnosticsUpdated(project_path));
2856 }
2857 }
2858 Ok(())
2859 })
2860 }
2861
2862 async fn handle_disk_based_diagnostics_updating(
2863 this: ModelHandle<Self>,
2864 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2865 _: Arc<Client>,
2866 mut cx: AsyncAppContext,
2867 ) -> Result<()> {
2868 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2869 Ok(())
2870 }
2871
2872 async fn handle_disk_based_diagnostics_updated(
2873 this: ModelHandle<Self>,
2874 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2875 _: Arc<Client>,
2876 mut cx: AsyncAppContext,
2877 ) -> Result<()> {
2878 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2879 Ok(())
2880 }
2881
2882 async fn handle_update_buffer(
2883 this: ModelHandle<Self>,
2884 envelope: TypedEnvelope<proto::UpdateBuffer>,
2885 _: Arc<Client>,
2886 mut cx: AsyncAppContext,
2887 ) -> Result<()> {
2888 this.update(&mut cx, |this, cx| {
2889 let payload = envelope.payload.clone();
2890 let buffer_id = payload.buffer_id;
2891 let ops = payload
2892 .operations
2893 .into_iter()
2894 .map(|op| language::proto::deserialize_operation(op))
2895 .collect::<Result<Vec<_>, _>>()?;
2896 match this.opened_buffers.entry(buffer_id) {
2897 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2898 OpenBuffer::Strong(buffer) => {
2899 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2900 }
2901 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2902 OpenBuffer::Weak(_) => {}
2903 },
2904 hash_map::Entry::Vacant(e) => {
2905 e.insert(OpenBuffer::Loading(ops));
2906 }
2907 }
2908 Ok(())
2909 })
2910 }
2911
2912 async fn handle_update_buffer_file(
2913 this: ModelHandle<Self>,
2914 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2915 _: Arc<Client>,
2916 mut cx: AsyncAppContext,
2917 ) -> Result<()> {
2918 this.update(&mut cx, |this, cx| {
2919 let payload = envelope.payload.clone();
2920 let buffer_id = payload.buffer_id;
2921 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2922 let worktree = this
2923 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2924 .ok_or_else(|| anyhow!("no such worktree"))?;
2925 let file = File::from_proto(file, worktree.clone(), cx)?;
2926 let buffer = this
2927 .opened_buffers
2928 .get_mut(&buffer_id)
2929 .and_then(|b| b.upgrade(cx))
2930 .ok_or_else(|| anyhow!("no such buffer"))?;
2931 buffer.update(cx, |buffer, cx| {
2932 buffer.file_updated(Box::new(file), cx).detach();
2933 });
2934 Ok(())
2935 })
2936 }
2937
2938 async fn handle_save_buffer(
2939 this: ModelHandle<Self>,
2940 envelope: TypedEnvelope<proto::SaveBuffer>,
2941 _: Arc<Client>,
2942 mut cx: AsyncAppContext,
2943 ) -> Result<proto::BufferSaved> {
2944 let buffer_id = envelope.payload.buffer_id;
2945 let requested_version = deserialize_version(envelope.payload.version);
2946
2947 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2948 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2949 let buffer = this
2950 .opened_buffers
2951 .get(&buffer_id)
2952 .map(|buffer| buffer.upgrade(cx).unwrap())
2953 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2954 Ok::<_, anyhow::Error>((project_id, buffer))
2955 })?;
2956 buffer
2957 .update(&mut cx, |buffer, _| {
2958 buffer.wait_for_version(requested_version)
2959 })
2960 .await;
2961
2962 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2963 Ok(proto::BufferSaved {
2964 project_id,
2965 buffer_id,
2966 version: serialize_version(&saved_version),
2967 mtime: Some(mtime.into()),
2968 })
2969 }
2970
2971 async fn handle_format_buffers(
2972 this: ModelHandle<Self>,
2973 envelope: TypedEnvelope<proto::FormatBuffers>,
2974 _: Arc<Client>,
2975 mut cx: AsyncAppContext,
2976 ) -> Result<proto::FormatBuffersResponse> {
2977 let sender_id = envelope.original_sender_id()?;
2978 let format = this.update(&mut cx, |this, cx| {
2979 let mut buffers = HashSet::default();
2980 for buffer_id in &envelope.payload.buffer_ids {
2981 buffers.insert(
2982 this.opened_buffers
2983 .get(buffer_id)
2984 .map(|buffer| buffer.upgrade(cx).unwrap())
2985 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2986 );
2987 }
2988 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2989 })?;
2990
2991 let project_transaction = format.await?;
2992 let project_transaction = this.update(&mut cx, |this, cx| {
2993 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2994 });
2995 Ok(proto::FormatBuffersResponse {
2996 transaction: Some(project_transaction),
2997 })
2998 }
2999
3000 async fn handle_get_completions(
3001 this: ModelHandle<Self>,
3002 envelope: TypedEnvelope<proto::GetCompletions>,
3003 _: Arc<Client>,
3004 mut cx: AsyncAppContext,
3005 ) -> Result<proto::GetCompletionsResponse> {
3006 let position = envelope
3007 .payload
3008 .position
3009 .and_then(language::proto::deserialize_anchor)
3010 .ok_or_else(|| anyhow!("invalid position"))?;
3011 let version = deserialize_version(envelope.payload.version);
3012 let buffer = this.read_with(&cx, |this, cx| {
3013 this.opened_buffers
3014 .get(&envelope.payload.buffer_id)
3015 .map(|buffer| buffer.upgrade(cx).unwrap())
3016 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3017 })?;
3018 buffer
3019 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
3020 .await;
3021 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3022 let completions = this
3023 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
3024 .await?;
3025
3026 Ok(proto::GetCompletionsResponse {
3027 completions: completions
3028 .iter()
3029 .map(language::proto::serialize_completion)
3030 .collect(),
3031 version: serialize_version(&version),
3032 })
3033 }
3034
3035 async fn handle_apply_additional_edits_for_completion(
3036 this: ModelHandle<Self>,
3037 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
3038 _: Arc<Client>,
3039 mut cx: AsyncAppContext,
3040 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
3041 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3042 let buffer = this
3043 .opened_buffers
3044 .get(&envelope.payload.buffer_id)
3045 .map(|buffer| buffer.upgrade(cx).unwrap())
3046 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3047 let language = buffer.read(cx).language();
3048 let completion = language::proto::deserialize_completion(
3049 envelope
3050 .payload
3051 .completion
3052 .ok_or_else(|| anyhow!("invalid completion"))?,
3053 language,
3054 )?;
3055 Ok::<_, anyhow::Error>(
3056 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3057 )
3058 })?;
3059
3060 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3061 transaction: apply_additional_edits
3062 .await?
3063 .as_ref()
3064 .map(language::proto::serialize_transaction),
3065 })
3066 }
3067
3068 async fn handle_get_code_actions(
3069 this: ModelHandle<Self>,
3070 envelope: TypedEnvelope<proto::GetCodeActions>,
3071 _: Arc<Client>,
3072 mut cx: AsyncAppContext,
3073 ) -> Result<proto::GetCodeActionsResponse> {
3074 let start = envelope
3075 .payload
3076 .start
3077 .and_then(language::proto::deserialize_anchor)
3078 .ok_or_else(|| anyhow!("invalid start"))?;
3079 let end = envelope
3080 .payload
3081 .end
3082 .and_then(language::proto::deserialize_anchor)
3083 .ok_or_else(|| anyhow!("invalid end"))?;
3084 let buffer = this.update(&mut cx, |this, cx| {
3085 this.opened_buffers
3086 .get(&envelope.payload.buffer_id)
3087 .map(|buffer| buffer.upgrade(cx).unwrap())
3088 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3089 })?;
3090 buffer
3091 .update(&mut cx, |buffer, _| {
3092 buffer.wait_for_version(deserialize_version(envelope.payload.version))
3093 })
3094 .await;
3095
3096 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3097 let code_actions = this.update(&mut cx, |this, cx| {
3098 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3099 })?;
3100
3101 Ok(proto::GetCodeActionsResponse {
3102 actions: code_actions
3103 .await?
3104 .iter()
3105 .map(language::proto::serialize_code_action)
3106 .collect(),
3107 version: serialize_version(&version),
3108 })
3109 }
3110
3111 async fn handle_apply_code_action(
3112 this: ModelHandle<Self>,
3113 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3114 _: Arc<Client>,
3115 mut cx: AsyncAppContext,
3116 ) -> Result<proto::ApplyCodeActionResponse> {
3117 let sender_id = envelope.original_sender_id()?;
3118 let action = language::proto::deserialize_code_action(
3119 envelope
3120 .payload
3121 .action
3122 .ok_or_else(|| anyhow!("invalid action"))?,
3123 )?;
3124 let apply_code_action = this.update(&mut cx, |this, cx| {
3125 let buffer = this
3126 .opened_buffers
3127 .get(&envelope.payload.buffer_id)
3128 .map(|buffer| buffer.upgrade(cx).unwrap())
3129 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3130 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3131 })?;
3132
3133 let project_transaction = apply_code_action.await?;
3134 let project_transaction = this.update(&mut cx, |this, cx| {
3135 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3136 });
3137 Ok(proto::ApplyCodeActionResponse {
3138 transaction: Some(project_transaction),
3139 })
3140 }
3141
3142 async fn handle_lsp_command<T: LspCommand>(
3143 this: ModelHandle<Self>,
3144 envelope: TypedEnvelope<T::ProtoRequest>,
3145 _: Arc<Client>,
3146 mut cx: AsyncAppContext,
3147 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3148 where
3149 <T::LspRequest as lsp::request::Request>::Result: Send,
3150 {
3151 let sender_id = envelope.original_sender_id()?;
3152 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3153 let buffer_handle = this.read_with(&cx, |this, _| {
3154 this.opened_buffers
3155 .get(&buffer_id)
3156 .map(|buffer| buffer.upgrade(&cx).unwrap())
3157 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3158 })?;
3159 let request = T::from_proto(
3160 envelope.payload,
3161 this.clone(),
3162 buffer_handle.clone(),
3163 cx.clone(),
3164 )
3165 .await?;
3166 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3167 let response = this
3168 .update(&mut cx, |this, cx| {
3169 this.request_lsp(buffer_handle, request, cx)
3170 })
3171 .await?;
3172 this.update(&mut cx, |this, cx| {
3173 Ok(T::response_to_proto(
3174 response,
3175 this,
3176 sender_id,
3177 &buffer_version,
3178 cx,
3179 ))
3180 })
3181 }
3182
3183 async fn handle_get_project_symbols(
3184 this: ModelHandle<Self>,
3185 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3186 _: Arc<Client>,
3187 mut cx: AsyncAppContext,
3188 ) -> Result<proto::GetProjectSymbolsResponse> {
3189 let symbols = this
3190 .update(&mut cx, |this, cx| {
3191 this.symbols(&envelope.payload.query, cx)
3192 })
3193 .await?;
3194
3195 Ok(proto::GetProjectSymbolsResponse {
3196 symbols: symbols.iter().map(serialize_symbol).collect(),
3197 })
3198 }
3199
3200 async fn handle_search_project(
3201 this: ModelHandle<Self>,
3202 envelope: TypedEnvelope<proto::SearchProject>,
3203 _: Arc<Client>,
3204 mut cx: AsyncAppContext,
3205 ) -> Result<proto::SearchProjectResponse> {
3206 let peer_id = envelope.original_sender_id()?;
3207 let query = SearchQuery::from_proto(envelope.payload)?;
3208 let result = this
3209 .update(&mut cx, |this, cx| this.search(query, cx))
3210 .await?;
3211
3212 this.update(&mut cx, |this, cx| {
3213 let mut locations = Vec::new();
3214 for (buffer, ranges) in result {
3215 for range in ranges {
3216 let start = serialize_anchor(&range.start);
3217 let end = serialize_anchor(&range.end);
3218 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3219 locations.push(proto::Location {
3220 buffer: Some(buffer),
3221 start: Some(start),
3222 end: Some(end),
3223 });
3224 }
3225 }
3226 Ok(proto::SearchProjectResponse { locations })
3227 })
3228 }
3229
3230 async fn handle_open_buffer_for_symbol(
3231 this: ModelHandle<Self>,
3232 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3233 _: Arc<Client>,
3234 mut cx: AsyncAppContext,
3235 ) -> Result<proto::OpenBufferForSymbolResponse> {
3236 let peer_id = envelope.original_sender_id()?;
3237 let symbol = envelope
3238 .payload
3239 .symbol
3240 .ok_or_else(|| anyhow!("invalid symbol"))?;
3241 let symbol = this.read_with(&cx, |this, _| {
3242 let symbol = this.deserialize_symbol(symbol)?;
3243 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3244 if signature == symbol.signature {
3245 Ok(symbol)
3246 } else {
3247 Err(anyhow!("invalid symbol signature"))
3248 }
3249 })?;
3250 let buffer = this
3251 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3252 .await?;
3253
3254 Ok(proto::OpenBufferForSymbolResponse {
3255 buffer: Some(this.update(&mut cx, |this, cx| {
3256 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3257 })),
3258 })
3259 }
3260
3261 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3262 let mut hasher = Sha256::new();
3263 hasher.update(worktree_id.to_proto().to_be_bytes());
3264 hasher.update(path.to_string_lossy().as_bytes());
3265 hasher.update(self.nonce.to_be_bytes());
3266 hasher.finalize().as_slice().try_into().unwrap()
3267 }
3268
3269 async fn handle_open_buffer(
3270 this: ModelHandle<Self>,
3271 envelope: TypedEnvelope<proto::OpenBuffer>,
3272 _: Arc<Client>,
3273 mut cx: AsyncAppContext,
3274 ) -> Result<proto::OpenBufferResponse> {
3275 let peer_id = envelope.original_sender_id()?;
3276 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3277 let open_buffer = this.update(&mut cx, |this, cx| {
3278 this.open_buffer(
3279 ProjectPath {
3280 worktree_id,
3281 path: PathBuf::from(envelope.payload.path).into(),
3282 },
3283 cx,
3284 )
3285 });
3286
3287 let buffer = open_buffer.await?;
3288 this.update(&mut cx, |this, cx| {
3289 Ok(proto::OpenBufferResponse {
3290 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3291 })
3292 })
3293 }
3294
3295 fn serialize_project_transaction_for_peer(
3296 &mut self,
3297 project_transaction: ProjectTransaction,
3298 peer_id: PeerId,
3299 cx: &AppContext,
3300 ) -> proto::ProjectTransaction {
3301 let mut serialized_transaction = proto::ProjectTransaction {
3302 buffers: Default::default(),
3303 transactions: Default::default(),
3304 };
3305 for (buffer, transaction) in project_transaction.0 {
3306 serialized_transaction
3307 .buffers
3308 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3309 serialized_transaction
3310 .transactions
3311 .push(language::proto::serialize_transaction(&transaction));
3312 }
3313 serialized_transaction
3314 }
3315
3316 fn deserialize_project_transaction(
3317 &mut self,
3318 message: proto::ProjectTransaction,
3319 push_to_history: bool,
3320 cx: &mut ModelContext<Self>,
3321 ) -> Task<Result<ProjectTransaction>> {
3322 cx.spawn(|this, mut cx| async move {
3323 let mut project_transaction = ProjectTransaction::default();
3324 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3325 let buffer = this
3326 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3327 .await?;
3328 let transaction = language::proto::deserialize_transaction(transaction)?;
3329 project_transaction.0.insert(buffer, transaction);
3330 }
3331
3332 for (buffer, transaction) in &project_transaction.0 {
3333 buffer
3334 .update(&mut cx, |buffer, _| {
3335 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3336 })
3337 .await;
3338
3339 if push_to_history {
3340 buffer.update(&mut cx, |buffer, _| {
3341 buffer.push_transaction(transaction.clone(), Instant::now());
3342 });
3343 }
3344 }
3345
3346 Ok(project_transaction)
3347 })
3348 }
3349
3350 fn serialize_buffer_for_peer(
3351 &mut self,
3352 buffer: &ModelHandle<Buffer>,
3353 peer_id: PeerId,
3354 cx: &AppContext,
3355 ) -> proto::Buffer {
3356 let buffer_id = buffer.read(cx).remote_id();
3357 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3358 if shared_buffers.insert(buffer_id) {
3359 proto::Buffer {
3360 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3361 }
3362 } else {
3363 proto::Buffer {
3364 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3365 }
3366 }
3367 }
3368
3369 fn deserialize_buffer(
3370 &mut self,
3371 buffer: proto::Buffer,
3372 cx: &mut ModelContext<Self>,
3373 ) -> Task<Result<ModelHandle<Buffer>>> {
3374 let replica_id = self.replica_id();
3375
3376 let opened_buffer_tx = self.opened_buffer.0.clone();
3377 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3378 cx.spawn(|this, mut cx| async move {
3379 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3380 proto::buffer::Variant::Id(id) => {
3381 let buffer = loop {
3382 let buffer = this.read_with(&cx, |this, cx| {
3383 this.opened_buffers
3384 .get(&id)
3385 .and_then(|buffer| buffer.upgrade(cx))
3386 });
3387 if let Some(buffer) = buffer {
3388 break buffer;
3389 }
3390 opened_buffer_rx
3391 .next()
3392 .await
3393 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3394 };
3395 Ok(buffer)
3396 }
3397 proto::buffer::Variant::State(mut buffer) => {
3398 let mut buffer_worktree = None;
3399 let mut buffer_file = None;
3400 if let Some(file) = buffer.file.take() {
3401 this.read_with(&cx, |this, cx| {
3402 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3403 let worktree =
3404 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3405 anyhow!("no worktree found for id {}", file.worktree_id)
3406 })?;
3407 buffer_file =
3408 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3409 as Box<dyn language::File>);
3410 buffer_worktree = Some(worktree);
3411 Ok::<_, anyhow::Error>(())
3412 })?;
3413 }
3414
3415 let buffer = cx.add_model(|cx| {
3416 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3417 });
3418
3419 this.update(&mut cx, |this, cx| {
3420 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3421 })?;
3422
3423 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3424 Ok(buffer)
3425 }
3426 }
3427 })
3428 }
3429
3430 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3431 let language = self
3432 .languages
3433 .get_language(&serialized_symbol.language_name);
3434 let start = serialized_symbol
3435 .start
3436 .ok_or_else(|| anyhow!("invalid start"))?;
3437 let end = serialized_symbol
3438 .end
3439 .ok_or_else(|| anyhow!("invalid end"))?;
3440 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3441 Ok(Symbol {
3442 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3443 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3444 language_name: serialized_symbol.language_name.clone(),
3445 label: language
3446 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3447 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3448 name: serialized_symbol.name,
3449 path: PathBuf::from(serialized_symbol.path),
3450 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3451 kind,
3452 signature: serialized_symbol
3453 .signature
3454 .try_into()
3455 .map_err(|_| anyhow!("invalid signature"))?,
3456 })
3457 }
3458
3459 async fn handle_close_buffer(
3460 _: ModelHandle<Self>,
3461 _: TypedEnvelope<proto::CloseBuffer>,
3462 _: Arc<Client>,
3463 _: AsyncAppContext,
3464 ) -> Result<()> {
3465 // TODO: use this for following
3466 Ok(())
3467 }
3468
3469 async fn handle_buffer_saved(
3470 this: ModelHandle<Self>,
3471 envelope: TypedEnvelope<proto::BufferSaved>,
3472 _: Arc<Client>,
3473 mut cx: AsyncAppContext,
3474 ) -> Result<()> {
3475 let version = deserialize_version(envelope.payload.version);
3476 let mtime = envelope
3477 .payload
3478 .mtime
3479 .ok_or_else(|| anyhow!("missing mtime"))?
3480 .into();
3481
3482 this.update(&mut cx, |this, cx| {
3483 let buffer = this
3484 .opened_buffers
3485 .get(&envelope.payload.buffer_id)
3486 .and_then(|buffer| buffer.upgrade(cx));
3487 if let Some(buffer) = buffer {
3488 buffer.update(cx, |buffer, cx| {
3489 buffer.did_save(version, mtime, None, cx);
3490 });
3491 }
3492 Ok(())
3493 })
3494 }
3495
3496 async fn handle_buffer_reloaded(
3497 this: ModelHandle<Self>,
3498 envelope: TypedEnvelope<proto::BufferReloaded>,
3499 _: Arc<Client>,
3500 mut cx: AsyncAppContext,
3501 ) -> Result<()> {
3502 let payload = envelope.payload.clone();
3503 let version = deserialize_version(payload.version);
3504 let mtime = payload
3505 .mtime
3506 .ok_or_else(|| anyhow!("missing mtime"))?
3507 .into();
3508 this.update(&mut cx, |this, cx| {
3509 let buffer = this
3510 .opened_buffers
3511 .get(&payload.buffer_id)
3512 .and_then(|buffer| buffer.upgrade(cx));
3513 if let Some(buffer) = buffer {
3514 buffer.update(cx, |buffer, cx| {
3515 buffer.did_reload(version, mtime, cx);
3516 });
3517 }
3518 Ok(())
3519 })
3520 }
3521
3522 pub fn match_paths<'a>(
3523 &self,
3524 query: &'a str,
3525 include_ignored: bool,
3526 smart_case: bool,
3527 max_results: usize,
3528 cancel_flag: &'a AtomicBool,
3529 cx: &AppContext,
3530 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3531 let worktrees = self
3532 .worktrees(cx)
3533 .filter(|worktree| worktree.read(cx).is_visible())
3534 .collect::<Vec<_>>();
3535 let include_root_name = worktrees.len() > 1;
3536 let candidate_sets = worktrees
3537 .into_iter()
3538 .map(|worktree| CandidateSet {
3539 snapshot: worktree.read(cx).snapshot(),
3540 include_ignored,
3541 include_root_name,
3542 })
3543 .collect::<Vec<_>>();
3544
3545 let background = cx.background().clone();
3546 async move {
3547 fuzzy::match_paths(
3548 candidate_sets.as_slice(),
3549 query,
3550 smart_case,
3551 max_results,
3552 cancel_flag,
3553 background,
3554 )
3555 .await
3556 }
3557 }
3558}
3559
3560impl WorktreeHandle {
3561 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3562 match self {
3563 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3564 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3565 }
3566 }
3567}
3568
3569impl OpenBuffer {
3570 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3571 match self {
3572 OpenBuffer::Strong(handle) => Some(handle.clone()),
3573 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3574 OpenBuffer::Loading(_) => None,
3575 }
3576 }
3577}
3578
3579struct CandidateSet {
3580 snapshot: Snapshot,
3581 include_ignored: bool,
3582 include_root_name: bool,
3583}
3584
3585impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3586 type Candidates = CandidateSetIter<'a>;
3587
3588 fn id(&self) -> usize {
3589 self.snapshot.id().to_usize()
3590 }
3591
3592 fn len(&self) -> usize {
3593 if self.include_ignored {
3594 self.snapshot.file_count()
3595 } else {
3596 self.snapshot.visible_file_count()
3597 }
3598 }
3599
3600 fn prefix(&self) -> Arc<str> {
3601 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3602 self.snapshot.root_name().into()
3603 } else if self.include_root_name {
3604 format!("{}/", self.snapshot.root_name()).into()
3605 } else {
3606 "".into()
3607 }
3608 }
3609
3610 fn candidates(&'a self, start: usize) -> Self::Candidates {
3611 CandidateSetIter {
3612 traversal: self.snapshot.files(self.include_ignored, start),
3613 }
3614 }
3615}
3616
3617struct CandidateSetIter<'a> {
3618 traversal: Traversal<'a>,
3619}
3620
3621impl<'a> Iterator for CandidateSetIter<'a> {
3622 type Item = PathMatchCandidate<'a>;
3623
3624 fn next(&mut self) -> Option<Self::Item> {
3625 self.traversal.next().map(|entry| {
3626 if let EntryKind::File(char_bag) = entry.kind {
3627 PathMatchCandidate {
3628 path: &entry.path,
3629 char_bag,
3630 }
3631 } else {
3632 unreachable!()
3633 }
3634 })
3635 }
3636}
3637
3638impl Entity for Project {
3639 type Event = Event;
3640
3641 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3642 match &self.client_state {
3643 ProjectClientState::Local { remote_id_rx, .. } => {
3644 if let Some(project_id) = *remote_id_rx.borrow() {
3645 self.client
3646 .send(proto::UnregisterProject { project_id })
3647 .log_err();
3648 }
3649 }
3650 ProjectClientState::Remote { remote_id, .. } => {
3651 self.client
3652 .send(proto::LeaveProject {
3653 project_id: *remote_id,
3654 })
3655 .log_err();
3656 }
3657 }
3658 }
3659
3660 fn app_will_quit(
3661 &mut self,
3662 _: &mut MutableAppContext,
3663 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3664 let shutdown_futures = self
3665 .language_servers
3666 .drain()
3667 .filter_map(|(_, server)| server.shutdown())
3668 .collect::<Vec<_>>();
3669 Some(
3670 async move {
3671 futures::future::join_all(shutdown_futures).await;
3672 }
3673 .boxed(),
3674 )
3675 }
3676}
3677
3678impl Collaborator {
3679 fn from_proto(
3680 message: proto::Collaborator,
3681 user_store: &ModelHandle<UserStore>,
3682 cx: &mut AsyncAppContext,
3683 ) -> impl Future<Output = Result<Self>> {
3684 let user = user_store.update(cx, |user_store, cx| {
3685 user_store.fetch_user(message.user_id, cx)
3686 });
3687
3688 async move {
3689 Ok(Self {
3690 peer_id: PeerId(message.peer_id),
3691 user: user.await?,
3692 replica_id: message.replica_id as ReplicaId,
3693 })
3694 }
3695 }
3696}
3697
3698impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3699 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3700 Self {
3701 worktree_id,
3702 path: path.as_ref().into(),
3703 }
3704 }
3705}
3706
3707impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3708 fn from(options: lsp::CreateFileOptions) -> Self {
3709 Self {
3710 overwrite: options.overwrite.unwrap_or(false),
3711 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3712 }
3713 }
3714}
3715
3716impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3717 fn from(options: lsp::RenameFileOptions) -> Self {
3718 Self {
3719 overwrite: options.overwrite.unwrap_or(false),
3720 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3721 }
3722 }
3723}
3724
3725impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3726 fn from(options: lsp::DeleteFileOptions) -> Self {
3727 Self {
3728 recursive: options.recursive.unwrap_or(false),
3729 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3730 }
3731 }
3732}
3733
3734fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3735 proto::Symbol {
3736 source_worktree_id: symbol.source_worktree_id.to_proto(),
3737 worktree_id: symbol.worktree_id.to_proto(),
3738 language_name: symbol.language_name.clone(),
3739 name: symbol.name.clone(),
3740 kind: unsafe { mem::transmute(symbol.kind) },
3741 path: symbol.path.to_string_lossy().to_string(),
3742 start: Some(proto::Point {
3743 row: symbol.range.start.row,
3744 column: symbol.range.start.column,
3745 }),
3746 end: Some(proto::Point {
3747 row: symbol.range.end.row,
3748 column: symbol.range.end.column,
3749 }),
3750 signature: symbol.signature.to_vec(),
3751 }
3752}
3753
3754fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3755 let mut path_components = path.components();
3756 let mut base_components = base.components();
3757 let mut components: Vec<Component> = Vec::new();
3758 loop {
3759 match (path_components.next(), base_components.next()) {
3760 (None, None) => break,
3761 (Some(a), None) => {
3762 components.push(a);
3763 components.extend(path_components.by_ref());
3764 break;
3765 }
3766 (None, _) => components.push(Component::ParentDir),
3767 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3768 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3769 (Some(a), Some(_)) => {
3770 components.push(Component::ParentDir);
3771 for _ in base_components {
3772 components.push(Component::ParentDir);
3773 }
3774 components.push(a);
3775 components.extend(path_components.by_ref());
3776 break;
3777 }
3778 }
3779 }
3780 components.iter().map(|c| c.as_os_str()).collect()
3781}
3782
3783#[cfg(test)]
3784mod tests {
3785 use super::{Event, *};
3786 use fs::RealFs;
3787 use futures::StreamExt;
3788 use gpui::test::subscribe;
3789 use language::{
3790 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3791 };
3792 use lsp::Url;
3793 use serde_json::json;
3794 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3795 use unindent::Unindent as _;
3796 use util::test::temp_tree;
3797 use worktree::WorktreeHandle as _;
3798
3799 #[gpui::test]
3800 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3801 let dir = temp_tree(json!({
3802 "root": {
3803 "apple": "",
3804 "banana": {
3805 "carrot": {
3806 "date": "",
3807 "endive": "",
3808 }
3809 },
3810 "fennel": {
3811 "grape": "",
3812 }
3813 }
3814 }));
3815
3816 let root_link_path = dir.path().join("root_link");
3817 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3818 unix::fs::symlink(
3819 &dir.path().join("root/fennel"),
3820 &dir.path().join("root/finnochio"),
3821 )
3822 .unwrap();
3823
3824 let project = Project::test(Arc::new(RealFs), cx);
3825
3826 let (tree, _) = project
3827 .update(cx, |project, cx| {
3828 project.find_or_create_local_worktree(&root_link_path, true, cx)
3829 })
3830 .await
3831 .unwrap();
3832
3833 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3834 .await;
3835 cx.read(|cx| {
3836 let tree = tree.read(cx);
3837 assert_eq!(tree.file_count(), 5);
3838 assert_eq!(
3839 tree.inode_for_path("fennel/grape"),
3840 tree.inode_for_path("finnochio/grape")
3841 );
3842 });
3843
3844 let cancel_flag = Default::default();
3845 let results = project
3846 .read_with(cx, |project, cx| {
3847 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3848 })
3849 .await;
3850 assert_eq!(
3851 results
3852 .into_iter()
3853 .map(|result| result.path)
3854 .collect::<Vec<Arc<Path>>>(),
3855 vec![
3856 PathBuf::from("banana/carrot/date").into(),
3857 PathBuf::from("banana/carrot/endive").into(),
3858 ]
3859 );
3860 }
3861
3862 #[gpui::test]
3863 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3864 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3865 let progress_token = language_server_config
3866 .disk_based_diagnostics_progress_token
3867 .clone()
3868 .unwrap();
3869
3870 let language = Arc::new(Language::new(
3871 LanguageConfig {
3872 name: "Rust".into(),
3873 path_suffixes: vec!["rs".to_string()],
3874 language_server: Some(language_server_config),
3875 ..Default::default()
3876 },
3877 Some(tree_sitter_rust::language()),
3878 ));
3879
3880 let fs = FakeFs::new(cx.background());
3881 fs.insert_tree(
3882 "/dir",
3883 json!({
3884 "a.rs": "fn a() { A }",
3885 "b.rs": "const y: i32 = 1",
3886 }),
3887 )
3888 .await;
3889
3890 let project = Project::test(fs, cx);
3891 project.update(cx, |project, _| {
3892 Arc::get_mut(&mut project.languages).unwrap().add(language);
3893 });
3894
3895 let (tree, _) = project
3896 .update(cx, |project, cx| {
3897 project.find_or_create_local_worktree("/dir", true, cx)
3898 })
3899 .await
3900 .unwrap();
3901 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3902
3903 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3904 .await;
3905
3906 // Cause worktree to start the fake language server
3907 let _buffer = project
3908 .update(cx, |project, cx| {
3909 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3910 })
3911 .await
3912 .unwrap();
3913
3914 let mut events = subscribe(&project, cx);
3915
3916 let mut fake_server = fake_servers.next().await.unwrap();
3917 fake_server.start_progress(&progress_token).await;
3918 assert_eq!(
3919 events.next().await.unwrap(),
3920 Event::DiskBasedDiagnosticsStarted
3921 );
3922
3923 fake_server.start_progress(&progress_token).await;
3924 fake_server.end_progress(&progress_token).await;
3925 fake_server.start_progress(&progress_token).await;
3926
3927 fake_server
3928 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3929 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3930 version: None,
3931 diagnostics: vec![lsp::Diagnostic {
3932 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3933 severity: Some(lsp::DiagnosticSeverity::ERROR),
3934 message: "undefined variable 'A'".to_string(),
3935 ..Default::default()
3936 }],
3937 })
3938 .await;
3939 assert_eq!(
3940 events.next().await.unwrap(),
3941 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3942 );
3943
3944 fake_server.end_progress(&progress_token).await;
3945 fake_server.end_progress(&progress_token).await;
3946 assert_eq!(
3947 events.next().await.unwrap(),
3948 Event::DiskBasedDiagnosticsUpdated
3949 );
3950 assert_eq!(
3951 events.next().await.unwrap(),
3952 Event::DiskBasedDiagnosticsFinished
3953 );
3954
3955 let buffer = project
3956 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3957 .await
3958 .unwrap();
3959
3960 buffer.read_with(cx, |buffer, _| {
3961 let snapshot = buffer.snapshot();
3962 let diagnostics = snapshot
3963 .diagnostics_in_range::<_, Point>(0..buffer.len())
3964 .collect::<Vec<_>>();
3965 assert_eq!(
3966 diagnostics,
3967 &[DiagnosticEntry {
3968 range: Point::new(0, 9)..Point::new(0, 10),
3969 diagnostic: Diagnostic {
3970 severity: lsp::DiagnosticSeverity::ERROR,
3971 message: "undefined variable 'A'".to_string(),
3972 group_id: 0,
3973 is_primary: true,
3974 ..Default::default()
3975 }
3976 }]
3977 )
3978 });
3979 }
3980
3981 #[gpui::test]
3982 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3983 let dir = temp_tree(json!({
3984 "root": {
3985 "dir1": {},
3986 "dir2": {
3987 "dir3": {}
3988 }
3989 }
3990 }));
3991
3992 let project = Project::test(Arc::new(RealFs), cx);
3993 let (tree, _) = project
3994 .update(cx, |project, cx| {
3995 project.find_or_create_local_worktree(&dir.path(), true, cx)
3996 })
3997 .await
3998 .unwrap();
3999
4000 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4001 .await;
4002
4003 let cancel_flag = Default::default();
4004 let results = project
4005 .read_with(cx, |project, cx| {
4006 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
4007 })
4008 .await;
4009
4010 assert!(results.is_empty());
4011 }
4012
4013 #[gpui::test]
4014 async fn test_definition(cx: &mut gpui::TestAppContext) {
4015 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4016 let language = Arc::new(Language::new(
4017 LanguageConfig {
4018 name: "Rust".into(),
4019 path_suffixes: vec!["rs".to_string()],
4020 language_server: Some(language_server_config),
4021 ..Default::default()
4022 },
4023 Some(tree_sitter_rust::language()),
4024 ));
4025
4026 let fs = FakeFs::new(cx.background());
4027 fs.insert_tree(
4028 "/dir",
4029 json!({
4030 "a.rs": "const fn a() { A }",
4031 "b.rs": "const y: i32 = crate::a()",
4032 }),
4033 )
4034 .await;
4035
4036 let project = Project::test(fs, cx);
4037 project.update(cx, |project, _| {
4038 Arc::get_mut(&mut project.languages).unwrap().add(language);
4039 });
4040
4041 let (tree, _) = project
4042 .update(cx, |project, cx| {
4043 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
4044 })
4045 .await
4046 .unwrap();
4047 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4048 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4049 .await;
4050
4051 let buffer = project
4052 .update(cx, |project, cx| {
4053 project.open_buffer(
4054 ProjectPath {
4055 worktree_id,
4056 path: Path::new("").into(),
4057 },
4058 cx,
4059 )
4060 })
4061 .await
4062 .unwrap();
4063
4064 let mut fake_server = fake_servers.next().await.unwrap();
4065 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
4066 let params = params.text_document_position_params;
4067 assert_eq!(
4068 params.text_document.uri.to_file_path().unwrap(),
4069 Path::new("/dir/b.rs"),
4070 );
4071 assert_eq!(params.position, lsp::Position::new(0, 22));
4072
4073 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
4074 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4075 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4076 )))
4077 });
4078
4079 let mut definitions = project
4080 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
4081 .await
4082 .unwrap();
4083
4084 assert_eq!(definitions.len(), 1);
4085 let definition = definitions.pop().unwrap();
4086 cx.update(|cx| {
4087 let target_buffer = definition.buffer.read(cx);
4088 assert_eq!(
4089 target_buffer
4090 .file()
4091 .unwrap()
4092 .as_local()
4093 .unwrap()
4094 .abs_path(cx),
4095 Path::new("/dir/a.rs"),
4096 );
4097 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4098 assert_eq!(
4099 list_worktrees(&project, cx),
4100 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4101 );
4102
4103 drop(definition);
4104 });
4105 cx.read(|cx| {
4106 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4107 });
4108
4109 fn list_worktrees<'a>(
4110 project: &'a ModelHandle<Project>,
4111 cx: &'a AppContext,
4112 ) -> Vec<(&'a Path, bool)> {
4113 project
4114 .read(cx)
4115 .worktrees(cx)
4116 .map(|worktree| {
4117 let worktree = worktree.read(cx);
4118 (
4119 worktree.as_local().unwrap().abs_path().as_ref(),
4120 worktree.is_visible(),
4121 )
4122 })
4123 .collect::<Vec<_>>()
4124 }
4125 }
4126
4127 #[gpui::test]
4128 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4129 let fs = FakeFs::new(cx.background());
4130 fs.insert_tree(
4131 "/dir",
4132 json!({
4133 "file1": "the old contents",
4134 }),
4135 )
4136 .await;
4137
4138 let project = Project::test(fs.clone(), cx);
4139 let worktree_id = project
4140 .update(cx, |p, cx| {
4141 p.find_or_create_local_worktree("/dir", true, cx)
4142 })
4143 .await
4144 .unwrap()
4145 .0
4146 .read_with(cx, |tree, _| tree.id());
4147
4148 let buffer = project
4149 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4150 .await
4151 .unwrap();
4152 buffer
4153 .update(cx, |buffer, cx| {
4154 assert_eq!(buffer.text(), "the old contents");
4155 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4156 buffer.save(cx)
4157 })
4158 .await
4159 .unwrap();
4160
4161 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4162 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4163 }
4164
4165 #[gpui::test]
4166 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4167 let fs = FakeFs::new(cx.background());
4168 fs.insert_tree(
4169 "/dir",
4170 json!({
4171 "file1": "the old contents",
4172 }),
4173 )
4174 .await;
4175
4176 let project = Project::test(fs.clone(), cx);
4177 let worktree_id = project
4178 .update(cx, |p, cx| {
4179 p.find_or_create_local_worktree("/dir/file1", true, cx)
4180 })
4181 .await
4182 .unwrap()
4183 .0
4184 .read_with(cx, |tree, _| tree.id());
4185
4186 let buffer = project
4187 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4188 .await
4189 .unwrap();
4190 buffer
4191 .update(cx, |buffer, cx| {
4192 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4193 buffer.save(cx)
4194 })
4195 .await
4196 .unwrap();
4197
4198 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4199 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4200 }
4201
4202 #[gpui::test]
4203 async fn test_save_as(cx: &mut gpui::TestAppContext) {
4204 let fs = FakeFs::new(cx.background());
4205 fs.insert_tree("/dir", json!({})).await;
4206
4207 let project = Project::test(fs.clone(), cx);
4208 let (worktree, _) = project
4209 .update(cx, |project, cx| {
4210 project.find_or_create_local_worktree("/dir", true, cx)
4211 })
4212 .await
4213 .unwrap();
4214 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4215
4216 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
4217 buffer.update(cx, |buffer, cx| {
4218 buffer.edit([0..0], "abc", cx);
4219 assert!(buffer.is_dirty());
4220 assert!(!buffer.has_conflict());
4221 });
4222 project
4223 .update(cx, |project, cx| {
4224 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
4225 })
4226 .await
4227 .unwrap();
4228 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
4229 buffer.read_with(cx, |buffer, cx| {
4230 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
4231 assert!(!buffer.is_dirty());
4232 assert!(!buffer.has_conflict());
4233 });
4234
4235 let opened_buffer = project
4236 .update(cx, |project, cx| {
4237 project.open_buffer((worktree_id, "file1"), cx)
4238 })
4239 .await
4240 .unwrap();
4241 assert_eq!(opened_buffer, buffer);
4242 }
4243
4244 #[gpui::test(retries = 5)]
4245 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4246 let dir = temp_tree(json!({
4247 "a": {
4248 "file1": "",
4249 "file2": "",
4250 "file3": "",
4251 },
4252 "b": {
4253 "c": {
4254 "file4": "",
4255 "file5": "",
4256 }
4257 }
4258 }));
4259
4260 let project = Project::test(Arc::new(RealFs), cx);
4261 let rpc = project.read_with(cx, |p, _| p.client.clone());
4262
4263 let (tree, _) = project
4264 .update(cx, |p, cx| {
4265 p.find_or_create_local_worktree(dir.path(), true, cx)
4266 })
4267 .await
4268 .unwrap();
4269 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4270
4271 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4272 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4273 async move { buffer.await.unwrap() }
4274 };
4275 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4276 tree.read_with(cx, |tree, _| {
4277 tree.entry_for_path(path)
4278 .expect(&format!("no entry for path {}", path))
4279 .id
4280 })
4281 };
4282
4283 let buffer2 = buffer_for_path("a/file2", cx).await;
4284 let buffer3 = buffer_for_path("a/file3", cx).await;
4285 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4286 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4287
4288 let file2_id = id_for_path("a/file2", &cx);
4289 let file3_id = id_for_path("a/file3", &cx);
4290 let file4_id = id_for_path("b/c/file4", &cx);
4291
4292 // Wait for the initial scan.
4293 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4294 .await;
4295
4296 // Create a remote copy of this worktree.
4297 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4298 let (remote, load_task) = cx.update(|cx| {
4299 Worktree::remote(
4300 1,
4301 1,
4302 initial_snapshot.to_proto(&Default::default(), true),
4303 rpc.clone(),
4304 cx,
4305 )
4306 });
4307 load_task.await;
4308
4309 cx.read(|cx| {
4310 assert!(!buffer2.read(cx).is_dirty());
4311 assert!(!buffer3.read(cx).is_dirty());
4312 assert!(!buffer4.read(cx).is_dirty());
4313 assert!(!buffer5.read(cx).is_dirty());
4314 });
4315
4316 // Rename and delete files and directories.
4317 tree.flush_fs_events(&cx).await;
4318 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4319 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4320 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4321 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4322 tree.flush_fs_events(&cx).await;
4323
4324 let expected_paths = vec![
4325 "a",
4326 "a/file1",
4327 "a/file2.new",
4328 "b",
4329 "d",
4330 "d/file3",
4331 "d/file4",
4332 ];
4333
4334 cx.read(|app| {
4335 assert_eq!(
4336 tree.read(app)
4337 .paths()
4338 .map(|p| p.to_str().unwrap())
4339 .collect::<Vec<_>>(),
4340 expected_paths
4341 );
4342
4343 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4344 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4345 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4346
4347 assert_eq!(
4348 buffer2.read(app).file().unwrap().path().as_ref(),
4349 Path::new("a/file2.new")
4350 );
4351 assert_eq!(
4352 buffer3.read(app).file().unwrap().path().as_ref(),
4353 Path::new("d/file3")
4354 );
4355 assert_eq!(
4356 buffer4.read(app).file().unwrap().path().as_ref(),
4357 Path::new("d/file4")
4358 );
4359 assert_eq!(
4360 buffer5.read(app).file().unwrap().path().as_ref(),
4361 Path::new("b/c/file5")
4362 );
4363
4364 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4365 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4366 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4367 assert!(buffer5.read(app).file().unwrap().is_deleted());
4368 });
4369
4370 // Update the remote worktree. Check that it becomes consistent with the
4371 // local worktree.
4372 remote.update(cx, |remote, cx| {
4373 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4374 &initial_snapshot,
4375 1,
4376 1,
4377 true,
4378 );
4379 remote
4380 .as_remote_mut()
4381 .unwrap()
4382 .snapshot
4383 .apply_remote_update(update_message)
4384 .unwrap();
4385
4386 assert_eq!(
4387 remote
4388 .paths()
4389 .map(|p| p.to_str().unwrap())
4390 .collect::<Vec<_>>(),
4391 expected_paths
4392 );
4393 });
4394 }
4395
4396 #[gpui::test]
4397 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4398 let fs = FakeFs::new(cx.background());
4399 fs.insert_tree(
4400 "/the-dir",
4401 json!({
4402 "a.txt": "a-contents",
4403 "b.txt": "b-contents",
4404 }),
4405 )
4406 .await;
4407
4408 let project = Project::test(fs.clone(), cx);
4409 let worktree_id = project
4410 .update(cx, |p, cx| {
4411 p.find_or_create_local_worktree("/the-dir", true, cx)
4412 })
4413 .await
4414 .unwrap()
4415 .0
4416 .read_with(cx, |tree, _| tree.id());
4417
4418 // Spawn multiple tasks to open paths, repeating some paths.
4419 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4420 (
4421 p.open_buffer((worktree_id, "a.txt"), cx),
4422 p.open_buffer((worktree_id, "b.txt"), cx),
4423 p.open_buffer((worktree_id, "a.txt"), cx),
4424 )
4425 });
4426
4427 let buffer_a_1 = buffer_a_1.await.unwrap();
4428 let buffer_a_2 = buffer_a_2.await.unwrap();
4429 let buffer_b = buffer_b.await.unwrap();
4430 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4431 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4432
4433 // There is only one buffer per path.
4434 let buffer_a_id = buffer_a_1.id();
4435 assert_eq!(buffer_a_2.id(), buffer_a_id);
4436
4437 // Open the same path again while it is still open.
4438 drop(buffer_a_1);
4439 let buffer_a_3 = project
4440 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4441 .await
4442 .unwrap();
4443
4444 // There's still only one buffer per path.
4445 assert_eq!(buffer_a_3.id(), buffer_a_id);
4446 }
4447
4448 #[gpui::test]
4449 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4450 use std::fs;
4451
4452 let dir = temp_tree(json!({
4453 "file1": "abc",
4454 "file2": "def",
4455 "file3": "ghi",
4456 }));
4457
4458 let project = Project::test(Arc::new(RealFs), cx);
4459 let (worktree, _) = project
4460 .update(cx, |p, cx| {
4461 p.find_or_create_local_worktree(dir.path(), true, cx)
4462 })
4463 .await
4464 .unwrap();
4465 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4466
4467 worktree.flush_fs_events(&cx).await;
4468 worktree
4469 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4470 .await;
4471
4472 let buffer1 = project
4473 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4474 .await
4475 .unwrap();
4476 let events = Rc::new(RefCell::new(Vec::new()));
4477
4478 // initially, the buffer isn't dirty.
4479 buffer1.update(cx, |buffer, cx| {
4480 cx.subscribe(&buffer1, {
4481 let events = events.clone();
4482 move |_, _, event, _| match event {
4483 BufferEvent::Operation(_) => {}
4484 _ => events.borrow_mut().push(event.clone()),
4485 }
4486 })
4487 .detach();
4488
4489 assert!(!buffer.is_dirty());
4490 assert!(events.borrow().is_empty());
4491
4492 buffer.edit(vec![1..2], "", cx);
4493 });
4494
4495 // after the first edit, the buffer is dirty, and emits a dirtied event.
4496 buffer1.update(cx, |buffer, cx| {
4497 assert!(buffer.text() == "ac");
4498 assert!(buffer.is_dirty());
4499 assert_eq!(
4500 *events.borrow(),
4501 &[language::Event::Edited, language::Event::Dirtied]
4502 );
4503 events.borrow_mut().clear();
4504 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4505 });
4506
4507 // after saving, the buffer is not dirty, and emits a saved event.
4508 buffer1.update(cx, |buffer, cx| {
4509 assert!(!buffer.is_dirty());
4510 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4511 events.borrow_mut().clear();
4512
4513 buffer.edit(vec![1..1], "B", cx);
4514 buffer.edit(vec![2..2], "D", cx);
4515 });
4516
4517 // after editing again, the buffer is dirty, and emits another dirty event.
4518 buffer1.update(cx, |buffer, cx| {
4519 assert!(buffer.text() == "aBDc");
4520 assert!(buffer.is_dirty());
4521 assert_eq!(
4522 *events.borrow(),
4523 &[
4524 language::Event::Edited,
4525 language::Event::Dirtied,
4526 language::Event::Edited,
4527 ],
4528 );
4529 events.borrow_mut().clear();
4530
4531 // TODO - currently, after restoring the buffer to its
4532 // previously-saved state, the is still considered dirty.
4533 buffer.edit([1..3], "", cx);
4534 assert!(buffer.text() == "ac");
4535 assert!(buffer.is_dirty());
4536 });
4537
4538 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4539
4540 // When a file is deleted, the buffer is considered dirty.
4541 let events = Rc::new(RefCell::new(Vec::new()));
4542 let buffer2 = project
4543 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4544 .await
4545 .unwrap();
4546 buffer2.update(cx, |_, cx| {
4547 cx.subscribe(&buffer2, {
4548 let events = events.clone();
4549 move |_, _, event, _| events.borrow_mut().push(event.clone())
4550 })
4551 .detach();
4552 });
4553
4554 fs::remove_file(dir.path().join("file2")).unwrap();
4555 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4556 assert_eq!(
4557 *events.borrow(),
4558 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4559 );
4560
4561 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4562 let events = Rc::new(RefCell::new(Vec::new()));
4563 let buffer3 = project
4564 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4565 .await
4566 .unwrap();
4567 buffer3.update(cx, |_, cx| {
4568 cx.subscribe(&buffer3, {
4569 let events = events.clone();
4570 move |_, _, event, _| events.borrow_mut().push(event.clone())
4571 })
4572 .detach();
4573 });
4574
4575 worktree.flush_fs_events(&cx).await;
4576 buffer3.update(cx, |buffer, cx| {
4577 buffer.edit(Some(0..0), "x", cx);
4578 });
4579 events.borrow_mut().clear();
4580 fs::remove_file(dir.path().join("file3")).unwrap();
4581 buffer3
4582 .condition(&cx, |_, _| !events.borrow().is_empty())
4583 .await;
4584 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4585 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4586 }
4587
4588 #[gpui::test]
4589 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4590 use std::fs;
4591
4592 let initial_contents = "aaa\nbbbbb\nc\n";
4593 let dir = temp_tree(json!({ "the-file": initial_contents }));
4594
4595 let project = Project::test(Arc::new(RealFs), cx);
4596 let (worktree, _) = project
4597 .update(cx, |p, cx| {
4598 p.find_or_create_local_worktree(dir.path(), true, cx)
4599 })
4600 .await
4601 .unwrap();
4602 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4603
4604 worktree
4605 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4606 .await;
4607
4608 let abs_path = dir.path().join("the-file");
4609 let buffer = project
4610 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4611 .await
4612 .unwrap();
4613
4614 // TODO
4615 // Add a cursor on each row.
4616 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4617 // assert!(!buffer.is_dirty());
4618 // buffer.add_selection_set(
4619 // &(0..3)
4620 // .map(|row| Selection {
4621 // id: row as usize,
4622 // start: Point::new(row, 1),
4623 // end: Point::new(row, 1),
4624 // reversed: false,
4625 // goal: SelectionGoal::None,
4626 // })
4627 // .collect::<Vec<_>>(),
4628 // cx,
4629 // )
4630 // });
4631
4632 // Change the file on disk, adding two new lines of text, and removing
4633 // one line.
4634 buffer.read_with(cx, |buffer, _| {
4635 assert!(!buffer.is_dirty());
4636 assert!(!buffer.has_conflict());
4637 });
4638 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4639 fs::write(&abs_path, new_contents).unwrap();
4640
4641 // Because the buffer was not modified, it is reloaded from disk. Its
4642 // contents are edited according to the diff between the old and new
4643 // file contents.
4644 buffer
4645 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4646 .await;
4647
4648 buffer.update(cx, |buffer, _| {
4649 assert_eq!(buffer.text(), new_contents);
4650 assert!(!buffer.is_dirty());
4651 assert!(!buffer.has_conflict());
4652
4653 // TODO
4654 // let cursor_positions = buffer
4655 // .selection_set(selection_set_id)
4656 // .unwrap()
4657 // .selections::<Point>(&*buffer)
4658 // .map(|selection| {
4659 // assert_eq!(selection.start, selection.end);
4660 // selection.start
4661 // })
4662 // .collect::<Vec<_>>();
4663 // assert_eq!(
4664 // cursor_positions,
4665 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4666 // );
4667 });
4668
4669 // Modify the buffer
4670 buffer.update(cx, |buffer, cx| {
4671 buffer.edit(vec![0..0], " ", cx);
4672 assert!(buffer.is_dirty());
4673 assert!(!buffer.has_conflict());
4674 });
4675
4676 // Change the file on disk again, adding blank lines to the beginning.
4677 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4678
4679 // Because the buffer is modified, it doesn't reload from disk, but is
4680 // marked as having a conflict.
4681 buffer
4682 .condition(&cx, |buffer, _| buffer.has_conflict())
4683 .await;
4684 }
4685
4686 #[gpui::test]
4687 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4688 let fs = FakeFs::new(cx.background());
4689 fs.insert_tree(
4690 "/the-dir",
4691 json!({
4692 "a.rs": "
4693 fn foo(mut v: Vec<usize>) {
4694 for x in &v {
4695 v.push(1);
4696 }
4697 }
4698 "
4699 .unindent(),
4700 }),
4701 )
4702 .await;
4703
4704 let project = Project::test(fs.clone(), cx);
4705 let (worktree, _) = project
4706 .update(cx, |p, cx| {
4707 p.find_or_create_local_worktree("/the-dir", true, cx)
4708 })
4709 .await
4710 .unwrap();
4711 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4712
4713 let buffer = project
4714 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4715 .await
4716 .unwrap();
4717
4718 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4719 let message = lsp::PublishDiagnosticsParams {
4720 uri: buffer_uri.clone(),
4721 diagnostics: vec![
4722 lsp::Diagnostic {
4723 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4724 severity: Some(DiagnosticSeverity::WARNING),
4725 message: "error 1".to_string(),
4726 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4727 location: lsp::Location {
4728 uri: buffer_uri.clone(),
4729 range: lsp::Range::new(
4730 lsp::Position::new(1, 8),
4731 lsp::Position::new(1, 9),
4732 ),
4733 },
4734 message: "error 1 hint 1".to_string(),
4735 }]),
4736 ..Default::default()
4737 },
4738 lsp::Diagnostic {
4739 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4740 severity: Some(DiagnosticSeverity::HINT),
4741 message: "error 1 hint 1".to_string(),
4742 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4743 location: lsp::Location {
4744 uri: buffer_uri.clone(),
4745 range: lsp::Range::new(
4746 lsp::Position::new(1, 8),
4747 lsp::Position::new(1, 9),
4748 ),
4749 },
4750 message: "original diagnostic".to_string(),
4751 }]),
4752 ..Default::default()
4753 },
4754 lsp::Diagnostic {
4755 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4756 severity: Some(DiagnosticSeverity::ERROR),
4757 message: "error 2".to_string(),
4758 related_information: Some(vec![
4759 lsp::DiagnosticRelatedInformation {
4760 location: lsp::Location {
4761 uri: buffer_uri.clone(),
4762 range: lsp::Range::new(
4763 lsp::Position::new(1, 13),
4764 lsp::Position::new(1, 15),
4765 ),
4766 },
4767 message: "error 2 hint 1".to_string(),
4768 },
4769 lsp::DiagnosticRelatedInformation {
4770 location: lsp::Location {
4771 uri: buffer_uri.clone(),
4772 range: lsp::Range::new(
4773 lsp::Position::new(1, 13),
4774 lsp::Position::new(1, 15),
4775 ),
4776 },
4777 message: "error 2 hint 2".to_string(),
4778 },
4779 ]),
4780 ..Default::default()
4781 },
4782 lsp::Diagnostic {
4783 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4784 severity: Some(DiagnosticSeverity::HINT),
4785 message: "error 2 hint 1".to_string(),
4786 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4787 location: lsp::Location {
4788 uri: buffer_uri.clone(),
4789 range: lsp::Range::new(
4790 lsp::Position::new(2, 8),
4791 lsp::Position::new(2, 17),
4792 ),
4793 },
4794 message: "original diagnostic".to_string(),
4795 }]),
4796 ..Default::default()
4797 },
4798 lsp::Diagnostic {
4799 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4800 severity: Some(DiagnosticSeverity::HINT),
4801 message: "error 2 hint 2".to_string(),
4802 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4803 location: lsp::Location {
4804 uri: buffer_uri.clone(),
4805 range: lsp::Range::new(
4806 lsp::Position::new(2, 8),
4807 lsp::Position::new(2, 17),
4808 ),
4809 },
4810 message: "original diagnostic".to_string(),
4811 }]),
4812 ..Default::default()
4813 },
4814 ],
4815 version: None,
4816 };
4817
4818 project
4819 .update(cx, |p, cx| {
4820 p.update_diagnostics(message, &Default::default(), cx)
4821 })
4822 .unwrap();
4823 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4824
4825 assert_eq!(
4826 buffer
4827 .diagnostics_in_range::<_, Point>(0..buffer.len())
4828 .collect::<Vec<_>>(),
4829 &[
4830 DiagnosticEntry {
4831 range: Point::new(1, 8)..Point::new(1, 9),
4832 diagnostic: Diagnostic {
4833 severity: DiagnosticSeverity::WARNING,
4834 message: "error 1".to_string(),
4835 group_id: 0,
4836 is_primary: true,
4837 ..Default::default()
4838 }
4839 },
4840 DiagnosticEntry {
4841 range: Point::new(1, 8)..Point::new(1, 9),
4842 diagnostic: Diagnostic {
4843 severity: DiagnosticSeverity::HINT,
4844 message: "error 1 hint 1".to_string(),
4845 group_id: 0,
4846 is_primary: false,
4847 ..Default::default()
4848 }
4849 },
4850 DiagnosticEntry {
4851 range: Point::new(1, 13)..Point::new(1, 15),
4852 diagnostic: Diagnostic {
4853 severity: DiagnosticSeverity::HINT,
4854 message: "error 2 hint 1".to_string(),
4855 group_id: 1,
4856 is_primary: false,
4857 ..Default::default()
4858 }
4859 },
4860 DiagnosticEntry {
4861 range: Point::new(1, 13)..Point::new(1, 15),
4862 diagnostic: Diagnostic {
4863 severity: DiagnosticSeverity::HINT,
4864 message: "error 2 hint 2".to_string(),
4865 group_id: 1,
4866 is_primary: false,
4867 ..Default::default()
4868 }
4869 },
4870 DiagnosticEntry {
4871 range: Point::new(2, 8)..Point::new(2, 17),
4872 diagnostic: Diagnostic {
4873 severity: DiagnosticSeverity::ERROR,
4874 message: "error 2".to_string(),
4875 group_id: 1,
4876 is_primary: true,
4877 ..Default::default()
4878 }
4879 }
4880 ]
4881 );
4882
4883 assert_eq!(
4884 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4885 &[
4886 DiagnosticEntry {
4887 range: Point::new(1, 8)..Point::new(1, 9),
4888 diagnostic: Diagnostic {
4889 severity: DiagnosticSeverity::WARNING,
4890 message: "error 1".to_string(),
4891 group_id: 0,
4892 is_primary: true,
4893 ..Default::default()
4894 }
4895 },
4896 DiagnosticEntry {
4897 range: Point::new(1, 8)..Point::new(1, 9),
4898 diagnostic: Diagnostic {
4899 severity: DiagnosticSeverity::HINT,
4900 message: "error 1 hint 1".to_string(),
4901 group_id: 0,
4902 is_primary: false,
4903 ..Default::default()
4904 }
4905 },
4906 ]
4907 );
4908 assert_eq!(
4909 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4910 &[
4911 DiagnosticEntry {
4912 range: Point::new(1, 13)..Point::new(1, 15),
4913 diagnostic: Diagnostic {
4914 severity: DiagnosticSeverity::HINT,
4915 message: "error 2 hint 1".to_string(),
4916 group_id: 1,
4917 is_primary: false,
4918 ..Default::default()
4919 }
4920 },
4921 DiagnosticEntry {
4922 range: Point::new(1, 13)..Point::new(1, 15),
4923 diagnostic: Diagnostic {
4924 severity: DiagnosticSeverity::HINT,
4925 message: "error 2 hint 2".to_string(),
4926 group_id: 1,
4927 is_primary: false,
4928 ..Default::default()
4929 }
4930 },
4931 DiagnosticEntry {
4932 range: Point::new(2, 8)..Point::new(2, 17),
4933 diagnostic: Diagnostic {
4934 severity: DiagnosticSeverity::ERROR,
4935 message: "error 2".to_string(),
4936 group_id: 1,
4937 is_primary: true,
4938 ..Default::default()
4939 }
4940 }
4941 ]
4942 );
4943 }
4944
4945 #[gpui::test]
4946 async fn test_rename(cx: &mut gpui::TestAppContext) {
4947 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4948 let language = Arc::new(Language::new(
4949 LanguageConfig {
4950 name: "Rust".into(),
4951 path_suffixes: vec!["rs".to_string()],
4952 language_server: Some(language_server_config),
4953 ..Default::default()
4954 },
4955 Some(tree_sitter_rust::language()),
4956 ));
4957
4958 let fs = FakeFs::new(cx.background());
4959 fs.insert_tree(
4960 "/dir",
4961 json!({
4962 "one.rs": "const ONE: usize = 1;",
4963 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4964 }),
4965 )
4966 .await;
4967
4968 let project = Project::test(fs.clone(), cx);
4969 project.update(cx, |project, _| {
4970 Arc::get_mut(&mut project.languages).unwrap().add(language);
4971 });
4972
4973 let (tree, _) = project
4974 .update(cx, |project, cx| {
4975 project.find_or_create_local_worktree("/dir", true, cx)
4976 })
4977 .await
4978 .unwrap();
4979 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4980 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4981 .await;
4982
4983 let buffer = project
4984 .update(cx, |project, cx| {
4985 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4986 })
4987 .await
4988 .unwrap();
4989
4990 let mut fake_server = fake_servers.next().await.unwrap();
4991
4992 let response = project.update(cx, |project, cx| {
4993 project.prepare_rename(buffer.clone(), 7, cx)
4994 });
4995 fake_server
4996 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4997 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4998 assert_eq!(params.position, lsp::Position::new(0, 7));
4999 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5000 lsp::Position::new(0, 6),
5001 lsp::Position::new(0, 9),
5002 )))
5003 })
5004 .next()
5005 .await
5006 .unwrap();
5007 let range = response.await.unwrap().unwrap();
5008 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
5009 assert_eq!(range, 6..9);
5010
5011 let response = project.update(cx, |project, cx| {
5012 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
5013 });
5014 fake_server
5015 .handle_request::<lsp::request::Rename, _>(|params, _| {
5016 assert_eq!(
5017 params.text_document_position.text_document.uri.as_str(),
5018 "file:///dir/one.rs"
5019 );
5020 assert_eq!(
5021 params.text_document_position.position,
5022 lsp::Position::new(0, 7)
5023 );
5024 assert_eq!(params.new_name, "THREE");
5025 Some(lsp::WorkspaceEdit {
5026 changes: Some(
5027 [
5028 (
5029 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
5030 vec![lsp::TextEdit::new(
5031 lsp::Range::new(
5032 lsp::Position::new(0, 6),
5033 lsp::Position::new(0, 9),
5034 ),
5035 "THREE".to_string(),
5036 )],
5037 ),
5038 (
5039 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
5040 vec![
5041 lsp::TextEdit::new(
5042 lsp::Range::new(
5043 lsp::Position::new(0, 24),
5044 lsp::Position::new(0, 27),
5045 ),
5046 "THREE".to_string(),
5047 ),
5048 lsp::TextEdit::new(
5049 lsp::Range::new(
5050 lsp::Position::new(0, 35),
5051 lsp::Position::new(0, 38),
5052 ),
5053 "THREE".to_string(),
5054 ),
5055 ],
5056 ),
5057 ]
5058 .into_iter()
5059 .collect(),
5060 ),
5061 ..Default::default()
5062 })
5063 })
5064 .next()
5065 .await
5066 .unwrap();
5067 let mut transaction = response.await.unwrap().0;
5068 assert_eq!(transaction.len(), 2);
5069 assert_eq!(
5070 transaction
5071 .remove_entry(&buffer)
5072 .unwrap()
5073 .0
5074 .read_with(cx, |buffer, _| buffer.text()),
5075 "const THREE: usize = 1;"
5076 );
5077 assert_eq!(
5078 transaction
5079 .into_keys()
5080 .next()
5081 .unwrap()
5082 .read_with(cx, |buffer, _| buffer.text()),
5083 "const TWO: usize = one::THREE + one::THREE;"
5084 );
5085 }
5086
5087 #[gpui::test]
5088 async fn test_search(cx: &mut gpui::TestAppContext) {
5089 let fs = FakeFs::new(cx.background());
5090 fs.insert_tree(
5091 "/dir",
5092 json!({
5093 "one.rs": "const ONE: usize = 1;",
5094 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5095 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5096 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5097 }),
5098 )
5099 .await;
5100 let project = Project::test(fs.clone(), cx);
5101 let (tree, _) = project
5102 .update(cx, |project, cx| {
5103 project.find_or_create_local_worktree("/dir", true, cx)
5104 })
5105 .await
5106 .unwrap();
5107 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5108 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5109 .await;
5110
5111 assert_eq!(
5112 search(&project, SearchQuery::text("TWO", false, true), cx)
5113 .await
5114 .unwrap(),
5115 HashMap::from_iter([
5116 ("two.rs".to_string(), vec![6..9]),
5117 ("three.rs".to_string(), vec![37..40])
5118 ])
5119 );
5120
5121 let buffer_4 = project
5122 .update(cx, |project, cx| {
5123 project.open_buffer((worktree_id, "four.rs"), cx)
5124 })
5125 .await
5126 .unwrap();
5127 buffer_4.update(cx, |buffer, cx| {
5128 buffer.edit([20..28, 31..43], "two::TWO", cx);
5129 });
5130
5131 assert_eq!(
5132 search(&project, SearchQuery::text("TWO", false, true), cx)
5133 .await
5134 .unwrap(),
5135 HashMap::from_iter([
5136 ("two.rs".to_string(), vec![6..9]),
5137 ("three.rs".to_string(), vec![37..40]),
5138 ("four.rs".to_string(), vec![25..28, 36..39])
5139 ])
5140 );
5141
5142 async fn search(
5143 project: &ModelHandle<Project>,
5144 query: SearchQuery,
5145 cx: &mut gpui::TestAppContext,
5146 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5147 let results = project
5148 .update(cx, |project, cx| project.search(query, cx))
5149 .await?;
5150
5151 Ok(results
5152 .into_iter()
5153 .map(|(buffer, ranges)| {
5154 buffer.read_with(cx, |buffer, _| {
5155 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5156 let ranges = ranges
5157 .into_iter()
5158 .map(|range| range.to_offset(buffer))
5159 .collect::<Vec<_>>();
5160 (path, ranges)
5161 })
5162 })
5163 .collect())
5164 }
5165 }
5166}