1pub mod fs;
2mod ignore;
3mod lsp_command;
4pub mod search;
5pub mod worktree;
6
7use anyhow::{anyhow, Context, Result};
8use client::{proto, Client, PeerId, TypedEnvelope, User, UserStore};
9use clock::ReplicaId;
10use collections::{hash_map, HashMap, HashSet};
11use futures::{future::Shared, Future, FutureExt, StreamExt, TryFutureExt};
12use fuzzy::{PathMatch, PathMatchCandidate, PathMatchCandidateSet};
13use gpui::{
14 AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext, Task,
15 UpgradeModelHandle, WeakModelHandle,
16};
17use language::{
18 proto::{deserialize_anchor, serialize_anchor},
19 range_from_lsp, Anchor, AnchorRangeExt, Bias, Buffer, CodeAction, CodeLabel, Completion,
20 Diagnostic, DiagnosticEntry, File as _, Language, LanguageRegistry, Operation, PointUtf16,
21 ToLspPosition, ToOffset, ToPointUtf16, Transaction,
22};
23use lsp::{DiagnosticSeverity, DocumentHighlightKind, LanguageServer};
24use lsp_command::*;
25use postage::watch;
26use rand::prelude::*;
27use search::SearchQuery;
28use sha2::{Digest, Sha256};
29use smol::block_on;
30use std::{
31 cell::RefCell,
32 cmp,
33 convert::TryInto,
34 hash::Hash,
35 mem,
36 ops::Range,
37 path::{Component, Path, PathBuf},
38 rc::Rc,
39 sync::{atomic::AtomicBool, Arc},
40 time::Instant,
41};
42use util::{post_inc, ResultExt, TryFutureExt as _};
43
44pub use fs::*;
45pub use worktree::*;
46
47pub struct Project {
48 worktrees: Vec<WorktreeHandle>,
49 active_entry: Option<ProjectEntry>,
50 languages: Arc<LanguageRegistry>,
51 language_servers: HashMap<(WorktreeId, String), Arc<LanguageServer>>,
52 started_language_servers:
53 HashMap<(WorktreeId, String), Shared<Task<Option<Arc<LanguageServer>>>>>,
54 client: Arc<client::Client>,
55 user_store: ModelHandle<UserStore>,
56 fs: Arc<dyn Fs>,
57 client_state: ProjectClientState,
58 collaborators: HashMap<PeerId, Collaborator>,
59 subscriptions: Vec<client::Subscription>,
60 language_servers_with_diagnostics_running: isize,
61 opened_buffer: (Rc<RefCell<watch::Sender<()>>>, watch::Receiver<()>),
62 shared_buffers: HashMap<PeerId, HashSet<u64>>,
63 loading_buffers: HashMap<
64 ProjectPath,
65 postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
66 >,
67 loading_local_worktrees:
68 HashMap<Arc<Path>, Shared<Task<Result<ModelHandle<Worktree>, Arc<anyhow::Error>>>>>,
69 opened_buffers: HashMap<u64, OpenBuffer>,
70 nonce: u128,
71}
72
73enum OpenBuffer {
74 Strong(ModelHandle<Buffer>),
75 Weak(WeakModelHandle<Buffer>),
76 Loading(Vec<Operation>),
77}
78
79enum WorktreeHandle {
80 Strong(ModelHandle<Worktree>),
81 Weak(WeakModelHandle<Worktree>),
82}
83
84enum ProjectClientState {
85 Local {
86 is_shared: bool,
87 remote_id_tx: watch::Sender<Option<u64>>,
88 remote_id_rx: watch::Receiver<Option<u64>>,
89 _maintain_remote_id_task: Task<Option<()>>,
90 },
91 Remote {
92 sharing_has_stopped: bool,
93 remote_id: u64,
94 replica_id: ReplicaId,
95 },
96}
97
98#[derive(Clone, Debug)]
99pub struct Collaborator {
100 pub user: Arc<User>,
101 pub peer_id: PeerId,
102 pub replica_id: ReplicaId,
103}
104
105#[derive(Clone, Debug, PartialEq)]
106pub enum Event {
107 ActiveEntryChanged(Option<ProjectEntry>),
108 WorktreeRemoved(WorktreeId),
109 DiskBasedDiagnosticsStarted,
110 DiskBasedDiagnosticsUpdated,
111 DiskBasedDiagnosticsFinished,
112 DiagnosticsUpdated(ProjectPath),
113}
114
115#[derive(Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord)]
116pub struct ProjectPath {
117 pub worktree_id: WorktreeId,
118 pub path: Arc<Path>,
119}
120
121#[derive(Clone, Debug, Default, PartialEq)]
122pub struct DiagnosticSummary {
123 pub error_count: usize,
124 pub warning_count: usize,
125 pub info_count: usize,
126 pub hint_count: usize,
127}
128
129#[derive(Debug)]
130pub struct Location {
131 pub buffer: ModelHandle<Buffer>,
132 pub range: Range<language::Anchor>,
133}
134
135#[derive(Debug)]
136pub struct DocumentHighlight {
137 pub range: Range<language::Anchor>,
138 pub kind: DocumentHighlightKind,
139}
140
141#[derive(Clone, Debug)]
142pub struct Symbol {
143 pub source_worktree_id: WorktreeId,
144 pub worktree_id: WorktreeId,
145 pub language_name: String,
146 pub path: PathBuf,
147 pub label: CodeLabel,
148 pub name: String,
149 pub kind: lsp::SymbolKind,
150 pub range: Range<PointUtf16>,
151 pub signature: [u8; 32],
152}
153
154#[derive(Default)]
155pub struct ProjectTransaction(pub HashMap<ModelHandle<Buffer>, language::Transaction>);
156
157impl DiagnosticSummary {
158 fn new<'a, T: 'a>(diagnostics: impl IntoIterator<Item = &'a DiagnosticEntry<T>>) -> Self {
159 let mut this = Self {
160 error_count: 0,
161 warning_count: 0,
162 info_count: 0,
163 hint_count: 0,
164 };
165
166 for entry in diagnostics {
167 if entry.diagnostic.is_primary {
168 match entry.diagnostic.severity {
169 DiagnosticSeverity::ERROR => this.error_count += 1,
170 DiagnosticSeverity::WARNING => this.warning_count += 1,
171 DiagnosticSeverity::INFORMATION => this.info_count += 1,
172 DiagnosticSeverity::HINT => this.hint_count += 1,
173 _ => {}
174 }
175 }
176 }
177
178 this
179 }
180
181 pub fn to_proto(&self, path: &Path) -> proto::DiagnosticSummary {
182 proto::DiagnosticSummary {
183 path: path.to_string_lossy().to_string(),
184 error_count: self.error_count as u32,
185 warning_count: self.warning_count as u32,
186 info_count: self.info_count as u32,
187 hint_count: self.hint_count as u32,
188 }
189 }
190}
191
192#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
193pub struct ProjectEntry {
194 pub worktree_id: WorktreeId,
195 pub entry_id: usize,
196}
197
198impl Project {
199 pub fn init(client: &Arc<Client>) {
200 client.add_entity_message_handler(Self::handle_add_collaborator);
201 client.add_entity_message_handler(Self::handle_buffer_reloaded);
202 client.add_entity_message_handler(Self::handle_buffer_saved);
203 client.add_entity_message_handler(Self::handle_close_buffer);
204 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updated);
205 client.add_entity_message_handler(Self::handle_disk_based_diagnostics_updating);
206 client.add_entity_message_handler(Self::handle_remove_collaborator);
207 client.add_entity_message_handler(Self::handle_register_worktree);
208 client.add_entity_message_handler(Self::handle_unregister_worktree);
209 client.add_entity_message_handler(Self::handle_unshare_project);
210 client.add_entity_message_handler(Self::handle_update_buffer_file);
211 client.add_entity_message_handler(Self::handle_update_buffer);
212 client.add_entity_message_handler(Self::handle_update_diagnostic_summary);
213 client.add_entity_message_handler(Self::handle_update_worktree);
214 client.add_entity_request_handler(Self::handle_apply_additional_edits_for_completion);
215 client.add_entity_request_handler(Self::handle_apply_code_action);
216 client.add_entity_request_handler(Self::handle_format_buffers);
217 client.add_entity_request_handler(Self::handle_get_code_actions);
218 client.add_entity_request_handler(Self::handle_get_completions);
219 client.add_entity_request_handler(Self::handle_lsp_command::<GetDefinition>);
220 client.add_entity_request_handler(Self::handle_lsp_command::<GetDocumentHighlights>);
221 client.add_entity_request_handler(Self::handle_lsp_command::<GetReferences>);
222 client.add_entity_request_handler(Self::handle_lsp_command::<PrepareRename>);
223 client.add_entity_request_handler(Self::handle_lsp_command::<PerformRename>);
224 client.add_entity_request_handler(Self::handle_search_project);
225 client.add_entity_request_handler(Self::handle_get_project_symbols);
226 client.add_entity_request_handler(Self::handle_open_buffer_for_symbol);
227 client.add_entity_request_handler(Self::handle_open_buffer);
228 client.add_entity_request_handler(Self::handle_save_buffer);
229 }
230
231 pub fn local(
232 client: Arc<Client>,
233 user_store: ModelHandle<UserStore>,
234 languages: Arc<LanguageRegistry>,
235 fs: Arc<dyn Fs>,
236 cx: &mut MutableAppContext,
237 ) -> ModelHandle<Self> {
238 cx.add_model(|cx: &mut ModelContext<Self>| {
239 let (remote_id_tx, remote_id_rx) = watch::channel();
240 let _maintain_remote_id_task = cx.spawn_weak({
241 let rpc = client.clone();
242 move |this, mut cx| {
243 async move {
244 let mut status = rpc.status();
245 while let Some(status) = status.next().await {
246 if let Some(this) = this.upgrade(&cx) {
247 let remote_id = if let client::Status::Connected { .. } = status {
248 let response = rpc.request(proto::RegisterProject {}).await?;
249 Some(response.project_id)
250 } else {
251 None
252 };
253
254 if let Some(project_id) = remote_id {
255 let mut registrations = Vec::new();
256 this.update(&mut cx, |this, cx| {
257 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
258 registrations.push(worktree.update(
259 cx,
260 |worktree, cx| {
261 let worktree = worktree.as_local_mut().unwrap();
262 worktree.register(project_id, cx)
263 },
264 ));
265 }
266 });
267 for registration in registrations {
268 registration.await?;
269 }
270 }
271 this.update(&mut cx, |this, cx| this.set_remote_id(remote_id, cx));
272 }
273 }
274 Ok(())
275 }
276 .log_err()
277 }
278 });
279
280 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
281 Self {
282 worktrees: Default::default(),
283 collaborators: Default::default(),
284 opened_buffers: Default::default(),
285 shared_buffers: Default::default(),
286 loading_buffers: Default::default(),
287 loading_local_worktrees: Default::default(),
288 client_state: ProjectClientState::Local {
289 is_shared: false,
290 remote_id_tx,
291 remote_id_rx,
292 _maintain_remote_id_task,
293 },
294 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
295 subscriptions: Vec::new(),
296 active_entry: None,
297 languages,
298 client,
299 user_store,
300 fs,
301 language_servers_with_diagnostics_running: 0,
302 language_servers: Default::default(),
303 started_language_servers: Default::default(),
304 nonce: StdRng::from_entropy().gen(),
305 }
306 })
307 }
308
309 pub async fn remote(
310 remote_id: u64,
311 client: Arc<Client>,
312 user_store: ModelHandle<UserStore>,
313 languages: Arc<LanguageRegistry>,
314 fs: Arc<dyn Fs>,
315 cx: &mut AsyncAppContext,
316 ) -> Result<ModelHandle<Self>> {
317 client.authenticate_and_connect(&cx).await?;
318
319 let response = client
320 .request(proto::JoinProject {
321 project_id: remote_id,
322 })
323 .await?;
324
325 let replica_id = response.replica_id as ReplicaId;
326
327 let mut worktrees = Vec::new();
328 for worktree in response.worktrees {
329 let (worktree, load_task) = cx
330 .update(|cx| Worktree::remote(remote_id, replica_id, worktree, client.clone(), cx));
331 worktrees.push(worktree);
332 load_task.detach();
333 }
334
335 let (opened_buffer_tx, opened_buffer_rx) = watch::channel();
336 let this = cx.add_model(|cx| {
337 let mut this = Self {
338 worktrees: Vec::new(),
339 loading_buffers: Default::default(),
340 opened_buffer: (Rc::new(RefCell::new(opened_buffer_tx)), opened_buffer_rx),
341 shared_buffers: Default::default(),
342 loading_local_worktrees: Default::default(),
343 active_entry: None,
344 collaborators: Default::default(),
345 languages,
346 user_store: user_store.clone(),
347 fs,
348 subscriptions: vec![client.add_model_for_remote_entity(remote_id, cx)],
349 client,
350 client_state: ProjectClientState::Remote {
351 sharing_has_stopped: false,
352 remote_id,
353 replica_id,
354 },
355 language_servers_with_diagnostics_running: 0,
356 language_servers: Default::default(),
357 started_language_servers: Default::default(),
358 opened_buffers: Default::default(),
359 nonce: StdRng::from_entropy().gen(),
360 };
361 for worktree in worktrees {
362 this.add_worktree(&worktree, cx);
363 }
364 this
365 });
366
367 let user_ids = response
368 .collaborators
369 .iter()
370 .map(|peer| peer.user_id)
371 .collect();
372 user_store
373 .update(cx, |user_store, cx| user_store.load_users(user_ids, cx))
374 .await?;
375 let mut collaborators = HashMap::default();
376 for message in response.collaborators {
377 let collaborator = Collaborator::from_proto(message, &user_store, cx).await?;
378 collaborators.insert(collaborator.peer_id, collaborator);
379 }
380
381 this.update(cx, |this, _| {
382 this.collaborators = collaborators;
383 });
384
385 Ok(this)
386 }
387
388 #[cfg(any(test, feature = "test-support"))]
389 pub fn test(fs: Arc<dyn Fs>, cx: &mut gpui::TestAppContext) -> ModelHandle<Project> {
390 let languages = Arc::new(LanguageRegistry::new());
391 let http_client = client::test::FakeHttpClient::with_404_response();
392 let client = client::Client::new(http_client.clone());
393 let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
394 cx.update(|cx| Project::local(client, user_store, languages, fs, cx))
395 }
396
397 #[cfg(any(test, feature = "test-support"))]
398 pub fn buffer_for_id(&self, remote_id: u64, cx: &AppContext) -> Option<ModelHandle<Buffer>> {
399 self.opened_buffers
400 .get(&remote_id)
401 .and_then(|buffer| buffer.upgrade(cx))
402 }
403
404 #[cfg(any(test, feature = "test-support"))]
405 pub fn languages(&self) -> &Arc<LanguageRegistry> {
406 &self.languages
407 }
408
409 #[cfg(any(test, feature = "test-support"))]
410 pub fn check_invariants(&self, cx: &AppContext) {
411 if self.is_local() {
412 let mut worktree_root_paths = HashMap::default();
413 for worktree in self.worktrees(cx) {
414 let worktree = worktree.read(cx);
415 let abs_path = worktree.as_local().unwrap().abs_path().clone();
416 let prev_worktree_id = worktree_root_paths.insert(abs_path.clone(), worktree.id());
417 assert_eq!(
418 prev_worktree_id,
419 None,
420 "abs path {:?} for worktree {:?} is not unique ({:?} was already registered with the same path)",
421 abs_path,
422 worktree.id(),
423 prev_worktree_id
424 )
425 }
426 } else {
427 let replica_id = self.replica_id();
428 for buffer in self.opened_buffers.values() {
429 if let Some(buffer) = buffer.upgrade(cx) {
430 let buffer = buffer.read(cx);
431 assert_eq!(
432 buffer.deferred_ops_len(),
433 0,
434 "replica {}, buffer {} has deferred operations",
435 replica_id,
436 buffer.remote_id()
437 );
438 }
439 }
440 }
441 }
442
443 #[cfg(any(test, feature = "test-support"))]
444 pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &AppContext) -> bool {
445 let path = path.into();
446 if let Some(worktree) = self.worktree_for_id(path.worktree_id, cx) {
447 self.opened_buffers.iter().any(|(_, buffer)| {
448 if let Some(buffer) = buffer.upgrade(cx) {
449 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
450 if file.worktree == worktree && file.path() == &path.path {
451 return true;
452 }
453 }
454 }
455 false
456 })
457 } else {
458 false
459 }
460 }
461
462 pub fn fs(&self) -> &Arc<dyn Fs> {
463 &self.fs
464 }
465
466 fn set_remote_id(&mut self, remote_id: Option<u64>, cx: &mut ModelContext<Self>) {
467 if let ProjectClientState::Local { remote_id_tx, .. } = &mut self.client_state {
468 *remote_id_tx.borrow_mut() = remote_id;
469 }
470
471 self.subscriptions.clear();
472 if let Some(remote_id) = remote_id {
473 self.subscriptions
474 .push(self.client.add_model_for_remote_entity(remote_id, cx));
475 }
476 }
477
478 pub fn remote_id(&self) -> Option<u64> {
479 match &self.client_state {
480 ProjectClientState::Local { remote_id_rx, .. } => *remote_id_rx.borrow(),
481 ProjectClientState::Remote { remote_id, .. } => Some(*remote_id),
482 }
483 }
484
485 pub fn next_remote_id(&self) -> impl Future<Output = u64> {
486 let mut id = None;
487 let mut watch = None;
488 match &self.client_state {
489 ProjectClientState::Local { remote_id_rx, .. } => watch = Some(remote_id_rx.clone()),
490 ProjectClientState::Remote { remote_id, .. } => id = Some(*remote_id),
491 }
492
493 async move {
494 if let Some(id) = id {
495 return id;
496 }
497 let mut watch = watch.unwrap();
498 loop {
499 let id = *watch.borrow();
500 if let Some(id) = id {
501 return id;
502 }
503 watch.next().await;
504 }
505 }
506 }
507
508 pub fn replica_id(&self) -> ReplicaId {
509 match &self.client_state {
510 ProjectClientState::Local { .. } => 0,
511 ProjectClientState::Remote { replica_id, .. } => *replica_id,
512 }
513 }
514
515 pub fn collaborators(&self) -> &HashMap<PeerId, Collaborator> {
516 &self.collaborators
517 }
518
519 pub fn worktrees<'a>(
520 &'a self,
521 cx: &'a AppContext,
522 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
523 self.worktrees
524 .iter()
525 .filter_map(move |worktree| worktree.upgrade(cx))
526 }
527
528 pub fn visible_worktrees<'a>(
529 &'a self,
530 cx: &'a AppContext,
531 ) -> impl 'a + Iterator<Item = ModelHandle<Worktree>> {
532 self.worktrees.iter().filter_map(|worktree| {
533 worktree.upgrade(cx).and_then(|worktree| {
534 if worktree.read(cx).is_visible() {
535 Some(worktree)
536 } else {
537 None
538 }
539 })
540 })
541 }
542
543 pub fn worktree_for_id(
544 &self,
545 id: WorktreeId,
546 cx: &AppContext,
547 ) -> Option<ModelHandle<Worktree>> {
548 self.worktrees(cx)
549 .find(|worktree| worktree.read(cx).id() == id)
550 }
551
552 pub fn share(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
553 let rpc = self.client.clone();
554 cx.spawn(|this, mut cx| async move {
555 let project_id = this.update(&mut cx, |this, cx| {
556 if let ProjectClientState::Local {
557 is_shared,
558 remote_id_rx,
559 ..
560 } = &mut this.client_state
561 {
562 *is_shared = true;
563
564 for open_buffer in this.opened_buffers.values_mut() {
565 match open_buffer {
566 OpenBuffer::Strong(_) => {}
567 OpenBuffer::Weak(buffer) => {
568 if let Some(buffer) = buffer.upgrade(cx) {
569 *open_buffer = OpenBuffer::Strong(buffer);
570 }
571 }
572 OpenBuffer::Loading(_) => unreachable!(),
573 }
574 }
575
576 for worktree_handle in this.worktrees.iter_mut() {
577 match worktree_handle {
578 WorktreeHandle::Strong(_) => {}
579 WorktreeHandle::Weak(worktree) => {
580 if let Some(worktree) = worktree.upgrade(cx) {
581 *worktree_handle = WorktreeHandle::Strong(worktree);
582 }
583 }
584 }
585 }
586
587 remote_id_rx
588 .borrow()
589 .ok_or_else(|| anyhow!("no project id"))
590 } else {
591 Err(anyhow!("can't share a remote project"))
592 }
593 })?;
594
595 rpc.request(proto::ShareProject { project_id }).await?;
596
597 let mut tasks = Vec::new();
598 this.update(&mut cx, |this, cx| {
599 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
600 worktree.update(cx, |worktree, cx| {
601 let worktree = worktree.as_local_mut().unwrap();
602 tasks.push(worktree.share(project_id, cx));
603 });
604 }
605 });
606 for task in tasks {
607 task.await?;
608 }
609 this.update(&mut cx, |_, cx| cx.notify());
610 Ok(())
611 })
612 }
613
614 pub fn unshare(&self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
615 let rpc = self.client.clone();
616 cx.spawn(|this, mut cx| async move {
617 let project_id = this.update(&mut cx, |this, cx| {
618 if let ProjectClientState::Local {
619 is_shared,
620 remote_id_rx,
621 ..
622 } = &mut this.client_state
623 {
624 *is_shared = false;
625
626 for open_buffer in this.opened_buffers.values_mut() {
627 match open_buffer {
628 OpenBuffer::Strong(buffer) => {
629 *open_buffer = OpenBuffer::Weak(buffer.downgrade());
630 }
631 _ => {}
632 }
633 }
634
635 for worktree_handle in this.worktrees.iter_mut() {
636 match worktree_handle {
637 WorktreeHandle::Strong(worktree) => {
638 if !worktree.read(cx).is_visible() {
639 *worktree_handle = WorktreeHandle::Weak(worktree.downgrade());
640 }
641 }
642 _ => {}
643 }
644 }
645
646 remote_id_rx
647 .borrow()
648 .ok_or_else(|| anyhow!("no project id"))
649 } else {
650 Err(anyhow!("can't share a remote project"))
651 }
652 })?;
653
654 rpc.send(proto::UnshareProject { project_id })?;
655 this.update(&mut cx, |this, cx| {
656 this.collaborators.clear();
657 this.shared_buffers.clear();
658 for worktree in this.worktrees(cx).collect::<Vec<_>>() {
659 worktree.update(cx, |worktree, _| {
660 worktree.as_local_mut().unwrap().unshare();
661 });
662 }
663 cx.notify()
664 });
665 Ok(())
666 })
667 }
668
669 pub fn is_read_only(&self) -> bool {
670 match &self.client_state {
671 ProjectClientState::Local { .. } => false,
672 ProjectClientState::Remote {
673 sharing_has_stopped,
674 ..
675 } => *sharing_has_stopped,
676 }
677 }
678
679 pub fn is_local(&self) -> bool {
680 match &self.client_state {
681 ProjectClientState::Local { .. } => true,
682 ProjectClientState::Remote { .. } => false,
683 }
684 }
685
686 pub fn is_remote(&self) -> bool {
687 !self.is_local()
688 }
689
690 pub fn create_buffer(&mut self, cx: &mut ModelContext<Self>) -> Result<ModelHandle<Buffer>> {
691 if self.is_remote() {
692 return Err(anyhow!("creating buffers as a guest is not supported yet"));
693 }
694
695 let buffer = cx.add_model(|cx| {
696 Buffer::new(self.replica_id(), "", cx).with_language(language::PLAIN_TEXT.clone(), cx)
697 });
698 self.register_buffer(&buffer, None, cx)?;
699 Ok(buffer)
700 }
701
702 pub fn open_buffer(
703 &mut self,
704 path: impl Into<ProjectPath>,
705 cx: &mut ModelContext<Self>,
706 ) -> Task<Result<ModelHandle<Buffer>>> {
707 let project_path = path.into();
708 let worktree = if let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) {
709 worktree
710 } else {
711 return Task::ready(Err(anyhow!("no such worktree")));
712 };
713
714 // If there is already a buffer for the given path, then return it.
715 let existing_buffer = self.get_open_buffer(&project_path, cx);
716 if let Some(existing_buffer) = existing_buffer {
717 return Task::ready(Ok(existing_buffer));
718 }
719
720 let mut loading_watch = match self.loading_buffers.entry(project_path.clone()) {
721 // If the given path is already being loaded, then wait for that existing
722 // task to complete and return the same buffer.
723 hash_map::Entry::Occupied(e) => e.get().clone(),
724
725 // Otherwise, record the fact that this path is now being loaded.
726 hash_map::Entry::Vacant(entry) => {
727 let (mut tx, rx) = postage::watch::channel();
728 entry.insert(rx.clone());
729
730 let load_buffer = if worktree.read(cx).is_local() {
731 self.open_local_buffer(&project_path.path, &worktree, cx)
732 } else {
733 self.open_remote_buffer(&project_path.path, &worktree, cx)
734 };
735
736 cx.spawn(move |this, mut cx| async move {
737 let load_result = load_buffer.await;
738 *tx.borrow_mut() = Some(this.update(&mut cx, |this, _| {
739 // Record the fact that the buffer is no longer loading.
740 this.loading_buffers.remove(&project_path);
741 let buffer = load_result.map_err(Arc::new)?;
742 Ok(buffer)
743 }));
744 })
745 .detach();
746 rx
747 }
748 };
749
750 cx.foreground().spawn(async move {
751 loop {
752 if let Some(result) = loading_watch.borrow().as_ref() {
753 match result {
754 Ok(buffer) => return Ok(buffer.clone()),
755 Err(error) => return Err(anyhow!("{}", error)),
756 }
757 }
758 loading_watch.next().await;
759 }
760 })
761 }
762
763 fn open_local_buffer(
764 &mut self,
765 path: &Arc<Path>,
766 worktree: &ModelHandle<Worktree>,
767 cx: &mut ModelContext<Self>,
768 ) -> Task<Result<ModelHandle<Buffer>>> {
769 let load_buffer = worktree.update(cx, |worktree, cx| {
770 let worktree = worktree.as_local_mut().unwrap();
771 worktree.load_buffer(path, cx)
772 });
773 let worktree = worktree.downgrade();
774 cx.spawn(|this, mut cx| async move {
775 let buffer = load_buffer.await?;
776 let worktree = worktree
777 .upgrade(&cx)
778 .ok_or_else(|| anyhow!("worktree was removed"))?;
779 this.update(&mut cx, |this, cx| {
780 this.register_buffer(&buffer, Some(&worktree), cx)
781 })?;
782 Ok(buffer)
783 })
784 }
785
786 fn open_remote_buffer(
787 &mut self,
788 path: &Arc<Path>,
789 worktree: &ModelHandle<Worktree>,
790 cx: &mut ModelContext<Self>,
791 ) -> Task<Result<ModelHandle<Buffer>>> {
792 let rpc = self.client.clone();
793 let project_id = self.remote_id().unwrap();
794 let remote_worktree_id = worktree.read(cx).id();
795 let path = path.clone();
796 let path_string = path.to_string_lossy().to_string();
797 cx.spawn(|this, mut cx| async move {
798 let response = rpc
799 .request(proto::OpenBuffer {
800 project_id,
801 worktree_id: remote_worktree_id.to_proto(),
802 path: path_string,
803 })
804 .await?;
805 let buffer = response.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
806 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
807 .await
808 })
809 }
810
811 fn open_local_buffer_via_lsp(
812 &mut self,
813 abs_path: lsp::Url,
814 lang_name: String,
815 lang_server: Arc<LanguageServer>,
816 cx: &mut ModelContext<Self>,
817 ) -> Task<Result<ModelHandle<Buffer>>> {
818 cx.spawn(|this, mut cx| async move {
819 let abs_path = abs_path
820 .to_file_path()
821 .map_err(|_| anyhow!("can't convert URI to path"))?;
822 let (worktree, relative_path) = if let Some(result) =
823 this.read_with(&cx, |this, cx| this.find_local_worktree(&abs_path, cx))
824 {
825 result
826 } else {
827 let worktree = this
828 .update(&mut cx, |this, cx| {
829 this.create_local_worktree(&abs_path, false, cx)
830 })
831 .await?;
832 this.update(&mut cx, |this, cx| {
833 this.language_servers
834 .insert((worktree.read(cx).id(), lang_name), lang_server);
835 });
836 (worktree, PathBuf::new())
837 };
838
839 let project_path = ProjectPath {
840 worktree_id: worktree.read_with(&cx, |worktree, _| worktree.id()),
841 path: relative_path.into(),
842 };
843 this.update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
844 .await
845 })
846 }
847
848 pub fn save_buffer_as(
849 &mut self,
850 buffer: ModelHandle<Buffer>,
851 abs_path: PathBuf,
852 cx: &mut ModelContext<Project>,
853 ) -> Task<Result<()>> {
854 let worktree_task = self.find_or_create_local_worktree(&abs_path, true, cx);
855 cx.spawn(|this, mut cx| async move {
856 let (worktree, path) = worktree_task.await?;
857 worktree
858 .update(&mut cx, |worktree, cx| {
859 worktree
860 .as_local_mut()
861 .unwrap()
862 .save_buffer_as(buffer.clone(), path, cx)
863 })
864 .await?;
865 this.update(&mut cx, |this, cx| {
866 this.assign_language_to_buffer(&buffer, Some(&worktree), cx);
867 });
868 Ok(())
869 })
870 }
871
872 pub fn get_open_buffer(
873 &mut self,
874 path: &ProjectPath,
875 cx: &mut ModelContext<Self>,
876 ) -> Option<ModelHandle<Buffer>> {
877 let worktree = self.worktree_for_id(path.worktree_id, cx)?;
878 self.opened_buffers.values().find_map(|buffer| {
879 let buffer = buffer.upgrade(cx)?;
880 let file = File::from_dyn(buffer.read(cx).file())?;
881 if file.worktree == worktree && file.path() == &path.path {
882 Some(buffer)
883 } else {
884 None
885 }
886 })
887 }
888
889 fn register_buffer(
890 &mut self,
891 buffer: &ModelHandle<Buffer>,
892 worktree: Option<&ModelHandle<Worktree>>,
893 cx: &mut ModelContext<Self>,
894 ) -> Result<()> {
895 let remote_id = buffer.read(cx).remote_id();
896 let open_buffer = if self.is_remote() || self.is_shared() {
897 OpenBuffer::Strong(buffer.clone())
898 } else {
899 OpenBuffer::Weak(buffer.downgrade())
900 };
901
902 match self.opened_buffers.insert(remote_id, open_buffer) {
903 None => {}
904 Some(OpenBuffer::Loading(operations)) => {
905 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?
906 }
907 Some(OpenBuffer::Weak(existing_handle)) => {
908 if existing_handle.upgrade(cx).is_some() {
909 Err(anyhow!(
910 "already registered buffer with remote id {}",
911 remote_id
912 ))?
913 }
914 }
915 Some(OpenBuffer::Strong(_)) => Err(anyhow!(
916 "already registered buffer with remote id {}",
917 remote_id
918 ))?,
919 }
920 self.assign_language_to_buffer(&buffer, worktree, cx);
921 Ok(())
922 }
923
924 fn assign_language_to_buffer(
925 &mut self,
926 buffer: &ModelHandle<Buffer>,
927 worktree: Option<&ModelHandle<Worktree>>,
928 cx: &mut ModelContext<Self>,
929 ) -> Option<()> {
930 let (path, full_path) = {
931 let file = buffer.read(cx).file()?;
932 (file.path().clone(), file.full_path(cx))
933 };
934
935 // If the buffer has a language, set it and start/assign the language server
936 if let Some(language) = self.languages.select_language(&full_path) {
937 buffer.update(cx, |buffer, cx| {
938 buffer.set_language(Some(language.clone()), cx);
939 });
940
941 // For local worktrees, start a language server if needed.
942 // Also assign the language server and any previously stored diagnostics to the buffer.
943 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
944 let worktree_id = local_worktree.id();
945 let worktree_abs_path = local_worktree.abs_path().clone();
946 let buffer = buffer.downgrade();
947 let language_server =
948 self.start_language_server(worktree_id, worktree_abs_path, language, cx);
949
950 cx.spawn_weak(|_, mut cx| async move {
951 if let Some(language_server) = language_server.await {
952 if let Some(buffer) = buffer.upgrade(&cx) {
953 buffer.update(&mut cx, |buffer, cx| {
954 buffer.set_language_server(Some(language_server), cx);
955 });
956 }
957 }
958 })
959 .detach();
960 }
961 }
962
963 if let Some(local_worktree) = worktree.and_then(|w| w.read(cx).as_local()) {
964 if let Some(diagnostics) = local_worktree.diagnostics_for_path(&path) {
965 buffer.update(cx, |buffer, cx| {
966 buffer.update_diagnostics(diagnostics, None, cx).log_err();
967 });
968 }
969 }
970
971 None
972 }
973
974 fn start_language_server(
975 &mut self,
976 worktree_id: WorktreeId,
977 worktree_path: Arc<Path>,
978 language: Arc<Language>,
979 cx: &mut ModelContext<Self>,
980 ) -> Shared<Task<Option<Arc<LanguageServer>>>> {
981 enum LspEvent {
982 DiagnosticsStart,
983 DiagnosticsUpdate(lsp::PublishDiagnosticsParams),
984 DiagnosticsFinish,
985 }
986
987 let key = (worktree_id, language.name().to_string());
988 self.started_language_servers
989 .entry(key.clone())
990 .or_insert_with(|| {
991 let language_server = self.languages.start_language_server(
992 &language,
993 worktree_path,
994 self.client.http_client(),
995 cx,
996 );
997 let rpc = self.client.clone();
998 cx.spawn_weak(|this, mut cx| async move {
999 let language_server = language_server?.await.log_err()?;
1000 if let Some(this) = this.upgrade(&cx) {
1001 this.update(&mut cx, |this, _| {
1002 this.language_servers.insert(key, language_server.clone());
1003 });
1004 }
1005
1006 let disk_based_sources = language
1007 .disk_based_diagnostic_sources()
1008 .cloned()
1009 .unwrap_or_default();
1010 let disk_based_diagnostics_progress_token =
1011 language.disk_based_diagnostics_progress_token().cloned();
1012 let has_disk_based_diagnostic_progress_token =
1013 disk_based_diagnostics_progress_token.is_some();
1014 let (diagnostics_tx, diagnostics_rx) = smol::channel::unbounded();
1015
1016 // Listen for `PublishDiagnostics` notifications.
1017 language_server
1018 .on_notification::<lsp::notification::PublishDiagnostics, _>({
1019 let diagnostics_tx = diagnostics_tx.clone();
1020 move |params| {
1021 if !has_disk_based_diagnostic_progress_token {
1022 block_on(diagnostics_tx.send(LspEvent::DiagnosticsStart)).ok();
1023 }
1024 block_on(diagnostics_tx.send(LspEvent::DiagnosticsUpdate(params)))
1025 .ok();
1026 if !has_disk_based_diagnostic_progress_token {
1027 block_on(diagnostics_tx.send(LspEvent::DiagnosticsFinish)).ok();
1028 }
1029 }
1030 })
1031 .detach();
1032
1033 // Listen for `Progress` notifications. Send an event when the language server
1034 // transitions between running jobs and not running any jobs.
1035 let mut running_jobs_for_this_server: i32 = 0;
1036 language_server
1037 .on_notification::<lsp::notification::Progress, _>(move |params| {
1038 let token = match params.token {
1039 lsp::NumberOrString::Number(_) => None,
1040 lsp::NumberOrString::String(token) => Some(token),
1041 };
1042
1043 if token == disk_based_diagnostics_progress_token {
1044 match params.value {
1045 lsp::ProgressParamsValue::WorkDone(progress) => {
1046 match progress {
1047 lsp::WorkDoneProgress::Begin(_) => {
1048 running_jobs_for_this_server += 1;
1049 if running_jobs_for_this_server == 1 {
1050 block_on(
1051 diagnostics_tx
1052 .send(LspEvent::DiagnosticsStart),
1053 )
1054 .ok();
1055 }
1056 }
1057 lsp::WorkDoneProgress::End(_) => {
1058 running_jobs_for_this_server -= 1;
1059 if running_jobs_for_this_server == 0 {
1060 block_on(
1061 diagnostics_tx
1062 .send(LspEvent::DiagnosticsFinish),
1063 )
1064 .ok();
1065 }
1066 }
1067 _ => {}
1068 }
1069 }
1070 }
1071 }
1072 })
1073 .detach();
1074
1075 // Process all the LSP events.
1076 cx.spawn(|mut cx| async move {
1077 while let Ok(message) = diagnostics_rx.recv().await {
1078 let this = this.upgrade(&cx)?;
1079 match message {
1080 LspEvent::DiagnosticsStart => {
1081 this.update(&mut cx, |this, cx| {
1082 this.disk_based_diagnostics_started(cx);
1083 if let Some(project_id) = this.remote_id() {
1084 rpc.send(proto::DiskBasedDiagnosticsUpdating {
1085 project_id,
1086 })
1087 .log_err();
1088 }
1089 });
1090 }
1091 LspEvent::DiagnosticsUpdate(mut params) => {
1092 language.process_diagnostics(&mut params);
1093 this.update(&mut cx, |this, cx| {
1094 this.update_diagnostics(params, &disk_based_sources, cx)
1095 .log_err();
1096 });
1097 }
1098 LspEvent::DiagnosticsFinish => {
1099 this.update(&mut cx, |this, cx| {
1100 this.disk_based_diagnostics_finished(cx);
1101 if let Some(project_id) = this.remote_id() {
1102 rpc.send(proto::DiskBasedDiagnosticsUpdated {
1103 project_id,
1104 })
1105 .log_err();
1106 }
1107 });
1108 }
1109 }
1110 }
1111 Some(())
1112 })
1113 .detach();
1114
1115 Some(language_server)
1116 })
1117 .shared()
1118 })
1119 .clone()
1120 }
1121
1122 pub fn update_diagnostics(
1123 &mut self,
1124 params: lsp::PublishDiagnosticsParams,
1125 disk_based_sources: &HashSet<String>,
1126 cx: &mut ModelContext<Self>,
1127 ) -> Result<()> {
1128 let abs_path = params
1129 .uri
1130 .to_file_path()
1131 .map_err(|_| anyhow!("URI is not a file"))?;
1132 let mut next_group_id = 0;
1133 let mut diagnostics = Vec::default();
1134 let mut primary_diagnostic_group_ids = HashMap::default();
1135 let mut sources_by_group_id = HashMap::default();
1136 let mut supporting_diagnostic_severities = HashMap::default();
1137 for diagnostic in ¶ms.diagnostics {
1138 let source = diagnostic.source.as_ref();
1139 let code = diagnostic.code.as_ref().map(|code| match code {
1140 lsp::NumberOrString::Number(code) => code.to_string(),
1141 lsp::NumberOrString::String(code) => code.clone(),
1142 });
1143 let range = range_from_lsp(diagnostic.range);
1144 let is_supporting = diagnostic
1145 .related_information
1146 .as_ref()
1147 .map_or(false, |infos| {
1148 infos.iter().any(|info| {
1149 primary_diagnostic_group_ids.contains_key(&(
1150 source,
1151 code.clone(),
1152 range_from_lsp(info.location.range),
1153 ))
1154 })
1155 });
1156
1157 if is_supporting {
1158 if let Some(severity) = diagnostic.severity {
1159 supporting_diagnostic_severities
1160 .insert((source, code.clone(), range), severity);
1161 }
1162 } else {
1163 let group_id = post_inc(&mut next_group_id);
1164 let is_disk_based =
1165 source.map_or(false, |source| disk_based_sources.contains(source));
1166
1167 sources_by_group_id.insert(group_id, source);
1168 primary_diagnostic_group_ids
1169 .insert((source, code.clone(), range.clone()), group_id);
1170
1171 diagnostics.push(DiagnosticEntry {
1172 range,
1173 diagnostic: Diagnostic {
1174 code: code.clone(),
1175 severity: diagnostic.severity.unwrap_or(DiagnosticSeverity::ERROR),
1176 message: diagnostic.message.clone(),
1177 group_id,
1178 is_primary: true,
1179 is_valid: true,
1180 is_disk_based,
1181 },
1182 });
1183 if let Some(infos) = &diagnostic.related_information {
1184 for info in infos {
1185 if info.location.uri == params.uri && !info.message.is_empty() {
1186 let range = range_from_lsp(info.location.range);
1187 diagnostics.push(DiagnosticEntry {
1188 range,
1189 diagnostic: Diagnostic {
1190 code: code.clone(),
1191 severity: DiagnosticSeverity::INFORMATION,
1192 message: info.message.clone(),
1193 group_id,
1194 is_primary: false,
1195 is_valid: true,
1196 is_disk_based,
1197 },
1198 });
1199 }
1200 }
1201 }
1202 }
1203 }
1204
1205 for entry in &mut diagnostics {
1206 let diagnostic = &mut entry.diagnostic;
1207 if !diagnostic.is_primary {
1208 let source = *sources_by_group_id.get(&diagnostic.group_id).unwrap();
1209 if let Some(&severity) = supporting_diagnostic_severities.get(&(
1210 source,
1211 diagnostic.code.clone(),
1212 entry.range.clone(),
1213 )) {
1214 diagnostic.severity = severity;
1215 }
1216 }
1217 }
1218
1219 self.update_diagnostic_entries(abs_path, params.version, diagnostics, cx)?;
1220 Ok(())
1221 }
1222
1223 pub fn update_diagnostic_entries(
1224 &mut self,
1225 abs_path: PathBuf,
1226 version: Option<i32>,
1227 diagnostics: Vec<DiagnosticEntry<PointUtf16>>,
1228 cx: &mut ModelContext<Project>,
1229 ) -> Result<(), anyhow::Error> {
1230 let (worktree, relative_path) = self
1231 .find_local_worktree(&abs_path, cx)
1232 .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
1233 if !worktree.read(cx).is_visible() {
1234 return Ok(());
1235 }
1236
1237 let project_path = ProjectPath {
1238 worktree_id: worktree.read(cx).id(),
1239 path: relative_path.into(),
1240 };
1241
1242 for buffer in self.opened_buffers.values() {
1243 if let Some(buffer) = buffer.upgrade(cx) {
1244 if buffer
1245 .read(cx)
1246 .file()
1247 .map_or(false, |file| *file.path() == project_path.path)
1248 {
1249 buffer.update(cx, |buffer, cx| {
1250 buffer.update_diagnostics(diagnostics.clone(), version, cx)
1251 })?;
1252 break;
1253 }
1254 }
1255 }
1256 worktree.update(cx, |worktree, cx| {
1257 worktree
1258 .as_local_mut()
1259 .ok_or_else(|| anyhow!("not a local worktree"))?
1260 .update_diagnostics(project_path.path.clone(), diagnostics, cx)
1261 })?;
1262 cx.emit(Event::DiagnosticsUpdated(project_path));
1263 Ok(())
1264 }
1265
1266 pub fn format(
1267 &self,
1268 buffers: HashSet<ModelHandle<Buffer>>,
1269 push_to_history: bool,
1270 cx: &mut ModelContext<Project>,
1271 ) -> Task<Result<ProjectTransaction>> {
1272 let mut local_buffers = Vec::new();
1273 let mut remote_buffers = None;
1274 for buffer_handle in buffers {
1275 let buffer = buffer_handle.read(cx);
1276 let worktree;
1277 if let Some(file) = File::from_dyn(buffer.file()) {
1278 worktree = file.worktree.clone();
1279 if let Some(buffer_abs_path) = file.as_local().map(|f| f.abs_path(cx)) {
1280 let lang_server;
1281 if let Some(lang) = buffer.language() {
1282 if let Some(server) = self
1283 .language_servers
1284 .get(&(worktree.read(cx).id(), lang.name().to_string()))
1285 {
1286 lang_server = server.clone();
1287 } else {
1288 return Task::ready(Ok(Default::default()));
1289 };
1290 } else {
1291 return Task::ready(Ok(Default::default()));
1292 }
1293
1294 local_buffers.push((buffer_handle, buffer_abs_path, lang_server));
1295 } else {
1296 remote_buffers.get_or_insert(Vec::new()).push(buffer_handle);
1297 }
1298 } else {
1299 return Task::ready(Ok(Default::default()));
1300 }
1301 }
1302
1303 let remote_buffers = self.remote_id().zip(remote_buffers);
1304 let client = self.client.clone();
1305
1306 cx.spawn(|this, mut cx| async move {
1307 let mut project_transaction = ProjectTransaction::default();
1308
1309 if let Some((project_id, remote_buffers)) = remote_buffers {
1310 let response = client
1311 .request(proto::FormatBuffers {
1312 project_id,
1313 buffer_ids: remote_buffers
1314 .iter()
1315 .map(|buffer| buffer.read_with(&cx, |buffer, _| buffer.remote_id()))
1316 .collect(),
1317 })
1318 .await?
1319 .transaction
1320 .ok_or_else(|| anyhow!("missing transaction"))?;
1321 project_transaction = this
1322 .update(&mut cx, |this, cx| {
1323 this.deserialize_project_transaction(response, push_to_history, cx)
1324 })
1325 .await?;
1326 }
1327
1328 for (buffer, buffer_abs_path, lang_server) in local_buffers {
1329 let capabilities = if let Some(capabilities) = lang_server.capabilities().await {
1330 capabilities
1331 } else {
1332 continue;
1333 };
1334
1335 let text_document = lsp::TextDocumentIdentifier::new(
1336 lsp::Url::from_file_path(&buffer_abs_path).unwrap(),
1337 );
1338 let lsp_edits = if capabilities
1339 .document_formatting_provider
1340 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1341 {
1342 lang_server
1343 .request::<lsp::request::Formatting>(lsp::DocumentFormattingParams {
1344 text_document,
1345 options: Default::default(),
1346 work_done_progress_params: Default::default(),
1347 })
1348 .await?
1349 } else if capabilities
1350 .document_range_formatting_provider
1351 .map_or(false, |provider| provider != lsp::OneOf::Left(false))
1352 {
1353 let buffer_start = lsp::Position::new(0, 0);
1354 let buffer_end = buffer
1355 .read_with(&cx, |buffer, _| buffer.max_point_utf16())
1356 .to_lsp_position();
1357 lang_server
1358 .request::<lsp::request::RangeFormatting>(
1359 lsp::DocumentRangeFormattingParams {
1360 text_document,
1361 range: lsp::Range::new(buffer_start, buffer_end),
1362 options: Default::default(),
1363 work_done_progress_params: Default::default(),
1364 },
1365 )
1366 .await?
1367 } else {
1368 continue;
1369 };
1370
1371 if let Some(lsp_edits) = lsp_edits {
1372 let edits = buffer
1373 .update(&mut cx, |buffer, cx| {
1374 buffer.edits_from_lsp(lsp_edits, None, cx)
1375 })
1376 .await?;
1377 buffer.update(&mut cx, |buffer, cx| {
1378 buffer.finalize_last_transaction();
1379 buffer.start_transaction();
1380 for (range, text) in edits {
1381 buffer.edit([range], text, cx);
1382 }
1383 if buffer.end_transaction(cx).is_some() {
1384 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1385 if !push_to_history {
1386 buffer.forget_transaction(transaction.id);
1387 }
1388 project_transaction.0.insert(cx.handle(), transaction);
1389 }
1390 });
1391 }
1392 }
1393
1394 Ok(project_transaction)
1395 })
1396 }
1397
1398 pub fn definition<T: ToPointUtf16>(
1399 &self,
1400 buffer: &ModelHandle<Buffer>,
1401 position: T,
1402 cx: &mut ModelContext<Self>,
1403 ) -> Task<Result<Vec<Location>>> {
1404 let position = position.to_point_utf16(buffer.read(cx));
1405 self.request_lsp(buffer.clone(), GetDefinition { position }, cx)
1406 }
1407
1408 pub fn references<T: ToPointUtf16>(
1409 &self,
1410 buffer: &ModelHandle<Buffer>,
1411 position: T,
1412 cx: &mut ModelContext<Self>,
1413 ) -> Task<Result<Vec<Location>>> {
1414 let position = position.to_point_utf16(buffer.read(cx));
1415 self.request_lsp(buffer.clone(), GetReferences { position }, cx)
1416 }
1417
1418 pub fn document_highlights<T: ToPointUtf16>(
1419 &self,
1420 buffer: &ModelHandle<Buffer>,
1421 position: T,
1422 cx: &mut ModelContext<Self>,
1423 ) -> Task<Result<Vec<DocumentHighlight>>> {
1424 let position = position.to_point_utf16(buffer.read(cx));
1425
1426 self.request_lsp(buffer.clone(), GetDocumentHighlights { position }, cx)
1427 }
1428
1429 pub fn symbols(&self, query: &str, cx: &mut ModelContext<Self>) -> Task<Result<Vec<Symbol>>> {
1430 if self.is_local() {
1431 let mut language_servers = HashMap::default();
1432 for ((worktree_id, language_name), language_server) in self.language_servers.iter() {
1433 if let Some((worktree, language)) = self
1434 .worktree_for_id(*worktree_id, cx)
1435 .and_then(|worktree| worktree.read(cx).as_local())
1436 .zip(self.languages.get_language(language_name))
1437 {
1438 language_servers
1439 .entry(Arc::as_ptr(language_server))
1440 .or_insert((
1441 language_server.clone(),
1442 *worktree_id,
1443 worktree.abs_path().clone(),
1444 language.clone(),
1445 ));
1446 }
1447 }
1448
1449 let mut requests = Vec::new();
1450 for (language_server, _, _, _) in language_servers.values() {
1451 requests.push(language_server.request::<lsp::request::WorkspaceSymbol>(
1452 lsp::WorkspaceSymbolParams {
1453 query: query.to_string(),
1454 ..Default::default()
1455 },
1456 ));
1457 }
1458
1459 cx.spawn_weak(|this, cx| async move {
1460 let responses = futures::future::try_join_all(requests).await?;
1461
1462 let mut symbols = Vec::new();
1463 if let Some(this) = this.upgrade(&cx) {
1464 this.read_with(&cx, |this, cx| {
1465 for ((_, source_worktree_id, worktree_abs_path, language), lsp_symbols) in
1466 language_servers.into_values().zip(responses)
1467 {
1468 symbols.extend(lsp_symbols.into_iter().flatten().filter_map(
1469 |lsp_symbol| {
1470 let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
1471 let mut worktree_id = source_worktree_id;
1472 let path;
1473 if let Some((worktree, rel_path)) =
1474 this.find_local_worktree(&abs_path, cx)
1475 {
1476 worktree_id = worktree.read(cx).id();
1477 path = rel_path;
1478 } else {
1479 path = relativize_path(&worktree_abs_path, &abs_path);
1480 }
1481
1482 let label = language
1483 .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
1484 .unwrap_or_else(|| {
1485 CodeLabel::plain(lsp_symbol.name.clone(), None)
1486 });
1487 let signature = this.symbol_signature(worktree_id, &path);
1488
1489 Some(Symbol {
1490 source_worktree_id,
1491 worktree_id,
1492 language_name: language.name().to_string(),
1493 name: lsp_symbol.name,
1494 kind: lsp_symbol.kind,
1495 label,
1496 path,
1497 range: range_from_lsp(lsp_symbol.location.range),
1498 signature,
1499 })
1500 },
1501 ));
1502 }
1503 })
1504 }
1505
1506 Ok(symbols)
1507 })
1508 } else if let Some(project_id) = self.remote_id() {
1509 let request = self.client.request(proto::GetProjectSymbols {
1510 project_id,
1511 query: query.to_string(),
1512 });
1513 cx.spawn_weak(|this, cx| async move {
1514 let response = request.await?;
1515 let mut symbols = Vec::new();
1516 if let Some(this) = this.upgrade(&cx) {
1517 this.read_with(&cx, |this, _| {
1518 symbols.extend(
1519 response
1520 .symbols
1521 .into_iter()
1522 .filter_map(|symbol| this.deserialize_symbol(symbol).log_err()),
1523 );
1524 })
1525 }
1526 Ok(symbols)
1527 })
1528 } else {
1529 Task::ready(Ok(Default::default()))
1530 }
1531 }
1532
1533 pub fn open_buffer_for_symbol(
1534 &mut self,
1535 symbol: &Symbol,
1536 cx: &mut ModelContext<Self>,
1537 ) -> Task<Result<ModelHandle<Buffer>>> {
1538 if self.is_local() {
1539 let language_server = if let Some(server) = self
1540 .language_servers
1541 .get(&(symbol.source_worktree_id, symbol.language_name.clone()))
1542 {
1543 server.clone()
1544 } else {
1545 return Task::ready(Err(anyhow!(
1546 "language server for worktree and language not found"
1547 )));
1548 };
1549
1550 let worktree_abs_path = if let Some(worktree_abs_path) = self
1551 .worktree_for_id(symbol.worktree_id, cx)
1552 .and_then(|worktree| worktree.read(cx).as_local())
1553 .map(|local_worktree| local_worktree.abs_path())
1554 {
1555 worktree_abs_path
1556 } else {
1557 return Task::ready(Err(anyhow!("worktree not found for symbol")));
1558 };
1559 let symbol_abs_path = worktree_abs_path.join(&symbol.path);
1560 let symbol_uri = if let Ok(uri) = lsp::Url::from_file_path(symbol_abs_path) {
1561 uri
1562 } else {
1563 return Task::ready(Err(anyhow!("invalid symbol path")));
1564 };
1565
1566 self.open_local_buffer_via_lsp(
1567 symbol_uri,
1568 symbol.language_name.clone(),
1569 language_server,
1570 cx,
1571 )
1572 } else if let Some(project_id) = self.remote_id() {
1573 let request = self.client.request(proto::OpenBufferForSymbol {
1574 project_id,
1575 symbol: Some(serialize_symbol(symbol)),
1576 });
1577 cx.spawn(|this, mut cx| async move {
1578 let response = request.await?;
1579 let buffer = response.buffer.ok_or_else(|| anyhow!("invalid buffer"))?;
1580 this.update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
1581 .await
1582 })
1583 } else {
1584 Task::ready(Err(anyhow!("project does not have a remote id")))
1585 }
1586 }
1587
1588 pub fn completions<T: ToPointUtf16>(
1589 &self,
1590 source_buffer_handle: &ModelHandle<Buffer>,
1591 position: T,
1592 cx: &mut ModelContext<Self>,
1593 ) -> Task<Result<Vec<Completion>>> {
1594 let source_buffer_handle = source_buffer_handle.clone();
1595 let source_buffer = source_buffer_handle.read(cx);
1596 let buffer_id = source_buffer.remote_id();
1597 let language = source_buffer.language().cloned();
1598 let worktree;
1599 let buffer_abs_path;
1600 if let Some(file) = File::from_dyn(source_buffer.file()) {
1601 worktree = file.worktree.clone();
1602 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1603 } else {
1604 return Task::ready(Ok(Default::default()));
1605 };
1606
1607 let position = position.to_point_utf16(source_buffer);
1608 let anchor = source_buffer.anchor_after(position);
1609
1610 if worktree.read(cx).as_local().is_some() {
1611 let buffer_abs_path = buffer_abs_path.unwrap();
1612 let lang_server = if let Some(server) = source_buffer.language_server().cloned() {
1613 server
1614 } else {
1615 return Task::ready(Ok(Default::default()));
1616 };
1617
1618 cx.spawn(|_, cx| async move {
1619 let completions = lang_server
1620 .request::<lsp::request::Completion>(lsp::CompletionParams {
1621 text_document_position: lsp::TextDocumentPositionParams::new(
1622 lsp::TextDocumentIdentifier::new(
1623 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1624 ),
1625 position.to_lsp_position(),
1626 ),
1627 context: Default::default(),
1628 work_done_progress_params: Default::default(),
1629 partial_result_params: Default::default(),
1630 })
1631 .await
1632 .context("lsp completion request failed")?;
1633
1634 let completions = if let Some(completions) = completions {
1635 match completions {
1636 lsp::CompletionResponse::Array(completions) => completions,
1637 lsp::CompletionResponse::List(list) => list.items,
1638 }
1639 } else {
1640 Default::default()
1641 };
1642
1643 source_buffer_handle.read_with(&cx, |this, _| {
1644 Ok(completions
1645 .into_iter()
1646 .filter_map(|lsp_completion| {
1647 let (old_range, new_text) = match lsp_completion.text_edit.as_ref()? {
1648 lsp::CompletionTextEdit::Edit(edit) => {
1649 (range_from_lsp(edit.range), edit.new_text.clone())
1650 }
1651 lsp::CompletionTextEdit::InsertAndReplace(_) => {
1652 log::info!("unsupported insert/replace completion");
1653 return None;
1654 }
1655 };
1656
1657 let clipped_start = this.clip_point_utf16(old_range.start, Bias::Left);
1658 let clipped_end = this.clip_point_utf16(old_range.end, Bias::Left);
1659 if clipped_start == old_range.start && clipped_end == old_range.end {
1660 Some(Completion {
1661 old_range: this.anchor_before(old_range.start)
1662 ..this.anchor_after(old_range.end),
1663 new_text,
1664 label: language
1665 .as_ref()
1666 .and_then(|l| l.label_for_completion(&lsp_completion))
1667 .unwrap_or_else(|| {
1668 CodeLabel::plain(
1669 lsp_completion.label.clone(),
1670 lsp_completion.filter_text.as_deref(),
1671 )
1672 }),
1673 lsp_completion,
1674 })
1675 } else {
1676 None
1677 }
1678 })
1679 .collect())
1680 })
1681 })
1682 } else if let Some(project_id) = self.remote_id() {
1683 let rpc = self.client.clone();
1684 let message = proto::GetCompletions {
1685 project_id,
1686 buffer_id,
1687 position: Some(language::proto::serialize_anchor(&anchor)),
1688 version: (&source_buffer.version()).into(),
1689 };
1690 cx.spawn_weak(|_, mut cx| async move {
1691 let response = rpc.request(message).await?;
1692
1693 source_buffer_handle
1694 .update(&mut cx, |buffer, _| {
1695 buffer.wait_for_version(response.version.into())
1696 })
1697 .await;
1698
1699 response
1700 .completions
1701 .into_iter()
1702 .map(|completion| {
1703 language::proto::deserialize_completion(completion, language.as_ref())
1704 })
1705 .collect()
1706 })
1707 } else {
1708 Task::ready(Ok(Default::default()))
1709 }
1710 }
1711
1712 pub fn apply_additional_edits_for_completion(
1713 &self,
1714 buffer_handle: ModelHandle<Buffer>,
1715 completion: Completion,
1716 push_to_history: bool,
1717 cx: &mut ModelContext<Self>,
1718 ) -> Task<Result<Option<Transaction>>> {
1719 let buffer = buffer_handle.read(cx);
1720 let buffer_id = buffer.remote_id();
1721
1722 if self.is_local() {
1723 let lang_server = if let Some(language_server) = buffer.language_server() {
1724 language_server.clone()
1725 } else {
1726 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1727 };
1728
1729 cx.spawn(|_, mut cx| async move {
1730 let resolved_completion = lang_server
1731 .request::<lsp::request::ResolveCompletionItem>(completion.lsp_completion)
1732 .await?;
1733 if let Some(edits) = resolved_completion.additional_text_edits {
1734 let edits = buffer_handle
1735 .update(&mut cx, |buffer, cx| buffer.edits_from_lsp(edits, None, cx))
1736 .await?;
1737 buffer_handle.update(&mut cx, |buffer, cx| {
1738 buffer.finalize_last_transaction();
1739 buffer.start_transaction();
1740 for (range, text) in edits {
1741 buffer.edit([range], text, cx);
1742 }
1743 let transaction = if buffer.end_transaction(cx).is_some() {
1744 let transaction = buffer.finalize_last_transaction().unwrap().clone();
1745 if !push_to_history {
1746 buffer.forget_transaction(transaction.id);
1747 }
1748 Some(transaction)
1749 } else {
1750 None
1751 };
1752 Ok(transaction)
1753 })
1754 } else {
1755 Ok(None)
1756 }
1757 })
1758 } else if let Some(project_id) = self.remote_id() {
1759 let client = self.client.clone();
1760 cx.spawn(|_, mut cx| async move {
1761 let response = client
1762 .request(proto::ApplyCompletionAdditionalEdits {
1763 project_id,
1764 buffer_id,
1765 completion: Some(language::proto::serialize_completion(&completion)),
1766 })
1767 .await?;
1768
1769 if let Some(transaction) = response.transaction {
1770 let transaction = language::proto::deserialize_transaction(transaction)?;
1771 buffer_handle
1772 .update(&mut cx, |buffer, _| {
1773 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
1774 })
1775 .await;
1776 if push_to_history {
1777 buffer_handle.update(&mut cx, |buffer, _| {
1778 buffer.push_transaction(transaction.clone(), Instant::now());
1779 });
1780 }
1781 Ok(Some(transaction))
1782 } else {
1783 Ok(None)
1784 }
1785 })
1786 } else {
1787 Task::ready(Err(anyhow!("project does not have a remote id")))
1788 }
1789 }
1790
1791 pub fn code_actions<T: ToOffset>(
1792 &self,
1793 buffer_handle: &ModelHandle<Buffer>,
1794 range: Range<T>,
1795 cx: &mut ModelContext<Self>,
1796 ) -> Task<Result<Vec<CodeAction>>> {
1797 let buffer_handle = buffer_handle.clone();
1798 let buffer = buffer_handle.read(cx);
1799 let buffer_id = buffer.remote_id();
1800 let worktree;
1801 let buffer_abs_path;
1802 if let Some(file) = File::from_dyn(buffer.file()) {
1803 worktree = file.worktree.clone();
1804 buffer_abs_path = file.as_local().map(|f| f.abs_path(cx));
1805 } else {
1806 return Task::ready(Ok(Default::default()));
1807 };
1808 let range = buffer.anchor_before(range.start)..buffer.anchor_before(range.end);
1809
1810 if worktree.read(cx).as_local().is_some() {
1811 let buffer_abs_path = buffer_abs_path.unwrap();
1812 let lang_name;
1813 let lang_server;
1814 if let Some(lang) = buffer.language() {
1815 lang_name = lang.name().to_string();
1816 if let Some(server) = self
1817 .language_servers
1818 .get(&(worktree.read(cx).id(), lang_name.clone()))
1819 {
1820 lang_server = server.clone();
1821 } else {
1822 return Task::ready(Ok(Default::default()));
1823 };
1824 } else {
1825 return Task::ready(Ok(Default::default()));
1826 }
1827
1828 let lsp_range = lsp::Range::new(
1829 range.start.to_point_utf16(buffer).to_lsp_position(),
1830 range.end.to_point_utf16(buffer).to_lsp_position(),
1831 );
1832 cx.foreground().spawn(async move {
1833 if !lang_server
1834 .capabilities()
1835 .await
1836 .map_or(false, |capabilities| {
1837 capabilities.code_action_provider.is_some()
1838 })
1839 {
1840 return Ok(Default::default());
1841 }
1842
1843 Ok(lang_server
1844 .request::<lsp::request::CodeActionRequest>(lsp::CodeActionParams {
1845 text_document: lsp::TextDocumentIdentifier::new(
1846 lsp::Url::from_file_path(buffer_abs_path).unwrap(),
1847 ),
1848 range: lsp_range,
1849 work_done_progress_params: Default::default(),
1850 partial_result_params: Default::default(),
1851 context: lsp::CodeActionContext {
1852 diagnostics: Default::default(),
1853 only: Some(vec![
1854 lsp::CodeActionKind::QUICKFIX,
1855 lsp::CodeActionKind::REFACTOR,
1856 lsp::CodeActionKind::REFACTOR_EXTRACT,
1857 ]),
1858 },
1859 })
1860 .await?
1861 .unwrap_or_default()
1862 .into_iter()
1863 .filter_map(|entry| {
1864 if let lsp::CodeActionOrCommand::CodeAction(lsp_action) = entry {
1865 Some(CodeAction {
1866 range: range.clone(),
1867 lsp_action,
1868 })
1869 } else {
1870 None
1871 }
1872 })
1873 .collect())
1874 })
1875 } else if let Some(project_id) = self.remote_id() {
1876 let rpc = self.client.clone();
1877 let version = buffer.version();
1878 cx.spawn_weak(|_, mut cx| async move {
1879 let response = rpc
1880 .request(proto::GetCodeActions {
1881 project_id,
1882 buffer_id,
1883 start: Some(language::proto::serialize_anchor(&range.start)),
1884 end: Some(language::proto::serialize_anchor(&range.end)),
1885 version: (&version).into(),
1886 })
1887 .await?;
1888
1889 buffer_handle
1890 .update(&mut cx, |buffer, _| {
1891 buffer.wait_for_version(response.version.into())
1892 })
1893 .await;
1894
1895 response
1896 .actions
1897 .into_iter()
1898 .map(language::proto::deserialize_code_action)
1899 .collect()
1900 })
1901 } else {
1902 Task::ready(Ok(Default::default()))
1903 }
1904 }
1905
1906 pub fn apply_code_action(
1907 &self,
1908 buffer_handle: ModelHandle<Buffer>,
1909 mut action: CodeAction,
1910 push_to_history: bool,
1911 cx: &mut ModelContext<Self>,
1912 ) -> Task<Result<ProjectTransaction>> {
1913 if self.is_local() {
1914 let buffer = buffer_handle.read(cx);
1915 let lang_name = if let Some(lang) = buffer.language() {
1916 lang.name().to_string()
1917 } else {
1918 return Task::ready(Ok(Default::default()));
1919 };
1920 let lang_server = if let Some(language_server) = buffer.language_server() {
1921 language_server.clone()
1922 } else {
1923 return Task::ready(Err(anyhow!("buffer does not have a language server")));
1924 };
1925 let range = action.range.to_point_utf16(buffer);
1926
1927 cx.spawn(|this, mut cx| async move {
1928 if let Some(lsp_range) = action
1929 .lsp_action
1930 .data
1931 .as_mut()
1932 .and_then(|d| d.get_mut("codeActionParams"))
1933 .and_then(|d| d.get_mut("range"))
1934 {
1935 *lsp_range = serde_json::to_value(&lsp::Range::new(
1936 range.start.to_lsp_position(),
1937 range.end.to_lsp_position(),
1938 ))
1939 .unwrap();
1940 action.lsp_action = lang_server
1941 .request::<lsp::request::CodeActionResolveRequest>(action.lsp_action)
1942 .await?;
1943 } else {
1944 let actions = this
1945 .update(&mut cx, |this, cx| {
1946 this.code_actions(&buffer_handle, action.range, cx)
1947 })
1948 .await?;
1949 action.lsp_action = actions
1950 .into_iter()
1951 .find(|a| a.lsp_action.title == action.lsp_action.title)
1952 .ok_or_else(|| anyhow!("code action is outdated"))?
1953 .lsp_action;
1954 }
1955
1956 if let Some(edit) = action.lsp_action.edit {
1957 Self::deserialize_workspace_edit(
1958 this,
1959 edit,
1960 push_to_history,
1961 lang_name,
1962 lang_server,
1963 &mut cx,
1964 )
1965 .await
1966 } else {
1967 Ok(ProjectTransaction::default())
1968 }
1969 })
1970 } else if let Some(project_id) = self.remote_id() {
1971 let client = self.client.clone();
1972 let request = proto::ApplyCodeAction {
1973 project_id,
1974 buffer_id: buffer_handle.read(cx).remote_id(),
1975 action: Some(language::proto::serialize_code_action(&action)),
1976 };
1977 cx.spawn(|this, mut cx| async move {
1978 let response = client
1979 .request(request)
1980 .await?
1981 .transaction
1982 .ok_or_else(|| anyhow!("missing transaction"))?;
1983 this.update(&mut cx, |this, cx| {
1984 this.deserialize_project_transaction(response, push_to_history, cx)
1985 })
1986 .await
1987 })
1988 } else {
1989 Task::ready(Err(anyhow!("project does not have a remote id")))
1990 }
1991 }
1992
1993 async fn deserialize_workspace_edit(
1994 this: ModelHandle<Self>,
1995 edit: lsp::WorkspaceEdit,
1996 push_to_history: bool,
1997 language_name: String,
1998 language_server: Arc<LanguageServer>,
1999 cx: &mut AsyncAppContext,
2000 ) -> Result<ProjectTransaction> {
2001 let fs = this.read_with(cx, |this, _| this.fs.clone());
2002 let mut operations = Vec::new();
2003 if let Some(document_changes) = edit.document_changes {
2004 match document_changes {
2005 lsp::DocumentChanges::Edits(edits) => {
2006 operations.extend(edits.into_iter().map(lsp::DocumentChangeOperation::Edit))
2007 }
2008 lsp::DocumentChanges::Operations(ops) => operations = ops,
2009 }
2010 } else if let Some(changes) = edit.changes {
2011 operations.extend(changes.into_iter().map(|(uri, edits)| {
2012 lsp::DocumentChangeOperation::Edit(lsp::TextDocumentEdit {
2013 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
2014 uri,
2015 version: None,
2016 },
2017 edits: edits.into_iter().map(lsp::OneOf::Left).collect(),
2018 })
2019 }));
2020 }
2021
2022 let mut project_transaction = ProjectTransaction::default();
2023 for operation in operations {
2024 match operation {
2025 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Create(op)) => {
2026 let abs_path = op
2027 .uri
2028 .to_file_path()
2029 .map_err(|_| anyhow!("can't convert URI to path"))?;
2030
2031 if let Some(parent_path) = abs_path.parent() {
2032 fs.create_dir(parent_path).await?;
2033 }
2034 if abs_path.ends_with("/") {
2035 fs.create_dir(&abs_path).await?;
2036 } else {
2037 fs.create_file(&abs_path, op.options.map(Into::into).unwrap_or_default())
2038 .await?;
2039 }
2040 }
2041 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Rename(op)) => {
2042 let source_abs_path = op
2043 .old_uri
2044 .to_file_path()
2045 .map_err(|_| anyhow!("can't convert URI to path"))?;
2046 let target_abs_path = op
2047 .new_uri
2048 .to_file_path()
2049 .map_err(|_| anyhow!("can't convert URI to path"))?;
2050 fs.rename(
2051 &source_abs_path,
2052 &target_abs_path,
2053 op.options.map(Into::into).unwrap_or_default(),
2054 )
2055 .await?;
2056 }
2057 lsp::DocumentChangeOperation::Op(lsp::ResourceOp::Delete(op)) => {
2058 let abs_path = op
2059 .uri
2060 .to_file_path()
2061 .map_err(|_| anyhow!("can't convert URI to path"))?;
2062 let options = op.options.map(Into::into).unwrap_or_default();
2063 if abs_path.ends_with("/") {
2064 fs.remove_dir(&abs_path, options).await?;
2065 } else {
2066 fs.remove_file(&abs_path, options).await?;
2067 }
2068 }
2069 lsp::DocumentChangeOperation::Edit(op) => {
2070 let buffer_to_edit = this
2071 .update(cx, |this, cx| {
2072 this.open_local_buffer_via_lsp(
2073 op.text_document.uri,
2074 language_name.clone(),
2075 language_server.clone(),
2076 cx,
2077 )
2078 })
2079 .await?;
2080
2081 let edits = buffer_to_edit
2082 .update(cx, |buffer, cx| {
2083 let edits = op.edits.into_iter().map(|edit| match edit {
2084 lsp::OneOf::Left(edit) => edit,
2085 lsp::OneOf::Right(edit) => edit.text_edit,
2086 });
2087 buffer.edits_from_lsp(edits, op.text_document.version, cx)
2088 })
2089 .await?;
2090
2091 let transaction = buffer_to_edit.update(cx, |buffer, cx| {
2092 buffer.finalize_last_transaction();
2093 buffer.start_transaction();
2094 for (range, text) in edits {
2095 buffer.edit([range], text, cx);
2096 }
2097 let transaction = if buffer.end_transaction(cx).is_some() {
2098 let transaction = buffer.finalize_last_transaction().unwrap().clone();
2099 if !push_to_history {
2100 buffer.forget_transaction(transaction.id);
2101 }
2102 Some(transaction)
2103 } else {
2104 None
2105 };
2106
2107 transaction
2108 });
2109 if let Some(transaction) = transaction {
2110 project_transaction.0.insert(buffer_to_edit, transaction);
2111 }
2112 }
2113 }
2114 }
2115
2116 Ok(project_transaction)
2117 }
2118
2119 pub fn prepare_rename<T: ToPointUtf16>(
2120 &self,
2121 buffer: ModelHandle<Buffer>,
2122 position: T,
2123 cx: &mut ModelContext<Self>,
2124 ) -> Task<Result<Option<Range<Anchor>>>> {
2125 let position = position.to_point_utf16(buffer.read(cx));
2126 self.request_lsp(buffer, PrepareRename { position }, cx)
2127 }
2128
2129 pub fn perform_rename<T: ToPointUtf16>(
2130 &self,
2131 buffer: ModelHandle<Buffer>,
2132 position: T,
2133 new_name: String,
2134 push_to_history: bool,
2135 cx: &mut ModelContext<Self>,
2136 ) -> Task<Result<ProjectTransaction>> {
2137 let position = position.to_point_utf16(buffer.read(cx));
2138 self.request_lsp(
2139 buffer,
2140 PerformRename {
2141 position,
2142 new_name,
2143 push_to_history,
2144 },
2145 cx,
2146 )
2147 }
2148
2149 pub fn search(
2150 &self,
2151 query: SearchQuery,
2152 cx: &mut ModelContext<Self>,
2153 ) -> Task<Result<HashMap<ModelHandle<Buffer>, Vec<Range<Anchor>>>>> {
2154 if self.is_local() {
2155 let snapshots = self
2156 .visible_worktrees(cx)
2157 .filter_map(|tree| {
2158 let tree = tree.read(cx).as_local()?;
2159 Some(tree.snapshot())
2160 })
2161 .collect::<Vec<_>>();
2162
2163 let background = cx.background().clone();
2164 let path_count: usize = snapshots.iter().map(|s| s.visible_file_count()).sum();
2165 if path_count == 0 {
2166 return Task::ready(Ok(Default::default()));
2167 }
2168 let workers = background.num_cpus().min(path_count);
2169 let (matching_paths_tx, mut matching_paths_rx) = smol::channel::bounded(1024);
2170 cx.background()
2171 .spawn({
2172 let fs = self.fs.clone();
2173 let background = cx.background().clone();
2174 let query = query.clone();
2175 async move {
2176 let fs = &fs;
2177 let query = &query;
2178 let matching_paths_tx = &matching_paths_tx;
2179 let paths_per_worker = (path_count + workers - 1) / workers;
2180 let snapshots = &snapshots;
2181 background
2182 .scoped(|scope| {
2183 for worker_ix in 0..workers {
2184 let worker_start_ix = worker_ix * paths_per_worker;
2185 let worker_end_ix = worker_start_ix + paths_per_worker;
2186 scope.spawn(async move {
2187 let mut snapshot_start_ix = 0;
2188 let mut abs_path = PathBuf::new();
2189 for snapshot in snapshots {
2190 let snapshot_end_ix =
2191 snapshot_start_ix + snapshot.visible_file_count();
2192 if worker_end_ix <= snapshot_start_ix {
2193 break;
2194 } else if worker_start_ix > snapshot_end_ix {
2195 snapshot_start_ix = snapshot_end_ix;
2196 continue;
2197 } else {
2198 let start_in_snapshot = worker_start_ix
2199 .saturating_sub(snapshot_start_ix);
2200 let end_in_snapshot =
2201 cmp::min(worker_end_ix, snapshot_end_ix)
2202 - snapshot_start_ix;
2203
2204 for entry in snapshot
2205 .files(false, start_in_snapshot)
2206 .take(end_in_snapshot - start_in_snapshot)
2207 {
2208 if matching_paths_tx.is_closed() {
2209 break;
2210 }
2211
2212 abs_path.clear();
2213 abs_path.push(&snapshot.abs_path());
2214 abs_path.push(&entry.path);
2215 let matches = if let Some(file) =
2216 fs.open_sync(&abs_path).await.log_err()
2217 {
2218 query.detect(file).unwrap_or(false)
2219 } else {
2220 false
2221 };
2222
2223 if matches {
2224 let project_path =
2225 (snapshot.id(), entry.path.clone());
2226 if matching_paths_tx
2227 .send(project_path)
2228 .await
2229 .is_err()
2230 {
2231 break;
2232 }
2233 }
2234 }
2235
2236 snapshot_start_ix = snapshot_end_ix;
2237 }
2238 }
2239 });
2240 }
2241 })
2242 .await;
2243 }
2244 })
2245 .detach();
2246
2247 let (buffers_tx, buffers_rx) = smol::channel::bounded(1024);
2248 let open_buffers = self
2249 .opened_buffers
2250 .values()
2251 .filter_map(|b| b.upgrade(cx))
2252 .collect::<HashSet<_>>();
2253 cx.spawn(|this, cx| async move {
2254 for buffer in &open_buffers {
2255 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2256 buffers_tx.send((buffer.clone(), snapshot)).await?;
2257 }
2258
2259 let open_buffers = Rc::new(RefCell::new(open_buffers));
2260 while let Some(project_path) = matching_paths_rx.next().await {
2261 if buffers_tx.is_closed() {
2262 break;
2263 }
2264
2265 let this = this.clone();
2266 let open_buffers = open_buffers.clone();
2267 let buffers_tx = buffers_tx.clone();
2268 cx.spawn(|mut cx| async move {
2269 if let Some(buffer) = this
2270 .update(&mut cx, |this, cx| this.open_buffer(project_path, cx))
2271 .await
2272 .log_err()
2273 {
2274 if open_buffers.borrow_mut().insert(buffer.clone()) {
2275 let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot());
2276 buffers_tx.send((buffer, snapshot)).await?;
2277 }
2278 }
2279
2280 Ok::<_, anyhow::Error>(())
2281 })
2282 .detach();
2283 }
2284
2285 Ok::<_, anyhow::Error>(())
2286 })
2287 .detach_and_log_err(cx);
2288
2289 let background = cx.background().clone();
2290 cx.background().spawn(async move {
2291 let query = &query;
2292 let mut matched_buffers = Vec::new();
2293 for _ in 0..workers {
2294 matched_buffers.push(HashMap::default());
2295 }
2296 background
2297 .scoped(|scope| {
2298 for worker_matched_buffers in matched_buffers.iter_mut() {
2299 let mut buffers_rx = buffers_rx.clone();
2300 scope.spawn(async move {
2301 while let Some((buffer, snapshot)) = buffers_rx.next().await {
2302 let buffer_matches = query
2303 .search(snapshot.as_rope())
2304 .await
2305 .iter()
2306 .map(|range| {
2307 snapshot.anchor_before(range.start)
2308 ..snapshot.anchor_after(range.end)
2309 })
2310 .collect::<Vec<_>>();
2311 if !buffer_matches.is_empty() {
2312 worker_matched_buffers
2313 .insert(buffer.clone(), buffer_matches);
2314 }
2315 }
2316 });
2317 }
2318 })
2319 .await;
2320 Ok(matched_buffers.into_iter().flatten().collect())
2321 })
2322 } else if let Some(project_id) = self.remote_id() {
2323 let request = self.client.request(query.to_proto(project_id));
2324 cx.spawn(|this, mut cx| async move {
2325 let response = request.await?;
2326 let mut result = HashMap::default();
2327 for location in response.locations {
2328 let buffer = location.buffer.ok_or_else(|| anyhow!("missing buffer"))?;
2329 let target_buffer = this
2330 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
2331 .await?;
2332 let start = location
2333 .start
2334 .and_then(deserialize_anchor)
2335 .ok_or_else(|| anyhow!("missing target start"))?;
2336 let end = location
2337 .end
2338 .and_then(deserialize_anchor)
2339 .ok_or_else(|| anyhow!("missing target end"))?;
2340 result
2341 .entry(target_buffer)
2342 .or_insert(Vec::new())
2343 .push(start..end)
2344 }
2345 Ok(result)
2346 })
2347 } else {
2348 Task::ready(Ok(Default::default()))
2349 }
2350 }
2351
2352 fn request_lsp<R: LspCommand>(
2353 &self,
2354 buffer_handle: ModelHandle<Buffer>,
2355 request: R,
2356 cx: &mut ModelContext<Self>,
2357 ) -> Task<Result<R::Response>>
2358 where
2359 <R::LspRequest as lsp::request::Request>::Result: Send,
2360 {
2361 let buffer = buffer_handle.read(cx);
2362 if self.is_local() {
2363 let file = File::from_dyn(buffer.file()).and_then(File::as_local);
2364 if let Some((file, language_server)) = file.zip(buffer.language_server().cloned()) {
2365 let lsp_params = request.to_lsp(&file.abs_path(cx), cx);
2366 return cx.spawn(|this, cx| async move {
2367 if !language_server
2368 .capabilities()
2369 .await
2370 .map_or(false, |capabilities| {
2371 request.check_capabilities(&capabilities)
2372 })
2373 {
2374 return Ok(Default::default());
2375 }
2376
2377 let response = language_server
2378 .request::<R::LspRequest>(lsp_params)
2379 .await
2380 .context("lsp request failed")?;
2381 request
2382 .response_from_lsp(response, this, buffer_handle, cx)
2383 .await
2384 });
2385 }
2386 } else if let Some(project_id) = self.remote_id() {
2387 let rpc = self.client.clone();
2388 let message = request.to_proto(project_id, buffer);
2389 return cx.spawn(|this, cx| async move {
2390 let response = rpc.request(message).await?;
2391 request
2392 .response_from_proto(response, this, buffer_handle, cx)
2393 .await
2394 });
2395 }
2396 Task::ready(Ok(Default::default()))
2397 }
2398
2399 pub fn find_or_create_local_worktree(
2400 &mut self,
2401 abs_path: impl AsRef<Path>,
2402 visible: bool,
2403 cx: &mut ModelContext<Self>,
2404 ) -> Task<Result<(ModelHandle<Worktree>, PathBuf)>> {
2405 let abs_path = abs_path.as_ref();
2406 if let Some((tree, relative_path)) = self.find_local_worktree(abs_path, cx) {
2407 Task::ready(Ok((tree.clone(), relative_path.into())))
2408 } else {
2409 let worktree = self.create_local_worktree(abs_path, visible, cx);
2410 cx.foreground()
2411 .spawn(async move { Ok((worktree.await?, PathBuf::new())) })
2412 }
2413 }
2414
2415 pub fn find_local_worktree(
2416 &self,
2417 abs_path: &Path,
2418 cx: &AppContext,
2419 ) -> Option<(ModelHandle<Worktree>, PathBuf)> {
2420 for tree in self.worktrees(cx) {
2421 if let Some(relative_path) = tree
2422 .read(cx)
2423 .as_local()
2424 .and_then(|t| abs_path.strip_prefix(t.abs_path()).ok())
2425 {
2426 return Some((tree.clone(), relative_path.into()));
2427 }
2428 }
2429 None
2430 }
2431
2432 pub fn is_shared(&self) -> bool {
2433 match &self.client_state {
2434 ProjectClientState::Local { is_shared, .. } => *is_shared,
2435 ProjectClientState::Remote { .. } => false,
2436 }
2437 }
2438
2439 fn create_local_worktree(
2440 &mut self,
2441 abs_path: impl AsRef<Path>,
2442 visible: bool,
2443 cx: &mut ModelContext<Self>,
2444 ) -> Task<Result<ModelHandle<Worktree>>> {
2445 let fs = self.fs.clone();
2446 let client = self.client.clone();
2447 let path: Arc<Path> = abs_path.as_ref().into();
2448 let task = self
2449 .loading_local_worktrees
2450 .entry(path.clone())
2451 .or_insert_with(|| {
2452 cx.spawn(|project, mut cx| {
2453 async move {
2454 let worktree =
2455 Worktree::local(client.clone(), path.clone(), visible, fs, &mut cx)
2456 .await;
2457 project.update(&mut cx, |project, _| {
2458 project.loading_local_worktrees.remove(&path);
2459 });
2460 let worktree = worktree?;
2461
2462 let (remote_project_id, is_shared) =
2463 project.update(&mut cx, |project, cx| {
2464 project.add_worktree(&worktree, cx);
2465 (project.remote_id(), project.is_shared())
2466 });
2467
2468 if let Some(project_id) = remote_project_id {
2469 if is_shared {
2470 worktree
2471 .update(&mut cx, |worktree, cx| {
2472 worktree.as_local_mut().unwrap().share(project_id, cx)
2473 })
2474 .await?;
2475 } else {
2476 worktree
2477 .update(&mut cx, |worktree, cx| {
2478 worktree.as_local_mut().unwrap().register(project_id, cx)
2479 })
2480 .await?;
2481 }
2482 }
2483
2484 Ok(worktree)
2485 }
2486 .map_err(|err| Arc::new(err))
2487 })
2488 .shared()
2489 })
2490 .clone();
2491 cx.foreground().spawn(async move {
2492 match task.await {
2493 Ok(worktree) => Ok(worktree),
2494 Err(err) => Err(anyhow!("{}", err)),
2495 }
2496 })
2497 }
2498
2499 pub fn remove_worktree(&mut self, id: WorktreeId, cx: &mut ModelContext<Self>) {
2500 self.worktrees.retain(|worktree| {
2501 worktree
2502 .upgrade(cx)
2503 .map_or(false, |w| w.read(cx).id() != id)
2504 });
2505 cx.notify();
2506 }
2507
2508 fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
2509 cx.observe(&worktree, |_, _, cx| cx.notify()).detach();
2510 if worktree.read(cx).is_local() {
2511 cx.subscribe(&worktree, |this, worktree, _, cx| {
2512 this.update_local_worktree_buffers(worktree, cx);
2513 })
2514 .detach();
2515 }
2516
2517 let push_strong_handle = {
2518 let worktree = worktree.read(cx);
2519 self.is_shared() || worktree.is_visible() || worktree.is_remote()
2520 };
2521 if push_strong_handle {
2522 self.worktrees
2523 .push(WorktreeHandle::Strong(worktree.clone()));
2524 } else {
2525 cx.observe_release(&worktree, |this, cx| {
2526 this.worktrees
2527 .retain(|worktree| worktree.upgrade(cx).is_some());
2528 cx.notify();
2529 })
2530 .detach();
2531 self.worktrees
2532 .push(WorktreeHandle::Weak(worktree.downgrade()));
2533 }
2534 cx.notify();
2535 }
2536
2537 fn update_local_worktree_buffers(
2538 &mut self,
2539 worktree_handle: ModelHandle<Worktree>,
2540 cx: &mut ModelContext<Self>,
2541 ) {
2542 let snapshot = worktree_handle.read(cx).snapshot();
2543 let mut buffers_to_delete = Vec::new();
2544 for (buffer_id, buffer) in &self.opened_buffers {
2545 if let Some(buffer) = buffer.upgrade(cx) {
2546 buffer.update(cx, |buffer, cx| {
2547 if let Some(old_file) = File::from_dyn(buffer.file()) {
2548 if old_file.worktree != worktree_handle {
2549 return;
2550 }
2551
2552 let new_file = if let Some(entry) = old_file
2553 .entry_id
2554 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
2555 {
2556 File {
2557 is_local: true,
2558 entry_id: Some(entry.id),
2559 mtime: entry.mtime,
2560 path: entry.path.clone(),
2561 worktree: worktree_handle.clone(),
2562 }
2563 } else if let Some(entry) =
2564 snapshot.entry_for_path(old_file.path().as_ref())
2565 {
2566 File {
2567 is_local: true,
2568 entry_id: Some(entry.id),
2569 mtime: entry.mtime,
2570 path: entry.path.clone(),
2571 worktree: worktree_handle.clone(),
2572 }
2573 } else {
2574 File {
2575 is_local: true,
2576 entry_id: None,
2577 path: old_file.path().clone(),
2578 mtime: old_file.mtime(),
2579 worktree: worktree_handle.clone(),
2580 }
2581 };
2582
2583 if let Some(project_id) = self.remote_id() {
2584 self.client
2585 .send(proto::UpdateBufferFile {
2586 project_id,
2587 buffer_id: *buffer_id as u64,
2588 file: Some(new_file.to_proto()),
2589 })
2590 .log_err();
2591 }
2592 buffer.file_updated(Box::new(new_file), cx).detach();
2593 }
2594 });
2595 } else {
2596 buffers_to_delete.push(*buffer_id);
2597 }
2598 }
2599
2600 for buffer_id in buffers_to_delete {
2601 self.opened_buffers.remove(&buffer_id);
2602 }
2603 }
2604
2605 pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
2606 let new_active_entry = entry.and_then(|project_path| {
2607 let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
2608 let entry = worktree.read(cx).entry_for_path(project_path.path)?;
2609 Some(ProjectEntry {
2610 worktree_id: project_path.worktree_id,
2611 entry_id: entry.id,
2612 })
2613 });
2614 if new_active_entry != self.active_entry {
2615 self.active_entry = new_active_entry;
2616 cx.emit(Event::ActiveEntryChanged(new_active_entry));
2617 }
2618 }
2619
2620 pub fn is_running_disk_based_diagnostics(&self) -> bool {
2621 self.language_servers_with_diagnostics_running > 0
2622 }
2623
2624 pub fn diagnostic_summary(&self, cx: &AppContext) -> DiagnosticSummary {
2625 let mut summary = DiagnosticSummary::default();
2626 for (_, path_summary) in self.diagnostic_summaries(cx) {
2627 summary.error_count += path_summary.error_count;
2628 summary.warning_count += path_summary.warning_count;
2629 summary.info_count += path_summary.info_count;
2630 summary.hint_count += path_summary.hint_count;
2631 }
2632 summary
2633 }
2634
2635 pub fn diagnostic_summaries<'a>(
2636 &'a self,
2637 cx: &'a AppContext,
2638 ) -> impl Iterator<Item = (ProjectPath, DiagnosticSummary)> + 'a {
2639 self.worktrees(cx).flat_map(move |worktree| {
2640 let worktree = worktree.read(cx);
2641 let worktree_id = worktree.id();
2642 worktree
2643 .diagnostic_summaries()
2644 .map(move |(path, summary)| (ProjectPath { worktree_id, path }, summary))
2645 })
2646 }
2647
2648 pub fn disk_based_diagnostics_started(&mut self, cx: &mut ModelContext<Self>) {
2649 self.language_servers_with_diagnostics_running += 1;
2650 if self.language_servers_with_diagnostics_running == 1 {
2651 cx.emit(Event::DiskBasedDiagnosticsStarted);
2652 }
2653 }
2654
2655 pub fn disk_based_diagnostics_finished(&mut self, cx: &mut ModelContext<Self>) {
2656 cx.emit(Event::DiskBasedDiagnosticsUpdated);
2657 self.language_servers_with_diagnostics_running -= 1;
2658 if self.language_servers_with_diagnostics_running == 0 {
2659 cx.emit(Event::DiskBasedDiagnosticsFinished);
2660 }
2661 }
2662
2663 pub fn active_entry(&self) -> Option<ProjectEntry> {
2664 self.active_entry
2665 }
2666
2667 // RPC message handlers
2668
2669 async fn handle_unshare_project(
2670 this: ModelHandle<Self>,
2671 _: TypedEnvelope<proto::UnshareProject>,
2672 _: Arc<Client>,
2673 mut cx: AsyncAppContext,
2674 ) -> Result<()> {
2675 this.update(&mut cx, |this, cx| {
2676 if let ProjectClientState::Remote {
2677 sharing_has_stopped,
2678 ..
2679 } = &mut this.client_state
2680 {
2681 *sharing_has_stopped = true;
2682 this.collaborators.clear();
2683 cx.notify();
2684 } else {
2685 unreachable!()
2686 }
2687 });
2688
2689 Ok(())
2690 }
2691
2692 async fn handle_add_collaborator(
2693 this: ModelHandle<Self>,
2694 mut envelope: TypedEnvelope<proto::AddProjectCollaborator>,
2695 _: Arc<Client>,
2696 mut cx: AsyncAppContext,
2697 ) -> Result<()> {
2698 let user_store = this.read_with(&cx, |this, _| this.user_store.clone());
2699 let collaborator = envelope
2700 .payload
2701 .collaborator
2702 .take()
2703 .ok_or_else(|| anyhow!("empty collaborator"))?;
2704
2705 let collaborator = Collaborator::from_proto(collaborator, &user_store, &mut cx).await?;
2706 this.update(&mut cx, |this, cx| {
2707 this.collaborators
2708 .insert(collaborator.peer_id, collaborator);
2709 cx.notify();
2710 });
2711
2712 Ok(())
2713 }
2714
2715 async fn handle_remove_collaborator(
2716 this: ModelHandle<Self>,
2717 envelope: TypedEnvelope<proto::RemoveProjectCollaborator>,
2718 _: Arc<Client>,
2719 mut cx: AsyncAppContext,
2720 ) -> Result<()> {
2721 this.update(&mut cx, |this, cx| {
2722 let peer_id = PeerId(envelope.payload.peer_id);
2723 let replica_id = this
2724 .collaborators
2725 .remove(&peer_id)
2726 .ok_or_else(|| anyhow!("unknown peer {:?}", peer_id))?
2727 .replica_id;
2728 for (_, buffer) in &this.opened_buffers {
2729 if let Some(buffer) = buffer.upgrade(cx) {
2730 buffer.update(cx, |buffer, cx| buffer.remove_peer(replica_id, cx));
2731 }
2732 }
2733 cx.notify();
2734 Ok(())
2735 })
2736 }
2737
2738 async fn handle_register_worktree(
2739 this: ModelHandle<Self>,
2740 envelope: TypedEnvelope<proto::RegisterWorktree>,
2741 client: Arc<Client>,
2742 mut cx: AsyncAppContext,
2743 ) -> Result<()> {
2744 this.update(&mut cx, |this, cx| {
2745 let remote_id = this.remote_id().ok_or_else(|| anyhow!("invalid project"))?;
2746 let replica_id = this.replica_id();
2747 let worktree = proto::Worktree {
2748 id: envelope.payload.worktree_id,
2749 root_name: envelope.payload.root_name,
2750 entries: Default::default(),
2751 diagnostic_summaries: Default::default(),
2752 visible: envelope.payload.visible,
2753 };
2754 let (worktree, load_task) =
2755 Worktree::remote(remote_id, replica_id, worktree, client, cx);
2756 this.add_worktree(&worktree, cx);
2757 load_task.detach();
2758 Ok(())
2759 })
2760 }
2761
2762 async fn handle_unregister_worktree(
2763 this: ModelHandle<Self>,
2764 envelope: TypedEnvelope<proto::UnregisterWorktree>,
2765 _: Arc<Client>,
2766 mut cx: AsyncAppContext,
2767 ) -> Result<()> {
2768 this.update(&mut cx, |this, cx| {
2769 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2770 this.remove_worktree(worktree_id, cx);
2771 Ok(())
2772 })
2773 }
2774
2775 async fn handle_update_worktree(
2776 this: ModelHandle<Self>,
2777 envelope: TypedEnvelope<proto::UpdateWorktree>,
2778 _: Arc<Client>,
2779 mut cx: AsyncAppContext,
2780 ) -> Result<()> {
2781 this.update(&mut cx, |this, cx| {
2782 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2783 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2784 worktree.update(cx, |worktree, _| {
2785 let worktree = worktree.as_remote_mut().unwrap();
2786 worktree.update_from_remote(envelope)
2787 })?;
2788 }
2789 Ok(())
2790 })
2791 }
2792
2793 async fn handle_update_diagnostic_summary(
2794 this: ModelHandle<Self>,
2795 envelope: TypedEnvelope<proto::UpdateDiagnosticSummary>,
2796 _: Arc<Client>,
2797 mut cx: AsyncAppContext,
2798 ) -> Result<()> {
2799 this.update(&mut cx, |this, cx| {
2800 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
2801 if let Some(worktree) = this.worktree_for_id(worktree_id, cx) {
2802 if let Some(summary) = envelope.payload.summary {
2803 let project_path = ProjectPath {
2804 worktree_id,
2805 path: Path::new(&summary.path).into(),
2806 };
2807 worktree.update(cx, |worktree, _| {
2808 worktree
2809 .as_remote_mut()
2810 .unwrap()
2811 .update_diagnostic_summary(project_path.path.clone(), &summary);
2812 });
2813 cx.emit(Event::DiagnosticsUpdated(project_path));
2814 }
2815 }
2816 Ok(())
2817 })
2818 }
2819
2820 async fn handle_disk_based_diagnostics_updating(
2821 this: ModelHandle<Self>,
2822 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdating>,
2823 _: Arc<Client>,
2824 mut cx: AsyncAppContext,
2825 ) -> Result<()> {
2826 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_started(cx));
2827 Ok(())
2828 }
2829
2830 async fn handle_disk_based_diagnostics_updated(
2831 this: ModelHandle<Self>,
2832 _: TypedEnvelope<proto::DiskBasedDiagnosticsUpdated>,
2833 _: Arc<Client>,
2834 mut cx: AsyncAppContext,
2835 ) -> Result<()> {
2836 this.update(&mut cx, |this, cx| this.disk_based_diagnostics_finished(cx));
2837 Ok(())
2838 }
2839
2840 async fn handle_update_buffer(
2841 this: ModelHandle<Self>,
2842 envelope: TypedEnvelope<proto::UpdateBuffer>,
2843 _: Arc<Client>,
2844 mut cx: AsyncAppContext,
2845 ) -> Result<()> {
2846 this.update(&mut cx, |this, cx| {
2847 let payload = envelope.payload.clone();
2848 let buffer_id = payload.buffer_id;
2849 let ops = payload
2850 .operations
2851 .into_iter()
2852 .map(|op| language::proto::deserialize_operation(op))
2853 .collect::<Result<Vec<_>, _>>()?;
2854 match this.opened_buffers.entry(buffer_id) {
2855 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2856 OpenBuffer::Strong(buffer) => {
2857 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx))?;
2858 }
2859 OpenBuffer::Loading(operations) => operations.extend_from_slice(&ops),
2860 OpenBuffer::Weak(_) => {}
2861 },
2862 hash_map::Entry::Vacant(e) => {
2863 e.insert(OpenBuffer::Loading(ops));
2864 }
2865 }
2866 Ok(())
2867 })
2868 }
2869
2870 async fn handle_update_buffer_file(
2871 this: ModelHandle<Self>,
2872 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2873 _: Arc<Client>,
2874 mut cx: AsyncAppContext,
2875 ) -> Result<()> {
2876 this.update(&mut cx, |this, cx| {
2877 let payload = envelope.payload.clone();
2878 let buffer_id = payload.buffer_id;
2879 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2880 let worktree = this
2881 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2882 .ok_or_else(|| anyhow!("no such worktree"))?;
2883 let file = File::from_proto(file, worktree.clone(), cx)?;
2884 let buffer = this
2885 .opened_buffers
2886 .get_mut(&buffer_id)
2887 .and_then(|b| b.upgrade(cx))
2888 .ok_or_else(|| anyhow!("no such buffer"))?;
2889 buffer.update(cx, |buffer, cx| {
2890 buffer.file_updated(Box::new(file), cx).detach();
2891 });
2892 Ok(())
2893 })
2894 }
2895
2896 async fn handle_save_buffer(
2897 this: ModelHandle<Self>,
2898 envelope: TypedEnvelope<proto::SaveBuffer>,
2899 _: Arc<Client>,
2900 mut cx: AsyncAppContext,
2901 ) -> Result<proto::BufferSaved> {
2902 let buffer_id = envelope.payload.buffer_id;
2903 let requested_version = envelope.payload.version.try_into()?;
2904
2905 let (project_id, buffer) = this.update(&mut cx, |this, cx| {
2906 let project_id = this.remote_id().ok_or_else(|| anyhow!("not connected"))?;
2907 let buffer = this
2908 .opened_buffers
2909 .get(&buffer_id)
2910 .map(|buffer| buffer.upgrade(cx).unwrap())
2911 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?;
2912 Ok::<_, anyhow::Error>((project_id, buffer))
2913 })?;
2914 buffer
2915 .update(&mut cx, |buffer, _| {
2916 buffer.wait_for_version(requested_version)
2917 })
2918 .await;
2919
2920 let (saved_version, mtime) = buffer.update(&mut cx, |buffer, cx| buffer.save(cx)).await?;
2921 Ok(proto::BufferSaved {
2922 project_id,
2923 buffer_id,
2924 version: (&saved_version).into(),
2925 mtime: Some(mtime.into()),
2926 })
2927 }
2928
2929 async fn handle_format_buffers(
2930 this: ModelHandle<Self>,
2931 envelope: TypedEnvelope<proto::FormatBuffers>,
2932 _: Arc<Client>,
2933 mut cx: AsyncAppContext,
2934 ) -> Result<proto::FormatBuffersResponse> {
2935 let sender_id = envelope.original_sender_id()?;
2936 let format = this.update(&mut cx, |this, cx| {
2937 let mut buffers = HashSet::default();
2938 for buffer_id in &envelope.payload.buffer_ids {
2939 buffers.insert(
2940 this.opened_buffers
2941 .get(buffer_id)
2942 .map(|buffer| buffer.upgrade(cx).unwrap())
2943 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))?,
2944 );
2945 }
2946 Ok::<_, anyhow::Error>(this.format(buffers, false, cx))
2947 })?;
2948
2949 let project_transaction = format.await?;
2950 let project_transaction = this.update(&mut cx, |this, cx| {
2951 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2952 });
2953 Ok(proto::FormatBuffersResponse {
2954 transaction: Some(project_transaction),
2955 })
2956 }
2957
2958 async fn handle_get_completions(
2959 this: ModelHandle<Self>,
2960 envelope: TypedEnvelope<proto::GetCompletions>,
2961 _: Arc<Client>,
2962 mut cx: AsyncAppContext,
2963 ) -> Result<proto::GetCompletionsResponse> {
2964 let position = envelope
2965 .payload
2966 .position
2967 .and_then(language::proto::deserialize_anchor)
2968 .ok_or_else(|| anyhow!("invalid position"))?;
2969 let version = clock::Global::from(envelope.payload.version);
2970 let buffer = this.read_with(&cx, |this, cx| {
2971 this.opened_buffers
2972 .get(&envelope.payload.buffer_id)
2973 .map(|buffer| buffer.upgrade(cx).unwrap())
2974 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
2975 })?;
2976 buffer
2977 .update(&mut cx, |buffer, _| buffer.wait_for_version(version))
2978 .await;
2979 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
2980 let completions = this
2981 .update(&mut cx, |this, cx| this.completions(&buffer, position, cx))
2982 .await?;
2983
2984 Ok(proto::GetCompletionsResponse {
2985 completions: completions
2986 .iter()
2987 .map(language::proto::serialize_completion)
2988 .collect(),
2989 version: (&version).into(),
2990 })
2991 }
2992
2993 async fn handle_apply_additional_edits_for_completion(
2994 this: ModelHandle<Self>,
2995 envelope: TypedEnvelope<proto::ApplyCompletionAdditionalEdits>,
2996 _: Arc<Client>,
2997 mut cx: AsyncAppContext,
2998 ) -> Result<proto::ApplyCompletionAdditionalEditsResponse> {
2999 let apply_additional_edits = this.update(&mut cx, |this, cx| {
3000 let buffer = this
3001 .opened_buffers
3002 .get(&envelope.payload.buffer_id)
3003 .map(|buffer| buffer.upgrade(cx).unwrap())
3004 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3005 let language = buffer.read(cx).language();
3006 let completion = language::proto::deserialize_completion(
3007 envelope
3008 .payload
3009 .completion
3010 .ok_or_else(|| anyhow!("invalid completion"))?,
3011 language,
3012 )?;
3013 Ok::<_, anyhow::Error>(
3014 this.apply_additional_edits_for_completion(buffer, completion, false, cx),
3015 )
3016 })?;
3017
3018 Ok(proto::ApplyCompletionAdditionalEditsResponse {
3019 transaction: apply_additional_edits
3020 .await?
3021 .as_ref()
3022 .map(language::proto::serialize_transaction),
3023 })
3024 }
3025
3026 async fn handle_get_code_actions(
3027 this: ModelHandle<Self>,
3028 envelope: TypedEnvelope<proto::GetCodeActions>,
3029 _: Arc<Client>,
3030 mut cx: AsyncAppContext,
3031 ) -> Result<proto::GetCodeActionsResponse> {
3032 let start = envelope
3033 .payload
3034 .start
3035 .and_then(language::proto::deserialize_anchor)
3036 .ok_or_else(|| anyhow!("invalid start"))?;
3037 let end = envelope
3038 .payload
3039 .end
3040 .and_then(language::proto::deserialize_anchor)
3041 .ok_or_else(|| anyhow!("invalid end"))?;
3042 let buffer = this.update(&mut cx, |this, cx| {
3043 this.opened_buffers
3044 .get(&envelope.payload.buffer_id)
3045 .map(|buffer| buffer.upgrade(cx).unwrap())
3046 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))
3047 })?;
3048 buffer
3049 .update(&mut cx, |buffer, _| {
3050 buffer.wait_for_version(envelope.payload.version.into())
3051 })
3052 .await;
3053
3054 let version = buffer.read_with(&cx, |buffer, _| buffer.version());
3055 let code_actions = this.update(&mut cx, |this, cx| {
3056 Ok::<_, anyhow::Error>(this.code_actions(&buffer, start..end, cx))
3057 })?;
3058
3059 Ok(proto::GetCodeActionsResponse {
3060 actions: code_actions
3061 .await?
3062 .iter()
3063 .map(language::proto::serialize_code_action)
3064 .collect(),
3065 version: (&version).into(),
3066 })
3067 }
3068
3069 async fn handle_apply_code_action(
3070 this: ModelHandle<Self>,
3071 envelope: TypedEnvelope<proto::ApplyCodeAction>,
3072 _: Arc<Client>,
3073 mut cx: AsyncAppContext,
3074 ) -> Result<proto::ApplyCodeActionResponse> {
3075 let sender_id = envelope.original_sender_id()?;
3076 let action = language::proto::deserialize_code_action(
3077 envelope
3078 .payload
3079 .action
3080 .ok_or_else(|| anyhow!("invalid action"))?,
3081 )?;
3082 let apply_code_action = this.update(&mut cx, |this, cx| {
3083 let buffer = this
3084 .opened_buffers
3085 .get(&envelope.payload.buffer_id)
3086 .map(|buffer| buffer.upgrade(cx).unwrap())
3087 .ok_or_else(|| anyhow!("unknown buffer id {}", envelope.payload.buffer_id))?;
3088 Ok::<_, anyhow::Error>(this.apply_code_action(buffer, action, false, cx))
3089 })?;
3090
3091 let project_transaction = apply_code_action.await?;
3092 let project_transaction = this.update(&mut cx, |this, cx| {
3093 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
3094 });
3095 Ok(proto::ApplyCodeActionResponse {
3096 transaction: Some(project_transaction),
3097 })
3098 }
3099
3100 async fn handle_lsp_command<T: LspCommand>(
3101 this: ModelHandle<Self>,
3102 envelope: TypedEnvelope<T::ProtoRequest>,
3103 _: Arc<Client>,
3104 mut cx: AsyncAppContext,
3105 ) -> Result<<T::ProtoRequest as proto::RequestMessage>::Response>
3106 where
3107 <T::LspRequest as lsp::request::Request>::Result: Send,
3108 {
3109 let sender_id = envelope.original_sender_id()?;
3110 let buffer_id = T::buffer_id_from_proto(&envelope.payload);
3111 let buffer_handle = this.read_with(&cx, |this, _| {
3112 this.opened_buffers
3113 .get(&buffer_id)
3114 .map(|buffer| buffer.upgrade(&cx).unwrap())
3115 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
3116 })?;
3117 let request = T::from_proto(
3118 envelope.payload,
3119 this.clone(),
3120 buffer_handle.clone(),
3121 cx.clone(),
3122 )
3123 .await?;
3124 let buffer_version = buffer_handle.read_with(&cx, |buffer, _| buffer.version());
3125 let response = this
3126 .update(&mut cx, |this, cx| {
3127 this.request_lsp(buffer_handle, request, cx)
3128 })
3129 .await?;
3130 this.update(&mut cx, |this, cx| {
3131 Ok(T::response_to_proto(
3132 response,
3133 this,
3134 sender_id,
3135 &buffer_version,
3136 cx,
3137 ))
3138 })
3139 }
3140
3141 async fn handle_get_project_symbols(
3142 this: ModelHandle<Self>,
3143 envelope: TypedEnvelope<proto::GetProjectSymbols>,
3144 _: Arc<Client>,
3145 mut cx: AsyncAppContext,
3146 ) -> Result<proto::GetProjectSymbolsResponse> {
3147 let symbols = this
3148 .update(&mut cx, |this, cx| {
3149 this.symbols(&envelope.payload.query, cx)
3150 })
3151 .await?;
3152
3153 Ok(proto::GetProjectSymbolsResponse {
3154 symbols: symbols.iter().map(serialize_symbol).collect(),
3155 })
3156 }
3157
3158 async fn handle_search_project(
3159 this: ModelHandle<Self>,
3160 envelope: TypedEnvelope<proto::SearchProject>,
3161 _: Arc<Client>,
3162 mut cx: AsyncAppContext,
3163 ) -> Result<proto::SearchProjectResponse> {
3164 let peer_id = envelope.original_sender_id()?;
3165 let query = SearchQuery::from_proto(envelope.payload)?;
3166 let result = this
3167 .update(&mut cx, |this, cx| this.search(query, cx))
3168 .await?;
3169
3170 this.update(&mut cx, |this, cx| {
3171 let mut locations = Vec::new();
3172 for (buffer, ranges) in result {
3173 for range in ranges {
3174 let start = serialize_anchor(&range.start);
3175 let end = serialize_anchor(&range.end);
3176 let buffer = this.serialize_buffer_for_peer(&buffer, peer_id, cx);
3177 locations.push(proto::Location {
3178 buffer: Some(buffer),
3179 start: Some(start),
3180 end: Some(end),
3181 });
3182 }
3183 }
3184 Ok(proto::SearchProjectResponse { locations })
3185 })
3186 }
3187
3188 async fn handle_open_buffer_for_symbol(
3189 this: ModelHandle<Self>,
3190 envelope: TypedEnvelope<proto::OpenBufferForSymbol>,
3191 _: Arc<Client>,
3192 mut cx: AsyncAppContext,
3193 ) -> Result<proto::OpenBufferForSymbolResponse> {
3194 let peer_id = envelope.original_sender_id()?;
3195 let symbol = envelope
3196 .payload
3197 .symbol
3198 .ok_or_else(|| anyhow!("invalid symbol"))?;
3199 let symbol = this.read_with(&cx, |this, _| {
3200 let symbol = this.deserialize_symbol(symbol)?;
3201 let signature = this.symbol_signature(symbol.worktree_id, &symbol.path);
3202 if signature == symbol.signature {
3203 Ok(symbol)
3204 } else {
3205 Err(anyhow!("invalid symbol signature"))
3206 }
3207 })?;
3208 let buffer = this
3209 .update(&mut cx, |this, cx| this.open_buffer_for_symbol(&symbol, cx))
3210 .await?;
3211
3212 Ok(proto::OpenBufferForSymbolResponse {
3213 buffer: Some(this.update(&mut cx, |this, cx| {
3214 this.serialize_buffer_for_peer(&buffer, peer_id, cx)
3215 })),
3216 })
3217 }
3218
3219 fn symbol_signature(&self, worktree_id: WorktreeId, path: &Path) -> [u8; 32] {
3220 let mut hasher = Sha256::new();
3221 hasher.update(worktree_id.to_proto().to_be_bytes());
3222 hasher.update(path.to_string_lossy().as_bytes());
3223 hasher.update(self.nonce.to_be_bytes());
3224 hasher.finalize().as_slice().try_into().unwrap()
3225 }
3226
3227 async fn handle_open_buffer(
3228 this: ModelHandle<Self>,
3229 envelope: TypedEnvelope<proto::OpenBuffer>,
3230 _: Arc<Client>,
3231 mut cx: AsyncAppContext,
3232 ) -> Result<proto::OpenBufferResponse> {
3233 let peer_id = envelope.original_sender_id()?;
3234 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
3235 let open_buffer = this.update(&mut cx, |this, cx| {
3236 this.open_buffer(
3237 ProjectPath {
3238 worktree_id,
3239 path: PathBuf::from(envelope.payload.path).into(),
3240 },
3241 cx,
3242 )
3243 });
3244
3245 let buffer = open_buffer.await?;
3246 this.update(&mut cx, |this, cx| {
3247 Ok(proto::OpenBufferResponse {
3248 buffer: Some(this.serialize_buffer_for_peer(&buffer, peer_id, cx)),
3249 })
3250 })
3251 }
3252
3253 fn serialize_project_transaction_for_peer(
3254 &mut self,
3255 project_transaction: ProjectTransaction,
3256 peer_id: PeerId,
3257 cx: &AppContext,
3258 ) -> proto::ProjectTransaction {
3259 let mut serialized_transaction = proto::ProjectTransaction {
3260 buffers: Default::default(),
3261 transactions: Default::default(),
3262 };
3263 for (buffer, transaction) in project_transaction.0 {
3264 serialized_transaction
3265 .buffers
3266 .push(self.serialize_buffer_for_peer(&buffer, peer_id, cx));
3267 serialized_transaction
3268 .transactions
3269 .push(language::proto::serialize_transaction(&transaction));
3270 }
3271 serialized_transaction
3272 }
3273
3274 fn deserialize_project_transaction(
3275 &mut self,
3276 message: proto::ProjectTransaction,
3277 push_to_history: bool,
3278 cx: &mut ModelContext<Self>,
3279 ) -> Task<Result<ProjectTransaction>> {
3280 cx.spawn(|this, mut cx| async move {
3281 let mut project_transaction = ProjectTransaction::default();
3282 for (buffer, transaction) in message.buffers.into_iter().zip(message.transactions) {
3283 let buffer = this
3284 .update(&mut cx, |this, cx| this.deserialize_buffer(buffer, cx))
3285 .await?;
3286 let transaction = language::proto::deserialize_transaction(transaction)?;
3287 project_transaction.0.insert(buffer, transaction);
3288 }
3289
3290 for (buffer, transaction) in &project_transaction.0 {
3291 buffer
3292 .update(&mut cx, |buffer, _| {
3293 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
3294 })
3295 .await;
3296
3297 if push_to_history {
3298 buffer.update(&mut cx, |buffer, _| {
3299 buffer.push_transaction(transaction.clone(), Instant::now());
3300 });
3301 }
3302 }
3303
3304 Ok(project_transaction)
3305 })
3306 }
3307
3308 fn serialize_buffer_for_peer(
3309 &mut self,
3310 buffer: &ModelHandle<Buffer>,
3311 peer_id: PeerId,
3312 cx: &AppContext,
3313 ) -> proto::Buffer {
3314 let buffer_id = buffer.read(cx).remote_id();
3315 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
3316 if shared_buffers.insert(buffer_id) {
3317 proto::Buffer {
3318 variant: Some(proto::buffer::Variant::State(buffer.read(cx).to_proto())),
3319 }
3320 } else {
3321 proto::Buffer {
3322 variant: Some(proto::buffer::Variant::Id(buffer_id)),
3323 }
3324 }
3325 }
3326
3327 fn deserialize_buffer(
3328 &mut self,
3329 buffer: proto::Buffer,
3330 cx: &mut ModelContext<Self>,
3331 ) -> Task<Result<ModelHandle<Buffer>>> {
3332 let replica_id = self.replica_id();
3333
3334 let opened_buffer_tx = self.opened_buffer.0.clone();
3335 let mut opened_buffer_rx = self.opened_buffer.1.clone();
3336 cx.spawn(|this, mut cx| async move {
3337 match buffer.variant.ok_or_else(|| anyhow!("missing buffer"))? {
3338 proto::buffer::Variant::Id(id) => {
3339 let buffer = loop {
3340 let buffer = this.read_with(&cx, |this, cx| {
3341 this.opened_buffers
3342 .get(&id)
3343 .and_then(|buffer| buffer.upgrade(cx))
3344 });
3345 if let Some(buffer) = buffer {
3346 break buffer;
3347 }
3348 opened_buffer_rx
3349 .next()
3350 .await
3351 .ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
3352 };
3353 Ok(buffer)
3354 }
3355 proto::buffer::Variant::State(mut buffer) => {
3356 let mut buffer_worktree = None;
3357 let mut buffer_file = None;
3358 if let Some(file) = buffer.file.take() {
3359 this.read_with(&cx, |this, cx| {
3360 let worktree_id = WorktreeId::from_proto(file.worktree_id);
3361 let worktree =
3362 this.worktree_for_id(worktree_id, cx).ok_or_else(|| {
3363 anyhow!("no worktree found for id {}", file.worktree_id)
3364 })?;
3365 buffer_file =
3366 Some(Box::new(File::from_proto(file, worktree.clone(), cx)?)
3367 as Box<dyn language::File>);
3368 buffer_worktree = Some(worktree);
3369 Ok::<_, anyhow::Error>(())
3370 })?;
3371 }
3372
3373 let buffer = cx.add_model(|cx| {
3374 Buffer::from_proto(replica_id, buffer, buffer_file, cx).unwrap()
3375 });
3376
3377 this.update(&mut cx, |this, cx| {
3378 this.register_buffer(&buffer, buffer_worktree.as_ref(), cx)
3379 })?;
3380
3381 *opened_buffer_tx.borrow_mut().borrow_mut() = ();
3382 Ok(buffer)
3383 }
3384 }
3385 })
3386 }
3387
3388 fn deserialize_symbol(&self, serialized_symbol: proto::Symbol) -> Result<Symbol> {
3389 let language = self
3390 .languages
3391 .get_language(&serialized_symbol.language_name);
3392 let start = serialized_symbol
3393 .start
3394 .ok_or_else(|| anyhow!("invalid start"))?;
3395 let end = serialized_symbol
3396 .end
3397 .ok_or_else(|| anyhow!("invalid end"))?;
3398 let kind = unsafe { mem::transmute(serialized_symbol.kind) };
3399 Ok(Symbol {
3400 source_worktree_id: WorktreeId::from_proto(serialized_symbol.source_worktree_id),
3401 worktree_id: WorktreeId::from_proto(serialized_symbol.worktree_id),
3402 language_name: serialized_symbol.language_name.clone(),
3403 label: language
3404 .and_then(|language| language.label_for_symbol(&serialized_symbol.name, kind))
3405 .unwrap_or_else(|| CodeLabel::plain(serialized_symbol.name.clone(), None)),
3406 name: serialized_symbol.name,
3407 path: PathBuf::from(serialized_symbol.path),
3408 range: PointUtf16::new(start.row, start.column)..PointUtf16::new(end.row, end.column),
3409 kind,
3410 signature: serialized_symbol
3411 .signature
3412 .try_into()
3413 .map_err(|_| anyhow!("invalid signature"))?,
3414 })
3415 }
3416
3417 async fn handle_close_buffer(
3418 _: ModelHandle<Self>,
3419 _: TypedEnvelope<proto::CloseBuffer>,
3420 _: Arc<Client>,
3421 _: AsyncAppContext,
3422 ) -> Result<()> {
3423 // TODO: use this for following
3424 Ok(())
3425 }
3426
3427 async fn handle_buffer_saved(
3428 this: ModelHandle<Self>,
3429 envelope: TypedEnvelope<proto::BufferSaved>,
3430 _: Arc<Client>,
3431 mut cx: AsyncAppContext,
3432 ) -> Result<()> {
3433 let version = envelope.payload.version.try_into()?;
3434 let mtime = envelope
3435 .payload
3436 .mtime
3437 .ok_or_else(|| anyhow!("missing mtime"))?
3438 .into();
3439
3440 this.update(&mut cx, |this, cx| {
3441 let buffer = this
3442 .opened_buffers
3443 .get(&envelope.payload.buffer_id)
3444 .and_then(|buffer| buffer.upgrade(cx));
3445 if let Some(buffer) = buffer {
3446 buffer.update(cx, |buffer, cx| {
3447 buffer.did_save(version, mtime, None, cx);
3448 });
3449 }
3450 Ok(())
3451 })
3452 }
3453
3454 async fn handle_buffer_reloaded(
3455 this: ModelHandle<Self>,
3456 envelope: TypedEnvelope<proto::BufferReloaded>,
3457 _: Arc<Client>,
3458 mut cx: AsyncAppContext,
3459 ) -> Result<()> {
3460 let payload = envelope.payload.clone();
3461 let version = payload.version.try_into()?;
3462 let mtime = payload
3463 .mtime
3464 .ok_or_else(|| anyhow!("missing mtime"))?
3465 .into();
3466 this.update(&mut cx, |this, cx| {
3467 let buffer = this
3468 .opened_buffers
3469 .get(&payload.buffer_id)
3470 .and_then(|buffer| buffer.upgrade(cx));
3471 if let Some(buffer) = buffer {
3472 buffer.update(cx, |buffer, cx| {
3473 buffer.did_reload(version, mtime, cx);
3474 });
3475 }
3476 Ok(())
3477 })
3478 }
3479
3480 pub fn match_paths<'a>(
3481 &self,
3482 query: &'a str,
3483 include_ignored: bool,
3484 smart_case: bool,
3485 max_results: usize,
3486 cancel_flag: &'a AtomicBool,
3487 cx: &AppContext,
3488 ) -> impl 'a + Future<Output = Vec<PathMatch>> {
3489 let worktrees = self
3490 .worktrees(cx)
3491 .filter(|worktree| worktree.read(cx).is_visible())
3492 .collect::<Vec<_>>();
3493 let include_root_name = worktrees.len() > 1;
3494 let candidate_sets = worktrees
3495 .into_iter()
3496 .map(|worktree| CandidateSet {
3497 snapshot: worktree.read(cx).snapshot(),
3498 include_ignored,
3499 include_root_name,
3500 })
3501 .collect::<Vec<_>>();
3502
3503 let background = cx.background().clone();
3504 async move {
3505 fuzzy::match_paths(
3506 candidate_sets.as_slice(),
3507 query,
3508 smart_case,
3509 max_results,
3510 cancel_flag,
3511 background,
3512 )
3513 .await
3514 }
3515 }
3516}
3517
3518impl WorktreeHandle {
3519 pub fn upgrade(&self, cx: &AppContext) -> Option<ModelHandle<Worktree>> {
3520 match self {
3521 WorktreeHandle::Strong(handle) => Some(handle.clone()),
3522 WorktreeHandle::Weak(handle) => handle.upgrade(cx),
3523 }
3524 }
3525}
3526
3527impl OpenBuffer {
3528 pub fn upgrade(&self, cx: &impl UpgradeModelHandle) -> Option<ModelHandle<Buffer>> {
3529 match self {
3530 OpenBuffer::Strong(handle) => Some(handle.clone()),
3531 OpenBuffer::Weak(handle) => handle.upgrade(cx),
3532 OpenBuffer::Loading(_) => None,
3533 }
3534 }
3535}
3536
3537struct CandidateSet {
3538 snapshot: Snapshot,
3539 include_ignored: bool,
3540 include_root_name: bool,
3541}
3542
3543impl<'a> PathMatchCandidateSet<'a> for CandidateSet {
3544 type Candidates = CandidateSetIter<'a>;
3545
3546 fn id(&self) -> usize {
3547 self.snapshot.id().to_usize()
3548 }
3549
3550 fn len(&self) -> usize {
3551 if self.include_ignored {
3552 self.snapshot.file_count()
3553 } else {
3554 self.snapshot.visible_file_count()
3555 }
3556 }
3557
3558 fn prefix(&self) -> Arc<str> {
3559 if self.snapshot.root_entry().map_or(false, |e| e.is_file()) {
3560 self.snapshot.root_name().into()
3561 } else if self.include_root_name {
3562 format!("{}/", self.snapshot.root_name()).into()
3563 } else {
3564 "".into()
3565 }
3566 }
3567
3568 fn candidates(&'a self, start: usize) -> Self::Candidates {
3569 CandidateSetIter {
3570 traversal: self.snapshot.files(self.include_ignored, start),
3571 }
3572 }
3573}
3574
3575struct CandidateSetIter<'a> {
3576 traversal: Traversal<'a>,
3577}
3578
3579impl<'a> Iterator for CandidateSetIter<'a> {
3580 type Item = PathMatchCandidate<'a>;
3581
3582 fn next(&mut self) -> Option<Self::Item> {
3583 self.traversal.next().map(|entry| {
3584 if let EntryKind::File(char_bag) = entry.kind {
3585 PathMatchCandidate {
3586 path: &entry.path,
3587 char_bag,
3588 }
3589 } else {
3590 unreachable!()
3591 }
3592 })
3593 }
3594}
3595
3596impl Entity for Project {
3597 type Event = Event;
3598
3599 fn release(&mut self, _: &mut gpui::MutableAppContext) {
3600 match &self.client_state {
3601 ProjectClientState::Local { remote_id_rx, .. } => {
3602 if let Some(project_id) = *remote_id_rx.borrow() {
3603 self.client
3604 .send(proto::UnregisterProject { project_id })
3605 .log_err();
3606 }
3607 }
3608 ProjectClientState::Remote { remote_id, .. } => {
3609 self.client
3610 .send(proto::LeaveProject {
3611 project_id: *remote_id,
3612 })
3613 .log_err();
3614 }
3615 }
3616 }
3617
3618 fn app_will_quit(
3619 &mut self,
3620 _: &mut MutableAppContext,
3621 ) -> Option<std::pin::Pin<Box<dyn 'static + Future<Output = ()>>>> {
3622 let shutdown_futures = self
3623 .language_servers
3624 .drain()
3625 .filter_map(|(_, server)| server.shutdown())
3626 .collect::<Vec<_>>();
3627 Some(
3628 async move {
3629 futures::future::join_all(shutdown_futures).await;
3630 }
3631 .boxed(),
3632 )
3633 }
3634}
3635
3636impl Collaborator {
3637 fn from_proto(
3638 message: proto::Collaborator,
3639 user_store: &ModelHandle<UserStore>,
3640 cx: &mut AsyncAppContext,
3641 ) -> impl Future<Output = Result<Self>> {
3642 let user = user_store.update(cx, |user_store, cx| {
3643 user_store.fetch_user(message.user_id, cx)
3644 });
3645
3646 async move {
3647 Ok(Self {
3648 peer_id: PeerId(message.peer_id),
3649 user: user.await?,
3650 replica_id: message.replica_id as ReplicaId,
3651 })
3652 }
3653 }
3654}
3655
3656impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
3657 fn from((worktree_id, path): (WorktreeId, P)) -> Self {
3658 Self {
3659 worktree_id,
3660 path: path.as_ref().into(),
3661 }
3662 }
3663}
3664
3665impl From<lsp::CreateFileOptions> for fs::CreateOptions {
3666 fn from(options: lsp::CreateFileOptions) -> Self {
3667 Self {
3668 overwrite: options.overwrite.unwrap_or(false),
3669 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3670 }
3671 }
3672}
3673
3674impl From<lsp::RenameFileOptions> for fs::RenameOptions {
3675 fn from(options: lsp::RenameFileOptions) -> Self {
3676 Self {
3677 overwrite: options.overwrite.unwrap_or(false),
3678 ignore_if_exists: options.ignore_if_exists.unwrap_or(false),
3679 }
3680 }
3681}
3682
3683impl From<lsp::DeleteFileOptions> for fs::RemoveOptions {
3684 fn from(options: lsp::DeleteFileOptions) -> Self {
3685 Self {
3686 recursive: options.recursive.unwrap_or(false),
3687 ignore_if_not_exists: options.ignore_if_not_exists.unwrap_or(false),
3688 }
3689 }
3690}
3691
3692fn serialize_symbol(symbol: &Symbol) -> proto::Symbol {
3693 proto::Symbol {
3694 source_worktree_id: symbol.source_worktree_id.to_proto(),
3695 worktree_id: symbol.worktree_id.to_proto(),
3696 language_name: symbol.language_name.clone(),
3697 name: symbol.name.clone(),
3698 kind: unsafe { mem::transmute(symbol.kind) },
3699 path: symbol.path.to_string_lossy().to_string(),
3700 start: Some(proto::Point {
3701 row: symbol.range.start.row,
3702 column: symbol.range.start.column,
3703 }),
3704 end: Some(proto::Point {
3705 row: symbol.range.end.row,
3706 column: symbol.range.end.column,
3707 }),
3708 signature: symbol.signature.to_vec(),
3709 }
3710}
3711
3712fn relativize_path(base: &Path, path: &Path) -> PathBuf {
3713 let mut path_components = path.components();
3714 let mut base_components = base.components();
3715 let mut components: Vec<Component> = Vec::new();
3716 loop {
3717 match (path_components.next(), base_components.next()) {
3718 (None, None) => break,
3719 (Some(a), None) => {
3720 components.push(a);
3721 components.extend(path_components.by_ref());
3722 break;
3723 }
3724 (None, _) => components.push(Component::ParentDir),
3725 (Some(a), Some(b)) if components.is_empty() && a == b => (),
3726 (Some(a), Some(b)) if b == Component::CurDir => components.push(a),
3727 (Some(a), Some(_)) => {
3728 components.push(Component::ParentDir);
3729 for _ in base_components {
3730 components.push(Component::ParentDir);
3731 }
3732 components.push(a);
3733 components.extend(path_components.by_ref());
3734 break;
3735 }
3736 }
3737 }
3738 components.iter().map(|c| c.as_os_str()).collect()
3739}
3740
3741#[cfg(test)]
3742mod tests {
3743 use super::{Event, *};
3744 use fs::RealFs;
3745 use futures::StreamExt;
3746 use gpui::test::subscribe;
3747 use language::{
3748 tree_sitter_rust, AnchorRangeExt, Diagnostic, LanguageConfig, LanguageServerConfig, Point,
3749 };
3750 use lsp::Url;
3751 use serde_json::json;
3752 use std::{cell::RefCell, os::unix, path::PathBuf, rc::Rc};
3753 use unindent::Unindent as _;
3754 use util::test::temp_tree;
3755 use worktree::WorktreeHandle as _;
3756
3757 #[gpui::test]
3758 async fn test_populate_and_search(cx: &mut gpui::TestAppContext) {
3759 let dir = temp_tree(json!({
3760 "root": {
3761 "apple": "",
3762 "banana": {
3763 "carrot": {
3764 "date": "",
3765 "endive": "",
3766 }
3767 },
3768 "fennel": {
3769 "grape": "",
3770 }
3771 }
3772 }));
3773
3774 let root_link_path = dir.path().join("root_link");
3775 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
3776 unix::fs::symlink(
3777 &dir.path().join("root/fennel"),
3778 &dir.path().join("root/finnochio"),
3779 )
3780 .unwrap();
3781
3782 let project = Project::test(Arc::new(RealFs), cx);
3783
3784 let (tree, _) = project
3785 .update(cx, |project, cx| {
3786 project.find_or_create_local_worktree(&root_link_path, true, cx)
3787 })
3788 .await
3789 .unwrap();
3790
3791 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3792 .await;
3793 cx.read(|cx| {
3794 let tree = tree.read(cx);
3795 assert_eq!(tree.file_count(), 5);
3796 assert_eq!(
3797 tree.inode_for_path("fennel/grape"),
3798 tree.inode_for_path("finnochio/grape")
3799 );
3800 });
3801
3802 let cancel_flag = Default::default();
3803 let results = project
3804 .read_with(cx, |project, cx| {
3805 project.match_paths("bna", false, false, 10, &cancel_flag, cx)
3806 })
3807 .await;
3808 assert_eq!(
3809 results
3810 .into_iter()
3811 .map(|result| result.path)
3812 .collect::<Vec<Arc<Path>>>(),
3813 vec![
3814 PathBuf::from("banana/carrot/date").into(),
3815 PathBuf::from("banana/carrot/endive").into(),
3816 ]
3817 );
3818 }
3819
3820 #[gpui::test]
3821 async fn test_language_server_diagnostics(cx: &mut gpui::TestAppContext) {
3822 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3823 let progress_token = language_server_config
3824 .disk_based_diagnostics_progress_token
3825 .clone()
3826 .unwrap();
3827
3828 let language = Arc::new(Language::new(
3829 LanguageConfig {
3830 name: "Rust".into(),
3831 path_suffixes: vec!["rs".to_string()],
3832 language_server: Some(language_server_config),
3833 ..Default::default()
3834 },
3835 Some(tree_sitter_rust::language()),
3836 ));
3837
3838 let fs = FakeFs::new(cx.background());
3839 fs.insert_tree(
3840 "/dir",
3841 json!({
3842 "a.rs": "fn a() { A }",
3843 "b.rs": "const y: i32 = 1",
3844 }),
3845 )
3846 .await;
3847
3848 let project = Project::test(fs, cx);
3849 project.update(cx, |project, _| {
3850 Arc::get_mut(&mut project.languages).unwrap().add(language);
3851 });
3852
3853 let (tree, _) = project
3854 .update(cx, |project, cx| {
3855 project.find_or_create_local_worktree("/dir", true, cx)
3856 })
3857 .await
3858 .unwrap();
3859 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
3860
3861 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3862 .await;
3863
3864 // Cause worktree to start the fake language server
3865 let _buffer = project
3866 .update(cx, |project, cx| {
3867 project.open_buffer((worktree_id, Path::new("b.rs")), cx)
3868 })
3869 .await
3870 .unwrap();
3871
3872 let mut events = subscribe(&project, cx);
3873
3874 let mut fake_server = fake_servers.next().await.unwrap();
3875 fake_server.start_progress(&progress_token).await;
3876 assert_eq!(
3877 events.next().await.unwrap(),
3878 Event::DiskBasedDiagnosticsStarted
3879 );
3880
3881 fake_server.start_progress(&progress_token).await;
3882 fake_server.end_progress(&progress_token).await;
3883 fake_server.start_progress(&progress_token).await;
3884
3885 fake_server
3886 .notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
3887 uri: Url::from_file_path("/dir/a.rs").unwrap(),
3888 version: None,
3889 diagnostics: vec![lsp::Diagnostic {
3890 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3891 severity: Some(lsp::DiagnosticSeverity::ERROR),
3892 message: "undefined variable 'A'".to_string(),
3893 ..Default::default()
3894 }],
3895 })
3896 .await;
3897 assert_eq!(
3898 events.next().await.unwrap(),
3899 Event::DiagnosticsUpdated((worktree_id, Path::new("a.rs")).into())
3900 );
3901
3902 fake_server.end_progress(&progress_token).await;
3903 fake_server.end_progress(&progress_token).await;
3904 assert_eq!(
3905 events.next().await.unwrap(),
3906 Event::DiskBasedDiagnosticsUpdated
3907 );
3908 assert_eq!(
3909 events.next().await.unwrap(),
3910 Event::DiskBasedDiagnosticsFinished
3911 );
3912
3913 let buffer = project
3914 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
3915 .await
3916 .unwrap();
3917
3918 buffer.read_with(cx, |buffer, _| {
3919 let snapshot = buffer.snapshot();
3920 let diagnostics = snapshot
3921 .diagnostics_in_range::<_, Point>(0..buffer.len())
3922 .collect::<Vec<_>>();
3923 assert_eq!(
3924 diagnostics,
3925 &[DiagnosticEntry {
3926 range: Point::new(0, 9)..Point::new(0, 10),
3927 diagnostic: Diagnostic {
3928 severity: lsp::DiagnosticSeverity::ERROR,
3929 message: "undefined variable 'A'".to_string(),
3930 group_id: 0,
3931 is_primary: true,
3932 ..Default::default()
3933 }
3934 }]
3935 )
3936 });
3937 }
3938
3939 #[gpui::test]
3940 async fn test_search_worktree_without_files(cx: &mut gpui::TestAppContext) {
3941 let dir = temp_tree(json!({
3942 "root": {
3943 "dir1": {},
3944 "dir2": {
3945 "dir3": {}
3946 }
3947 }
3948 }));
3949
3950 let project = Project::test(Arc::new(RealFs), cx);
3951 let (tree, _) = project
3952 .update(cx, |project, cx| {
3953 project.find_or_create_local_worktree(&dir.path(), true, cx)
3954 })
3955 .await
3956 .unwrap();
3957
3958 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
3959 .await;
3960
3961 let cancel_flag = Default::default();
3962 let results = project
3963 .read_with(cx, |project, cx| {
3964 project.match_paths("dir", false, false, 10, &cancel_flag, cx)
3965 })
3966 .await;
3967
3968 assert!(results.is_empty());
3969 }
3970
3971 #[gpui::test]
3972 async fn test_definition(cx: &mut gpui::TestAppContext) {
3973 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
3974 let language = Arc::new(Language::new(
3975 LanguageConfig {
3976 name: "Rust".into(),
3977 path_suffixes: vec!["rs".to_string()],
3978 language_server: Some(language_server_config),
3979 ..Default::default()
3980 },
3981 Some(tree_sitter_rust::language()),
3982 ));
3983
3984 let fs = FakeFs::new(cx.background());
3985 fs.insert_tree(
3986 "/dir",
3987 json!({
3988 "a.rs": "const fn a() { A }",
3989 "b.rs": "const y: i32 = crate::a()",
3990 }),
3991 )
3992 .await;
3993
3994 let project = Project::test(fs, cx);
3995 project.update(cx, |project, _| {
3996 Arc::get_mut(&mut project.languages).unwrap().add(language);
3997 });
3998
3999 let (tree, _) = project
4000 .update(cx, |project, cx| {
4001 project.find_or_create_local_worktree("/dir/b.rs", true, cx)
4002 })
4003 .await
4004 .unwrap();
4005 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4006 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4007 .await;
4008
4009 let buffer = project
4010 .update(cx, |project, cx| {
4011 project.open_buffer(
4012 ProjectPath {
4013 worktree_id,
4014 path: Path::new("").into(),
4015 },
4016 cx,
4017 )
4018 })
4019 .await
4020 .unwrap();
4021
4022 let mut fake_server = fake_servers.next().await.unwrap();
4023 fake_server.handle_request::<lsp::request::GotoDefinition, _>(move |params, _| {
4024 let params = params.text_document_position_params;
4025 assert_eq!(
4026 params.text_document.uri.to_file_path().unwrap(),
4027 Path::new("/dir/b.rs"),
4028 );
4029 assert_eq!(params.position, lsp::Position::new(0, 22));
4030
4031 Some(lsp::GotoDefinitionResponse::Scalar(lsp::Location::new(
4032 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
4033 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
4034 )))
4035 });
4036
4037 let mut definitions = project
4038 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
4039 .await
4040 .unwrap();
4041
4042 assert_eq!(definitions.len(), 1);
4043 let definition = definitions.pop().unwrap();
4044 cx.update(|cx| {
4045 let target_buffer = definition.buffer.read(cx);
4046 assert_eq!(
4047 target_buffer
4048 .file()
4049 .unwrap()
4050 .as_local()
4051 .unwrap()
4052 .abs_path(cx),
4053 Path::new("/dir/a.rs"),
4054 );
4055 assert_eq!(definition.range.to_offset(target_buffer), 9..10);
4056 assert_eq!(
4057 list_worktrees(&project, cx),
4058 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
4059 );
4060
4061 drop(definition);
4062 });
4063 cx.read(|cx| {
4064 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
4065 });
4066
4067 fn list_worktrees<'a>(
4068 project: &'a ModelHandle<Project>,
4069 cx: &'a AppContext,
4070 ) -> Vec<(&'a Path, bool)> {
4071 project
4072 .read(cx)
4073 .worktrees(cx)
4074 .map(|worktree| {
4075 let worktree = worktree.read(cx);
4076 (
4077 worktree.as_local().unwrap().abs_path().as_ref(),
4078 worktree.is_visible(),
4079 )
4080 })
4081 .collect::<Vec<_>>()
4082 }
4083 }
4084
4085 #[gpui::test]
4086 async fn test_save_file(cx: &mut gpui::TestAppContext) {
4087 let fs = FakeFs::new(cx.background());
4088 fs.insert_tree(
4089 "/dir",
4090 json!({
4091 "file1": "the old contents",
4092 }),
4093 )
4094 .await;
4095
4096 let project = Project::test(fs.clone(), cx);
4097 let worktree_id = project
4098 .update(cx, |p, cx| {
4099 p.find_or_create_local_worktree("/dir", true, cx)
4100 })
4101 .await
4102 .unwrap()
4103 .0
4104 .read_with(cx, |tree, _| tree.id());
4105
4106 let buffer = project
4107 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4108 .await
4109 .unwrap();
4110 buffer
4111 .update(cx, |buffer, cx| {
4112 assert_eq!(buffer.text(), "the old contents");
4113 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4114 buffer.save(cx)
4115 })
4116 .await
4117 .unwrap();
4118
4119 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4120 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4121 }
4122
4123 #[gpui::test]
4124 async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4125 let fs = FakeFs::new(cx.background());
4126 fs.insert_tree(
4127 "/dir",
4128 json!({
4129 "file1": "the old contents",
4130 }),
4131 )
4132 .await;
4133
4134 let project = Project::test(fs.clone(), cx);
4135 let worktree_id = project
4136 .update(cx, |p, cx| {
4137 p.find_or_create_local_worktree("/dir/file1", true, cx)
4138 })
4139 .await
4140 .unwrap()
4141 .0
4142 .read_with(cx, |tree, _| tree.id());
4143
4144 let buffer = project
4145 .update(cx, |p, cx| p.open_buffer((worktree_id, ""), cx))
4146 .await
4147 .unwrap();
4148 buffer
4149 .update(cx, |buffer, cx| {
4150 buffer.edit(Some(0..0), "a line of text.\n".repeat(10 * 1024), cx);
4151 buffer.save(cx)
4152 })
4153 .await
4154 .unwrap();
4155
4156 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
4157 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
4158 }
4159
4160 #[gpui::test]
4161 async fn test_save_as(cx: &mut gpui::TestAppContext) {
4162 let fs = FakeFs::new(cx.background());
4163 fs.insert_tree("/dir", json!({})).await;
4164
4165 let project = Project::test(fs.clone(), cx);
4166 let (worktree, _) = project
4167 .update(cx, |project, cx| {
4168 project.find_or_create_local_worktree("/dir", true, cx)
4169 })
4170 .await
4171 .unwrap();
4172 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4173
4174 let buffer = project.update(cx, |project, cx| project.create_buffer(cx).unwrap());
4175 buffer.update(cx, |buffer, cx| {
4176 buffer.edit([0..0], "abc", cx);
4177 assert!(buffer.is_dirty());
4178 assert!(!buffer.has_conflict());
4179 });
4180 project
4181 .update(cx, |project, cx| {
4182 project.save_buffer_as(buffer.clone(), "/dir/file1".into(), cx)
4183 })
4184 .await
4185 .unwrap();
4186 assert_eq!(fs.load(Path::new("/dir/file1")).await.unwrap(), "abc");
4187 buffer.read_with(cx, |buffer, cx| {
4188 assert_eq!(buffer.file().unwrap().full_path(cx), Path::new("dir/file1"));
4189 assert!(!buffer.is_dirty());
4190 assert!(!buffer.has_conflict());
4191 });
4192
4193 let opened_buffer = project
4194 .update(cx, |project, cx| {
4195 project.open_buffer((worktree_id, "file1"), cx)
4196 })
4197 .await
4198 .unwrap();
4199 assert_eq!(opened_buffer, buffer);
4200 }
4201
4202 #[gpui::test(retries = 5)]
4203 async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4204 let dir = temp_tree(json!({
4205 "a": {
4206 "file1": "",
4207 "file2": "",
4208 "file3": "",
4209 },
4210 "b": {
4211 "c": {
4212 "file4": "",
4213 "file5": "",
4214 }
4215 }
4216 }));
4217
4218 let project = Project::test(Arc::new(RealFs), cx);
4219 let rpc = project.read_with(cx, |p, _| p.client.clone());
4220
4221 let (tree, _) = project
4222 .update(cx, |p, cx| {
4223 p.find_or_create_local_worktree(dir.path(), true, cx)
4224 })
4225 .await
4226 .unwrap();
4227 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4228
4229 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4230 let buffer = project.update(cx, |p, cx| p.open_buffer((worktree_id, path), cx));
4231 async move { buffer.await.unwrap() }
4232 };
4233 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
4234 tree.read_with(cx, |tree, _| {
4235 tree.entry_for_path(path)
4236 .expect(&format!("no entry for path {}", path))
4237 .id
4238 })
4239 };
4240
4241 let buffer2 = buffer_for_path("a/file2", cx).await;
4242 let buffer3 = buffer_for_path("a/file3", cx).await;
4243 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4244 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4245
4246 let file2_id = id_for_path("a/file2", &cx);
4247 let file3_id = id_for_path("a/file3", &cx);
4248 let file4_id = id_for_path("b/c/file4", &cx);
4249
4250 // Wait for the initial scan.
4251 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4252 .await;
4253
4254 // Create a remote copy of this worktree.
4255 let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
4256 let (remote, load_task) = cx.update(|cx| {
4257 Worktree::remote(
4258 1,
4259 1,
4260 initial_snapshot.to_proto(&Default::default(), true),
4261 rpc.clone(),
4262 cx,
4263 )
4264 });
4265 load_task.await;
4266
4267 cx.read(|cx| {
4268 assert!(!buffer2.read(cx).is_dirty());
4269 assert!(!buffer3.read(cx).is_dirty());
4270 assert!(!buffer4.read(cx).is_dirty());
4271 assert!(!buffer5.read(cx).is_dirty());
4272 });
4273
4274 // Rename and delete files and directories.
4275 tree.flush_fs_events(&cx).await;
4276 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4277 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4278 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4279 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4280 tree.flush_fs_events(&cx).await;
4281
4282 let expected_paths = vec![
4283 "a",
4284 "a/file1",
4285 "a/file2.new",
4286 "b",
4287 "d",
4288 "d/file3",
4289 "d/file4",
4290 ];
4291
4292 cx.read(|app| {
4293 assert_eq!(
4294 tree.read(app)
4295 .paths()
4296 .map(|p| p.to_str().unwrap())
4297 .collect::<Vec<_>>(),
4298 expected_paths
4299 );
4300
4301 assert_eq!(id_for_path("a/file2.new", &cx), file2_id);
4302 assert_eq!(id_for_path("d/file3", &cx), file3_id);
4303 assert_eq!(id_for_path("d/file4", &cx), file4_id);
4304
4305 assert_eq!(
4306 buffer2.read(app).file().unwrap().path().as_ref(),
4307 Path::new("a/file2.new")
4308 );
4309 assert_eq!(
4310 buffer3.read(app).file().unwrap().path().as_ref(),
4311 Path::new("d/file3")
4312 );
4313 assert_eq!(
4314 buffer4.read(app).file().unwrap().path().as_ref(),
4315 Path::new("d/file4")
4316 );
4317 assert_eq!(
4318 buffer5.read(app).file().unwrap().path().as_ref(),
4319 Path::new("b/c/file5")
4320 );
4321
4322 assert!(!buffer2.read(app).file().unwrap().is_deleted());
4323 assert!(!buffer3.read(app).file().unwrap().is_deleted());
4324 assert!(!buffer4.read(app).file().unwrap().is_deleted());
4325 assert!(buffer5.read(app).file().unwrap().is_deleted());
4326 });
4327
4328 // Update the remote worktree. Check that it becomes consistent with the
4329 // local worktree.
4330 remote.update(cx, |remote, cx| {
4331 let update_message = tree.read(cx).as_local().unwrap().snapshot().build_update(
4332 &initial_snapshot,
4333 1,
4334 1,
4335 true,
4336 );
4337 remote
4338 .as_remote_mut()
4339 .unwrap()
4340 .snapshot
4341 .apply_remote_update(update_message)
4342 .unwrap();
4343
4344 assert_eq!(
4345 remote
4346 .paths()
4347 .map(|p| p.to_str().unwrap())
4348 .collect::<Vec<_>>(),
4349 expected_paths
4350 );
4351 });
4352 }
4353
4354 #[gpui::test]
4355 async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4356 let fs = FakeFs::new(cx.background());
4357 fs.insert_tree(
4358 "/the-dir",
4359 json!({
4360 "a.txt": "a-contents",
4361 "b.txt": "b-contents",
4362 }),
4363 )
4364 .await;
4365
4366 let project = Project::test(fs.clone(), cx);
4367 let worktree_id = project
4368 .update(cx, |p, cx| {
4369 p.find_or_create_local_worktree("/the-dir", true, cx)
4370 })
4371 .await
4372 .unwrap()
4373 .0
4374 .read_with(cx, |tree, _| tree.id());
4375
4376 // Spawn multiple tasks to open paths, repeating some paths.
4377 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4378 (
4379 p.open_buffer((worktree_id, "a.txt"), cx),
4380 p.open_buffer((worktree_id, "b.txt"), cx),
4381 p.open_buffer((worktree_id, "a.txt"), cx),
4382 )
4383 });
4384
4385 let buffer_a_1 = buffer_a_1.await.unwrap();
4386 let buffer_a_2 = buffer_a_2.await.unwrap();
4387 let buffer_b = buffer_b.await.unwrap();
4388 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
4389 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
4390
4391 // There is only one buffer per path.
4392 let buffer_a_id = buffer_a_1.id();
4393 assert_eq!(buffer_a_2.id(), buffer_a_id);
4394
4395 // Open the same path again while it is still open.
4396 drop(buffer_a_1);
4397 let buffer_a_3 = project
4398 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
4399 .await
4400 .unwrap();
4401
4402 // There's still only one buffer per path.
4403 assert_eq!(buffer_a_3.id(), buffer_a_id);
4404 }
4405
4406 #[gpui::test]
4407 async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4408 use std::fs;
4409
4410 let dir = temp_tree(json!({
4411 "file1": "abc",
4412 "file2": "def",
4413 "file3": "ghi",
4414 }));
4415
4416 let project = Project::test(Arc::new(RealFs), cx);
4417 let (worktree, _) = project
4418 .update(cx, |p, cx| {
4419 p.find_or_create_local_worktree(dir.path(), true, cx)
4420 })
4421 .await
4422 .unwrap();
4423 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4424
4425 worktree.flush_fs_events(&cx).await;
4426 worktree
4427 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4428 .await;
4429
4430 let buffer1 = project
4431 .update(cx, |p, cx| p.open_buffer((worktree_id, "file1"), cx))
4432 .await
4433 .unwrap();
4434 let events = Rc::new(RefCell::new(Vec::new()));
4435
4436 // initially, the buffer isn't dirty.
4437 buffer1.update(cx, |buffer, cx| {
4438 cx.subscribe(&buffer1, {
4439 let events = events.clone();
4440 move |_, _, event, _| events.borrow_mut().push(event.clone())
4441 })
4442 .detach();
4443
4444 assert!(!buffer.is_dirty());
4445 assert!(events.borrow().is_empty());
4446
4447 buffer.edit(vec![1..2], "", cx);
4448 });
4449
4450 // after the first edit, the buffer is dirty, and emits a dirtied event.
4451 buffer1.update(cx, |buffer, cx| {
4452 assert!(buffer.text() == "ac");
4453 assert!(buffer.is_dirty());
4454 assert_eq!(
4455 *events.borrow(),
4456 &[language::Event::Edited, language::Event::Dirtied]
4457 );
4458 events.borrow_mut().clear();
4459 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), None, cx);
4460 });
4461
4462 // after saving, the buffer is not dirty, and emits a saved event.
4463 buffer1.update(cx, |buffer, cx| {
4464 assert!(!buffer.is_dirty());
4465 assert_eq!(*events.borrow(), &[language::Event::Saved]);
4466 events.borrow_mut().clear();
4467
4468 buffer.edit(vec![1..1], "B", cx);
4469 buffer.edit(vec![2..2], "D", cx);
4470 });
4471
4472 // after editing again, the buffer is dirty, and emits another dirty event.
4473 buffer1.update(cx, |buffer, cx| {
4474 assert!(buffer.text() == "aBDc");
4475 assert!(buffer.is_dirty());
4476 assert_eq!(
4477 *events.borrow(),
4478 &[
4479 language::Event::Edited,
4480 language::Event::Dirtied,
4481 language::Event::Edited,
4482 ],
4483 );
4484 events.borrow_mut().clear();
4485
4486 // TODO - currently, after restoring the buffer to its
4487 // previously-saved state, the is still considered dirty.
4488 buffer.edit([1..3], "", cx);
4489 assert!(buffer.text() == "ac");
4490 assert!(buffer.is_dirty());
4491 });
4492
4493 assert_eq!(*events.borrow(), &[language::Event::Edited]);
4494
4495 // When a file is deleted, the buffer is considered dirty.
4496 let events = Rc::new(RefCell::new(Vec::new()));
4497 let buffer2 = project
4498 .update(cx, |p, cx| p.open_buffer((worktree_id, "file2"), cx))
4499 .await
4500 .unwrap();
4501 buffer2.update(cx, |_, cx| {
4502 cx.subscribe(&buffer2, {
4503 let events = events.clone();
4504 move |_, _, event, _| events.borrow_mut().push(event.clone())
4505 })
4506 .detach();
4507 });
4508
4509 fs::remove_file(dir.path().join("file2")).unwrap();
4510 buffer2.condition(&cx, |b, _| b.is_dirty()).await;
4511 assert_eq!(
4512 *events.borrow(),
4513 &[language::Event::Dirtied, language::Event::FileHandleChanged]
4514 );
4515
4516 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4517 let events = Rc::new(RefCell::new(Vec::new()));
4518 let buffer3 = project
4519 .update(cx, |p, cx| p.open_buffer((worktree_id, "file3"), cx))
4520 .await
4521 .unwrap();
4522 buffer3.update(cx, |_, cx| {
4523 cx.subscribe(&buffer3, {
4524 let events = events.clone();
4525 move |_, _, event, _| events.borrow_mut().push(event.clone())
4526 })
4527 .detach();
4528 });
4529
4530 worktree.flush_fs_events(&cx).await;
4531 buffer3.update(cx, |buffer, cx| {
4532 buffer.edit(Some(0..0), "x", cx);
4533 });
4534 events.borrow_mut().clear();
4535 fs::remove_file(dir.path().join("file3")).unwrap();
4536 buffer3
4537 .condition(&cx, |_, _| !events.borrow().is_empty())
4538 .await;
4539 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
4540 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
4541 }
4542
4543 #[gpui::test]
4544 async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4545 use std::fs;
4546
4547 let initial_contents = "aaa\nbbbbb\nc\n";
4548 let dir = temp_tree(json!({ "the-file": initial_contents }));
4549
4550 let project = Project::test(Arc::new(RealFs), cx);
4551 let (worktree, _) = project
4552 .update(cx, |p, cx| {
4553 p.find_or_create_local_worktree(dir.path(), true, cx)
4554 })
4555 .await
4556 .unwrap();
4557 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4558
4559 worktree
4560 .read_with(cx, |t, _| t.as_local().unwrap().scan_complete())
4561 .await;
4562
4563 let abs_path = dir.path().join("the-file");
4564 let buffer = project
4565 .update(cx, |p, cx| p.open_buffer((worktree_id, "the-file"), cx))
4566 .await
4567 .unwrap();
4568
4569 // TODO
4570 // Add a cursor on each row.
4571 // let selection_set_id = buffer.update(&mut cx, |buffer, cx| {
4572 // assert!(!buffer.is_dirty());
4573 // buffer.add_selection_set(
4574 // &(0..3)
4575 // .map(|row| Selection {
4576 // id: row as usize,
4577 // start: Point::new(row, 1),
4578 // end: Point::new(row, 1),
4579 // reversed: false,
4580 // goal: SelectionGoal::None,
4581 // })
4582 // .collect::<Vec<_>>(),
4583 // cx,
4584 // )
4585 // });
4586
4587 // Change the file on disk, adding two new lines of text, and removing
4588 // one line.
4589 buffer.read_with(cx, |buffer, _| {
4590 assert!(!buffer.is_dirty());
4591 assert!(!buffer.has_conflict());
4592 });
4593 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
4594 fs::write(&abs_path, new_contents).unwrap();
4595
4596 // Because the buffer was not modified, it is reloaded from disk. Its
4597 // contents are edited according to the diff between the old and new
4598 // file contents.
4599 buffer
4600 .condition(&cx, |buffer, _| buffer.text() == new_contents)
4601 .await;
4602
4603 buffer.update(cx, |buffer, _| {
4604 assert_eq!(buffer.text(), new_contents);
4605 assert!(!buffer.is_dirty());
4606 assert!(!buffer.has_conflict());
4607
4608 // TODO
4609 // let cursor_positions = buffer
4610 // .selection_set(selection_set_id)
4611 // .unwrap()
4612 // .selections::<Point>(&*buffer)
4613 // .map(|selection| {
4614 // assert_eq!(selection.start, selection.end);
4615 // selection.start
4616 // })
4617 // .collect::<Vec<_>>();
4618 // assert_eq!(
4619 // cursor_positions,
4620 // [Point::new(1, 1), Point::new(3, 1), Point::new(4, 0)]
4621 // );
4622 });
4623
4624 // Modify the buffer
4625 buffer.update(cx, |buffer, cx| {
4626 buffer.edit(vec![0..0], " ", cx);
4627 assert!(buffer.is_dirty());
4628 assert!(!buffer.has_conflict());
4629 });
4630
4631 // Change the file on disk again, adding blank lines to the beginning.
4632 fs::write(&abs_path, "\n\n\nAAAA\naaa\nBB\nbbbbb\n").unwrap();
4633
4634 // Because the buffer is modified, it doesn't reload from disk, but is
4635 // marked as having a conflict.
4636 buffer
4637 .condition(&cx, |buffer, _| buffer.has_conflict())
4638 .await;
4639 }
4640
4641 #[gpui::test]
4642 async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4643 let fs = FakeFs::new(cx.background());
4644 fs.insert_tree(
4645 "/the-dir",
4646 json!({
4647 "a.rs": "
4648 fn foo(mut v: Vec<usize>) {
4649 for x in &v {
4650 v.push(1);
4651 }
4652 }
4653 "
4654 .unindent(),
4655 }),
4656 )
4657 .await;
4658
4659 let project = Project::test(fs.clone(), cx);
4660 let (worktree, _) = project
4661 .update(cx, |p, cx| {
4662 p.find_or_create_local_worktree("/the-dir", true, cx)
4663 })
4664 .await
4665 .unwrap();
4666 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
4667
4668 let buffer = project
4669 .update(cx, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
4670 .await
4671 .unwrap();
4672
4673 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
4674 let message = lsp::PublishDiagnosticsParams {
4675 uri: buffer_uri.clone(),
4676 diagnostics: vec![
4677 lsp::Diagnostic {
4678 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4679 severity: Some(DiagnosticSeverity::WARNING),
4680 message: "error 1".to_string(),
4681 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4682 location: lsp::Location {
4683 uri: buffer_uri.clone(),
4684 range: lsp::Range::new(
4685 lsp::Position::new(1, 8),
4686 lsp::Position::new(1, 9),
4687 ),
4688 },
4689 message: "error 1 hint 1".to_string(),
4690 }]),
4691 ..Default::default()
4692 },
4693 lsp::Diagnostic {
4694 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4695 severity: Some(DiagnosticSeverity::HINT),
4696 message: "error 1 hint 1".to_string(),
4697 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4698 location: lsp::Location {
4699 uri: buffer_uri.clone(),
4700 range: lsp::Range::new(
4701 lsp::Position::new(1, 8),
4702 lsp::Position::new(1, 9),
4703 ),
4704 },
4705 message: "original diagnostic".to_string(),
4706 }]),
4707 ..Default::default()
4708 },
4709 lsp::Diagnostic {
4710 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4711 severity: Some(DiagnosticSeverity::ERROR),
4712 message: "error 2".to_string(),
4713 related_information: Some(vec![
4714 lsp::DiagnosticRelatedInformation {
4715 location: lsp::Location {
4716 uri: buffer_uri.clone(),
4717 range: lsp::Range::new(
4718 lsp::Position::new(1, 13),
4719 lsp::Position::new(1, 15),
4720 ),
4721 },
4722 message: "error 2 hint 1".to_string(),
4723 },
4724 lsp::DiagnosticRelatedInformation {
4725 location: lsp::Location {
4726 uri: buffer_uri.clone(),
4727 range: lsp::Range::new(
4728 lsp::Position::new(1, 13),
4729 lsp::Position::new(1, 15),
4730 ),
4731 },
4732 message: "error 2 hint 2".to_string(),
4733 },
4734 ]),
4735 ..Default::default()
4736 },
4737 lsp::Diagnostic {
4738 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4739 severity: Some(DiagnosticSeverity::HINT),
4740 message: "error 2 hint 1".to_string(),
4741 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4742 location: lsp::Location {
4743 uri: buffer_uri.clone(),
4744 range: lsp::Range::new(
4745 lsp::Position::new(2, 8),
4746 lsp::Position::new(2, 17),
4747 ),
4748 },
4749 message: "original diagnostic".to_string(),
4750 }]),
4751 ..Default::default()
4752 },
4753 lsp::Diagnostic {
4754 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4755 severity: Some(DiagnosticSeverity::HINT),
4756 message: "error 2 hint 2".to_string(),
4757 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4758 location: lsp::Location {
4759 uri: buffer_uri.clone(),
4760 range: lsp::Range::new(
4761 lsp::Position::new(2, 8),
4762 lsp::Position::new(2, 17),
4763 ),
4764 },
4765 message: "original diagnostic".to_string(),
4766 }]),
4767 ..Default::default()
4768 },
4769 ],
4770 version: None,
4771 };
4772
4773 project
4774 .update(cx, |p, cx| {
4775 p.update_diagnostics(message, &Default::default(), cx)
4776 })
4777 .unwrap();
4778 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
4779
4780 assert_eq!(
4781 buffer
4782 .diagnostics_in_range::<_, Point>(0..buffer.len())
4783 .collect::<Vec<_>>(),
4784 &[
4785 DiagnosticEntry {
4786 range: Point::new(1, 8)..Point::new(1, 9),
4787 diagnostic: Diagnostic {
4788 severity: DiagnosticSeverity::WARNING,
4789 message: "error 1".to_string(),
4790 group_id: 0,
4791 is_primary: true,
4792 ..Default::default()
4793 }
4794 },
4795 DiagnosticEntry {
4796 range: Point::new(1, 8)..Point::new(1, 9),
4797 diagnostic: Diagnostic {
4798 severity: DiagnosticSeverity::HINT,
4799 message: "error 1 hint 1".to_string(),
4800 group_id: 0,
4801 is_primary: false,
4802 ..Default::default()
4803 }
4804 },
4805 DiagnosticEntry {
4806 range: Point::new(1, 13)..Point::new(1, 15),
4807 diagnostic: Diagnostic {
4808 severity: DiagnosticSeverity::HINT,
4809 message: "error 2 hint 1".to_string(),
4810 group_id: 1,
4811 is_primary: false,
4812 ..Default::default()
4813 }
4814 },
4815 DiagnosticEntry {
4816 range: Point::new(1, 13)..Point::new(1, 15),
4817 diagnostic: Diagnostic {
4818 severity: DiagnosticSeverity::HINT,
4819 message: "error 2 hint 2".to_string(),
4820 group_id: 1,
4821 is_primary: false,
4822 ..Default::default()
4823 }
4824 },
4825 DiagnosticEntry {
4826 range: Point::new(2, 8)..Point::new(2, 17),
4827 diagnostic: Diagnostic {
4828 severity: DiagnosticSeverity::ERROR,
4829 message: "error 2".to_string(),
4830 group_id: 1,
4831 is_primary: true,
4832 ..Default::default()
4833 }
4834 }
4835 ]
4836 );
4837
4838 assert_eq!(
4839 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4840 &[
4841 DiagnosticEntry {
4842 range: Point::new(1, 8)..Point::new(1, 9),
4843 diagnostic: Diagnostic {
4844 severity: DiagnosticSeverity::WARNING,
4845 message: "error 1".to_string(),
4846 group_id: 0,
4847 is_primary: true,
4848 ..Default::default()
4849 }
4850 },
4851 DiagnosticEntry {
4852 range: Point::new(1, 8)..Point::new(1, 9),
4853 diagnostic: Diagnostic {
4854 severity: DiagnosticSeverity::HINT,
4855 message: "error 1 hint 1".to_string(),
4856 group_id: 0,
4857 is_primary: false,
4858 ..Default::default()
4859 }
4860 },
4861 ]
4862 );
4863 assert_eq!(
4864 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4865 &[
4866 DiagnosticEntry {
4867 range: Point::new(1, 13)..Point::new(1, 15),
4868 diagnostic: Diagnostic {
4869 severity: DiagnosticSeverity::HINT,
4870 message: "error 2 hint 1".to_string(),
4871 group_id: 1,
4872 is_primary: false,
4873 ..Default::default()
4874 }
4875 },
4876 DiagnosticEntry {
4877 range: Point::new(1, 13)..Point::new(1, 15),
4878 diagnostic: Diagnostic {
4879 severity: DiagnosticSeverity::HINT,
4880 message: "error 2 hint 2".to_string(),
4881 group_id: 1,
4882 is_primary: false,
4883 ..Default::default()
4884 }
4885 },
4886 DiagnosticEntry {
4887 range: Point::new(2, 8)..Point::new(2, 17),
4888 diagnostic: Diagnostic {
4889 severity: DiagnosticSeverity::ERROR,
4890 message: "error 2".to_string(),
4891 group_id: 1,
4892 is_primary: true,
4893 ..Default::default()
4894 }
4895 }
4896 ]
4897 );
4898 }
4899
4900 #[gpui::test]
4901 async fn test_rename(cx: &mut gpui::TestAppContext) {
4902 let (language_server_config, mut fake_servers) = LanguageServerConfig::fake();
4903 let language = Arc::new(Language::new(
4904 LanguageConfig {
4905 name: "Rust".into(),
4906 path_suffixes: vec!["rs".to_string()],
4907 language_server: Some(language_server_config),
4908 ..Default::default()
4909 },
4910 Some(tree_sitter_rust::language()),
4911 ));
4912
4913 let fs = FakeFs::new(cx.background());
4914 fs.insert_tree(
4915 "/dir",
4916 json!({
4917 "one.rs": "const ONE: usize = 1;",
4918 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4919 }),
4920 )
4921 .await;
4922
4923 let project = Project::test(fs.clone(), cx);
4924 project.update(cx, |project, _| {
4925 Arc::get_mut(&mut project.languages).unwrap().add(language);
4926 });
4927
4928 let (tree, _) = project
4929 .update(cx, |project, cx| {
4930 project.find_or_create_local_worktree("/dir", true, cx)
4931 })
4932 .await
4933 .unwrap();
4934 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
4935 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
4936 .await;
4937
4938 let buffer = project
4939 .update(cx, |project, cx| {
4940 project.open_buffer((worktree_id, Path::new("one.rs")), cx)
4941 })
4942 .await
4943 .unwrap();
4944
4945 let mut fake_server = fake_servers.next().await.unwrap();
4946
4947 let response = project.update(cx, |project, cx| {
4948 project.prepare_rename(buffer.clone(), 7, cx)
4949 });
4950 fake_server
4951 .handle_request::<lsp::request::PrepareRenameRequest, _>(|params, _| {
4952 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4953 assert_eq!(params.position, lsp::Position::new(0, 7));
4954 Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4955 lsp::Position::new(0, 6),
4956 lsp::Position::new(0, 9),
4957 )))
4958 })
4959 .next()
4960 .await
4961 .unwrap();
4962 let range = response.await.unwrap().unwrap();
4963 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
4964 assert_eq!(range, 6..9);
4965
4966 let response = project.update(cx, |project, cx| {
4967 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
4968 });
4969 fake_server
4970 .handle_request::<lsp::request::Rename, _>(|params, _| {
4971 assert_eq!(
4972 params.text_document_position.text_document.uri.as_str(),
4973 "file:///dir/one.rs"
4974 );
4975 assert_eq!(
4976 params.text_document_position.position,
4977 lsp::Position::new(0, 7)
4978 );
4979 assert_eq!(params.new_name, "THREE");
4980 Some(lsp::WorkspaceEdit {
4981 changes: Some(
4982 [
4983 (
4984 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4985 vec![lsp::TextEdit::new(
4986 lsp::Range::new(
4987 lsp::Position::new(0, 6),
4988 lsp::Position::new(0, 9),
4989 ),
4990 "THREE".to_string(),
4991 )],
4992 ),
4993 (
4994 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4995 vec![
4996 lsp::TextEdit::new(
4997 lsp::Range::new(
4998 lsp::Position::new(0, 24),
4999 lsp::Position::new(0, 27),
5000 ),
5001 "THREE".to_string(),
5002 ),
5003 lsp::TextEdit::new(
5004 lsp::Range::new(
5005 lsp::Position::new(0, 35),
5006 lsp::Position::new(0, 38),
5007 ),
5008 "THREE".to_string(),
5009 ),
5010 ],
5011 ),
5012 ]
5013 .into_iter()
5014 .collect(),
5015 ),
5016 ..Default::default()
5017 })
5018 })
5019 .next()
5020 .await
5021 .unwrap();
5022 let mut transaction = response.await.unwrap().0;
5023 assert_eq!(transaction.len(), 2);
5024 assert_eq!(
5025 transaction
5026 .remove_entry(&buffer)
5027 .unwrap()
5028 .0
5029 .read_with(cx, |buffer, _| buffer.text()),
5030 "const THREE: usize = 1;"
5031 );
5032 assert_eq!(
5033 transaction
5034 .into_keys()
5035 .next()
5036 .unwrap()
5037 .read_with(cx, |buffer, _| buffer.text()),
5038 "const TWO: usize = one::THREE + one::THREE;"
5039 );
5040 }
5041
5042 #[gpui::test]
5043 async fn test_search(cx: &mut gpui::TestAppContext) {
5044 let fs = FakeFs::new(cx.background());
5045 fs.insert_tree(
5046 "/dir",
5047 json!({
5048 "one.rs": "const ONE: usize = 1;",
5049 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5050 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5051 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5052 }),
5053 )
5054 .await;
5055 let project = Project::test(fs.clone(), cx);
5056 let (tree, _) = project
5057 .update(cx, |project, cx| {
5058 project.find_or_create_local_worktree("/dir", true, cx)
5059 })
5060 .await
5061 .unwrap();
5062 let worktree_id = tree.read_with(cx, |tree, _| tree.id());
5063 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
5064 .await;
5065
5066 assert_eq!(
5067 search(&project, SearchQuery::text("TWO", false, true), cx)
5068 .await
5069 .unwrap(),
5070 HashMap::from_iter([
5071 ("two.rs".to_string(), vec![6..9]),
5072 ("three.rs".to_string(), vec![37..40])
5073 ])
5074 );
5075
5076 let buffer_4 = project
5077 .update(cx, |project, cx| {
5078 project.open_buffer((worktree_id, "four.rs"), cx)
5079 })
5080 .await
5081 .unwrap();
5082 buffer_4.update(cx, |buffer, cx| {
5083 buffer.edit([20..28, 31..43], "two::TWO", cx);
5084 });
5085
5086 assert_eq!(
5087 search(&project, SearchQuery::text("TWO", false, true), cx)
5088 .await
5089 .unwrap(),
5090 HashMap::from_iter([
5091 ("two.rs".to_string(), vec![6..9]),
5092 ("three.rs".to_string(), vec![37..40]),
5093 ("four.rs".to_string(), vec![25..28, 36..39])
5094 ])
5095 );
5096
5097 async fn search(
5098 project: &ModelHandle<Project>,
5099 query: SearchQuery,
5100 cx: &mut gpui::TestAppContext,
5101 ) -> Result<HashMap<String, Vec<Range<usize>>>> {
5102 let results = project
5103 .update(cx, |project, cx| project.search(query, cx))
5104 .await?;
5105
5106 Ok(results
5107 .into_iter()
5108 .map(|(buffer, ranges)| {
5109 buffer.read_with(cx, |buffer, _| {
5110 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
5111 let ranges = ranges
5112 .into_iter()
5113 .map(|range| range.to_offset(buffer))
5114 .collect::<Vec<_>>();
5115 (path, ranges)
5116 })
5117 })
5118 .collect())
5119 }
5120 }
5121}